• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * libjingle
3  * Copyright 2013, Google Inc.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7  *
8  *  1. Redistributions of source code must retain the above copyright notice,
9  *     this list of conditions and the following disclaimer.
10  *  2. Redistributions in binary form must reproduce the above copyright notice,
11  *     this list of conditions and the following disclaimer in the documentation
12  *     and/or other materials provided with the distribution.
13  *  3. The name of the author may not be used to endorse or promote products
14  *     derived from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26  */
27 
28 // Hints for future visitors:
29 // This entire file is an implementation detail of the org.webrtc Java package,
30 // the most interesting bits of which are org.webrtc.PeerConnection{,Factory}.
31 // The layout of this file is roughly:
32 // - various helper C++ functions & classes that wrap Java counterparts and
33 //   expose a C++ interface that can be passed to the C++ PeerConnection APIs
34 // - implementations of methods declared "static" in the Java package (named
35 //   things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by
36 //   the JNI spec).
37 //
38 // Lifecycle notes: objects are owned where they will be called; in other words
39 // FooObservers are owned by C++-land, and user-callable objects (e.g.
40 // PeerConnection and VideoTrack) are owned by Java-land.
41 // When this file allocates C++ RefCountInterfaces it AddRef()s an artificial
42 // ref simulating the jlong held in Java-land, and then Release()s the ref in
43 // the respective free call.  Sometimes this AddRef is implicit in the
44 // construction of a scoped_refptr<> which is then .release()d.
45 // Any persistent (non-local) references from C++ to Java must be global or weak
46 // (in which case they must be checked before use)!
47 //
48 // Exception notes: pretty much all JNI calls can throw Java exceptions, so each
49 // call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
50 // call.  In this file this is done in CHECK_EXCEPTION, making for much easier
51 // debugging in case of failure (the alternative is to wait for control to
52 // return to the Java frame that called code in this file, at which point it's
53 // impossible to tell which JNI call broke).
54 
55 #include <jni.h>
56 #undef JNIEXPORT
57 #define JNIEXPORT __attribute__((visibility("default")))
58 
59 #include <asm/unistd.h>
60 #include <sys/prctl.h>
61 #include <sys/syscall.h>
62 #include <unistd.h>
63 #include <limits>
64 #include <map>
65 
66 #include "talk/app/webrtc/mediaconstraintsinterface.h"
67 #include "talk/app/webrtc/peerconnectioninterface.h"
68 #include "talk/app/webrtc/videosourceinterface.h"
69 #include "talk/media/base/videocapturer.h"
70 #include "talk/media/base/videorenderer.h"
71 #include "talk/media/devices/videorendererfactory.h"
72 #include "talk/media/webrtc/webrtcvideocapturer.h"
73 #include "talk/media/webrtc/webrtcvideodecoderfactory.h"
74 #include "talk/media/webrtc/webrtcvideoencoderfactory.h"
75 #include "third_party/icu/source/common/unicode/unistr.h"
76 #include "third_party/libyuv/include/libyuv/convert.h"
77 #include "third_party/libyuv/include/libyuv/convert_from.h"
78 #include "third_party/libyuv/include/libyuv/video_common.h"
79 #include "webrtc/base/bind.h"
80 #include "webrtc/base/checks.h"
81 #include "webrtc/base/logging.h"
82 #include "webrtc/base/messagequeue.h"
83 #include "webrtc/base/ssladapter.h"
84 #include "webrtc/common_video/interface/texture_video_frame.h"
85 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
86 #include "webrtc/system_wrappers/interface/compile_assert.h"
87 #include "webrtc/system_wrappers/interface/trace.h"
88 #include "webrtc/video_engine/include/vie_base.h"
89 #include "webrtc/voice_engine/include/voe_base.h"
90 
91 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
92 #include <android/log.h>
93 #include "webrtc/modules/video_capture/video_capture_internal.h"
94 #include "webrtc/modules/video_render/video_render_internal.h"
95 #include "webrtc/system_wrappers/interface/logcat_trace_context.h"
96 #include "webrtc/system_wrappers/interface/tick_util.h"
97 using webrtc::CodecSpecificInfo;
98 using webrtc::DecodedImageCallback;
99 using webrtc::EncodedImage;
100 using webrtc::I420VideoFrame;
101 using webrtc::LogcatTraceContext;
102 using webrtc::RTPFragmentationHeader;
103 using webrtc::TextureVideoFrame;
104 using webrtc::TickTime;
105 using webrtc::VideoCodec;
106 #endif
107 
108 using icu::UnicodeString;
109 using rtc::Bind;
110 using rtc::Thread;
111 using rtc::ThreadManager;
112 using rtc::scoped_ptr;
113 using webrtc::AudioSourceInterface;
114 using webrtc::AudioTrackInterface;
115 using webrtc::AudioTrackVector;
116 using webrtc::CreateSessionDescriptionObserver;
117 using webrtc::DataBuffer;
118 using webrtc::DataChannelInit;
119 using webrtc::DataChannelInterface;
120 using webrtc::DataChannelObserver;
121 using webrtc::IceCandidateInterface;
122 using webrtc::NativeHandle;
123 using webrtc::MediaConstraintsInterface;
124 using webrtc::MediaSourceInterface;
125 using webrtc::MediaStreamInterface;
126 using webrtc::MediaStreamTrackInterface;
127 using webrtc::PeerConnectionFactoryInterface;
128 using webrtc::PeerConnectionInterface;
129 using webrtc::PeerConnectionObserver;
130 using webrtc::SessionDescriptionInterface;
131 using webrtc::SetSessionDescriptionObserver;
132 using webrtc::StatsObserver;
133 using webrtc::StatsReport;
134 using webrtc::VideoRendererInterface;
135 using webrtc::VideoSourceInterface;
136 using webrtc::VideoTrackInterface;
137 using webrtc::VideoTrackVector;
138 using webrtc::kVideoCodecVP8;
139 
140 // Abort the process if |jni| has a Java exception pending.
141 // This macros uses the comma operator to execute ExceptionDescribe
142 // and ExceptionClear ignoring their return values and sending ""
143 // to the error stream.
144 #define CHECK_EXCEPTION(jni)    \
145   CHECK(!jni->ExceptionCheck()) \
146       << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
147 
148 // Helper that calls ptr->Release() and aborts the process with a useful
149 // message if that didn't actually delete *ptr because of extra refcounts.
150 #define CHECK_RELEASE(ptr) \
151   CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
152 
153 namespace {
154 
155 static JavaVM* g_jvm = NULL;  // Set in JNI_OnLoad().
156 
157 static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
158 // Key for per-thread JNIEnv* data.  Non-NULL in threads attached to |g_jvm| by
159 // AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
160 // were attached by the JVM because of a Java->native call.
161 static pthread_key_t g_jni_ptr;
162 
163 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
164 // Set in PeerConnectionFactory_initializeAndroidGlobals().
165 static bool factory_static_initialized = false;
166 #endif
167 
168 
169 // Return thread ID as a string.
GetThreadId()170 static std::string GetThreadId() {
171   char buf[21];  // Big enough to hold a kuint64max plus terminating NULL.
172   CHECK_LT(snprintf(buf, sizeof(buf), "%llu", syscall(__NR_gettid)),
173            sizeof(buf))
174       << "Thread id is bigger than uint64??";
175   return std::string(buf);
176 }
177 
178 // Return the current thread's name.
GetThreadName()179 static std::string GetThreadName() {
180   char name[17];
181   CHECK_EQ(0, prctl(PR_GET_NAME, name)) << "prctl(PR_GET_NAME) failed";
182   name[16] = '\0';
183   return std::string(name);
184 }
185 
186 // Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
GetEnv()187 static JNIEnv* GetEnv() {
188   void* env = NULL;
189   jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
190   CHECK(((env != NULL) && (status == JNI_OK)) ||
191         ((env == NULL) && (status == JNI_EDETACHED)))
192       << "Unexpected GetEnv return: " << status << ":" << env;
193   return reinterpret_cast<JNIEnv*>(env);
194 }
195 
ThreadDestructor(void * prev_jni_ptr)196 static void ThreadDestructor(void* prev_jni_ptr) {
197   // This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
198   // we were responsible for originally attaching the thread, so are responsible
199   // for detaching it now.  However, because some JVM implementations (notably
200   // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
201   // the JVMs accounting info for this thread may already be wiped out by the
202   // time this is called. Thus it may appear we are already detached even though
203   // it was our responsibility to detach!  Oh well.
204   if (!GetEnv())
205     return;
206 
207   CHECK(GetEnv() == prev_jni_ptr)
208       << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
209   jint status = g_jvm->DetachCurrentThread();
210   CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
211   CHECK(!GetEnv()) << "Detaching was a successful no-op???";
212 }
213 
CreateJNIPtrKey()214 static void CreateJNIPtrKey() {
215   CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
216       << "pthread_key_create";
217 }
218 
219 // Return a |JNIEnv*| usable on this thread.  Attaches to |g_jvm| if necessary.
AttachCurrentThreadIfNeeded()220 static JNIEnv* AttachCurrentThreadIfNeeded() {
221   JNIEnv* jni = GetEnv();
222   if (jni)
223     return jni;
224   CHECK(!pthread_getspecific(g_jni_ptr))
225       << "TLS has a JNIEnv* but not attached?";
226 
227   char* name = strdup((GetThreadName() + " - " + GetThreadId()).c_str());
228   JavaVMAttachArgs args;
229   args.version = JNI_VERSION_1_6;
230   args.name = name;
231   args.group = NULL;
232   // Deal with difference in signatures between Oracle's jni.h and Android's.
233 #ifdef _JAVASOFT_JNI_H_  // Oracle's jni.h violates the JNI spec!
234   void* env = NULL;
235 #else
236   JNIEnv* env = NULL;
237 #endif
238   CHECK(!g_jvm->AttachCurrentThread(&env, &args)) << "Failed to attach thread";
239   free(name);
240   CHECK(env) << "AttachCurrentThread handed back NULL!";
241   jni = reinterpret_cast<JNIEnv*>(env);
242   CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
243   return jni;
244 }
245 
246 // Return a |jlong| that will correctly convert back to |ptr|.  This is needed
247 // because the alternative (of silently passing a 32-bit pointer to a vararg
248 // function expecting a 64-bit param) picks up garbage in the high 32 bits.
jlongFromPointer(void * ptr)249 static jlong jlongFromPointer(void* ptr) {
250   COMPILE_ASSERT(sizeof(intptr_t) <= sizeof(jlong),
251                  Time_to_rethink_the_use_of_jlongs);
252   // Going through intptr_t to be obvious about the definedness of the
253   // conversion from pointer to integral type.  intptr_t to jlong is a standard
254   // widening by the COMPILE_ASSERT above.
255   jlong ret = reinterpret_cast<intptr_t>(ptr);
256   assert(reinterpret_cast<void*>(ret) == ptr);
257   return ret;
258 }
259 
260 // Android's FindClass() is trickier than usual because the app-specific
261 // ClassLoader is not consulted when there is no app-specific frame on the
262 // stack.  Consequently, we only look up classes once in JNI_OnLoad.
263 // http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
264 class ClassReferenceHolder {
265  public:
ClassReferenceHolder(JNIEnv * jni)266   explicit ClassReferenceHolder(JNIEnv* jni) {
267     LoadClass(jni, "java/nio/ByteBuffer");
268     LoadClass(jni, "org/webrtc/AudioTrack");
269     LoadClass(jni, "org/webrtc/DataChannel");
270     LoadClass(jni, "org/webrtc/DataChannel$Buffer");
271     LoadClass(jni, "org/webrtc/DataChannel$Init");
272     LoadClass(jni, "org/webrtc/DataChannel$State");
273     LoadClass(jni, "org/webrtc/IceCandidate");
274 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
275     LoadClass(jni, "android/graphics/SurfaceTexture");
276     LoadClass(jni, "android/opengl/EGLContext");
277     LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
278     LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
279     LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
280     LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
281 #endif
282     LoadClass(jni, "org/webrtc/MediaSource$State");
283     LoadClass(jni, "org/webrtc/MediaStream");
284     LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
285     LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
286     LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
287     LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
288     LoadClass(jni, "org/webrtc/SessionDescription");
289     LoadClass(jni, "org/webrtc/SessionDescription$Type");
290     LoadClass(jni, "org/webrtc/StatsReport");
291     LoadClass(jni, "org/webrtc/StatsReport$Value");
292     LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
293     LoadClass(jni, "org/webrtc/VideoTrack");
294   }
295 
~ClassReferenceHolder()296   ~ClassReferenceHolder() {
297     CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
298   }
299 
FreeReferences(JNIEnv * jni)300   void FreeReferences(JNIEnv* jni) {
301     for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
302          it != classes_.end(); ++it) {
303       jni->DeleteGlobalRef(it->second);
304     }
305     classes_.clear();
306   }
307 
GetClass(const std::string & name)308   jclass GetClass(const std::string& name) {
309     std::map<std::string, jclass>::iterator it = classes_.find(name);
310     CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
311     return it->second;
312   }
313 
314  private:
LoadClass(JNIEnv * jni,const std::string & name)315   void LoadClass(JNIEnv* jni, const std::string& name) {
316     jclass localRef = jni->FindClass(name.c_str());
317     CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
318     CHECK(localRef) << name;
319     jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
320     CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
321     CHECK(globalRef) << name;
322     bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
323     CHECK(inserted) << "Duplicate class name: " << name;
324   }
325 
326   std::map<std::string, jclass> classes_;
327 };
328 
329 // Allocated in JNI_OnLoad(), freed in JNI_OnUnLoad().
330 static ClassReferenceHolder* g_class_reference_holder = NULL;
331 
332 // JNIEnv-helper methods that CHECK success: no Java exception thrown and found
333 // object/class/method/field is non-null.
GetMethodID(JNIEnv * jni,jclass c,const std::string & name,const char * signature)334 jmethodID GetMethodID(
335     JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
336   jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
337   CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
338                        << signature;
339   CHECK(m) << name << ", " << signature;
340   return m;
341 }
342 
GetStaticMethodID(JNIEnv * jni,jclass c,const char * name,const char * signature)343 jmethodID GetStaticMethodID(
344     JNIEnv* jni, jclass c, const char* name, const char* signature) {
345   jmethodID m = jni->GetStaticMethodID(c, name, signature);
346   CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
347                        << signature;
348   CHECK(m) << name << ", " << signature;
349   return m;
350 }
351 
GetFieldID(JNIEnv * jni,jclass c,const char * name,const char * signature)352 jfieldID GetFieldID(
353     JNIEnv* jni, jclass c, const char* name, const char* signature) {
354   jfieldID f = jni->GetFieldID(c, name, signature);
355   CHECK_EXCEPTION(jni) << "error during GetFieldID";
356   CHECK(f) << name << ", " << signature;
357   return f;
358 }
359 
360 // Returns a global reference guaranteed to be valid for the lifetime of the
361 // process.
FindClass(JNIEnv * jni,const char * name)362 jclass FindClass(JNIEnv* jni, const char* name) {
363   return g_class_reference_holder->GetClass(name);
364 }
365 
GetObjectClass(JNIEnv * jni,jobject object)366 jclass GetObjectClass(JNIEnv* jni, jobject object) {
367   jclass c = jni->GetObjectClass(object);
368   CHECK_EXCEPTION(jni) << "error during GetObjectClass";
369   CHECK(c) << "GetObjectClass returned NULL";
370   return c;
371 }
372 
GetObjectField(JNIEnv * jni,jobject object,jfieldID id)373 jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
374   jobject o = jni->GetObjectField(object, id);
375   CHECK_EXCEPTION(jni) << "error during GetObjectField";
376   CHECK(o) << "GetObjectField returned NULL";
377   return o;
378 }
379 
GetStringField(JNIEnv * jni,jobject object,jfieldID id)380 jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
381   return static_cast<jstring>(GetObjectField(jni, object, id));
382 }
383 
GetLongField(JNIEnv * jni,jobject object,jfieldID id)384 jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
385   jlong l = jni->GetLongField(object, id);
386   CHECK_EXCEPTION(jni) << "error during GetLongField";
387   return l;
388 }
389 
GetIntField(JNIEnv * jni,jobject object,jfieldID id)390 jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
391   jint i = jni->GetIntField(object, id);
392   CHECK_EXCEPTION(jni) << "error during GetIntField";
393   return i;
394 }
395 
GetBooleanField(JNIEnv * jni,jobject object,jfieldID id)396 bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
397   jboolean b = jni->GetBooleanField(object, id);
398   CHECK_EXCEPTION(jni) << "error during GetBooleanField";
399   return b;
400 }
401 
NewGlobalRef(JNIEnv * jni,jobject o)402 jobject NewGlobalRef(JNIEnv* jni, jobject o) {
403   jobject ret = jni->NewGlobalRef(o);
404   CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
405   CHECK(ret);
406   return ret;
407 }
408 
DeleteGlobalRef(JNIEnv * jni,jobject o)409 void DeleteGlobalRef(JNIEnv* jni, jobject o) {
410   jni->DeleteGlobalRef(o);
411   CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
412 }
413 
414 // Given a jweak reference, allocate a (strong) local reference scoped to the
415 // lifetime of this object if the weak reference is still valid, or NULL
416 // otherwise.
417 class WeakRef {
418  public:
WeakRef(JNIEnv * jni,jweak ref)419   WeakRef(JNIEnv* jni, jweak ref)
420       : jni_(jni), obj_(jni_->NewLocalRef(ref)) {
421     CHECK_EXCEPTION(jni) << "error during NewLocalRef";
422   }
~WeakRef()423   ~WeakRef() {
424     if (obj_) {
425       jni_->DeleteLocalRef(obj_);
426       CHECK_EXCEPTION(jni_) << "error during DeleteLocalRef";
427     }
428   }
obj()429   jobject obj() { return obj_; }
430 
431  private:
432   JNIEnv* const jni_;
433   jobject const obj_;
434 };
435 
436 // Scope Java local references to the lifetime of this object.  Use in all C++
437 // callbacks (i.e. entry points that don't originate in a Java callstack
438 // through a "native" method call).
439 class ScopedLocalRefFrame {
440  public:
ScopedLocalRefFrame(JNIEnv * jni)441   explicit ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
442     CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
443   }
~ScopedLocalRefFrame()444   ~ScopedLocalRefFrame() {
445     jni_->PopLocalFrame(NULL);
446   }
447 
448  private:
449   JNIEnv* jni_;
450 };
451 
452 // Scoped holder for global Java refs.
453 template<class T>  // T is jclass, jobject, jintArray, etc.
454 class ScopedGlobalRef {
455  public:
ScopedGlobalRef(JNIEnv * jni,T obj)456   ScopedGlobalRef(JNIEnv* jni, T obj)
457       : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
~ScopedGlobalRef()458   ~ScopedGlobalRef() {
459     DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
460   }
operator *() const461   T operator*() const {
462     return obj_;
463   }
464  private:
465   T obj_;
466 };
467 
468 // Java references to "null" can only be distinguished as such in C++ by
469 // creating a local reference, so this helper wraps that logic.
IsNull(JNIEnv * jni,jobject obj)470 static bool IsNull(JNIEnv* jni, jobject obj) {
471   ScopedLocalRefFrame local_ref_frame(jni);
472   return jni->NewLocalRef(obj) == NULL;
473 }
474 
475 // Return the (singleton) Java Enum object corresponding to |index|;
476 // |state_class_fragment| is something like "MediaSource$State".
JavaEnumFromIndex(JNIEnv * jni,const std::string & state_class_fragment,int index)477 jobject JavaEnumFromIndex(
478     JNIEnv* jni, const std::string& state_class_fragment, int index) {
479   std::string state_class_name = "org/webrtc/" + state_class_fragment;
480   jclass state_class = FindClass(jni, state_class_name.c_str());
481   jmethodID state_values_id = GetStaticMethodID(
482       jni, state_class, "values", ("()[L" + state_class_name  + ";").c_str());
483   jobjectArray state_values = static_cast<jobjectArray>(
484       jni->CallStaticObjectMethod(state_class, state_values_id));
485   CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
486   jobject ret = jni->GetObjectArrayElement(state_values, index);
487   CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
488   return ret;
489 }
490 
491 // Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
JavaStringFromStdString(JNIEnv * jni,const std::string & native)492 static jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
493   UnicodeString ustr(UnicodeString::fromUTF8(native));
494   jstring jstr = jni->NewString(ustr.getBuffer(), ustr.length());
495   CHECK_EXCEPTION(jni) << "error during NewString";
496   return jstr;
497 }
498 
499 // Given a (UTF-16) jstring return a new UTF-8 native string.
JavaToStdString(JNIEnv * jni,const jstring & j_string)500 static std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
501   const jchar* jchars = jni->GetStringChars(j_string, NULL);
502   CHECK_EXCEPTION(jni) << "Error during GetStringChars";
503   UnicodeString ustr(jchars, jni->GetStringLength(j_string));
504   CHECK_EXCEPTION(jni) << "Error during GetStringLength";
505   jni->ReleaseStringChars(j_string, jchars);
506   CHECK_EXCEPTION(jni) << "Error during ReleaseStringChars";
507   std::string ret;
508   return ustr.toUTF8String(ret);
509 }
510 
JavaDataChannelInitToNative(JNIEnv * jni,jobject j_init)511 static DataChannelInit JavaDataChannelInitToNative(
512     JNIEnv* jni, jobject j_init) {
513   DataChannelInit init;
514 
515   jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
516   jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
517   jfieldID max_retransmit_time_id =
518       GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
519   jfieldID max_retransmits_id =
520       GetFieldID(jni, j_init_class, "maxRetransmits", "I");
521   jfieldID protocol_id =
522       GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
523   jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
524   jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
525 
526   init.ordered = GetBooleanField(jni, j_init, ordered_id);
527   init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
528   init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
529   init.protocol = JavaToStdString(
530       jni, GetStringField(jni, j_init, protocol_id));
531   init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
532   init.id = GetIntField(jni, j_init, id_id);
533 
534   return init;
535 }
536 
537 class ConstraintsWrapper;
538 
539 // Adapter between the C++ PeerConnectionObserver interface and the Java
540 // PeerConnection.Observer interface.  Wraps an instance of the Java interface
541 // and dispatches C++ callbacks to Java.
542 class PCOJava : public PeerConnectionObserver {
543  public:
PCOJava(JNIEnv * jni,jobject j_observer)544   PCOJava(JNIEnv* jni, jobject j_observer)
545       : j_observer_global_(jni, j_observer),
546         j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
547         j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
548         j_media_stream_ctor_(GetMethodID(
549             jni, *j_media_stream_class_, "<init>", "(J)V")),
550         j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
551         j_audio_track_ctor_(GetMethodID(
552             jni, *j_audio_track_class_, "<init>", "(J)V")),
553         j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
554         j_video_track_ctor_(GetMethodID(
555             jni, *j_video_track_class_, "<init>", "(J)V")),
556         j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
557         j_data_channel_ctor_(GetMethodID(
558             jni, *j_data_channel_class_, "<init>", "(J)V")) {
559   }
560 
~PCOJava()561   virtual ~PCOJava() {}
562 
OnIceCandidate(const IceCandidateInterface * candidate)563   virtual void OnIceCandidate(const IceCandidateInterface* candidate) OVERRIDE {
564     ScopedLocalRefFrame local_ref_frame(jni());
565     std::string sdp;
566     CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
567     jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
568     jmethodID ctor = GetMethodID(jni(), candidate_class,
569         "<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
570     jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
571     jstring j_sdp = JavaStringFromStdString(jni(), sdp);
572     jobject j_candidate = jni()->NewObject(
573         candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp);
574     CHECK_EXCEPTION(jni()) << "error during NewObject";
575     jmethodID m = GetMethodID(jni(), *j_observer_class_,
576                               "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
577     jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
578     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
579   }
580 
OnError()581   virtual void OnError() OVERRIDE {
582     ScopedLocalRefFrame local_ref_frame(jni());
583     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onError", "()V");
584     jni()->CallVoidMethod(*j_observer_global_, m);
585     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
586   }
587 
OnSignalingChange(PeerConnectionInterface::SignalingState new_state)588   virtual void OnSignalingChange(
589       PeerConnectionInterface::SignalingState new_state) OVERRIDE {
590     ScopedLocalRefFrame local_ref_frame(jni());
591     jmethodID m = GetMethodID(
592         jni(), *j_observer_class_, "onSignalingChange",
593         "(Lorg/webrtc/PeerConnection$SignalingState;)V");
594     jobject new_state_enum =
595         JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
596     jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
597     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
598   }
599 
OnIceConnectionChange(PeerConnectionInterface::IceConnectionState new_state)600   virtual void OnIceConnectionChange(
601       PeerConnectionInterface::IceConnectionState new_state) OVERRIDE {
602     ScopedLocalRefFrame local_ref_frame(jni());
603     jmethodID m = GetMethodID(
604         jni(), *j_observer_class_, "onIceConnectionChange",
605         "(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
606     jobject new_state_enum = JavaEnumFromIndex(
607         jni(), "PeerConnection$IceConnectionState", new_state);
608     jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
609     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
610   }
611 
OnIceGatheringChange(PeerConnectionInterface::IceGatheringState new_state)612   virtual void OnIceGatheringChange(
613       PeerConnectionInterface::IceGatheringState new_state) OVERRIDE {
614     ScopedLocalRefFrame local_ref_frame(jni());
615     jmethodID m = GetMethodID(
616         jni(), *j_observer_class_, "onIceGatheringChange",
617         "(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
618     jobject new_state_enum = JavaEnumFromIndex(
619         jni(), "PeerConnection$IceGatheringState", new_state);
620     jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
621     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
622   }
623 
OnAddStream(MediaStreamInterface * stream)624   virtual void OnAddStream(MediaStreamInterface* stream) OVERRIDE {
625     ScopedLocalRefFrame local_ref_frame(jni());
626     jobject j_stream = jni()->NewObject(
627         *j_media_stream_class_, j_media_stream_ctor_, (jlong)stream);
628     CHECK_EXCEPTION(jni()) << "error during NewObject";
629 
630     AudioTrackVector audio_tracks = stream->GetAudioTracks();
631     for (size_t i = 0; i < audio_tracks.size(); ++i) {
632       AudioTrackInterface* track = audio_tracks[i];
633       jstring id = JavaStringFromStdString(jni(), track->id());
634       jobject j_track = jni()->NewObject(
635           *j_audio_track_class_, j_audio_track_ctor_, (jlong)track, id);
636       CHECK_EXCEPTION(jni()) << "error during NewObject";
637       jfieldID audio_tracks_id = GetFieldID(jni(),
638                                             *j_media_stream_class_,
639                                             "audioTracks",
640                                             "Ljava/util/LinkedList;");
641       jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
642       jmethodID add = GetMethodID(jni(),
643                                   GetObjectClass(jni(), audio_tracks),
644                                   "add",
645                                   "(Ljava/lang/Object;)Z");
646       jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
647       CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
648       CHECK(added);
649     }
650 
651     VideoTrackVector video_tracks = stream->GetVideoTracks();
652     for (size_t i = 0; i < video_tracks.size(); ++i) {
653       VideoTrackInterface* track = video_tracks[i];
654       jstring id = JavaStringFromStdString(jni(), track->id());
655       jobject j_track = jni()->NewObject(
656           *j_video_track_class_, j_video_track_ctor_, (jlong)track, id);
657       CHECK_EXCEPTION(jni()) << "error during NewObject";
658       jfieldID video_tracks_id = GetFieldID(jni(),
659                                             *j_media_stream_class_,
660                                             "videoTracks",
661                                             "Ljava/util/LinkedList;");
662       jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
663       jmethodID add = GetMethodID(jni(),
664                                   GetObjectClass(jni(), video_tracks),
665                                   "add",
666                                   "(Ljava/lang/Object;)Z");
667       jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
668       CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
669       CHECK(added);
670     }
671     streams_[stream] = jni()->NewWeakGlobalRef(j_stream);
672     CHECK_EXCEPTION(jni()) << "error during NewWeakGlobalRef";
673 
674     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
675                               "(Lorg/webrtc/MediaStream;)V");
676     jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
677     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
678   }
679 
OnRemoveStream(MediaStreamInterface * stream)680   virtual void OnRemoveStream(MediaStreamInterface* stream) OVERRIDE {
681     ScopedLocalRefFrame local_ref_frame(jni());
682     NativeToJavaStreamsMap::iterator it = streams_.find(stream);
683     CHECK(it != streams_.end()) << "unexpected stream: " << std::hex << stream;
684 
685     WeakRef s(jni(), it->second);
686     streams_.erase(it);
687     if (!s.obj())
688       return;
689 
690     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
691                               "(Lorg/webrtc/MediaStream;)V");
692     jni()->CallVoidMethod(*j_observer_global_, m, s.obj());
693     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
694   }
695 
OnDataChannel(DataChannelInterface * channel)696   virtual void OnDataChannel(DataChannelInterface* channel) OVERRIDE {
697     ScopedLocalRefFrame local_ref_frame(jni());
698     jobject j_channel = jni()->NewObject(
699         *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel);
700     CHECK_EXCEPTION(jni()) << "error during NewObject";
701 
702     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
703                               "(Lorg/webrtc/DataChannel;)V");
704     jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
705 
706     // Channel is now owned by Java object, and will be freed from
707     // DataChannel.dispose().  Important that this be done _after_ the
708     // CallVoidMethod above as Java code might call back into native code and be
709     // surprised to see a refcount of 2.
710     int bumped_count = channel->AddRef();
711     CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
712 
713     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
714   }
715 
OnRenegotiationNeeded()716   virtual void OnRenegotiationNeeded() OVERRIDE {
717     ScopedLocalRefFrame local_ref_frame(jni());
718     jmethodID m =
719         GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
720     jni()->CallVoidMethod(*j_observer_global_, m);
721     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
722   }
723 
SetConstraints(ConstraintsWrapper * constraints)724   void SetConstraints(ConstraintsWrapper* constraints) {
725     CHECK(!constraints_.get()) << "constraints already set!";
726     constraints_.reset(constraints);
727   }
728 
constraints()729   const ConstraintsWrapper* constraints() { return constraints_.get(); }
730 
731  private:
jni()732   JNIEnv* jni() {
733     return AttachCurrentThreadIfNeeded();
734   }
735 
736   const ScopedGlobalRef<jobject> j_observer_global_;
737   const ScopedGlobalRef<jclass> j_observer_class_;
738   const ScopedGlobalRef<jclass> j_media_stream_class_;
739   const jmethodID j_media_stream_ctor_;
740   const ScopedGlobalRef<jclass> j_audio_track_class_;
741   const jmethodID j_audio_track_ctor_;
742   const ScopedGlobalRef<jclass> j_video_track_class_;
743   const jmethodID j_video_track_ctor_;
744   const ScopedGlobalRef<jclass> j_data_channel_class_;
745   const jmethodID j_data_channel_ctor_;
746   typedef std::map<void*, jweak> NativeToJavaStreamsMap;
747   NativeToJavaStreamsMap streams_;  // C++ -> Java streams.
748   scoped_ptr<ConstraintsWrapper> constraints_;
749 };
750 
751 // Wrapper for a Java MediaConstraints object.  Copies all needed data so when
752 // the constructor returns the Java object is no longer needed.
753 class ConstraintsWrapper : public MediaConstraintsInterface {
754  public:
ConstraintsWrapper(JNIEnv * jni,jobject j_constraints)755   ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) {
756     PopulateConstraintsFromJavaPairList(
757         jni, j_constraints, "mandatory", &mandatory_);
758     PopulateConstraintsFromJavaPairList(
759         jni, j_constraints, "optional", &optional_);
760   }
761 
~ConstraintsWrapper()762   virtual ~ConstraintsWrapper() {}
763 
764   // MediaConstraintsInterface.
GetMandatory() const765   virtual const Constraints& GetMandatory() const OVERRIDE {
766     return mandatory_;
767   }
768 
GetOptional() const769   virtual const Constraints& GetOptional() const OVERRIDE {
770     return optional_;
771   }
772 
773  private:
774   // Helper for translating a List<Pair<String, String>> to a Constraints.
PopulateConstraintsFromJavaPairList(JNIEnv * jni,jobject j_constraints,const char * field_name,Constraints * field)775   static void PopulateConstraintsFromJavaPairList(
776       JNIEnv* jni, jobject j_constraints,
777       const char* field_name, Constraints* field) {
778     jfieldID j_id = GetFieldID(jni,
779         GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;");
780     jobject j_list = GetObjectField(jni, j_constraints, j_id);
781     jmethodID j_iterator_id = GetMethodID(jni,
782         GetObjectClass(jni, j_list), "iterator", "()Ljava/util/Iterator;");
783     jobject j_iterator = jni->CallObjectMethod(j_list, j_iterator_id);
784     CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
785     jmethodID j_has_next = GetMethodID(jni,
786         GetObjectClass(jni, j_iterator), "hasNext", "()Z");
787     jmethodID j_next = GetMethodID(jni,
788         GetObjectClass(jni, j_iterator), "next", "()Ljava/lang/Object;");
789     while (jni->CallBooleanMethod(j_iterator, j_has_next)) {
790       CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
791       jobject entry = jni->CallObjectMethod(j_iterator, j_next);
792       CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
793       jmethodID get_key = GetMethodID(jni,
794           GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;");
795       jstring j_key = reinterpret_cast<jstring>(
796           jni->CallObjectMethod(entry, get_key));
797       CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
798       jmethodID get_value = GetMethodID(jni,
799           GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;");
800       jstring j_value = reinterpret_cast<jstring>(
801           jni->CallObjectMethod(entry, get_value));
802       CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
803       field->push_back(Constraint(JavaToStdString(jni, j_key),
804                                   JavaToStdString(jni, j_value)));
805     }
806     CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
807   }
808 
809   Constraints mandatory_;
810   Constraints optional_;
811 };
812 
JavaSdpFromNativeSdp(JNIEnv * jni,const SessionDescriptionInterface * desc)813 static jobject JavaSdpFromNativeSdp(
814     JNIEnv* jni, const SessionDescriptionInterface* desc) {
815   std::string sdp;
816   CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
817   jstring j_description = JavaStringFromStdString(jni, sdp);
818 
819   jclass j_type_class = FindClass(
820       jni, "org/webrtc/SessionDescription$Type");
821   jmethodID j_type_from_canonical = GetStaticMethodID(
822       jni, j_type_class, "fromCanonicalForm",
823       "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
824   jstring j_type_string = JavaStringFromStdString(jni, desc->type());
825   jobject j_type = jni->CallStaticObjectMethod(
826       j_type_class, j_type_from_canonical, j_type_string);
827   CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
828 
829   jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
830   jmethodID j_sdp_ctor = GetMethodID(
831       jni, j_sdp_class, "<init>",
832       "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
833   jobject j_sdp = jni->NewObject(
834       j_sdp_class, j_sdp_ctor, j_type, j_description);
835   CHECK_EXCEPTION(jni) << "error during NewObject";
836   return j_sdp;
837 }
838 
839 template <class T>  // T is one of {Create,Set}SessionDescriptionObserver.
840 class SdpObserverWrapper : public T {
841  public:
SdpObserverWrapper(JNIEnv * jni,jobject j_observer,ConstraintsWrapper * constraints)842   SdpObserverWrapper(JNIEnv* jni, jobject j_observer,
843                      ConstraintsWrapper* constraints)
844       : constraints_(constraints),
845         j_observer_global_(jni, j_observer),
846         j_observer_class_(jni, GetObjectClass(jni, j_observer)) {
847   }
848 
~SdpObserverWrapper()849   virtual ~SdpObserverWrapper() {}
850 
851   // Can't mark OVERRIDE because of templating.
OnSuccess()852   virtual void OnSuccess() {
853     ScopedLocalRefFrame local_ref_frame(jni());
854     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
855     jni()->CallVoidMethod(*j_observer_global_, m);
856     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
857   }
858 
859   // Can't mark OVERRIDE because of templating.
OnSuccess(SessionDescriptionInterface * desc)860   virtual void OnSuccess(SessionDescriptionInterface* desc) {
861     ScopedLocalRefFrame local_ref_frame(jni());
862     jmethodID m = GetMethodID(
863         jni(), *j_observer_class_, "onCreateSuccess",
864         "(Lorg/webrtc/SessionDescription;)V");
865     jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
866     jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
867     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
868   }
869 
870  protected:
871   // Common implementation for failure of Set & Create types, distinguished by
872   // |op| being "Set" or "Create".
OnFailure(const std::string & op,const std::string & error)873   void OnFailure(const std::string& op, const std::string& error) {
874     jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
875                               "(Ljava/lang/String;)V");
876     jstring j_error_string = JavaStringFromStdString(jni(), error);
877     jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
878     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
879   }
880 
jni()881   JNIEnv* jni() {
882     return AttachCurrentThreadIfNeeded();
883   }
884 
885  private:
886   scoped_ptr<ConstraintsWrapper> constraints_;
887   const ScopedGlobalRef<jobject> j_observer_global_;
888   const ScopedGlobalRef<jclass> j_observer_class_;
889 };
890 
891 class CreateSdpObserverWrapper
892     : public SdpObserverWrapper<CreateSessionDescriptionObserver> {
893  public:
CreateSdpObserverWrapper(JNIEnv * jni,jobject j_observer,ConstraintsWrapper * constraints)894   CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
895                            ConstraintsWrapper* constraints)
896       : SdpObserverWrapper(jni, j_observer, constraints) {}
897 
OnFailure(const std::string & error)898   virtual void OnFailure(const std::string& error) OVERRIDE {
899     ScopedLocalRefFrame local_ref_frame(jni());
900     SdpObserverWrapper::OnFailure(std::string("Create"), error);
901   }
902 };
903 
904 class SetSdpObserverWrapper
905     : public SdpObserverWrapper<SetSessionDescriptionObserver> {
906  public:
SetSdpObserverWrapper(JNIEnv * jni,jobject j_observer,ConstraintsWrapper * constraints)907   SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
908                         ConstraintsWrapper* constraints)
909       : SdpObserverWrapper(jni, j_observer, constraints) {}
910 
OnFailure(const std::string & error)911   virtual void OnFailure(const std::string& error) OVERRIDE {
912     ScopedLocalRefFrame local_ref_frame(jni());
913     SdpObserverWrapper::OnFailure(std::string("Set"), error);
914   }
915 };
916 
917 // Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
918 // and dispatching the callback from C++ back to Java.
919 class DataChannelObserverWrapper : public DataChannelObserver {
920  public:
DataChannelObserverWrapper(JNIEnv * jni,jobject j_observer)921   DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer)
922       : j_observer_global_(jni, j_observer),
923         j_observer_class_(jni, GetObjectClass(jni, j_observer)),
924         j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
925         j_on_state_change_mid_(GetMethodID(jni, *j_observer_class_,
926                                            "onStateChange", "()V")),
927         j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage",
928                                       "(Lorg/webrtc/DataChannel$Buffer;)V")),
929         j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_,
930                                    "<init>", "(Ljava/nio/ByteBuffer;Z)V")) {
931   }
932 
~DataChannelObserverWrapper()933   virtual ~DataChannelObserverWrapper() {}
934 
OnStateChange()935   virtual void OnStateChange() OVERRIDE {
936     ScopedLocalRefFrame local_ref_frame(jni());
937     jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
938     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
939   }
940 
OnMessage(const DataBuffer & buffer)941   virtual void OnMessage(const DataBuffer& buffer) OVERRIDE {
942     ScopedLocalRefFrame local_ref_frame(jni());
943     jobject byte_buffer =
944         jni()->NewDirectByteBuffer(const_cast<char*>(buffer.data.data()),
945                                    buffer.data.length());
946     jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
947                                         byte_buffer, buffer.binary);
948     jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
949     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
950   }
951 
952  private:
jni()953   JNIEnv* jni() {
954     return AttachCurrentThreadIfNeeded();
955   }
956 
957   const ScopedGlobalRef<jobject> j_observer_global_;
958   const ScopedGlobalRef<jclass> j_observer_class_;
959   const ScopedGlobalRef<jclass> j_buffer_class_;
960   const jmethodID j_on_state_change_mid_;
961   const jmethodID j_on_message_mid_;
962   const jmethodID j_buffer_ctor_;
963 };
964 
965 // Adapter for a Java StatsObserver presenting a C++ StatsObserver and
966 // dispatching the callback from C++ back to Java.
967 class StatsObserverWrapper : public StatsObserver {
968  public:
StatsObserverWrapper(JNIEnv * jni,jobject j_observer)969   StatsObserverWrapper(JNIEnv* jni, jobject j_observer)
970       : j_observer_global_(jni, j_observer),
971         j_observer_class_(jni, GetObjectClass(jni, j_observer)),
972         j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
973         j_stats_report_ctor_(GetMethodID(
974             jni, *j_stats_report_class_, "<init>",
975             "(Ljava/lang/String;Ljava/lang/String;D"
976             "[Lorg/webrtc/StatsReport$Value;)V")),
977         j_value_class_(jni, FindClass(
978             jni, "org/webrtc/StatsReport$Value")),
979         j_value_ctor_(GetMethodID(
980             jni, *j_value_class_, "<init>",
981             "(Ljava/lang/String;Ljava/lang/String;)V")) {
982   }
983 
~StatsObserverWrapper()984   virtual ~StatsObserverWrapper() {}
985 
OnComplete(const std::vector<StatsReport> & reports)986   virtual void OnComplete(const std::vector<StatsReport>& reports) OVERRIDE {
987     ScopedLocalRefFrame local_ref_frame(jni());
988     jobjectArray j_reports = ReportsToJava(jni(), reports);
989     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
990                               "([Lorg/webrtc/StatsReport;)V");
991     jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
992     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
993   }
994 
995  private:
ReportsToJava(JNIEnv * jni,const std::vector<StatsReport> & reports)996   jobjectArray ReportsToJava(
997       JNIEnv* jni, const std::vector<StatsReport>& reports) {
998     jobjectArray reports_array = jni->NewObjectArray(
999         reports.size(), *j_stats_report_class_, NULL);
1000     for (int i = 0; i < reports.size(); ++i) {
1001       ScopedLocalRefFrame local_ref_frame(jni);
1002       const StatsReport& report = reports[i];
1003       jstring j_id = JavaStringFromStdString(jni, report.id);
1004       jstring j_type = JavaStringFromStdString(jni, report.type);
1005       jobjectArray j_values = ValuesToJava(jni, report.values);
1006       jobject j_report = jni->NewObject(*j_stats_report_class_,
1007                                         j_stats_report_ctor_,
1008                                         j_id,
1009                                         j_type,
1010                                         report.timestamp,
1011                                         j_values);
1012       jni->SetObjectArrayElement(reports_array, i, j_report);
1013     }
1014     return reports_array;
1015   }
1016 
ValuesToJava(JNIEnv * jni,const StatsReport::Values & values)1017   jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) {
1018     jobjectArray j_values = jni->NewObjectArray(
1019         values.size(), *j_value_class_, NULL);
1020     for (int i = 0; i < values.size(); ++i) {
1021       ScopedLocalRefFrame local_ref_frame(jni);
1022       const StatsReport::Value& value = values[i];
1023       jstring j_name = JavaStringFromStdString(jni, value.name);
1024       jstring j_value = JavaStringFromStdString(jni, value.value);
1025       jobject j_element_value =
1026           jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
1027       jni->SetObjectArrayElement(j_values, i, j_element_value);
1028     }
1029     return j_values;
1030   }
1031 
jni()1032   JNIEnv* jni() {
1033     return AttachCurrentThreadIfNeeded();
1034   }
1035 
1036   const ScopedGlobalRef<jobject> j_observer_global_;
1037   const ScopedGlobalRef<jclass> j_observer_class_;
1038   const ScopedGlobalRef<jclass> j_stats_report_class_;
1039   const jmethodID j_stats_report_ctor_;
1040   const ScopedGlobalRef<jclass> j_value_class_;
1041   const jmethodID j_value_ctor_;
1042 };
1043 
1044 // Adapter presenting a cricket::VideoRenderer as a
1045 // webrtc::VideoRendererInterface.
1046 class VideoRendererWrapper : public VideoRendererInterface {
1047  public:
Create(cricket::VideoRenderer * renderer)1048   static VideoRendererWrapper* Create(cricket::VideoRenderer* renderer) {
1049     if (renderer)
1050       return new VideoRendererWrapper(renderer);
1051     return NULL;
1052   }
1053 
~VideoRendererWrapper()1054   virtual ~VideoRendererWrapper() {}
1055 
SetSize(int width,int height)1056   virtual void SetSize(int width, int height) OVERRIDE {
1057     ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
1058     const bool kNotReserved = false;  // What does this param mean??
1059     renderer_->SetSize(width, height, kNotReserved);
1060   }
1061 
RenderFrame(const cricket::VideoFrame * frame)1062   virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
1063     ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
1064     renderer_->RenderFrame(frame);
1065   }
1066 
1067  private:
VideoRendererWrapper(cricket::VideoRenderer * renderer)1068   explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
1069       : renderer_(renderer) {}
1070 
1071   scoped_ptr<cricket::VideoRenderer> renderer_;
1072 };
1073 
1074 // Wrapper for texture object in TextureVideoFrame.
1075 class NativeHandleImpl : public NativeHandle {
1076  public:
NativeHandleImpl()1077   NativeHandleImpl() :
1078     ref_count_(0), texture_object_(NULL), texture_id_(-1) {}
~NativeHandleImpl()1079   virtual ~NativeHandleImpl() {}
AddRef()1080   virtual int32_t AddRef() {
1081     return ++ref_count_;
1082   }
Release()1083   virtual int32_t Release() {
1084     return --ref_count_;
1085   }
GetHandle()1086   virtual void* GetHandle() {
1087     return texture_object_;
1088   }
GetTextureId()1089   int GetTextureId() {
1090     return texture_id_;
1091   }
SetTextureObject(void * texture_object,int texture_id)1092   void SetTextureObject(void *texture_object, int texture_id) {
1093     texture_object_ = reinterpret_cast<jobject>(texture_object);
1094     texture_id_ = texture_id;
1095   }
ref_count()1096   int32_t ref_count() {
1097     return ref_count_;
1098   }
1099 
1100  private:
1101   int32_t ref_count_;
1102   jobject texture_object_;
1103   int32_t texture_id_;
1104 };
1105 
1106 // Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
1107 // instance.
1108 class JavaVideoRendererWrapper : public VideoRendererInterface {
1109  public:
JavaVideoRendererWrapper(JNIEnv * jni,jobject j_callbacks)1110   JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
1111       : j_callbacks_(jni, j_callbacks),
1112         j_set_size_id_(GetMethodID(
1113             jni, GetObjectClass(jni, j_callbacks), "setSize", "(II)V")),
1114         j_render_frame_id_(GetMethodID(
1115             jni, GetObjectClass(jni, j_callbacks), "renderFrame",
1116             "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
1117         j_frame_class_(jni,
1118                        FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
1119         j_i420_frame_ctor_id_(GetMethodID(
1120             jni, *j_frame_class_, "<init>", "(II[I[Ljava/nio/ByteBuffer;)V")),
1121         j_texture_frame_ctor_id_(GetMethodID(
1122             jni, *j_frame_class_, "<init>",
1123             "(IILjava/lang/Object;I)V")),
1124         j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
1125     CHECK_EXCEPTION(jni);
1126   }
1127 
~JavaVideoRendererWrapper()1128   virtual ~JavaVideoRendererWrapper() {}
1129 
SetSize(int width,int height)1130   virtual void SetSize(int width, int height) OVERRIDE {
1131     ScopedLocalRefFrame local_ref_frame(jni());
1132     jni()->CallVoidMethod(*j_callbacks_, j_set_size_id_, width, height);
1133     CHECK_EXCEPTION(jni());
1134   }
1135 
RenderFrame(const cricket::VideoFrame * frame)1136   virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
1137     ScopedLocalRefFrame local_ref_frame(jni());
1138     if (frame->GetNativeHandle() != NULL) {
1139       jobject j_frame = CricketToJavaTextureFrame(frame);
1140       jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
1141       CHECK_EXCEPTION(jni());
1142     } else {
1143       jobject j_frame = CricketToJavaI420Frame(frame);
1144       jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
1145       CHECK_EXCEPTION(jni());
1146     }
1147   }
1148 
1149  private:
1150   // Return a VideoRenderer.I420Frame referring to the data in |frame|.
CricketToJavaI420Frame(const cricket::VideoFrame * frame)1151   jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
1152     jintArray strides = jni()->NewIntArray(3);
1153     jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
1154     strides_array[0] = frame->GetYPitch();
1155     strides_array[1] = frame->GetUPitch();
1156     strides_array[2] = frame->GetVPitch();
1157     jni()->ReleaseIntArrayElements(strides, strides_array, 0);
1158     jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
1159     jobject y_buffer = jni()->NewDirectByteBuffer(
1160         const_cast<uint8*>(frame->GetYPlane()),
1161         frame->GetYPitch() * frame->GetHeight());
1162     jobject u_buffer = jni()->NewDirectByteBuffer(
1163         const_cast<uint8*>(frame->GetUPlane()), frame->GetChromaSize());
1164     jobject v_buffer = jni()->NewDirectByteBuffer(
1165         const_cast<uint8*>(frame->GetVPlane()), frame->GetChromaSize());
1166     jni()->SetObjectArrayElement(planes, 0, y_buffer);
1167     jni()->SetObjectArrayElement(planes, 1, u_buffer);
1168     jni()->SetObjectArrayElement(planes, 2, v_buffer);
1169     return jni()->NewObject(
1170         *j_frame_class_, j_i420_frame_ctor_id_,
1171         frame->GetWidth(), frame->GetHeight(), strides, planes);
1172   }
1173 
1174   // Return a VideoRenderer.I420Frame referring texture object in |frame|.
CricketToJavaTextureFrame(const cricket::VideoFrame * frame)1175   jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
1176     NativeHandleImpl* handle =
1177         reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
1178     jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
1179     int texture_id = handle->GetTextureId();
1180     return jni()->NewObject(
1181         *j_frame_class_, j_texture_frame_ctor_id_,
1182         frame->GetWidth(), frame->GetHeight(), texture_object, texture_id);
1183   }
1184 
jni()1185   JNIEnv* jni() {
1186     return AttachCurrentThreadIfNeeded();
1187   }
1188 
1189   ScopedGlobalRef<jobject> j_callbacks_;
1190   jmethodID j_set_size_id_;
1191   jmethodID j_render_frame_id_;
1192   ScopedGlobalRef<jclass> j_frame_class_;
1193   jmethodID j_i420_frame_ctor_id_;
1194   jmethodID j_texture_frame_ctor_id_;
1195   ScopedGlobalRef<jclass> j_byte_buffer_class_;
1196 };
1197 
1198 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
1199 // TODO(fischman): consider pulling MediaCodecVideoEncoder out of this file and
1200 // into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
1201 // from this file.
1202 
1203 //#define TRACK_BUFFER_TIMING
1204 #define TAG "MediaCodecVideo"
1205 #ifdef TRACK_BUFFER_TIMING
1206 #define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
1207 #else
1208 #define ALOGV(...)
1209 #endif
1210 #define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
1211 #define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
1212 
1213 // Color formats supported by encoder - should mirror supportedColorList
1214 // from MediaCodecVideoEncoder.java
1215 enum COLOR_FORMATTYPE {
1216   COLOR_FormatYUV420Planar = 0x13,
1217   COLOR_FormatYUV420SemiPlanar = 0x15,
1218   COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
1219   // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
1220   // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
1221   // This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
1222   // but requires some (16, 32?) byte alignment.
1223   COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
1224 };
1225 
1226 // Arbitrary interval to poll the codec for new outputs.
1227 enum { kMediaCodecPollMs = 10 };
1228 // Media codec maximum output buffer ready timeout.
1229 enum { kMediaCodecTimeoutMs = 500 };
1230 // Interval to print codec statistics (bitrate, fps, encoding/decoding time).
1231 enum { kMediaCodecStatisticsIntervalMs = 3000 };
1232 
GetCurrentTimeMs()1233 static int64_t GetCurrentTimeMs() {
1234   return TickTime::Now().Ticks() / 1000000LL;
1235 }
1236 
1237 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
1238 // Android's MediaCodec SDK API behind the scenes to implement (hopefully)
1239 // HW-backed video encode.  This C++ class is implemented as a very thin shim,
1240 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
1241 // MediaCodecVideoEncoder is created, operated, and destroyed on a single
1242 // thread, currently the libjingle Worker thread.
1243 class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
1244                                public rtc::MessageHandler {
1245  public:
1246   virtual ~MediaCodecVideoEncoder();
1247   explicit MediaCodecVideoEncoder(JNIEnv* jni);
1248 
1249   // webrtc::VideoEncoder implementation.  Everything trampolines to
1250   // |codec_thread_| for execution.
1251   virtual int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
1252                              int32_t /* number_of_cores */,
1253                              uint32_t /* max_payload_size */) OVERRIDE;
1254   virtual int32_t Encode(
1255       const webrtc::I420VideoFrame& input_image,
1256       const webrtc::CodecSpecificInfo* /* codec_specific_info */,
1257       const std::vector<webrtc::VideoFrameType>* frame_types) OVERRIDE;
1258   virtual int32_t RegisterEncodeCompleteCallback(
1259       webrtc::EncodedImageCallback* callback) OVERRIDE;
1260   virtual int32_t Release() OVERRIDE;
1261   virtual int32_t SetChannelParameters(uint32_t /* packet_loss */,
1262                                        int /* rtt */) OVERRIDE;
1263   virtual int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) OVERRIDE;
1264 
1265   // rtc::MessageHandler implementation.
1266   virtual void OnMessage(rtc::Message* msg) OVERRIDE;
1267 
1268  private:
1269   // CHECK-fail if not running on |codec_thread_|.
1270   void CheckOnCodecThread();
1271 
1272   // Release() and InitEncode() in an attempt to restore the codec to an
1273   // operable state.  Necessary after all manner of OMX-layer errors.
1274   void ResetCodec();
1275 
1276   // Implementation of webrtc::VideoEncoder methods above, all running on the
1277   // codec thread exclusively.
1278   //
1279   // If width==0 then this is assumed to be a re-initialization and the
1280   // previously-current values are reused instead of the passed parameters
1281   // (makes it easier to reason about thread-safety).
1282   int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
1283   int32_t EncodeOnCodecThread(
1284       const webrtc::I420VideoFrame& input_image,
1285       const std::vector<webrtc::VideoFrameType>* frame_types);
1286   int32_t RegisterEncodeCompleteCallbackOnCodecThread(
1287       webrtc::EncodedImageCallback* callback);
1288   int32_t ReleaseOnCodecThread();
1289   int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
1290 
1291   // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
1292   int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
1293   jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
1294   bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
1295   jlong GetOutputBufferInfoPresentationTimestampUs(
1296       JNIEnv* jni,
1297       jobject j_output_buffer_info);
1298 
1299   // Deliver any outputs pending in the MediaCodec to our |callback_| and return
1300   // true on success.
1301   bool DeliverPendingOutputs(JNIEnv* jni);
1302 
1303   // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
1304   // |codec_thread_| synchronously.
1305   webrtc::EncodedImageCallback* callback_;
1306 
1307   // State that is constant for the lifetime of this object once the ctor
1308   // returns.
1309   scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
1310   ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
1311   ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
1312   jmethodID j_init_encode_method_;
1313   jmethodID j_dequeue_input_buffer_method_;
1314   jmethodID j_encode_method_;
1315   jmethodID j_release_method_;
1316   jmethodID j_set_rates_method_;
1317   jmethodID j_dequeue_output_buffer_method_;
1318   jmethodID j_release_output_buffer_method_;
1319   jfieldID j_color_format_field_;
1320   jfieldID j_info_index_field_;
1321   jfieldID j_info_buffer_field_;
1322   jfieldID j_info_is_key_frame_field_;
1323   jfieldID j_info_presentation_timestamp_us_field_;
1324 
1325   // State that is valid only between InitEncode() and the next Release().
1326   // Touched only on codec_thread_ so no explicit synchronization necessary.
1327   int width_;   // Frame width in pixels.
1328   int height_;  // Frame height in pixels.
1329   bool inited_;
1330   enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
1331   int last_set_bitrate_kbps_;  // Last-requested bitrate in kbps.
1332   int last_set_fps_;  // Last-requested frame rate.
1333   int64_t current_timestamp_us_;  // Current frame timestamps in us.
1334   int frames_received_;  // Number of frames received by encoder.
1335   int frames_dropped_;  // Number of frames dropped by encoder.
1336   int frames_resolution_update_;  // Number of frames with new codec resolution.
1337   int frames_in_queue_;  // Number of frames in encoder queue.
1338   int64_t start_time_ms_;  // Start time for statistics.
1339   int current_frames_;  // Number of frames in the current statistics interval.
1340   int current_bytes_;  // Encoded bytes in the current statistics interval.
1341   int current_encoding_time_ms_;  // Overall encoding time in the current second
1342   int64_t last_input_timestamp_ms_;  // Timestamp of last received yuv frame.
1343   int64_t last_output_timestamp_ms_;  // Timestamp of last encoded frame.
1344   std::vector<int32_t> timestamps_;  // Video frames timestamp queue.
1345   std::vector<int64_t> render_times_ms_;  // Video frames render time queue.
1346   std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to
1347                                              // encoder input.
1348   // Frame size in bytes fed to MediaCodec.
1349   int yuv_size_;
1350   // True only when between a callback_->Encoded() call return a positive value
1351   // and the next Encode() call being ignored.
1352   bool drop_next_input_frame_;
1353   // Global references; must be deleted in Release().
1354   std::vector<jobject> input_buffers_;
1355 };
1356 
~MediaCodecVideoEncoder()1357 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
1358   // Call Release() to ensure no more callbacks to us after we are deleted.
1359   Release();
1360 }
1361 
MediaCodecVideoEncoder(JNIEnv * jni)1362 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
1363   : callback_(NULL),
1364     inited_(false),
1365     codec_thread_(new Thread()),
1366     j_media_codec_video_encoder_class_(
1367         jni,
1368         FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
1369     j_media_codec_video_encoder_(
1370         jni,
1371         jni->NewObject(*j_media_codec_video_encoder_class_,
1372                        GetMethodID(jni,
1373                                    *j_media_codec_video_encoder_class_,
1374                                    "<init>",
1375                                    "()V"))) {
1376   ScopedLocalRefFrame local_ref_frame(jni);
1377   // It would be nice to avoid spinning up a new thread per MediaCodec, and
1378   // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
1379   // 2732 means that deadlocks abound.  This class synchronously trampolines
1380   // to |codec_thread_|, so if anything else can be coming to _us_ from
1381   // |codec_thread_|, or from any thread holding the |_sendCritSect| described
1382   // in the bug, we have a problem.  For now work around that with a dedicated
1383   // thread.
1384   codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
1385   CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
1386 
1387   jclass j_output_buffer_info_class =
1388       FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
1389   j_init_encode_method_ = GetMethodID(jni,
1390                                       *j_media_codec_video_encoder_class_,
1391                                       "initEncode",
1392                                       "(IIII)[Ljava/nio/ByteBuffer;");
1393   j_dequeue_input_buffer_method_ = GetMethodID(
1394       jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
1395   j_encode_method_ = GetMethodID(
1396       jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
1397   j_release_method_ =
1398       GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
1399   j_set_rates_method_ = GetMethodID(
1400       jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
1401   j_dequeue_output_buffer_method_ =
1402       GetMethodID(jni,
1403                   *j_media_codec_video_encoder_class_,
1404                   "dequeueOutputBuffer",
1405                   "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
1406   j_release_output_buffer_method_ = GetMethodID(
1407       jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
1408 
1409   j_color_format_field_ =
1410       GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
1411   j_info_index_field_ =
1412       GetFieldID(jni, j_output_buffer_info_class, "index", "I");
1413   j_info_buffer_field_ = GetFieldID(
1414       jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
1415   j_info_is_key_frame_field_ =
1416       GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
1417   j_info_presentation_timestamp_us_field_ = GetFieldID(
1418       jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
1419   CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
1420 }
1421 
InitEncode(const webrtc::VideoCodec * codec_settings,int32_t,uint32_t)1422 int32_t MediaCodecVideoEncoder::InitEncode(
1423     const webrtc::VideoCodec* codec_settings,
1424     int32_t /* number_of_cores */,
1425     uint32_t /* max_payload_size */) {
1426   // Factory should guard against other codecs being used with us.
1427   CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
1428 
1429   return codec_thread_->Invoke<int32_t>(
1430       Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
1431            this,
1432            codec_settings->width,
1433            codec_settings->height,
1434            codec_settings->startBitrate,
1435            codec_settings->maxFramerate));
1436 }
1437 
Encode(const webrtc::I420VideoFrame & frame,const webrtc::CodecSpecificInfo *,const std::vector<webrtc::VideoFrameType> * frame_types)1438 int32_t MediaCodecVideoEncoder::Encode(
1439     const webrtc::I420VideoFrame& frame,
1440     const webrtc::CodecSpecificInfo* /* codec_specific_info */,
1441     const std::vector<webrtc::VideoFrameType>* frame_types) {
1442   return codec_thread_->Invoke<int32_t>(Bind(
1443       &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
1444 }
1445 
RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback * callback)1446 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
1447     webrtc::EncodedImageCallback* callback) {
1448   return codec_thread_->Invoke<int32_t>(
1449       Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
1450            this,
1451            callback));
1452 }
1453 
Release()1454 int32_t MediaCodecVideoEncoder::Release() {
1455   return codec_thread_->Invoke<int32_t>(
1456       Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
1457 }
1458 
SetChannelParameters(uint32_t,int)1459 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
1460                                                      int /* rtt */) {
1461   return WEBRTC_VIDEO_CODEC_OK;
1462 }
1463 
SetRates(uint32_t new_bit_rate,uint32_t frame_rate)1464 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
1465                                          uint32_t frame_rate) {
1466   return codec_thread_->Invoke<int32_t>(
1467       Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
1468            this,
1469            new_bit_rate,
1470            frame_rate));
1471 }
1472 
OnMessage(rtc::Message * msg)1473 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
1474   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1475   ScopedLocalRefFrame local_ref_frame(jni);
1476 
1477   // We only ever send one message to |this| directly (not through a Bind()'d
1478   // functor), so expect no ID/data.
1479   CHECK(!msg->message_id) << "Unexpected message!";
1480   CHECK(!msg->pdata) << "Unexpected message!";
1481   CheckOnCodecThread();
1482   if (!inited_) {
1483     return;
1484   }
1485 
1486   // It would be nice to recover from a failure here if one happened, but it's
1487   // unclear how to signal such a failure to the app, so instead we stay silent
1488   // about it and let the next app-called API method reveal the borkedness.
1489   DeliverPendingOutputs(jni);
1490   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
1491 }
1492 
CheckOnCodecThread()1493 void MediaCodecVideoEncoder::CheckOnCodecThread() {
1494   CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
1495       << "Running on wrong thread!";
1496 }
1497 
ResetCodec()1498 void MediaCodecVideoEncoder::ResetCodec() {
1499   ALOGE("ResetCodec");
1500   if (Release() != WEBRTC_VIDEO_CODEC_OK ||
1501       codec_thread_->Invoke<int32_t>(Bind(
1502           &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
1503           width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
1504     // TODO(fischman): wouldn't it be nice if there was a way to gracefully
1505     // degrade to a SW encoder at this point?  There isn't one AFAICT :(
1506     // https://code.google.com/p/webrtc/issues/detail?id=2920
1507   }
1508 }
1509 
InitEncodeOnCodecThread(int width,int height,int kbps,int fps)1510 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
1511     int width, int height, int kbps, int fps) {
1512   CheckOnCodecThread();
1513   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1514   ScopedLocalRefFrame local_ref_frame(jni);
1515 
1516   ALOGD("InitEncodeOnCodecThread %d x %d. Bitrate: %d kbps. Fps: %d",
1517       width, height, kbps, fps);
1518   if (kbps == 0) {
1519     kbps = last_set_bitrate_kbps_;
1520   }
1521   if (fps == 0) {
1522     fps = last_set_fps_;
1523   }
1524 
1525   width_ = width;
1526   height_ = height;
1527   last_set_bitrate_kbps_ = kbps;
1528   last_set_fps_ = fps;
1529   yuv_size_ = width_ * height_ * 3 / 2;
1530   frames_received_ = 0;
1531   frames_dropped_ = 0;
1532   frames_resolution_update_ = 0;
1533   frames_in_queue_ = 0;
1534   current_timestamp_us_ = 0;
1535   start_time_ms_ = GetCurrentTimeMs();
1536   current_frames_ = 0;
1537   current_bytes_ = 0;
1538   current_encoding_time_ms_ = 0;
1539   last_input_timestamp_ms_ = -1;
1540   last_output_timestamp_ms_ = -1;
1541   timestamps_.clear();
1542   render_times_ms_.clear();
1543   frame_rtc_times_ms_.clear();
1544   drop_next_input_frame_ = false;
1545   // We enforce no extra stride/padding in the format creation step.
1546   jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
1547       jni->CallObjectMethod(*j_media_codec_video_encoder_,
1548                             j_init_encode_method_,
1549                             width_,
1550                             height_,
1551                             kbps,
1552                             fps));
1553   CHECK_EXCEPTION(jni);
1554   if (IsNull(jni, input_buffers))
1555     return WEBRTC_VIDEO_CODEC_ERROR;
1556 
1557   inited_ = true;
1558   switch (GetIntField(jni, *j_media_codec_video_encoder_,
1559       j_color_format_field_)) {
1560     case COLOR_FormatYUV420Planar:
1561       encoder_fourcc_ = libyuv::FOURCC_YU12;
1562       break;
1563     case COLOR_FormatYUV420SemiPlanar:
1564     case COLOR_QCOM_FormatYUV420SemiPlanar:
1565     case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
1566       encoder_fourcc_ = libyuv::FOURCC_NV12;
1567       break;
1568     default:
1569       LOG(LS_ERROR) << "Wrong color format.";
1570       return WEBRTC_VIDEO_CODEC_ERROR;
1571   }
1572   size_t num_input_buffers = jni->GetArrayLength(input_buffers);
1573   CHECK(input_buffers_.empty())
1574       << "Unexpected double InitEncode without Release";
1575   input_buffers_.resize(num_input_buffers);
1576   for (size_t i = 0; i < num_input_buffers; ++i) {
1577     input_buffers_[i] =
1578         jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
1579     int64 yuv_buffer_capacity =
1580         jni->GetDirectBufferCapacity(input_buffers_[i]);
1581     CHECK_EXCEPTION(jni);
1582     CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
1583   }
1584   CHECK_EXCEPTION(jni);
1585 
1586   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
1587   return WEBRTC_VIDEO_CODEC_OK;
1588 }
1589 
EncodeOnCodecThread(const webrtc::I420VideoFrame & frame,const std::vector<webrtc::VideoFrameType> * frame_types)1590 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
1591     const webrtc::I420VideoFrame& frame,
1592     const std::vector<webrtc::VideoFrameType>* frame_types) {
1593   CheckOnCodecThread();
1594   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1595   ScopedLocalRefFrame local_ref_frame(jni);
1596 
1597   if (!inited_) {
1598     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1599   }
1600   frames_received_++;
1601   if (!DeliverPendingOutputs(jni)) {
1602     ResetCodec();
1603     // Continue as if everything's fine.
1604   }
1605 
1606   if (drop_next_input_frame_) {
1607     ALOGV("Encoder drop frame - failed callback.");
1608     drop_next_input_frame_ = false;
1609     return WEBRTC_VIDEO_CODEC_OK;
1610   }
1611 
1612   CHECK(frame_types->size() == 1) << "Unexpected stream count";
1613   if (frame.width() != width_ || frame.height() != height_) {
1614     frames_resolution_update_++;
1615     ALOGD("Unexpected frame resolution change from %d x %d to %d x %d",
1616         width_, height_, frame.width(), frame.height());
1617     if (frames_resolution_update_ > 3) {
1618       // Reset codec if we received more than 3 frames with new resolution.
1619       width_ = frame.width();
1620       height_ = frame.height();
1621       frames_resolution_update_ = 0;
1622       ResetCodec();
1623     }
1624     return WEBRTC_VIDEO_CODEC_OK;
1625   }
1626   frames_resolution_update_ = 0;
1627 
1628   bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
1629 
1630   // Check if we accumulated too many frames in encoder input buffers
1631   // or the encoder latency exceeds 70 ms and drop frame if so.
1632   if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
1633     int encoder_latency_ms = last_input_timestamp_ms_ -
1634         last_output_timestamp_ms_;
1635     if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
1636       ALOGV("Drop frame - encoder is behind by %d ms. Q size: %d",
1637           encoder_latency_ms, frames_in_queue_);
1638       frames_dropped_++;
1639       return WEBRTC_VIDEO_CODEC_OK;
1640     }
1641   }
1642 
1643   int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
1644                                                 j_dequeue_input_buffer_method_);
1645   CHECK_EXCEPTION(jni);
1646   if (j_input_buffer_index == -1) {
1647     // Video codec falls behind - no input buffer available.
1648     ALOGV("Encoder drop frame - no input buffers available");
1649     frames_dropped_++;
1650     return WEBRTC_VIDEO_CODEC_OK;  // TODO(fischman): see webrtc bug 2887.
1651   }
1652   if (j_input_buffer_index == -2) {
1653     ResetCodec();
1654     return WEBRTC_VIDEO_CODEC_ERROR;
1655   }
1656 
1657   ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
1658       frames_received_, j_input_buffer_index, current_timestamp_us_ / 1000);
1659 
1660   jobject j_input_buffer = input_buffers_[j_input_buffer_index];
1661   uint8* yuv_buffer =
1662       reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
1663   CHECK_EXCEPTION(jni);
1664   CHECK(yuv_buffer) << "Indirect buffer??";
1665   CHECK(!libyuv::ConvertFromI420(
1666           frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
1667           frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
1668           frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
1669           yuv_buffer, width_,
1670           width_, height_,
1671           encoder_fourcc_))
1672       << "ConvertFromI420 failed";
1673   last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
1674   frames_in_queue_++;
1675 
1676   // Save input image timestamps for later output
1677   timestamps_.push_back(frame.timestamp());
1678   render_times_ms_.push_back(frame.render_time_ms());
1679   frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
1680 
1681   bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1682                                               j_encode_method_,
1683                                               key_frame,
1684                                               j_input_buffer_index,
1685                                               yuv_size_,
1686                                               current_timestamp_us_);
1687   CHECK_EXCEPTION(jni);
1688   current_timestamp_us_ += 1000000 / last_set_fps_;
1689 
1690   if (!encode_status || !DeliverPendingOutputs(jni)) {
1691     ResetCodec();
1692     return WEBRTC_VIDEO_CODEC_ERROR;
1693   }
1694 
1695   return WEBRTC_VIDEO_CODEC_OK;
1696 }
1697 
RegisterEncodeCompleteCallbackOnCodecThread(webrtc::EncodedImageCallback * callback)1698 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
1699     webrtc::EncodedImageCallback* callback) {
1700   CheckOnCodecThread();
1701   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1702   ScopedLocalRefFrame local_ref_frame(jni);
1703   callback_ = callback;
1704   return WEBRTC_VIDEO_CODEC_OK;
1705 }
1706 
ReleaseOnCodecThread()1707 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
1708   if (!inited_) {
1709     return WEBRTC_VIDEO_CODEC_OK;
1710   }
1711   CheckOnCodecThread();
1712   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1713   ALOGD("EncoderRelease: Frames received: %d. Frames dropped: %d.",
1714       frames_received_,frames_dropped_);
1715   ScopedLocalRefFrame local_ref_frame(jni);
1716   for (size_t i = 0; i < input_buffers_.size(); ++i)
1717     jni->DeleteGlobalRef(input_buffers_[i]);
1718   input_buffers_.clear();
1719   jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
1720   CHECK_EXCEPTION(jni);
1721   rtc::MessageQueueManager::Clear(this);
1722   inited_ = false;
1723   return WEBRTC_VIDEO_CODEC_OK;
1724 }
1725 
SetRatesOnCodecThread(uint32_t new_bit_rate,uint32_t frame_rate)1726 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
1727                                                       uint32_t frame_rate) {
1728   CheckOnCodecThread();
1729   if (last_set_bitrate_kbps_ == new_bit_rate &&
1730       last_set_fps_ == frame_rate) {
1731     return WEBRTC_VIDEO_CODEC_OK;
1732   }
1733   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1734   ScopedLocalRefFrame local_ref_frame(jni);
1735   if (new_bit_rate > 0) {
1736     last_set_bitrate_kbps_ = new_bit_rate;
1737   }
1738   if (frame_rate > 0) {
1739     last_set_fps_ = frame_rate;
1740   }
1741   bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1742                                        j_set_rates_method_,
1743                                        last_set_bitrate_kbps_,
1744                                        last_set_fps_);
1745   CHECK_EXCEPTION(jni);
1746   if (!ret) {
1747     ResetCodec();
1748     return WEBRTC_VIDEO_CODEC_ERROR;
1749   }
1750   return WEBRTC_VIDEO_CODEC_OK;
1751 }
1752 
GetOutputBufferInfoIndex(JNIEnv * jni,jobject j_output_buffer_info)1753 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
1754     JNIEnv* jni,
1755     jobject j_output_buffer_info) {
1756   return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
1757 }
1758 
GetOutputBufferInfoBuffer(JNIEnv * jni,jobject j_output_buffer_info)1759 jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
1760     JNIEnv* jni,
1761     jobject j_output_buffer_info) {
1762   return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
1763 }
1764 
GetOutputBufferInfoIsKeyFrame(JNIEnv * jni,jobject j_output_buffer_info)1765 bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
1766     JNIEnv* jni,
1767     jobject j_output_buffer_info) {
1768   return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
1769 }
1770 
GetOutputBufferInfoPresentationTimestampUs(JNIEnv * jni,jobject j_output_buffer_info)1771 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
1772     JNIEnv* jni,
1773     jobject j_output_buffer_info) {
1774   return GetLongField(
1775       jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
1776 }
1777 
DeliverPendingOutputs(JNIEnv * jni)1778 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
1779   while (true) {
1780     jobject j_output_buffer_info = jni->CallObjectMethod(
1781         *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
1782     CHECK_EXCEPTION(jni);
1783     if (IsNull(jni, j_output_buffer_info)) {
1784       break;
1785     }
1786 
1787     int output_buffer_index =
1788         GetOutputBufferInfoIndex(jni, j_output_buffer_info);
1789     if (output_buffer_index == -1) {
1790       ResetCodec();
1791       return false;
1792     }
1793 
1794     // Get frame timestamps from a queue.
1795     last_output_timestamp_ms_ =
1796         GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
1797         1000;
1798     int32_t timestamp = timestamps_.front();
1799     timestamps_.erase(timestamps_.begin());
1800     int64_t render_time_ms = render_times_ms_.front();
1801     render_times_ms_.erase(render_times_ms_.begin());
1802     int64_t frame_encoding_time_ms = GetCurrentTimeMs() -
1803         frame_rtc_times_ms_.front();
1804     frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
1805     frames_in_queue_--;
1806 
1807     // Extract payload and key frame flag.
1808     int32_t callback_status = 0;
1809     jobject j_output_buffer =
1810         GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
1811     bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
1812     size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
1813     uint8* payload = reinterpret_cast<uint8_t*>(
1814         jni->GetDirectBufferAddress(j_output_buffer));
1815     CHECK_EXCEPTION(jni);
1816 
1817     ALOGV("Encoder got output buffer # %d. Size: %d. TS: %lld. Latency: %lld."
1818         " EncTime: %lld",
1819         output_buffer_index, payload_size, last_output_timestamp_ms_,
1820         last_input_timestamp_ms_ - last_output_timestamp_ms_,
1821         frame_encoding_time_ms);
1822 
1823     // Calculate and print encoding statistics - every 3 seconds.
1824     current_frames_++;
1825     current_bytes_ += payload_size;
1826     current_encoding_time_ms_ += frame_encoding_time_ms;
1827     int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
1828     if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
1829         current_frames_ > 0) {
1830       ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
1831           " encTime: %d for last %d ms",
1832           current_bytes_ * 8 / statistic_time_ms,
1833           last_set_bitrate_kbps_,
1834           (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
1835           current_encoding_time_ms_ / current_frames_, statistic_time_ms);
1836       start_time_ms_ = GetCurrentTimeMs();
1837       current_frames_ = 0;
1838       current_bytes_= 0;
1839       current_encoding_time_ms_ = 0;
1840     }
1841 
1842     // Callback - return encoded frame.
1843     if (callback_) {
1844       scoped_ptr<webrtc::EncodedImage> image(
1845           new webrtc::EncodedImage(payload, payload_size, payload_size));
1846       image->_encodedWidth = width_;
1847       image->_encodedHeight = height_;
1848       image->_timeStamp = timestamp;
1849       image->capture_time_ms_ = render_time_ms;
1850       image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
1851       image->_completeFrame = true;
1852 
1853       webrtc::CodecSpecificInfo info;
1854       memset(&info, 0, sizeof(info));
1855       info.codecType = kVideoCodecVP8;
1856       info.codecSpecific.VP8.pictureId = webrtc::kNoPictureId;
1857       info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
1858       info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
1859 
1860       // Generate a header describing a single fragment.
1861       webrtc::RTPFragmentationHeader header;
1862       memset(&header, 0, sizeof(header));
1863       header.VerifyAndAllocateFragmentationHeader(1);
1864       header.fragmentationOffset[0] = 0;
1865       header.fragmentationLength[0] = image->_length;
1866       header.fragmentationPlType[0] = 0;
1867       header.fragmentationTimeDiff[0] = 0;
1868 
1869       callback_status = callback_->Encoded(*image, &info, &header);
1870     }
1871 
1872     // Return output buffer back to the encoder.
1873     bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1874                                           j_release_output_buffer_method_,
1875                                           output_buffer_index);
1876     CHECK_EXCEPTION(jni);
1877     if (!success) {
1878       ResetCodec();
1879       return false;
1880     }
1881 
1882     if (callback_status > 0) {
1883       drop_next_input_frame_ = true;
1884     // Theoretically could handle callback_status<0 here, but unclear what that
1885     // would mean for us.
1886     }
1887   }
1888 
1889   return true;
1890 }
1891 
1892 // Simplest-possible implementation of an encoder factory, churns out
1893 // MediaCodecVideoEncoders on demand (or errors, if that's not possible).
1894 class MediaCodecVideoEncoderFactory
1895     : public cricket::WebRtcVideoEncoderFactory {
1896  public:
1897   MediaCodecVideoEncoderFactory();
1898   virtual ~MediaCodecVideoEncoderFactory();
1899 
1900   // WebRtcVideoEncoderFactory implementation.
1901   virtual webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
1902       OVERRIDE;
1903   virtual void AddObserver(Observer* observer) OVERRIDE;
1904   virtual void RemoveObserver(Observer* observer) OVERRIDE;
1905   virtual const std::vector<VideoCodec>& codecs() const OVERRIDE;
1906   virtual void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) OVERRIDE;
1907 
1908  private:
1909   // Empty if platform support is lacking, const after ctor returns.
1910   std::vector<VideoCodec> supported_codecs_;
1911 };
1912 
MediaCodecVideoEncoderFactory()1913 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
1914   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1915   ScopedLocalRefFrame local_ref_frame(jni);
1916   jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
1917   bool is_platform_supported = jni->CallStaticBooleanMethod(
1918       j_encoder_class,
1919       GetStaticMethodID(jni, j_encoder_class, "isPlatformSupported", "()Z"));
1920   CHECK_EXCEPTION(jni);
1921   if (!is_platform_supported)
1922     return;
1923 
1924   // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
1925   // encoder?  Sure would be.  Too bad it doesn't.  So we hard-code some
1926   // reasonable defaults.
1927   supported_codecs_.push_back(
1928       VideoCodec(kVideoCodecVP8, "VP8", 1280, 1280, 30));
1929 }
1930 
~MediaCodecVideoEncoderFactory()1931 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
1932 
CreateVideoEncoder(webrtc::VideoCodecType type)1933 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
1934     webrtc::VideoCodecType type) {
1935   if (type != kVideoCodecVP8 || supported_codecs_.empty())
1936     return NULL;
1937   return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded());
1938 }
1939 
1940 // Since the available codec list is never going to change, we ignore the
1941 // Observer-related interface here.
AddObserver(Observer * observer)1942 void MediaCodecVideoEncoderFactory::AddObserver(Observer* observer) {}
RemoveObserver(Observer * observer)1943 void MediaCodecVideoEncoderFactory::RemoveObserver(Observer* observer) {}
1944 
1945 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
codecs() const1946 MediaCodecVideoEncoderFactory::codecs() const {
1947   return supported_codecs_;
1948 }
1949 
DestroyVideoEncoder(webrtc::VideoEncoder * encoder)1950 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1951     webrtc::VideoEncoder* encoder) {
1952   delete encoder;
1953 }
1954 
1955 class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
1956                                public rtc::MessageHandler {
1957  public:
1958   explicit MediaCodecVideoDecoder(JNIEnv* jni);
1959   virtual ~MediaCodecVideoDecoder();
1960 
1961   static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
1962 
1963   virtual int32_t InitDecode(const VideoCodec* codecSettings,
1964       int32_t numberOfCores) OVERRIDE;
1965 
1966   virtual int32_t
1967   Decode(const EncodedImage& inputImage, bool missingFrames,
1968          const RTPFragmentationHeader* fragmentation,
1969          const CodecSpecificInfo* codecSpecificInfo = NULL,
1970          int64_t renderTimeMs = -1) OVERRIDE;
1971 
1972   virtual int32_t RegisterDecodeCompleteCallback(
1973       DecodedImageCallback* callback) OVERRIDE;
1974 
1975   virtual int32_t Release() OVERRIDE;
1976 
1977   virtual int32_t Reset() OVERRIDE;
1978   // rtc::MessageHandler implementation.
1979   virtual void OnMessage(rtc::Message* msg) OVERRIDE;
1980 
1981  private:
1982   // CHECK-fail if not running on |codec_thread_|.
1983   void CheckOnCodecThread();
1984 
1985   int32_t InitDecodeOnCodecThread();
1986   int32_t ReleaseOnCodecThread();
1987   int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
1988   // Deliver any outputs pending in the MediaCodec to our |callback_| and return
1989   // true on success.
1990   bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
1991 
1992 
1993   bool key_frame_required_;
1994   bool inited_;
1995   bool use_surface_;
1996   VideoCodec codec_;
1997   I420VideoFrame decoded_image_;
1998   NativeHandleImpl native_handle_;
1999   DecodedImageCallback* callback_;
2000   int frames_received_;  // Number of frames received by decoder.
2001   int frames_decoded_;  // Number of frames decoded by decoder
2002   int64_t start_time_ms_;  // Start time for statistics.
2003   int current_frames_;  // Number of frames in the current statistics interval.
2004   int current_bytes_;  // Encoded bytes in the current statistics interval.
2005   int current_decoding_time_ms_;  // Overall decoding time in the current second
2006   uint32_t max_pending_frames_;  // Maximum number of pending input frames
2007   std::vector<int32_t> timestamps_;
2008   std::vector<int64_t> ntp_times_ms_;
2009   std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to
2010                                              // decoder input.
2011 
2012   // State that is constant for the lifetime of this object once the ctor
2013   // returns.
2014   scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
2015   ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
2016   ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
2017   jmethodID j_init_decode_method_;
2018   jmethodID j_release_method_;
2019   jmethodID j_dequeue_input_buffer_method_;
2020   jmethodID j_queue_input_buffer_method_;
2021   jmethodID j_dequeue_output_buffer_method_;
2022   jmethodID j_release_output_buffer_method_;
2023   // MediaCodecVideoDecoder fields.
2024   jfieldID j_input_buffers_field_;
2025   jfieldID j_output_buffers_field_;
2026   jfieldID j_color_format_field_;
2027   jfieldID j_width_field_;
2028   jfieldID j_height_field_;
2029   jfieldID j_stride_field_;
2030   jfieldID j_slice_height_field_;
2031   jfieldID j_surface_texture_field_;
2032   jfieldID j_textureID_field_;
2033   // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
2034   jfieldID j_info_index_field_;
2035   jfieldID j_info_offset_field_;
2036   jfieldID j_info_size_field_;
2037   jfieldID j_info_presentation_timestamp_us_field_;
2038 
2039   // Global references; must be deleted in Release().
2040   std::vector<jobject> input_buffers_;
2041   jobject surface_texture_;
2042 
2043   // Render EGL context.
2044   static jobject render_egl_context_;
2045 };
2046 
2047 jobject MediaCodecVideoDecoder::render_egl_context_ = NULL;
2048 
SetAndroidObjects(JNIEnv * jni,jobject render_egl_context)2049 int MediaCodecVideoDecoder::SetAndroidObjects(JNIEnv* jni,
2050     jobject render_egl_context) {
2051   if (render_egl_context_) {
2052     jni->DeleteGlobalRef(render_egl_context_);
2053   }
2054   if (IsNull(jni, render_egl_context)) {
2055     render_egl_context_ = NULL;
2056   } else {
2057     render_egl_context_ = jni->NewGlobalRef(render_egl_context);
2058   }
2059   ALOGD("VideoDecoder EGL context set.");
2060   return 0;
2061 }
2062 
MediaCodecVideoDecoder(JNIEnv * jni)2063 MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni)
2064   : key_frame_required_(true),
2065     inited_(false),
2066     codec_thread_(new Thread()),
2067     j_media_codec_video_decoder_class_(
2068         jni,
2069         FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
2070           j_media_codec_video_decoder_(
2071               jni,
2072               jni->NewObject(*j_media_codec_video_decoder_class_,
2073                    GetMethodID(jni,
2074                               *j_media_codec_video_decoder_class_,
2075                               "<init>",
2076                               "()V"))) {
2077   ScopedLocalRefFrame local_ref_frame(jni);
2078   codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
2079   CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
2080 
2081   j_init_decode_method_ = GetMethodID(
2082       jni, *j_media_codec_video_decoder_class_, "initDecode",
2083       "(IIZLandroid/opengl/EGLContext;)Z");
2084   j_release_method_ =
2085       GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
2086   j_dequeue_input_buffer_method_ = GetMethodID(
2087       jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
2088   j_queue_input_buffer_method_ = GetMethodID(
2089       jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
2090   j_dequeue_output_buffer_method_ = GetMethodID(
2091       jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
2092       "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
2093   j_release_output_buffer_method_ = GetMethodID(
2094       jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
2095 
2096   j_input_buffers_field_ = GetFieldID(
2097       jni, *j_media_codec_video_decoder_class_,
2098       "inputBuffers", "[Ljava/nio/ByteBuffer;");
2099   j_output_buffers_field_ = GetFieldID(
2100       jni, *j_media_codec_video_decoder_class_,
2101       "outputBuffers", "[Ljava/nio/ByteBuffer;");
2102   j_color_format_field_ = GetFieldID(
2103       jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
2104   j_width_field_ = GetFieldID(
2105       jni, *j_media_codec_video_decoder_class_, "width", "I");
2106   j_height_field_ = GetFieldID(
2107       jni, *j_media_codec_video_decoder_class_, "height", "I");
2108   j_stride_field_ = GetFieldID(
2109       jni, *j_media_codec_video_decoder_class_, "stride", "I");
2110   j_slice_height_field_ = GetFieldID(
2111       jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
2112   j_textureID_field_ = GetFieldID(
2113       jni, *j_media_codec_video_decoder_class_, "textureID", "I");
2114   j_surface_texture_field_ = GetFieldID(
2115       jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
2116       "Landroid/graphics/SurfaceTexture;");
2117 
2118   jclass j_decoder_output_buffer_info_class = FindClass(jni,
2119       "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
2120   j_info_index_field_ = GetFieldID(
2121       jni, j_decoder_output_buffer_info_class, "index", "I");
2122   j_info_offset_field_ = GetFieldID(
2123       jni, j_decoder_output_buffer_info_class, "offset", "I");
2124   j_info_size_field_ = GetFieldID(
2125       jni, j_decoder_output_buffer_info_class, "size", "I");
2126   j_info_presentation_timestamp_us_field_ = GetFieldID(
2127       jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
2128 
2129   CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
2130   use_surface_ = true;
2131   if (render_egl_context_ == NULL)
2132     use_surface_ = false;
2133   memset(&codec_, 0, sizeof(codec_));
2134 }
2135 
~MediaCodecVideoDecoder()2136 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
2137   // Call Release() to ensure no more callbacks to us after we are deleted.
2138   Release();
2139 }
2140 
InitDecode(const VideoCodec * inst,int32_t numberOfCores)2141 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
2142     int32_t numberOfCores) {
2143   if (inst == NULL) {
2144     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
2145   }
2146   int ret_val = Release();
2147   if (ret_val < 0) {
2148     return ret_val;
2149   }
2150   // Save VideoCodec instance for later.
2151   if (&codec_ != inst) {
2152     codec_ = *inst;
2153   }
2154   codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
2155 
2156   // Always start with a complete key frame.
2157   key_frame_required_ = true;
2158   frames_received_ = 0;
2159   frames_decoded_ = 0;
2160 
2161   // Call Java init.
2162   return codec_thread_->Invoke<int32_t>(
2163       Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
2164 }
2165 
InitDecodeOnCodecThread()2166 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
2167   CheckOnCodecThread();
2168   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2169   ScopedLocalRefFrame local_ref_frame(jni);
2170   ALOGD("InitDecodeOnCodecThread: %d x %d. fps: %d",
2171       codec_.width, codec_.height, codec_.maxFramerate);
2172 
2173   bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
2174                                        j_init_decode_method_,
2175                                        codec_.width,
2176                                        codec_.height,
2177                                        use_surface_,
2178                                        render_egl_context_);
2179   CHECK_EXCEPTION(jni);
2180   if (!success) {
2181     return WEBRTC_VIDEO_CODEC_ERROR;
2182   }
2183   inited_ = true;
2184 
2185   max_pending_frames_ = 0;
2186   if (use_surface_) {
2187     max_pending_frames_ = 1;
2188   }
2189   start_time_ms_ = GetCurrentTimeMs();
2190   current_frames_ = 0;
2191   current_bytes_ = 0;
2192   current_decoding_time_ms_ = 0;
2193   timestamps_.clear();
2194   ntp_times_ms_.clear();
2195   frame_rtc_times_ms_.clear();
2196 
2197   jobjectArray input_buffers = (jobjectArray)GetObjectField(
2198       jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
2199   size_t num_input_buffers = jni->GetArrayLength(input_buffers);
2200   input_buffers_.resize(num_input_buffers);
2201   for (size_t i = 0; i < num_input_buffers; ++i) {
2202     input_buffers_[i] =
2203         jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
2204     CHECK_EXCEPTION(jni);
2205   }
2206 
2207   if (use_surface_) {
2208     jobject surface_texture = GetObjectField(
2209         jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
2210     surface_texture_ = jni->NewGlobalRef(surface_texture);
2211   }
2212   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
2213 
2214   return WEBRTC_VIDEO_CODEC_OK;
2215 }
2216 
Release()2217 int32_t MediaCodecVideoDecoder::Release() {
2218   return codec_thread_->Invoke<int32_t>(
2219         Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
2220 }
2221 
ReleaseOnCodecThread()2222 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
2223   if (!inited_) {
2224     return WEBRTC_VIDEO_CODEC_OK;
2225   }
2226   CheckOnCodecThread();
2227   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2228   ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
2229   ScopedLocalRefFrame local_ref_frame(jni);
2230   for (size_t i = 0; i < input_buffers_.size(); i++) {
2231     jni->DeleteGlobalRef(input_buffers_[i]);
2232   }
2233   input_buffers_.clear();
2234   if (use_surface_) {
2235     // Before deleting texture object make sure it is no longer referenced
2236     // by any TextureVideoFrame.
2237     int32_t waitTimeoutUs = 3000000;  // 3 second wait
2238     while (waitTimeoutUs > 0 && native_handle_.ref_count() > 0) {
2239       ALOGD("Current Texture RefCnt: %d", native_handle_.ref_count());
2240       usleep(30000);
2241       waitTimeoutUs -= 30000;
2242     }
2243     ALOGD("TextureRefCnt: %d", native_handle_.ref_count());
2244     jni->DeleteGlobalRef(surface_texture_);
2245   }
2246   jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
2247   CHECK_EXCEPTION(jni);
2248   rtc::MessageQueueManager::Clear(this);
2249   inited_ = false;
2250   return WEBRTC_VIDEO_CODEC_OK;
2251 }
2252 
2253 
CheckOnCodecThread()2254 void MediaCodecVideoDecoder::CheckOnCodecThread() {
2255   CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
2256       << "Running on wrong thread!";
2257 }
2258 
Decode(const EncodedImage & inputImage,bool missingFrames,const RTPFragmentationHeader * fragmentation,const CodecSpecificInfo * codecSpecificInfo,int64_t renderTimeMs)2259 int32_t MediaCodecVideoDecoder::Decode(
2260     const EncodedImage& inputImage,
2261     bool missingFrames,
2262     const RTPFragmentationHeader* fragmentation,
2263     const CodecSpecificInfo* codecSpecificInfo,
2264     int64_t renderTimeMs) {
2265   if (!inited_) {
2266     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2267   }
2268   if (callback_ == NULL) {
2269     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2270   }
2271   if (inputImage._buffer == NULL && inputImage._length > 0) {
2272     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
2273   }
2274   // Check if encoded frame dimension has changed.
2275   if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
2276       (inputImage._encodedWidth != codec_.width ||
2277       inputImage._encodedHeight != codec_.height)) {
2278     codec_.width = inputImage._encodedWidth;
2279     codec_.height = inputImage._encodedHeight;
2280     InitDecode(&codec_, 1);
2281   }
2282 
2283   // Always start with a complete key frame.
2284   if (key_frame_required_) {
2285     if (inputImage._frameType != webrtc::kKeyFrame) {
2286       return WEBRTC_VIDEO_CODEC_ERROR;
2287     }
2288     if (!inputImage._completeFrame) {
2289       return WEBRTC_VIDEO_CODEC_ERROR;
2290     }
2291     key_frame_required_ = false;
2292   }
2293   if (inputImage._length == 0) {
2294     return WEBRTC_VIDEO_CODEC_ERROR;
2295   }
2296 
2297   return codec_thread_->Invoke<int32_t>(Bind(
2298       &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
2299 }
2300 
DecodeOnCodecThread(const EncodedImage & inputImage)2301 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
2302     const EncodedImage& inputImage) {
2303   static uint8_t yVal_ = 0x7f;
2304 
2305   CheckOnCodecThread();
2306   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2307   ScopedLocalRefFrame local_ref_frame(jni);
2308 
2309   // Try to drain the decoder and wait until output is not too
2310   // much behind the input.
2311   if (frames_received_ > frames_decoded_ + max_pending_frames_) {
2312     ALOGV("Wait for output...");
2313     if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
2314       Reset();
2315       return WEBRTC_VIDEO_CODEC_ERROR;
2316     }
2317     if (frames_received_ > frames_decoded_ + max_pending_frames_) {
2318       ALOGE("Output buffer dequeue timeout");
2319       Reset();
2320       return WEBRTC_VIDEO_CODEC_ERROR;
2321     }
2322   }
2323 
2324   // Get input buffer.
2325   int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
2326                                                 j_dequeue_input_buffer_method_);
2327   CHECK_EXCEPTION(jni);
2328   if (j_input_buffer_index < 0) {
2329     ALOGE("dequeueInputBuffer error");
2330     Reset();
2331     return WEBRTC_VIDEO_CODEC_ERROR;
2332   }
2333 
2334   // Copy encoded data to Java ByteBuffer.
2335   jobject j_input_buffer = input_buffers_[j_input_buffer_index];
2336   uint8* buffer =
2337       reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
2338   CHECK(buffer) << "Indirect buffer??";
2339   int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
2340   CHECK_EXCEPTION(jni);
2341   if (buffer_capacity < inputImage._length) {
2342     ALOGE("Input frame size %d is bigger than buffer size %d.",
2343         inputImage._length, buffer_capacity);
2344     Reset();
2345     return WEBRTC_VIDEO_CODEC_ERROR;
2346   }
2347   ALOGV("Decoder frame in # %d. Buffer # %d. Size: %d",
2348       frames_received_, j_input_buffer_index, inputImage._length);
2349   memcpy(buffer, inputImage._buffer, inputImage._length);
2350 
2351   // Save input image timestamps for later output.
2352   frames_received_++;
2353   current_bytes_ += inputImage._length;
2354   timestamps_.push_back(inputImage._timeStamp);
2355   ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
2356   frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
2357 
2358   // Feed input to decoder.
2359   jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
2360   bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
2361                                         j_queue_input_buffer_method_,
2362                                         j_input_buffer_index,
2363                                         inputImage._length,
2364                                         timestamp_us);
2365   CHECK_EXCEPTION(jni);
2366   if (!success) {
2367     ALOGE("queueInputBuffer error");
2368     Reset();
2369     return WEBRTC_VIDEO_CODEC_ERROR;
2370   }
2371 
2372   // Try to drain the decoder
2373   if (!DeliverPendingOutputs(jni, 0)) {
2374     ALOGE("DeliverPendingOutputs error");
2375     Reset();
2376     return WEBRTC_VIDEO_CODEC_ERROR;
2377   }
2378 
2379   return WEBRTC_VIDEO_CODEC_OK;
2380 }
2381 
DeliverPendingOutputs(JNIEnv * jni,int dequeue_timeout_us)2382 bool MediaCodecVideoDecoder::DeliverPendingOutputs(
2383     JNIEnv* jni, int dequeue_timeout_us) {
2384   if (frames_received_ <= frames_decoded_) {
2385     // No need to query for output buffers - decoder is drained.
2386     return true;
2387   }
2388   // Get decoder output.
2389   jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
2390       *j_media_codec_video_decoder_,
2391       j_dequeue_output_buffer_method_,
2392       dequeue_timeout_us);
2393 
2394   CHECK_EXCEPTION(jni);
2395   if (IsNull(jni, j_decoder_output_buffer_info)) {
2396     return true;
2397   }
2398 
2399   // Extract output buffer info from Java DecoderOutputBufferInfo.
2400   int output_buffer_index =
2401       GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
2402   if (output_buffer_index < 0) {
2403     ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
2404     Reset();
2405     return false;
2406   }
2407   int output_buffer_offset =
2408       GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
2409   int output_buffer_size =
2410       GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
2411   CHECK_EXCEPTION(jni);
2412 
2413   // Get decoded video frame properties.
2414   int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
2415       j_color_format_field_);
2416   int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
2417   int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
2418   int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
2419   int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
2420       j_slice_height_field_);
2421   int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
2422       j_textureID_field_);
2423 
2424   // Extract data from Java ByteBuffer and create output yuv420 frame -
2425   // for non surface decoding only.
2426   if (!use_surface_) {
2427     if (output_buffer_size < width * height * 3 / 2) {
2428       ALOGE("Insufficient output buffer size: %d", output_buffer_size);
2429       Reset();
2430       return false;
2431     }
2432     jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
2433         jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
2434     jobject output_buffer =
2435         jni->GetObjectArrayElement(output_buffers, output_buffer_index);
2436     uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
2437         output_buffer));
2438     CHECK_EXCEPTION(jni);
2439     payload += output_buffer_offset;
2440 
2441     // Create yuv420 frame.
2442     if (color_format == COLOR_FormatYUV420Planar) {
2443       decoded_image_.CreateFrame(
2444           stride * slice_height, payload,
2445           (stride * slice_height) / 4, payload + (stride * slice_height),
2446           (stride * slice_height) / 4, payload + (5 * stride * slice_height / 4),
2447           width, height,
2448           stride, stride / 2, stride / 2);
2449     } else {
2450       // All other supported formats are nv12.
2451       decoded_image_.CreateEmptyFrame(width, height, width,
2452           width / 2, width / 2);
2453       libyuv::NV12ToI420(
2454           payload, stride,
2455           payload + stride * slice_height, stride,
2456           decoded_image_.buffer(webrtc::kYPlane),
2457           decoded_image_.stride(webrtc::kYPlane),
2458           decoded_image_.buffer(webrtc::kUPlane),
2459           decoded_image_.stride(webrtc::kUPlane),
2460           decoded_image_.buffer(webrtc::kVPlane),
2461           decoded_image_.stride(webrtc::kVPlane),
2462           width, height);
2463     }
2464   }
2465 
2466   // Get frame timestamps from a queue.
2467   int32_t timestamp = timestamps_.front();
2468   timestamps_.erase(timestamps_.begin());
2469   int64_t ntp_time_ms = ntp_times_ms_.front();
2470   ntp_times_ms_.erase(ntp_times_ms_.begin());
2471   int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
2472       frame_rtc_times_ms_.front();
2473   frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
2474 
2475   ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
2476       " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
2477       color_format, output_buffer_size, frame_decoding_time_ms);
2478 
2479   // Return output buffer back to codec.
2480   bool success = jni->CallBooleanMethod(
2481       *j_media_codec_video_decoder_,
2482       j_release_output_buffer_method_,
2483       output_buffer_index,
2484       use_surface_);
2485   CHECK_EXCEPTION(jni);
2486   if (!success) {
2487     ALOGE("releaseOutputBuffer error");
2488     Reset();
2489     return false;
2490   }
2491 
2492   // Calculate and print decoding statistics - every 3 seconds.
2493   frames_decoded_++;
2494   current_frames_++;
2495   current_decoding_time_ms_ += frame_decoding_time_ms;
2496   int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
2497   if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
2498       current_frames_ > 0) {
2499     ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
2500         current_bytes_ * 8 / statistic_time_ms,
2501         (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
2502         current_decoding_time_ms_ / current_frames_, statistic_time_ms);
2503     start_time_ms_ = GetCurrentTimeMs();
2504     current_frames_ = 0;
2505     current_bytes_= 0;
2506     current_decoding_time_ms_ = 0;
2507   }
2508 
2509   // Callback - output decoded frame.
2510   int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
2511   if (use_surface_) {
2512     native_handle_.SetTextureObject(surface_texture_, texture_id);
2513     TextureVideoFrame texture_image(
2514         &native_handle_, width, height, timestamp, 0);
2515     texture_image.set_ntp_time_ms(ntp_time_ms);
2516     callback_status = callback_->Decoded(texture_image);
2517   } else {
2518     decoded_image_.set_timestamp(timestamp);
2519     decoded_image_.set_ntp_time_ms(ntp_time_ms);
2520     callback_status = callback_->Decoded(decoded_image_);
2521   }
2522   if (callback_status > 0) {
2523     ALOGE("callback error");
2524   }
2525 
2526   return true;
2527 }
2528 
RegisterDecodeCompleteCallback(DecodedImageCallback * callback)2529 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
2530     DecodedImageCallback* callback) {
2531   callback_ = callback;
2532   return WEBRTC_VIDEO_CODEC_OK;
2533 }
2534 
Reset()2535 int32_t MediaCodecVideoDecoder::Reset() {
2536   ALOGD("DecoderReset");
2537   if (!inited_) {
2538     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2539   }
2540   return InitDecode(&codec_, 1);
2541 }
2542 
OnMessage(rtc::Message * msg)2543 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
2544   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2545   ScopedLocalRefFrame local_ref_frame(jni);
2546   if (!inited_) {
2547     return;
2548   }
2549   // We only ever send one message to |this| directly (not through a Bind()'d
2550   // functor), so expect no ID/data.
2551   CHECK(!msg->message_id) << "Unexpected message!";
2552   CHECK(!msg->pdata) << "Unexpected message!";
2553   CheckOnCodecThread();
2554 
2555   DeliverPendingOutputs(jni, 0);
2556   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
2557 }
2558 
2559 class MediaCodecVideoDecoderFactory
2560     : public cricket::WebRtcVideoDecoderFactory {
2561  public:
2562   MediaCodecVideoDecoderFactory();
2563   virtual ~MediaCodecVideoDecoderFactory();
2564   // WebRtcVideoDecoderFactory implementation.
2565   virtual webrtc::VideoDecoder* CreateVideoDecoder(
2566       webrtc::VideoCodecType type) OVERRIDE;
2567 
2568   virtual void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) OVERRIDE;
2569 
2570  private:
2571   bool is_platform_supported_;
2572 };
2573 
MediaCodecVideoDecoderFactory()2574 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
2575   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2576   ScopedLocalRefFrame local_ref_frame(jni);
2577   jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
2578   is_platform_supported_ = jni->CallStaticBooleanMethod(
2579       j_decoder_class,
2580       GetStaticMethodID(jni, j_decoder_class, "isPlatformSupported", "()Z"));
2581   CHECK_EXCEPTION(jni);
2582 }
2583 
~MediaCodecVideoDecoderFactory()2584 MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
2585 
CreateVideoDecoder(webrtc::VideoCodecType type)2586 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
2587     webrtc::VideoCodecType type) {
2588   if (type != kVideoCodecVP8 || !is_platform_supported_) {
2589     return NULL;
2590   }
2591   return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded());
2592 }
2593 
2594 
DestroyVideoDecoder(webrtc::VideoDecoder * decoder)2595 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
2596     webrtc::VideoDecoder* decoder) {
2597   delete decoder;
2598 }
2599 
2600 #endif  // #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2601 
2602 }  // anonymous namespace
2603 
2604 // Convenience macro defining JNI-accessible methods in the org.webrtc package.
2605 // Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
2606 #define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \
2607   Java_org_webrtc_##name
2608 
JNI_OnLoad(JavaVM * jvm,void * reserved)2609 extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
2610   CHECK(!g_jvm) << "JNI_OnLoad called more than once!";
2611   g_jvm = jvm;
2612   CHECK(g_jvm) << "JNI_OnLoad handed NULL?";
2613 
2614   CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
2615 
2616   CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
2617 
2618   JNIEnv* jni;
2619   if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
2620     return -1;
2621   g_class_reference_holder = new ClassReferenceHolder(jni);
2622 
2623   return JNI_VERSION_1_6;
2624 }
2625 
JNI_OnUnLoad(JavaVM * jvm,void * reserved)2626 extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
2627   g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
2628   delete g_class_reference_holder;
2629   g_class_reference_holder = NULL;
2630   CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
2631   g_jvm = NULL;
2632 }
2633 
ExtractNativeDC(JNIEnv * jni,jobject j_dc)2634 static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
2635   jfieldID native_dc_id = GetFieldID(jni,
2636       GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
2637   jlong j_d = GetLongField(jni, j_dc, native_dc_id);
2638   return reinterpret_cast<DataChannelInterface*>(j_d);
2639 }
2640 
JOW(jlong,DataChannel_registerObserverNative)2641 JOW(jlong, DataChannel_registerObserverNative)(
2642     JNIEnv* jni, jobject j_dc, jobject j_observer) {
2643   scoped_ptr<DataChannelObserverWrapper> observer(
2644       new DataChannelObserverWrapper(jni, j_observer));
2645   ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
2646   return jlongFromPointer(observer.release());
2647 }
2648 
JOW(void,DataChannel_unregisterObserverNative)2649 JOW(void, DataChannel_unregisterObserverNative)(
2650     JNIEnv* jni, jobject j_dc, jlong native_observer) {
2651   ExtractNativeDC(jni, j_dc)->UnregisterObserver();
2652   delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer);
2653 }
2654 
JOW(jstring,DataChannel_label)2655 JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) {
2656   return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
2657 }
2658 
JOW(jobject,DataChannel_state)2659 JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) {
2660   return JavaEnumFromIndex(
2661       jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state());
2662 }
2663 
JOW(jlong,DataChannel_bufferedAmount)2664 JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
2665   uint64 buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
2666   CHECK_LE(buffered_amount, std::numeric_limits<int64>::max())
2667       << "buffered_amount overflowed jlong!";
2668   return static_cast<jlong>(buffered_amount);
2669 }
2670 
JOW(void,DataChannel_close)2671 JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) {
2672   ExtractNativeDC(jni, j_dc)->Close();
2673 }
2674 
JOW(jboolean,DataChannel_sendNative)2675 JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc,
2676                                       jbyteArray data, jboolean binary) {
2677   jbyte* bytes = jni->GetByteArrayElements(data, NULL);
2678   bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
2679       rtc::Buffer(bytes, jni->GetArrayLength(data)),
2680       binary));
2681   jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
2682   return ret;
2683 }
2684 
JOW(void,DataChannel_dispose)2685 JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) {
2686   CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
2687 }
2688 
JOW(void,Logging_nativeEnableTracing)2689 JOW(void, Logging_nativeEnableTracing)(
2690     JNIEnv* jni, jclass, jstring j_path, jint nativeLevels,
2691     jint nativeSeverity) {
2692   std::string path = JavaToStdString(jni, j_path);
2693   if (nativeLevels != webrtc::kTraceNone) {
2694     webrtc::Trace::set_level_filter(nativeLevels);
2695 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2696     if (path != "logcat:") {
2697 #endif
2698       CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
2699           << "SetTraceFile failed";
2700 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2701     } else {
2702       // Intentionally leak this to avoid needing to reason about its lifecycle.
2703       // It keeps no state and functions only as a dispatch point.
2704       static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
2705     }
2706 #endif
2707   }
2708   rtc::LogMessage::LogToDebug(nativeSeverity);
2709 }
2710 
JOW(void,PeerConnection_freePeerConnection)2711 JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) {
2712   CHECK_RELEASE(reinterpret_cast<PeerConnectionInterface*>(j_p));
2713 }
2714 
JOW(void,PeerConnection_freeObserver)2715 JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
2716   PCOJava* p = reinterpret_cast<PCOJava*>(j_p);
2717   delete p;
2718 }
2719 
JOW(void,MediaSource_free)2720 JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
2721   CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
2722 }
2723 
JOW(void,VideoCapturer_free)2724 JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) {
2725   delete reinterpret_cast<cricket::VideoCapturer*>(j_p);
2726 }
2727 
JOW(void,VideoRenderer_freeGuiVideoRenderer)2728 JOW(void, VideoRenderer_freeGuiVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
2729   delete reinterpret_cast<VideoRendererWrapper*>(j_p);
2730 }
2731 
JOW(void,VideoRenderer_freeWrappedVideoRenderer)2732 JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
2733   delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
2734 }
2735 
JOW(void,MediaStreamTrack_free)2736 JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
2737   CHECK_RELEASE(reinterpret_cast<MediaStreamTrackInterface*>(j_p));
2738 }
2739 
JOW(jboolean,MediaStream_nativeAddAudioTrack)2740 JOW(jboolean, MediaStream_nativeAddAudioTrack)(
2741     JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
2742   return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
2743       reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
2744 }
2745 
JOW(jboolean,MediaStream_nativeAddVideoTrack)2746 JOW(jboolean, MediaStream_nativeAddVideoTrack)(
2747     JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
2748   return reinterpret_cast<MediaStreamInterface*>(pointer)
2749       ->AddTrack(reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
2750 }
2751 
JOW(jboolean,MediaStream_nativeRemoveAudioTrack)2752 JOW(jboolean, MediaStream_nativeRemoveAudioTrack)(
2753     JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
2754   return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
2755       reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
2756 }
2757 
JOW(jboolean,MediaStream_nativeRemoveVideoTrack)2758 JOW(jboolean, MediaStream_nativeRemoveVideoTrack)(
2759     JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
2760   return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
2761       reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
2762 }
2763 
JOW(jstring,MediaStream_nativeLabel)2764 JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) {
2765   return JavaStringFromStdString(
2766       jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
2767 }
2768 
JOW(void,MediaStream_free)2769 JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) {
2770   CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
2771 }
2772 
JOW(jlong,PeerConnectionFactory_nativeCreateObserver)2773 JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
2774     JNIEnv * jni, jclass, jobject j_observer) {
2775   return (jlong)new PCOJava(jni, j_observer);
2776 }
2777 
2778 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
JOW(jboolean,PeerConnectionFactory_initializeAndroidGlobals)2779 JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
2780     JNIEnv* jni, jclass, jobject context,
2781     jboolean initialize_audio, jboolean initialize_video,
2782     jobject render_egl_context) {
2783   CHECK(g_jvm) << "JNI_OnLoad failed to run?";
2784   bool failure = false;
2785   if (!factory_static_initialized) {
2786     if (initialize_video) {
2787       failure |= webrtc::SetCaptureAndroidVM(g_jvm, context);
2788       failure |= webrtc::SetRenderAndroidVM(g_jvm);
2789     }
2790     if (initialize_audio)
2791       failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
2792     factory_static_initialized = true;
2793   }
2794   if (initialize_video)
2795     failure |= MediaCodecVideoDecoder::SetAndroidObjects(jni,
2796         render_egl_context);
2797   return !failure;
2798 }
2799 #endif  // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2800 
2801 // Helper struct for working around the fact that CreatePeerConnectionFactory()
2802 // comes in two flavors: either entirely automagical (constructing its own
2803 // threads and deleting them on teardown, but no external codec factory support)
2804 // or entirely manual (requires caller to delete threads after factory
2805 // teardown).  This struct takes ownership of its ctor's arguments to present a
2806 // single thing for Java to hold and eventually free.
2807 class OwnedFactoryAndThreads {
2808  public:
OwnedFactoryAndThreads(Thread * worker_thread,Thread * signaling_thread,PeerConnectionFactoryInterface * factory)2809   OwnedFactoryAndThreads(Thread* worker_thread,
2810                          Thread* signaling_thread,
2811                          PeerConnectionFactoryInterface* factory)
2812       : worker_thread_(worker_thread),
2813         signaling_thread_(signaling_thread),
2814         factory_(factory) {}
2815 
~OwnedFactoryAndThreads()2816   ~OwnedFactoryAndThreads() { CHECK_RELEASE(factory_); }
2817 
factory()2818   PeerConnectionFactoryInterface* factory() { return factory_; }
2819 
2820  private:
2821   const scoped_ptr<Thread> worker_thread_;
2822   const scoped_ptr<Thread> signaling_thread_;
2823   PeerConnectionFactoryInterface* factory_;  // Const after ctor except dtor.
2824 };
2825 
JOW(jlong,PeerConnectionFactory_nativeCreatePeerConnectionFactory)2826 JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
2827     JNIEnv* jni, jclass) {
2828   // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
2829   // ThreadManager only WrapCurrentThread()s the thread where it is first
2830   // created.  Since the semantics around when auto-wrapping happens in
2831   // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
2832   // about ramifications of auto-wrapping there.
2833   rtc::ThreadManager::Instance()->WrapCurrentThread();
2834   webrtc::Trace::CreateTrace();
2835   Thread* worker_thread = new Thread();
2836   worker_thread->SetName("worker_thread", NULL);
2837   Thread* signaling_thread = new Thread();
2838   signaling_thread->SetName("signaling_thread", NULL);
2839   CHECK(worker_thread->Start() && signaling_thread->Start())
2840       << "Failed to start threads";
2841   scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
2842   scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
2843 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2844   encoder_factory.reset(new MediaCodecVideoEncoderFactory());
2845   decoder_factory.reset(new MediaCodecVideoDecoderFactory());
2846 #endif
2847   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2848       webrtc::CreatePeerConnectionFactory(worker_thread,
2849                                           signaling_thread,
2850                                           NULL,
2851                                           encoder_factory.release(),
2852                                           decoder_factory.release()));
2853   OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
2854       worker_thread, signaling_thread, factory.release());
2855   return jlongFromPointer(owned_factory);
2856 }
2857 
JOW(void,PeerConnectionFactory_freeFactory)2858 JOW(void, PeerConnectionFactory_freeFactory)(JNIEnv*, jclass, jlong j_p) {
2859   delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
2860   webrtc::Trace::ReturnTrace();
2861 }
2862 
factoryFromJava(jlong j_p)2863 static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
2864   return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
2865 }
2866 
JOW(jlong,PeerConnectionFactory_nativeCreateLocalMediaStream)2867 JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
2868     JNIEnv* jni, jclass, jlong native_factory, jstring label) {
2869   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2870       factoryFromJava(native_factory));
2871   rtc::scoped_refptr<MediaStreamInterface> stream(
2872       factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
2873   return (jlong)stream.release();
2874 }
2875 
JOW(jlong,PeerConnectionFactory_nativeCreateVideoSource)2876 JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
2877     JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
2878     jobject j_constraints) {
2879   scoped_ptr<ConstraintsWrapper> constraints(
2880       new ConstraintsWrapper(jni, j_constraints));
2881   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2882       factoryFromJava(native_factory));
2883   rtc::scoped_refptr<VideoSourceInterface> source(
2884       factory->CreateVideoSource(
2885           reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
2886           constraints.get()));
2887   return (jlong)source.release();
2888 }
2889 
JOW(jlong,PeerConnectionFactory_nativeCreateVideoTrack)2890 JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
2891     JNIEnv* jni, jclass, jlong native_factory, jstring id,
2892     jlong native_source) {
2893   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2894       factoryFromJava(native_factory));
2895   rtc::scoped_refptr<VideoTrackInterface> track(
2896       factory->CreateVideoTrack(
2897           JavaToStdString(jni, id),
2898           reinterpret_cast<VideoSourceInterface*>(native_source)));
2899   return (jlong)track.release();
2900 }
2901 
JOW(jlong,PeerConnectionFactory_nativeCreateAudioSource)2902 JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
2903     JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
2904   scoped_ptr<ConstraintsWrapper> constraints(
2905       new ConstraintsWrapper(jni, j_constraints));
2906   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2907       factoryFromJava(native_factory));
2908   rtc::scoped_refptr<AudioSourceInterface> source(
2909       factory->CreateAudioSource(constraints.get()));
2910   return (jlong)source.release();
2911 }
2912 
JOW(jlong,PeerConnectionFactory_nativeCreateAudioTrack)2913 JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
2914     JNIEnv* jni, jclass, jlong native_factory, jstring id,
2915     jlong native_source) {
2916   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2917       factoryFromJava(native_factory));
2918   rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
2919       JavaToStdString(jni, id),
2920       reinterpret_cast<AudioSourceInterface*>(native_source)));
2921   return (jlong)track.release();
2922 }
2923 
JavaIceServersToJsepIceServers(JNIEnv * jni,jobject j_ice_servers,PeerConnectionInterface::IceServers * ice_servers)2924 static void JavaIceServersToJsepIceServers(
2925     JNIEnv* jni, jobject j_ice_servers,
2926     PeerConnectionInterface::IceServers* ice_servers) {
2927   jclass list_class = GetObjectClass(jni, j_ice_servers);
2928   jmethodID iterator_id = GetMethodID(
2929       jni, list_class, "iterator", "()Ljava/util/Iterator;");
2930   jobject iterator = jni->CallObjectMethod(j_ice_servers, iterator_id);
2931   CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
2932   jmethodID iterator_has_next = GetMethodID(
2933       jni, GetObjectClass(jni, iterator), "hasNext", "()Z");
2934   jmethodID iterator_next = GetMethodID(
2935       jni, GetObjectClass(jni, iterator), "next", "()Ljava/lang/Object;");
2936   while (jni->CallBooleanMethod(iterator, iterator_has_next)) {
2937     CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
2938     jobject j_ice_server = jni->CallObjectMethod(iterator, iterator_next);
2939     CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
2940     jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
2941     jfieldID j_ice_server_uri_id =
2942         GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;");
2943     jfieldID j_ice_server_username_id =
2944         GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
2945     jfieldID j_ice_server_password_id =
2946         GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
2947     jstring uri = reinterpret_cast<jstring>(
2948         GetObjectField(jni, j_ice_server, j_ice_server_uri_id));
2949     jstring username = reinterpret_cast<jstring>(
2950         GetObjectField(jni, j_ice_server, j_ice_server_username_id));
2951     jstring password = reinterpret_cast<jstring>(
2952         GetObjectField(jni, j_ice_server, j_ice_server_password_id));
2953     PeerConnectionInterface::IceServer server;
2954     server.uri = JavaToStdString(jni, uri);
2955     server.username = JavaToStdString(jni, username);
2956     server.password = JavaToStdString(jni, password);
2957     ice_servers->push_back(server);
2958   }
2959   CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
2960 }
2961 
JOW(jlong,PeerConnectionFactory_nativeCreatePeerConnection)2962 JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
2963     JNIEnv *jni, jclass, jlong factory, jobject j_ice_servers,
2964     jobject j_constraints, jlong observer_p) {
2965   rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
2966       reinterpret_cast<PeerConnectionFactoryInterface*>(
2967           factoryFromJava(factory)));
2968   PeerConnectionInterface::IceServers servers;
2969   JavaIceServersToJsepIceServers(jni, j_ice_servers, &servers);
2970   PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
2971   observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints));
2972   rtc::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection(
2973       servers, observer->constraints(), NULL, NULL, observer));
2974   return (jlong)pc.release();
2975 }
2976 
ExtractNativePC(JNIEnv * jni,jobject j_pc)2977 static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
2978     JNIEnv* jni, jobject j_pc) {
2979   jfieldID native_pc_id = GetFieldID(jni,
2980       GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
2981   jlong j_p = GetLongField(jni, j_pc, native_pc_id);
2982   return rtc::scoped_refptr<PeerConnectionInterface>(
2983       reinterpret_cast<PeerConnectionInterface*>(j_p));
2984 }
2985 
JOW(jobject,PeerConnection_getLocalDescription)2986 JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) {
2987   const SessionDescriptionInterface* sdp =
2988       ExtractNativePC(jni, j_pc)->local_description();
2989   return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
2990 }
2991 
JOW(jobject,PeerConnection_getRemoteDescription)2992 JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) {
2993   const SessionDescriptionInterface* sdp =
2994       ExtractNativePC(jni, j_pc)->remote_description();
2995   return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
2996 }
2997 
JOW(jobject,PeerConnection_createDataChannel)2998 JOW(jobject, PeerConnection_createDataChannel)(
2999     JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) {
3000   DataChannelInit init = JavaDataChannelInitToNative(jni, j_init);
3001   rtc::scoped_refptr<DataChannelInterface> channel(
3002       ExtractNativePC(jni, j_pc)->CreateDataChannel(
3003           JavaToStdString(jni, j_label), &init));
3004   // Mustn't pass channel.get() directly through NewObject to avoid reading its
3005   // vararg parameter as 64-bit and reading memory that doesn't belong to the
3006   // 32-bit parameter.
3007   jlong nativeChannelPtr = jlongFromPointer(channel.get());
3008   CHECK(nativeChannelPtr) << "Failed to create DataChannel";
3009   jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
3010   jmethodID j_data_channel_ctor = GetMethodID(
3011       jni, j_data_channel_class, "<init>", "(J)V");
3012   jobject j_channel = jni->NewObject(
3013       j_data_channel_class, j_data_channel_ctor, nativeChannelPtr);
3014   CHECK_EXCEPTION(jni) << "error during NewObject";
3015   // Channel is now owned by Java object, and will be freed from there.
3016   int bumped_count = channel->AddRef();
3017   CHECK(bumped_count == 2) << "Unexpected refcount";
3018   return j_channel;
3019 }
3020 
JOW(void,PeerConnection_createOffer)3021 JOW(void, PeerConnection_createOffer)(
3022     JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
3023   ConstraintsWrapper* constraints =
3024       new ConstraintsWrapper(jni, j_constraints);
3025   rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
3026       new rtc::RefCountedObject<CreateSdpObserverWrapper>(
3027           jni, j_observer, constraints));
3028   ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
3029 }
3030 
JOW(void,PeerConnection_createAnswer)3031 JOW(void, PeerConnection_createAnswer)(
3032     JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
3033   ConstraintsWrapper* constraints =
3034       new ConstraintsWrapper(jni, j_constraints);
3035   rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
3036       new rtc::RefCountedObject<CreateSdpObserverWrapper>(
3037           jni, j_observer, constraints));
3038   ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
3039 }
3040 
3041 // Helper to create a SessionDescriptionInterface from a SessionDescription.
JavaSdpToNativeSdp(JNIEnv * jni,jobject j_sdp)3042 static SessionDescriptionInterface* JavaSdpToNativeSdp(
3043     JNIEnv* jni, jobject j_sdp) {
3044   jfieldID j_type_id = GetFieldID(
3045       jni, GetObjectClass(jni, j_sdp), "type",
3046       "Lorg/webrtc/SessionDescription$Type;");
3047   jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
3048   jmethodID j_canonical_form_id = GetMethodID(
3049       jni, GetObjectClass(jni, j_type), "canonicalForm",
3050       "()Ljava/lang/String;");
3051   jstring j_type_string = (jstring)jni->CallObjectMethod(
3052       j_type, j_canonical_form_id);
3053   CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
3054   std::string std_type = JavaToStdString(jni, j_type_string);
3055 
3056   jfieldID j_description_id = GetFieldID(
3057       jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;");
3058   jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
3059   std::string std_description = JavaToStdString(jni, j_description);
3060 
3061   return webrtc::CreateSessionDescription(
3062       std_type, std_description, NULL);
3063 }
3064 
JOW(void,PeerConnection_setLocalDescription)3065 JOW(void, PeerConnection_setLocalDescription)(
3066     JNIEnv* jni, jobject j_pc,
3067     jobject j_observer, jobject j_sdp) {
3068   rtc::scoped_refptr<SetSdpObserverWrapper> observer(
3069       new rtc::RefCountedObject<SetSdpObserverWrapper>(
3070           jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
3071   ExtractNativePC(jni, j_pc)->SetLocalDescription(
3072       observer, JavaSdpToNativeSdp(jni, j_sdp));
3073 }
3074 
JOW(void,PeerConnection_setRemoteDescription)3075 JOW(void, PeerConnection_setRemoteDescription)(
3076     JNIEnv* jni, jobject j_pc,
3077     jobject j_observer, jobject j_sdp) {
3078   rtc::scoped_refptr<SetSdpObserverWrapper> observer(
3079       new rtc::RefCountedObject<SetSdpObserverWrapper>(
3080           jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
3081   ExtractNativePC(jni, j_pc)->SetRemoteDescription(
3082       observer, JavaSdpToNativeSdp(jni, j_sdp));
3083 }
3084 
JOW(jboolean,PeerConnection_updateIce)3085 JOW(jboolean, PeerConnection_updateIce)(
3086     JNIEnv* jni, jobject j_pc, jobject j_ice_servers, jobject j_constraints) {
3087   PeerConnectionInterface::IceServers ice_servers;
3088   JavaIceServersToJsepIceServers(jni, j_ice_servers, &ice_servers);
3089   scoped_ptr<ConstraintsWrapper> constraints(
3090       new ConstraintsWrapper(jni, j_constraints));
3091   return ExtractNativePC(jni, j_pc)->UpdateIce(ice_servers, constraints.get());
3092 }
3093 
JOW(jboolean,PeerConnection_nativeAddIceCandidate)3094 JOW(jboolean, PeerConnection_nativeAddIceCandidate)(
3095     JNIEnv* jni, jobject j_pc, jstring j_sdp_mid,
3096     jint j_sdp_mline_index, jstring j_candidate_sdp) {
3097   std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
3098   std::string sdp = JavaToStdString(jni, j_candidate_sdp);
3099   scoped_ptr<IceCandidateInterface> candidate(
3100       webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
3101   return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
3102 }
3103 
JOW(jboolean,PeerConnection_nativeAddLocalStream)3104 JOW(jboolean, PeerConnection_nativeAddLocalStream)(
3105     JNIEnv* jni, jobject j_pc, jlong native_stream, jobject j_constraints) {
3106   scoped_ptr<ConstraintsWrapper> constraints(
3107       new ConstraintsWrapper(jni, j_constraints));
3108   return ExtractNativePC(jni, j_pc)->AddStream(
3109       reinterpret_cast<MediaStreamInterface*>(native_stream),
3110       constraints.get());
3111 }
3112 
JOW(void,PeerConnection_nativeRemoveLocalStream)3113 JOW(void, PeerConnection_nativeRemoveLocalStream)(
3114     JNIEnv* jni, jobject j_pc, jlong native_stream) {
3115   ExtractNativePC(jni, j_pc)->RemoveStream(
3116       reinterpret_cast<MediaStreamInterface*>(native_stream));
3117 }
3118 
JOW(bool,PeerConnection_nativeGetStats)3119 JOW(bool, PeerConnection_nativeGetStats)(
3120     JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) {
3121   rtc::scoped_refptr<StatsObserverWrapper> observer(
3122       new rtc::RefCountedObject<StatsObserverWrapper>(jni, j_observer));
3123   return ExtractNativePC(jni, j_pc)->GetStats(
3124       observer,
3125       reinterpret_cast<MediaStreamTrackInterface*>(native_track),
3126       PeerConnectionInterface::kStatsOutputLevelStandard);
3127 }
3128 
JOW(jobject,PeerConnection_signalingState)3129 JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) {
3130   PeerConnectionInterface::SignalingState state =
3131       ExtractNativePC(jni, j_pc)->signaling_state();
3132   return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state);
3133 }
3134 
JOW(jobject,PeerConnection_iceConnectionState)3135 JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) {
3136   PeerConnectionInterface::IceConnectionState state =
3137       ExtractNativePC(jni, j_pc)->ice_connection_state();
3138   return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state);
3139 }
3140 
JOW(jobject,PeerGathering_iceGatheringState)3141 JOW(jobject, PeerGathering_iceGatheringState)(JNIEnv* jni, jobject j_pc) {
3142   PeerConnectionInterface::IceGatheringState state =
3143       ExtractNativePC(jni, j_pc)->ice_gathering_state();
3144   return JavaEnumFromIndex(jni, "PeerGathering$IceGatheringState", state);
3145 }
3146 
JOW(void,PeerConnection_close)3147 JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) {
3148   ExtractNativePC(jni, j_pc)->Close();
3149   return;
3150 }
3151 
JOW(jobject,MediaSource_nativeState)3152 JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
3153   rtc::scoped_refptr<MediaSourceInterface> p(
3154       reinterpret_cast<MediaSourceInterface*>(j_p));
3155   return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
3156 }
3157 
JOW(jlong,VideoCapturer_nativeCreateVideoCapturer)3158 JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)(
3159     JNIEnv* jni, jclass, jstring j_device_name) {
3160   std::string device_name = JavaToStdString(jni, j_device_name);
3161   scoped_ptr<cricket::DeviceManagerInterface> device_manager(
3162       cricket::DeviceManagerFactory::Create());
3163   CHECK(device_manager->Init()) << "DeviceManager::Init() failed";
3164   cricket::Device device;
3165   if (!device_manager->GetVideoCaptureDevice(device_name, &device)) {
3166     LOG(LS_ERROR) << "GetVideoCaptureDevice failed for " << device_name;
3167     return 0;
3168   }
3169   scoped_ptr<cricket::VideoCapturer> capturer(
3170       device_manager->CreateVideoCapturer(device));
3171   return (jlong)capturer.release();
3172 }
3173 
JOW(jlong,VideoRenderer_nativeCreateGuiVideoRenderer)3174 JOW(jlong, VideoRenderer_nativeCreateGuiVideoRenderer)(
3175     JNIEnv* jni, jclass, int x, int y) {
3176   scoped_ptr<VideoRendererWrapper> renderer(VideoRendererWrapper::Create(
3177       cricket::VideoRendererFactory::CreateGuiVideoRenderer(x, y)));
3178   return (jlong)renderer.release();
3179 }
3180 
JOW(jlong,VideoRenderer_nativeWrapVideoRenderer)3181 JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
3182     JNIEnv* jni, jclass, jobject j_callbacks) {
3183   scoped_ptr<JavaVideoRendererWrapper> renderer(
3184       new JavaVideoRendererWrapper(jni, j_callbacks));
3185   return (jlong)renderer.release();
3186 }
3187 
JOW(jlong,VideoSource_stop)3188 JOW(jlong, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
3189   cricket::VideoCapturer* capturer =
3190       reinterpret_cast<VideoSourceInterface*>(j_p)->GetVideoCapturer();
3191   scoped_ptr<cricket::VideoFormatPod> format(
3192       new cricket::VideoFormatPod(*capturer->GetCaptureFormat()));
3193   capturer->Stop();
3194   return jlongFromPointer(format.release());
3195 }
3196 
JOW(void,VideoSource_restart)3197 JOW(void, VideoSource_restart)(
3198     JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
3199   CHECK(j_p_source);
3200   CHECK(j_p_format);
3201   scoped_ptr<cricket::VideoFormatPod> format(
3202       reinterpret_cast<cricket::VideoFormatPod*>(j_p_format));
3203   reinterpret_cast<VideoSourceInterface*>(j_p_source)->GetVideoCapturer()->
3204       StartCapturing(cricket::VideoFormat(*format));
3205 }
3206 
JOW(void,VideoSource_freeNativeVideoFormat)3207 JOW(void, VideoSource_freeNativeVideoFormat)(
3208     JNIEnv* jni, jclass, jlong j_p) {
3209   delete reinterpret_cast<cricket::VideoFormatPod*>(j_p);
3210 }
3211 
JOW(jstring,MediaStreamTrack_nativeId)3212 JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
3213   return JavaStringFromStdString(
3214       jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
3215 }
3216 
JOW(jstring,MediaStreamTrack_nativeKind)3217 JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) {
3218   return JavaStringFromStdString(
3219       jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
3220 }
3221 
JOW(jboolean,MediaStreamTrack_nativeEnabled)3222 JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) {
3223   return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
3224 }
3225 
JOW(jobject,MediaStreamTrack_nativeState)3226 JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
3227   return JavaEnumFromIndex(
3228       jni,
3229       "MediaStreamTrack$State",
3230       reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
3231 }
3232 
JOW(jboolean,MediaStreamTrack_nativeSetState)3233 JOW(jboolean, MediaStreamTrack_nativeSetState)(
3234     JNIEnv* jni, jclass, jlong j_p, jint j_new_state) {
3235   MediaStreamTrackInterface::TrackState new_state =
3236       (MediaStreamTrackInterface::TrackState)j_new_state;
3237   return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
3238       ->set_state(new_state);
3239 }
3240 
JOW(jboolean,MediaStreamTrack_nativeSetEnabled)3241 JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
3242     JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
3243   return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
3244       ->set_enabled(enabled);
3245 }
3246 
JOW(void,VideoTrack_nativeAddRenderer)3247 JOW(void, VideoTrack_nativeAddRenderer)(
3248     JNIEnv* jni, jclass,
3249     jlong j_video_track_pointer, jlong j_renderer_pointer) {
3250   reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->AddRenderer(
3251       reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
3252 }
3253 
JOW(void,VideoTrack_nativeRemoveRenderer)3254 JOW(void, VideoTrack_nativeRemoveRenderer)(
3255     JNIEnv* jni, jclass,
3256     jlong j_video_track_pointer, jlong j_renderer_pointer) {
3257   reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->RemoveRenderer(
3258       reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
3259 }
3260