Upstream version 11.40.277.0
[platform/framework/web/crosswalk.git] / src / third_party / libjingle / source / talk / app / webrtc / java / jni / peerconnection_jni.cc
1 /*
2  * libjingle
3  * Copyright 2013, Google Inc.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7  *
8  *  1. Redistributions of source code must retain the above copyright notice,
9  *     this list of conditions and the following disclaimer.
10  *  2. Redistributions in binary form must reproduce the above copyright notice,
11  *     this list of conditions and the following disclaimer in the documentation
12  *     and/or other materials provided with the distribution.
13  *  3. The name of the author may not be used to endorse or promote products
14  *     derived from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26  */
27
28 // Hints for future visitors:
29 // This entire file is an implementation detail of the org.webrtc Java package,
30 // the most interesting bits of which are org.webrtc.PeerConnection{,Factory}.
31 // The layout of this file is roughly:
32 // - various helper C++ functions & classes that wrap Java counterparts and
33 //   expose a C++ interface that can be passed to the C++ PeerConnection APIs
34 // - implementations of methods declared "static" in the Java package (named
35 //   things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by
36 //   the JNI spec).
37 //
38 // Lifecycle notes: objects are owned where they will be called; in other words
39 // FooObservers are owned by C++-land, and user-callable objects (e.g.
40 // PeerConnection and VideoTrack) are owned by Java-land.
41 // When this file allocates C++ RefCountInterfaces it AddRef()s an artificial
42 // ref simulating the jlong held in Java-land, and then Release()s the ref in
43 // the respective free call.  Sometimes this AddRef is implicit in the
44 // construction of a scoped_refptr<> which is then .release()d.
45 // Any persistent (non-local) references from C++ to Java must be global or weak
46 // (in which case they must be checked before use)!
47 //
48 // Exception notes: pretty much all JNI calls can throw Java exceptions, so each
49 // call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
50 // call.  In this file this is done in CHECK_EXCEPTION, making for much easier
51 // debugging in case of failure (the alternative is to wait for control to
52 // return to the Java frame that called code in this file, at which point it's
53 // impossible to tell which JNI call broke).
54
55 #include <jni.h>
56 #undef JNIEXPORT
57 #define JNIEXPORT __attribute__((visibility("default")))
58
59 #include <asm/unistd.h>
60 #include <sys/prctl.h>
61 #include <sys/syscall.h>
62 #include <unistd.h>
63 #include <limits>
64 #include <map>
65
66 #include "talk/app/webrtc/mediaconstraintsinterface.h"
67 #include "talk/app/webrtc/peerconnectioninterface.h"
68 #include "talk/app/webrtc/videosourceinterface.h"
69 #include "talk/media/base/videocapturer.h"
70 #include "talk/media/base/videorenderer.h"
71 #include "talk/media/devices/videorendererfactory.h"
72 #include "talk/media/webrtc/webrtcvideocapturer.h"
73 #include "talk/media/webrtc/webrtcvideodecoderfactory.h"
74 #include "talk/media/webrtc/webrtcvideoencoderfactory.h"
75 #include "third_party/icu/source/common/unicode/unistr.h"
76 #include "third_party/libyuv/include/libyuv/convert.h"
77 #include "third_party/libyuv/include/libyuv/convert_from.h"
78 #include "third_party/libyuv/include/libyuv/video_common.h"
79 #include "webrtc/base/bind.h"
80 #include "webrtc/base/checks.h"
81 #include "webrtc/base/logging.h"
82 #include "webrtc/base/messagequeue.h"
83 #include "webrtc/base/ssladapter.h"
84 #include "webrtc/common_video/interface/texture_video_frame.h"
85 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
86 #include "webrtc/system_wrappers/interface/compile_assert.h"
87 #include "webrtc/system_wrappers/interface/trace.h"
88 #include "webrtc/video_engine/include/vie_base.h"
89 #include "webrtc/voice_engine/include/voe_base.h"
90
91 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
92 #include <android/log.h>
93 #include "webrtc/modules/video_capture/video_capture_internal.h"
94 #include "webrtc/modules/video_render/video_render_internal.h"
95 #include "webrtc/system_wrappers/interface/logcat_trace_context.h"
96 #include "webrtc/system_wrappers/interface/tick_util.h"
97 using webrtc::CodecSpecificInfo;
98 using webrtc::DecodedImageCallback;
99 using webrtc::EncodedImage;
100 using webrtc::I420VideoFrame;
101 using webrtc::LogcatTraceContext;
102 using webrtc::RTPFragmentationHeader;
103 using webrtc::TextureVideoFrame;
104 using webrtc::TickTime;
105 using webrtc::VideoCodec;
106 #endif
107
108 using icu::UnicodeString;
109 using rtc::Bind;
110 using rtc::Thread;
111 using rtc::ThreadManager;
112 using rtc::scoped_ptr;
113 using webrtc::AudioSourceInterface;
114 using webrtc::AudioTrackInterface;
115 using webrtc::AudioTrackVector;
116 using webrtc::CreateSessionDescriptionObserver;
117 using webrtc::DataBuffer;
118 using webrtc::DataChannelInit;
119 using webrtc::DataChannelInterface;
120 using webrtc::DataChannelObserver;
121 using webrtc::IceCandidateInterface;
122 using webrtc::NativeHandle;
123 using webrtc::MediaConstraintsInterface;
124 using webrtc::MediaSourceInterface;
125 using webrtc::MediaStreamInterface;
126 using webrtc::MediaStreamTrackInterface;
127 using webrtc::PeerConnectionFactoryInterface;
128 using webrtc::PeerConnectionInterface;
129 using webrtc::PeerConnectionObserver;
130 using webrtc::SessionDescriptionInterface;
131 using webrtc::SetSessionDescriptionObserver;
132 using webrtc::StatsObserver;
133 using webrtc::StatsReport;
134 using webrtc::VideoRendererInterface;
135 using webrtc::VideoSourceInterface;
136 using webrtc::VideoTrackInterface;
137 using webrtc::VideoTrackVector;
138 using webrtc::kVideoCodecVP8;
139
140 // Abort the process if |jni| has a Java exception pending.
141 // This macros uses the comma operator to execute ExceptionDescribe
142 // and ExceptionClear ignoring their return values and sending ""
143 // to the error stream.
144 #define CHECK_EXCEPTION(jni)    \
145   CHECK(!jni->ExceptionCheck()) \
146       << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
147
148 // Helper that calls ptr->Release() and aborts the process with a useful
149 // message if that didn't actually delete *ptr because of extra refcounts.
150 #define CHECK_RELEASE(ptr) \
151   CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
152
153 namespace {
154
155 static JavaVM* g_jvm = NULL;  // Set in JNI_OnLoad().
156
157 static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
158 // Key for per-thread JNIEnv* data.  Non-NULL in threads attached to |g_jvm| by
159 // AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
160 // were attached by the JVM because of a Java->native call.
161 static pthread_key_t g_jni_ptr;
162
163 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
164 // Set in PeerConnectionFactory_initializeAndroidGlobals().
165 static bool factory_static_initialized = false;
166 #endif
167
168
169 // Return thread ID as a string.
170 static std::string GetThreadId() {
171   char buf[21];  // Big enough to hold a kuint64max plus terminating NULL.
172   CHECK_LT(snprintf(buf, sizeof(buf), "%llu", syscall(__NR_gettid)),
173            sizeof(buf))
174       << "Thread id is bigger than uint64??";
175   return std::string(buf);
176 }
177
178 // Return the current thread's name.
179 static std::string GetThreadName() {
180   char name[17];
181   CHECK_EQ(0, prctl(PR_GET_NAME, name)) << "prctl(PR_GET_NAME) failed";
182   name[16] = '\0';
183   return std::string(name);
184 }
185
186 // Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
187 static JNIEnv* GetEnv() {
188   void* env = NULL;
189   jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
190   CHECK(((env != NULL) && (status == JNI_OK)) ||
191         ((env == NULL) && (status == JNI_EDETACHED)))
192       << "Unexpected GetEnv return: " << status << ":" << env;
193   return reinterpret_cast<JNIEnv*>(env);
194 }
195
196 static void ThreadDestructor(void* prev_jni_ptr) {
197   // This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
198   // we were responsible for originally attaching the thread, so are responsible
199   // for detaching it now.  However, because some JVM implementations (notably
200   // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
201   // the JVMs accounting info for this thread may already be wiped out by the
202   // time this is called. Thus it may appear we are already detached even though
203   // it was our responsibility to detach!  Oh well.
204   if (!GetEnv())
205     return;
206
207   CHECK(GetEnv() == prev_jni_ptr)
208       << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
209   jint status = g_jvm->DetachCurrentThread();
210   CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
211   CHECK(!GetEnv()) << "Detaching was a successful no-op???";
212 }
213
214 static void CreateJNIPtrKey() {
215   CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
216       << "pthread_key_create";
217 }
218
219 // Return a |JNIEnv*| usable on this thread.  Attaches to |g_jvm| if necessary.
220 static JNIEnv* AttachCurrentThreadIfNeeded() {
221   JNIEnv* jni = GetEnv();
222   if (jni)
223     return jni;
224   CHECK(!pthread_getspecific(g_jni_ptr))
225       << "TLS has a JNIEnv* but not attached?";
226
227   char* name = strdup((GetThreadName() + " - " + GetThreadId()).c_str());
228   JavaVMAttachArgs args;
229   args.version = JNI_VERSION_1_6;
230   args.name = name;
231   args.group = NULL;
232   // Deal with difference in signatures between Oracle's jni.h and Android's.
233 #ifdef _JAVASOFT_JNI_H_  // Oracle's jni.h violates the JNI spec!
234   void* env = NULL;
235 #else
236   JNIEnv* env = NULL;
237 #endif
238   CHECK(!g_jvm->AttachCurrentThread(&env, &args)) << "Failed to attach thread";
239   free(name);
240   CHECK(env) << "AttachCurrentThread handed back NULL!";
241   jni = reinterpret_cast<JNIEnv*>(env);
242   CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
243   return jni;
244 }
245
246 // Return a |jlong| that will correctly convert back to |ptr|.  This is needed
247 // because the alternative (of silently passing a 32-bit pointer to a vararg
248 // function expecting a 64-bit param) picks up garbage in the high 32 bits.
249 static jlong jlongFromPointer(void* ptr) {
250   COMPILE_ASSERT(sizeof(intptr_t) <= sizeof(jlong),
251                  Time_to_rethink_the_use_of_jlongs);
252   // Going through intptr_t to be obvious about the definedness of the
253   // conversion from pointer to integral type.  intptr_t to jlong is a standard
254   // widening by the COMPILE_ASSERT above.
255   jlong ret = reinterpret_cast<intptr_t>(ptr);
256   assert(reinterpret_cast<void*>(ret) == ptr);
257   return ret;
258 }
259
260 // Android's FindClass() is trickier than usual because the app-specific
261 // ClassLoader is not consulted when there is no app-specific frame on the
262 // stack.  Consequently, we only look up classes once in JNI_OnLoad.
263 // http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
264 class ClassReferenceHolder {
265  public:
266   explicit ClassReferenceHolder(JNIEnv* jni) {
267     LoadClass(jni, "java/nio/ByteBuffer");
268     LoadClass(jni, "org/webrtc/AudioTrack");
269     LoadClass(jni, "org/webrtc/DataChannel");
270     LoadClass(jni, "org/webrtc/DataChannel$Buffer");
271     LoadClass(jni, "org/webrtc/DataChannel$Init");
272     LoadClass(jni, "org/webrtc/DataChannel$State");
273     LoadClass(jni, "org/webrtc/IceCandidate");
274 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
275     LoadClass(jni, "android/graphics/SurfaceTexture");
276     LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
277     LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
278     LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
279     LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
280     jclass j_decoder_class = GetClass("org/webrtc/MediaCodecVideoDecoder");
281     jmethodID j_is_egl14_supported_method = jni->GetStaticMethodID(
282         j_decoder_class, "isEGL14Supported", "()Z");
283     bool is_egl14_supported = jni->CallStaticBooleanMethod(
284         j_decoder_class, j_is_egl14_supported_method);
285     CHECK_EXCEPTION(jni);
286     if (is_egl14_supported) {
287       LoadClass(jni, "android/opengl/EGLContext");
288     }
289 #endif
290     LoadClass(jni, "org/webrtc/MediaSource$State");
291     LoadClass(jni, "org/webrtc/MediaStream");
292     LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
293     LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
294     LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
295     LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
296     LoadClass(jni, "org/webrtc/SessionDescription");
297     LoadClass(jni, "org/webrtc/SessionDescription$Type");
298     LoadClass(jni, "org/webrtc/StatsReport");
299     LoadClass(jni, "org/webrtc/StatsReport$Value");
300     LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
301     LoadClass(jni, "org/webrtc/VideoTrack");
302   }
303
304   ~ClassReferenceHolder() {
305     CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
306   }
307
308   void FreeReferences(JNIEnv* jni) {
309     for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
310          it != classes_.end(); ++it) {
311       jni->DeleteGlobalRef(it->second);
312     }
313     classes_.clear();
314   }
315
316   jclass GetClass(const std::string& name) {
317     std::map<std::string, jclass>::iterator it = classes_.find(name);
318     CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
319     return it->second;
320   }
321
322  private:
323   void LoadClass(JNIEnv* jni, const std::string& name) {
324     jclass localRef = jni->FindClass(name.c_str());
325     CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
326     CHECK(localRef) << name;
327     jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
328     CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
329     CHECK(globalRef) << name;
330     bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
331     CHECK(inserted) << "Duplicate class name: " << name;
332   }
333
334   std::map<std::string, jclass> classes_;
335 };
336
337 // Allocated in JNI_OnLoad(), freed in JNI_OnUnLoad().
338 static ClassReferenceHolder* g_class_reference_holder = NULL;
339
340 // JNIEnv-helper methods that CHECK success: no Java exception thrown and found
341 // object/class/method/field is non-null.
342 jmethodID GetMethodID(
343     JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
344   jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
345   CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
346                        << signature;
347   CHECK(m) << name << ", " << signature;
348   return m;
349 }
350
351 jmethodID GetStaticMethodID(
352     JNIEnv* jni, jclass c, const char* name, const char* signature) {
353   jmethodID m = jni->GetStaticMethodID(c, name, signature);
354   CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
355                        << signature;
356   CHECK(m) << name << ", " << signature;
357   return m;
358 }
359
360 jfieldID GetFieldID(
361     JNIEnv* jni, jclass c, const char* name, const char* signature) {
362   jfieldID f = jni->GetFieldID(c, name, signature);
363   CHECK_EXCEPTION(jni) << "error during GetFieldID";
364   CHECK(f) << name << ", " << signature;
365   return f;
366 }
367
368 // Returns a global reference guaranteed to be valid for the lifetime of the
369 // process.
370 jclass FindClass(JNIEnv* jni, const char* name) {
371   return g_class_reference_holder->GetClass(name);
372 }
373
374 jclass GetObjectClass(JNIEnv* jni, jobject object) {
375   jclass c = jni->GetObjectClass(object);
376   CHECK_EXCEPTION(jni) << "error during GetObjectClass";
377   CHECK(c) << "GetObjectClass returned NULL";
378   return c;
379 }
380
381 jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
382   jobject o = jni->GetObjectField(object, id);
383   CHECK_EXCEPTION(jni) << "error during GetObjectField";
384   CHECK(o) << "GetObjectField returned NULL";
385   return o;
386 }
387
388 jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
389   return static_cast<jstring>(GetObjectField(jni, object, id));
390 }
391
392 jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
393   jlong l = jni->GetLongField(object, id);
394   CHECK_EXCEPTION(jni) << "error during GetLongField";
395   return l;
396 }
397
398 jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
399   jint i = jni->GetIntField(object, id);
400   CHECK_EXCEPTION(jni) << "error during GetIntField";
401   return i;
402 }
403
404 bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
405   jboolean b = jni->GetBooleanField(object, id);
406   CHECK_EXCEPTION(jni) << "error during GetBooleanField";
407   return b;
408 }
409
410 jobject NewGlobalRef(JNIEnv* jni, jobject o) {
411   jobject ret = jni->NewGlobalRef(o);
412   CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
413   CHECK(ret);
414   return ret;
415 }
416
417 void DeleteGlobalRef(JNIEnv* jni, jobject o) {
418   jni->DeleteGlobalRef(o);
419   CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
420 }
421
422 // Given a jweak reference, allocate a (strong) local reference scoped to the
423 // lifetime of this object if the weak reference is still valid, or NULL
424 // otherwise.
425 class WeakRef {
426  public:
427   WeakRef(JNIEnv* jni, jweak ref)
428       : jni_(jni), obj_(jni_->NewLocalRef(ref)) {
429     CHECK_EXCEPTION(jni) << "error during NewLocalRef";
430   }
431   ~WeakRef() {
432     if (obj_) {
433       jni_->DeleteLocalRef(obj_);
434       CHECK_EXCEPTION(jni_) << "error during DeleteLocalRef";
435     }
436   }
437   jobject obj() { return obj_; }
438
439  private:
440   JNIEnv* const jni_;
441   jobject const obj_;
442 };
443
444 // Scope Java local references to the lifetime of this object.  Use in all C++
445 // callbacks (i.e. entry points that don't originate in a Java callstack
446 // through a "native" method call).
447 class ScopedLocalRefFrame {
448  public:
449   explicit ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
450     CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
451   }
452   ~ScopedLocalRefFrame() {
453     jni_->PopLocalFrame(NULL);
454   }
455
456  private:
457   JNIEnv* jni_;
458 };
459
460 // Scoped holder for global Java refs.
461 template<class T>  // T is jclass, jobject, jintArray, etc.
462 class ScopedGlobalRef {
463  public:
464   ScopedGlobalRef(JNIEnv* jni, T obj)
465       : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
466   ~ScopedGlobalRef() {
467     DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
468   }
469   T operator*() const {
470     return obj_;
471   }
472  private:
473   T obj_;
474 };
475
476 // Java references to "null" can only be distinguished as such in C++ by
477 // creating a local reference, so this helper wraps that logic.
478 static bool IsNull(JNIEnv* jni, jobject obj) {
479   ScopedLocalRefFrame local_ref_frame(jni);
480   return jni->NewLocalRef(obj) == NULL;
481 }
482
483 // Return the (singleton) Java Enum object corresponding to |index|;
484 // |state_class_fragment| is something like "MediaSource$State".
485 jobject JavaEnumFromIndex(
486     JNIEnv* jni, const std::string& state_class_fragment, int index) {
487   std::string state_class_name = "org/webrtc/" + state_class_fragment;
488   jclass state_class = FindClass(jni, state_class_name.c_str());
489   jmethodID state_values_id = GetStaticMethodID(
490       jni, state_class, "values", ("()[L" + state_class_name  + ";").c_str());
491   jobjectArray state_values = static_cast<jobjectArray>(
492       jni->CallStaticObjectMethod(state_class, state_values_id));
493   CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
494   jobject ret = jni->GetObjectArrayElement(state_values, index);
495   CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
496   return ret;
497 }
498
499 // Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
500 static jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
501   UnicodeString ustr(UnicodeString::fromUTF8(native));
502   jstring jstr = jni->NewString(ustr.getBuffer(), ustr.length());
503   CHECK_EXCEPTION(jni) << "error during NewString";
504   return jstr;
505 }
506
507 // Given a (UTF-16) jstring return a new UTF-8 native string.
508 static std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
509   const jchar* jchars = jni->GetStringChars(j_string, NULL);
510   CHECK_EXCEPTION(jni) << "Error during GetStringChars";
511   UnicodeString ustr(jchars, jni->GetStringLength(j_string));
512   CHECK_EXCEPTION(jni) << "Error during GetStringLength";
513   jni->ReleaseStringChars(j_string, jchars);
514   CHECK_EXCEPTION(jni) << "Error during ReleaseStringChars";
515   std::string ret;
516   return ustr.toUTF8String(ret);
517 }
518
519 static DataChannelInit JavaDataChannelInitToNative(
520     JNIEnv* jni, jobject j_init) {
521   DataChannelInit init;
522
523   jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
524   jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
525   jfieldID max_retransmit_time_id =
526       GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
527   jfieldID max_retransmits_id =
528       GetFieldID(jni, j_init_class, "maxRetransmits", "I");
529   jfieldID protocol_id =
530       GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
531   jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
532   jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
533
534   init.ordered = GetBooleanField(jni, j_init, ordered_id);
535   init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
536   init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
537   init.protocol = JavaToStdString(
538       jni, GetStringField(jni, j_init, protocol_id));
539   init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
540   init.id = GetIntField(jni, j_init, id_id);
541
542   return init;
543 }
544
545 class ConstraintsWrapper;
546
547 // Adapter between the C++ PeerConnectionObserver interface and the Java
548 // PeerConnection.Observer interface.  Wraps an instance of the Java interface
549 // and dispatches C++ callbacks to Java.
550 class PCOJava : public PeerConnectionObserver {
551  public:
552   PCOJava(JNIEnv* jni, jobject j_observer)
553       : j_observer_global_(jni, j_observer),
554         j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
555         j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
556         j_media_stream_ctor_(GetMethodID(
557             jni, *j_media_stream_class_, "<init>", "(J)V")),
558         j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
559         j_audio_track_ctor_(GetMethodID(
560             jni, *j_audio_track_class_, "<init>", "(J)V")),
561         j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
562         j_video_track_ctor_(GetMethodID(
563             jni, *j_video_track_class_, "<init>", "(J)V")),
564         j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
565         j_data_channel_ctor_(GetMethodID(
566             jni, *j_data_channel_class_, "<init>", "(J)V")) {
567   }
568
569   virtual ~PCOJava() {}
570
571   virtual void OnIceCandidate(const IceCandidateInterface* candidate) OVERRIDE {
572     ScopedLocalRefFrame local_ref_frame(jni());
573     std::string sdp;
574     CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
575     jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
576     jmethodID ctor = GetMethodID(jni(), candidate_class,
577         "<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
578     jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
579     jstring j_sdp = JavaStringFromStdString(jni(), sdp);
580     jobject j_candidate = jni()->NewObject(
581         candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp);
582     CHECK_EXCEPTION(jni()) << "error during NewObject";
583     jmethodID m = GetMethodID(jni(), *j_observer_class_,
584                               "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
585     jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
586     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
587   }
588
589   virtual void OnSignalingChange(
590       PeerConnectionInterface::SignalingState new_state) OVERRIDE {
591     ScopedLocalRefFrame local_ref_frame(jni());
592     jmethodID m = GetMethodID(
593         jni(), *j_observer_class_, "onSignalingChange",
594         "(Lorg/webrtc/PeerConnection$SignalingState;)V");
595     jobject new_state_enum =
596         JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
597     jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
598     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
599   }
600
601   virtual void OnIceConnectionChange(
602       PeerConnectionInterface::IceConnectionState new_state) OVERRIDE {
603     ScopedLocalRefFrame local_ref_frame(jni());
604     jmethodID m = GetMethodID(
605         jni(), *j_observer_class_, "onIceConnectionChange",
606         "(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
607     jobject new_state_enum = JavaEnumFromIndex(
608         jni(), "PeerConnection$IceConnectionState", new_state);
609     jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
610     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
611   }
612
613   virtual void OnIceGatheringChange(
614       PeerConnectionInterface::IceGatheringState new_state) OVERRIDE {
615     ScopedLocalRefFrame local_ref_frame(jni());
616     jmethodID m = GetMethodID(
617         jni(), *j_observer_class_, "onIceGatheringChange",
618         "(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
619     jobject new_state_enum = JavaEnumFromIndex(
620         jni(), "PeerConnection$IceGatheringState", new_state);
621     jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
622     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
623   }
624
625   virtual void OnAddStream(MediaStreamInterface* stream) OVERRIDE {
626     ScopedLocalRefFrame local_ref_frame(jni());
627     jobject j_stream = jni()->NewObject(
628         *j_media_stream_class_, j_media_stream_ctor_, (jlong)stream);
629     CHECK_EXCEPTION(jni()) << "error during NewObject";
630
631     AudioTrackVector audio_tracks = stream->GetAudioTracks();
632     for (size_t i = 0; i < audio_tracks.size(); ++i) {
633       AudioTrackInterface* track = audio_tracks[i];
634       jstring id = JavaStringFromStdString(jni(), track->id());
635       jobject j_track = jni()->NewObject(
636           *j_audio_track_class_, j_audio_track_ctor_, (jlong)track, id);
637       CHECK_EXCEPTION(jni()) << "error during NewObject";
638       jfieldID audio_tracks_id = GetFieldID(jni(),
639                                             *j_media_stream_class_,
640                                             "audioTracks",
641                                             "Ljava/util/LinkedList;");
642       jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
643       jmethodID add = GetMethodID(jni(),
644                                   GetObjectClass(jni(), audio_tracks),
645                                   "add",
646                                   "(Ljava/lang/Object;)Z");
647       jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
648       CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
649       CHECK(added);
650     }
651
652     VideoTrackVector video_tracks = stream->GetVideoTracks();
653     for (size_t i = 0; i < video_tracks.size(); ++i) {
654       VideoTrackInterface* track = video_tracks[i];
655       jstring id = JavaStringFromStdString(jni(), track->id());
656       jobject j_track = jni()->NewObject(
657           *j_video_track_class_, j_video_track_ctor_, (jlong)track, id);
658       CHECK_EXCEPTION(jni()) << "error during NewObject";
659       jfieldID video_tracks_id = GetFieldID(jni(),
660                                             *j_media_stream_class_,
661                                             "videoTracks",
662                                             "Ljava/util/LinkedList;");
663       jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
664       jmethodID add = GetMethodID(jni(),
665                                   GetObjectClass(jni(), video_tracks),
666                                   "add",
667                                   "(Ljava/lang/Object;)Z");
668       jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
669       CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
670       CHECK(added);
671     }
672     streams_[stream] = jni()->NewWeakGlobalRef(j_stream);
673     CHECK_EXCEPTION(jni()) << "error during NewWeakGlobalRef";
674
675     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
676                               "(Lorg/webrtc/MediaStream;)V");
677     jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
678     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
679   }
680
681   virtual void OnRemoveStream(MediaStreamInterface* stream) OVERRIDE {
682     ScopedLocalRefFrame local_ref_frame(jni());
683     NativeToJavaStreamsMap::iterator it = streams_.find(stream);
684     CHECK(it != streams_.end()) << "unexpected stream: " << std::hex << stream;
685
686     WeakRef s(jni(), it->second);
687     streams_.erase(it);
688     if (!s.obj())
689       return;
690
691     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
692                               "(Lorg/webrtc/MediaStream;)V");
693     jni()->CallVoidMethod(*j_observer_global_, m, s.obj());
694     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
695   }
696
697   virtual void OnDataChannel(DataChannelInterface* channel) OVERRIDE {
698     ScopedLocalRefFrame local_ref_frame(jni());
699     jobject j_channel = jni()->NewObject(
700         *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel);
701     CHECK_EXCEPTION(jni()) << "error during NewObject";
702
703     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
704                               "(Lorg/webrtc/DataChannel;)V");
705     jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
706
707     // Channel is now owned by Java object, and will be freed from
708     // DataChannel.dispose().  Important that this be done _after_ the
709     // CallVoidMethod above as Java code might call back into native code and be
710     // surprised to see a refcount of 2.
711     int bumped_count = channel->AddRef();
712     CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
713
714     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
715   }
716
717   virtual void OnRenegotiationNeeded() OVERRIDE {
718     ScopedLocalRefFrame local_ref_frame(jni());
719     jmethodID m =
720         GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
721     jni()->CallVoidMethod(*j_observer_global_, m);
722     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
723   }
724
725   void SetConstraints(ConstraintsWrapper* constraints) {
726     CHECK(!constraints_.get()) << "constraints already set!";
727     constraints_.reset(constraints);
728   }
729
730   const ConstraintsWrapper* constraints() { return constraints_.get(); }
731
732  private:
733   JNIEnv* jni() {
734     return AttachCurrentThreadIfNeeded();
735   }
736
737   const ScopedGlobalRef<jobject> j_observer_global_;
738   const ScopedGlobalRef<jclass> j_observer_class_;
739   const ScopedGlobalRef<jclass> j_media_stream_class_;
740   const jmethodID j_media_stream_ctor_;
741   const ScopedGlobalRef<jclass> j_audio_track_class_;
742   const jmethodID j_audio_track_ctor_;
743   const ScopedGlobalRef<jclass> j_video_track_class_;
744   const jmethodID j_video_track_ctor_;
745   const ScopedGlobalRef<jclass> j_data_channel_class_;
746   const jmethodID j_data_channel_ctor_;
747   typedef std::map<void*, jweak> NativeToJavaStreamsMap;
748   NativeToJavaStreamsMap streams_;  // C++ -> Java streams.
749   scoped_ptr<ConstraintsWrapper> constraints_;
750 };
751
752 // Wrapper for a Java MediaConstraints object.  Copies all needed data so when
753 // the constructor returns the Java object is no longer needed.
754 class ConstraintsWrapper : public MediaConstraintsInterface {
755  public:
756   ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) {
757     PopulateConstraintsFromJavaPairList(
758         jni, j_constraints, "mandatory", &mandatory_);
759     PopulateConstraintsFromJavaPairList(
760         jni, j_constraints, "optional", &optional_);
761   }
762
763   virtual ~ConstraintsWrapper() {}
764
765   // MediaConstraintsInterface.
766   virtual const Constraints& GetMandatory() const OVERRIDE {
767     return mandatory_;
768   }
769
770   virtual const Constraints& GetOptional() const OVERRIDE {
771     return optional_;
772   }
773
774  private:
775   // Helper for translating a List<Pair<String, String>> to a Constraints.
776   static void PopulateConstraintsFromJavaPairList(
777       JNIEnv* jni, jobject j_constraints,
778       const char* field_name, Constraints* field) {
779     jfieldID j_id = GetFieldID(jni,
780         GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;");
781     jobject j_list = GetObjectField(jni, j_constraints, j_id);
782     jmethodID j_iterator_id = GetMethodID(jni,
783         GetObjectClass(jni, j_list), "iterator", "()Ljava/util/Iterator;");
784     jobject j_iterator = jni->CallObjectMethod(j_list, j_iterator_id);
785     CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
786     jmethodID j_has_next = GetMethodID(jni,
787         GetObjectClass(jni, j_iterator), "hasNext", "()Z");
788     jmethodID j_next = GetMethodID(jni,
789         GetObjectClass(jni, j_iterator), "next", "()Ljava/lang/Object;");
790     while (jni->CallBooleanMethod(j_iterator, j_has_next)) {
791       CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
792       jobject entry = jni->CallObjectMethod(j_iterator, j_next);
793       CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
794       jmethodID get_key = GetMethodID(jni,
795           GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;");
796       jstring j_key = reinterpret_cast<jstring>(
797           jni->CallObjectMethod(entry, get_key));
798       CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
799       jmethodID get_value = GetMethodID(jni,
800           GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;");
801       jstring j_value = reinterpret_cast<jstring>(
802           jni->CallObjectMethod(entry, get_value));
803       CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
804       field->push_back(Constraint(JavaToStdString(jni, j_key),
805                                   JavaToStdString(jni, j_value)));
806     }
807     CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
808   }
809
810   Constraints mandatory_;
811   Constraints optional_;
812 };
813
814 static jobject JavaSdpFromNativeSdp(
815     JNIEnv* jni, const SessionDescriptionInterface* desc) {
816   std::string sdp;
817   CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
818   jstring j_description = JavaStringFromStdString(jni, sdp);
819
820   jclass j_type_class = FindClass(
821       jni, "org/webrtc/SessionDescription$Type");
822   jmethodID j_type_from_canonical = GetStaticMethodID(
823       jni, j_type_class, "fromCanonicalForm",
824       "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
825   jstring j_type_string = JavaStringFromStdString(jni, desc->type());
826   jobject j_type = jni->CallStaticObjectMethod(
827       j_type_class, j_type_from_canonical, j_type_string);
828   CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
829
830   jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
831   jmethodID j_sdp_ctor = GetMethodID(
832       jni, j_sdp_class, "<init>",
833       "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
834   jobject j_sdp = jni->NewObject(
835       j_sdp_class, j_sdp_ctor, j_type, j_description);
836   CHECK_EXCEPTION(jni) << "error during NewObject";
837   return j_sdp;
838 }
839
840 template <class T>  // T is one of {Create,Set}SessionDescriptionObserver.
841 class SdpObserverWrapper : public T {
842  public:
843   SdpObserverWrapper(JNIEnv* jni, jobject j_observer,
844                      ConstraintsWrapper* constraints)
845       : constraints_(constraints),
846         j_observer_global_(jni, j_observer),
847         j_observer_class_(jni, GetObjectClass(jni, j_observer)) {
848   }
849
850   virtual ~SdpObserverWrapper() {}
851
852   // Can't mark OVERRIDE because of templating.
853   virtual void OnSuccess() {
854     ScopedLocalRefFrame local_ref_frame(jni());
855     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
856     jni()->CallVoidMethod(*j_observer_global_, m);
857     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
858   }
859
860   // Can't mark OVERRIDE because of templating.
861   virtual void OnSuccess(SessionDescriptionInterface* desc) {
862     ScopedLocalRefFrame local_ref_frame(jni());
863     jmethodID m = GetMethodID(
864         jni(), *j_observer_class_, "onCreateSuccess",
865         "(Lorg/webrtc/SessionDescription;)V");
866     jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
867     jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
868     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
869   }
870
871  protected:
872   // Common implementation for failure of Set & Create types, distinguished by
873   // |op| being "Set" or "Create".
874   void OnFailure(const std::string& op, const std::string& error) {
875     jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
876                               "(Ljava/lang/String;)V");
877     jstring j_error_string = JavaStringFromStdString(jni(), error);
878     jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
879     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
880   }
881
882   JNIEnv* jni() {
883     return AttachCurrentThreadIfNeeded();
884   }
885
886  private:
887   scoped_ptr<ConstraintsWrapper> constraints_;
888   const ScopedGlobalRef<jobject> j_observer_global_;
889   const ScopedGlobalRef<jclass> j_observer_class_;
890 };
891
892 class CreateSdpObserverWrapper
893     : public SdpObserverWrapper<CreateSessionDescriptionObserver> {
894  public:
895   CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
896                            ConstraintsWrapper* constraints)
897       : SdpObserverWrapper(jni, j_observer, constraints) {}
898
899   virtual void OnFailure(const std::string& error) OVERRIDE {
900     ScopedLocalRefFrame local_ref_frame(jni());
901     SdpObserverWrapper::OnFailure(std::string("Create"), error);
902   }
903 };
904
905 class SetSdpObserverWrapper
906     : public SdpObserverWrapper<SetSessionDescriptionObserver> {
907  public:
908   SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
909                         ConstraintsWrapper* constraints)
910       : SdpObserverWrapper(jni, j_observer, constraints) {}
911
912   virtual void OnFailure(const std::string& error) OVERRIDE {
913     ScopedLocalRefFrame local_ref_frame(jni());
914     SdpObserverWrapper::OnFailure(std::string("Set"), error);
915   }
916 };
917
918 // Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
919 // and dispatching the callback from C++ back to Java.
920 class DataChannelObserverWrapper : public DataChannelObserver {
921  public:
922   DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer)
923       : j_observer_global_(jni, j_observer),
924         j_observer_class_(jni, GetObjectClass(jni, j_observer)),
925         j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
926         j_on_state_change_mid_(GetMethodID(jni, *j_observer_class_,
927                                            "onStateChange", "()V")),
928         j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage",
929                                       "(Lorg/webrtc/DataChannel$Buffer;)V")),
930         j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_,
931                                    "<init>", "(Ljava/nio/ByteBuffer;Z)V")) {
932   }
933
934   virtual ~DataChannelObserverWrapper() {}
935
936   virtual void OnStateChange() OVERRIDE {
937     ScopedLocalRefFrame local_ref_frame(jni());
938     jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
939     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
940   }
941
942   virtual void OnMessage(const DataBuffer& buffer) OVERRIDE {
943     ScopedLocalRefFrame local_ref_frame(jni());
944     jobject byte_buffer =
945         jni()->NewDirectByteBuffer(const_cast<char*>(buffer.data.data()),
946                                    buffer.data.length());
947     jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
948                                         byte_buffer, buffer.binary);
949     jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
950     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
951   }
952
953  private:
954   JNIEnv* jni() {
955     return AttachCurrentThreadIfNeeded();
956   }
957
958   const ScopedGlobalRef<jobject> j_observer_global_;
959   const ScopedGlobalRef<jclass> j_observer_class_;
960   const ScopedGlobalRef<jclass> j_buffer_class_;
961   const jmethodID j_on_state_change_mid_;
962   const jmethodID j_on_message_mid_;
963   const jmethodID j_buffer_ctor_;
964 };
965
966 // Adapter for a Java StatsObserver presenting a C++ StatsObserver and
967 // dispatching the callback from C++ back to Java.
968 class StatsObserverWrapper : public StatsObserver {
969  public:
970   StatsObserverWrapper(JNIEnv* jni, jobject j_observer)
971       : j_observer_global_(jni, j_observer),
972         j_observer_class_(jni, GetObjectClass(jni, j_observer)),
973         j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
974         j_stats_report_ctor_(GetMethodID(
975             jni, *j_stats_report_class_, "<init>",
976             "(Ljava/lang/String;Ljava/lang/String;D"
977             "[Lorg/webrtc/StatsReport$Value;)V")),
978         j_value_class_(jni, FindClass(
979             jni, "org/webrtc/StatsReport$Value")),
980         j_value_ctor_(GetMethodID(
981             jni, *j_value_class_, "<init>",
982             "(Ljava/lang/String;Ljava/lang/String;)V")) {
983   }
984
985   virtual ~StatsObserverWrapper() {}
986
987   virtual void OnComplete(const std::vector<StatsReport>& reports) OVERRIDE {
988     ScopedLocalRefFrame local_ref_frame(jni());
989     jobjectArray j_reports = ReportsToJava(jni(), reports);
990     jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
991                               "([Lorg/webrtc/StatsReport;)V");
992     jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
993     CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
994   }
995
996  private:
997   jobjectArray ReportsToJava(
998       JNIEnv* jni, const std::vector<StatsReport>& reports) {
999     jobjectArray reports_array = jni->NewObjectArray(
1000         reports.size(), *j_stats_report_class_, NULL);
1001     for (int i = 0; i < reports.size(); ++i) {
1002       ScopedLocalRefFrame local_ref_frame(jni);
1003       const StatsReport& report = reports[i];
1004       jstring j_id = JavaStringFromStdString(jni, report.id);
1005       jstring j_type = JavaStringFromStdString(jni, report.type);
1006       jobjectArray j_values = ValuesToJava(jni, report.values);
1007       jobject j_report = jni->NewObject(*j_stats_report_class_,
1008                                         j_stats_report_ctor_,
1009                                         j_id,
1010                                         j_type,
1011                                         report.timestamp,
1012                                         j_values);
1013       jni->SetObjectArrayElement(reports_array, i, j_report);
1014     }
1015     return reports_array;
1016   }
1017
1018   jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) {
1019     jobjectArray j_values = jni->NewObjectArray(
1020         values.size(), *j_value_class_, NULL);
1021     for (int i = 0; i < values.size(); ++i) {
1022       ScopedLocalRefFrame local_ref_frame(jni);
1023       const StatsReport::Value& value = values[i];
1024       jstring j_name = JavaStringFromStdString(jni, value.name);
1025       jstring j_value = JavaStringFromStdString(jni, value.value);
1026       jobject j_element_value =
1027           jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
1028       jni->SetObjectArrayElement(j_values, i, j_element_value);
1029     }
1030     return j_values;
1031   }
1032
1033   JNIEnv* jni() {
1034     return AttachCurrentThreadIfNeeded();
1035   }
1036
1037   const ScopedGlobalRef<jobject> j_observer_global_;
1038   const ScopedGlobalRef<jclass> j_observer_class_;
1039   const ScopedGlobalRef<jclass> j_stats_report_class_;
1040   const jmethodID j_stats_report_ctor_;
1041   const ScopedGlobalRef<jclass> j_value_class_;
1042   const jmethodID j_value_ctor_;
1043 };
1044
1045 // Adapter presenting a cricket::VideoRenderer as a
1046 // webrtc::VideoRendererInterface.
1047 class VideoRendererWrapper : public VideoRendererInterface {
1048  public:
1049   static VideoRendererWrapper* Create(cricket::VideoRenderer* renderer) {
1050     if (renderer)
1051       return new VideoRendererWrapper(renderer);
1052     return NULL;
1053   }
1054
1055   virtual ~VideoRendererWrapper() {}
1056
1057   virtual void SetSize(int width, int height) OVERRIDE {
1058     ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
1059     const bool kNotReserved = false;  // What does this param mean??
1060     renderer_->SetSize(width, height, kNotReserved);
1061   }
1062
1063   virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
1064     ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
1065     renderer_->RenderFrame(frame);
1066   }
1067
1068  private:
1069   explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
1070       : renderer_(renderer) {}
1071
1072   scoped_ptr<cricket::VideoRenderer> renderer_;
1073 };
1074
1075 // Wrapper for texture object in TextureVideoFrame.
1076 class NativeHandleImpl : public NativeHandle {
1077  public:
1078   NativeHandleImpl() :
1079     ref_count_(0), texture_object_(NULL), texture_id_(-1) {}
1080   virtual ~NativeHandleImpl() {}
1081   virtual int32_t AddRef() {
1082     return ++ref_count_;
1083   }
1084   virtual int32_t Release() {
1085     return --ref_count_;
1086   }
1087   virtual void* GetHandle() {
1088     return texture_object_;
1089   }
1090   int GetTextureId() {
1091     return texture_id_;
1092   }
1093   void SetTextureObject(void *texture_object, int texture_id) {
1094     texture_object_ = reinterpret_cast<jobject>(texture_object);
1095     texture_id_ = texture_id;
1096   }
1097   int32_t ref_count() {
1098     return ref_count_;
1099   }
1100
1101  private:
1102   int32_t ref_count_;
1103   jobject texture_object_;
1104   int32_t texture_id_;
1105 };
1106
1107 // Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
1108 // instance.
1109 class JavaVideoRendererWrapper : public VideoRendererInterface {
1110  public:
1111   JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
1112       : j_callbacks_(jni, j_callbacks),
1113         j_set_size_id_(GetMethodID(
1114             jni, GetObjectClass(jni, j_callbacks), "setSize", "(II)V")),
1115         j_render_frame_id_(GetMethodID(
1116             jni, GetObjectClass(jni, j_callbacks), "renderFrame",
1117             "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
1118         j_frame_class_(jni,
1119                        FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
1120         j_i420_frame_ctor_id_(GetMethodID(
1121             jni, *j_frame_class_, "<init>", "(II[I[Ljava/nio/ByteBuffer;)V")),
1122         j_texture_frame_ctor_id_(GetMethodID(
1123             jni, *j_frame_class_, "<init>",
1124             "(IILjava/lang/Object;I)V")),
1125         j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
1126     CHECK_EXCEPTION(jni);
1127   }
1128
1129   virtual ~JavaVideoRendererWrapper() {}
1130
1131   virtual void SetSize(int width, int height) OVERRIDE {
1132     ScopedLocalRefFrame local_ref_frame(jni());
1133     jni()->CallVoidMethod(*j_callbacks_, j_set_size_id_, width, height);
1134     CHECK_EXCEPTION(jni());
1135   }
1136
1137   virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
1138     ScopedLocalRefFrame local_ref_frame(jni());
1139     if (frame->GetNativeHandle() != NULL) {
1140       jobject j_frame = CricketToJavaTextureFrame(frame);
1141       jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
1142       CHECK_EXCEPTION(jni());
1143     } else {
1144       jobject j_frame = CricketToJavaI420Frame(frame);
1145       jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
1146       CHECK_EXCEPTION(jni());
1147     }
1148   }
1149
1150  private:
1151   // Return a VideoRenderer.I420Frame referring to the data in |frame|.
1152   jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
1153     jintArray strides = jni()->NewIntArray(3);
1154     jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
1155     strides_array[0] = frame->GetYPitch();
1156     strides_array[1] = frame->GetUPitch();
1157     strides_array[2] = frame->GetVPitch();
1158     jni()->ReleaseIntArrayElements(strides, strides_array, 0);
1159     jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
1160     jobject y_buffer = jni()->NewDirectByteBuffer(
1161         const_cast<uint8*>(frame->GetYPlane()),
1162         frame->GetYPitch() * frame->GetHeight());
1163     jobject u_buffer = jni()->NewDirectByteBuffer(
1164         const_cast<uint8*>(frame->GetUPlane()), frame->GetChromaSize());
1165     jobject v_buffer = jni()->NewDirectByteBuffer(
1166         const_cast<uint8*>(frame->GetVPlane()), frame->GetChromaSize());
1167     jni()->SetObjectArrayElement(planes, 0, y_buffer);
1168     jni()->SetObjectArrayElement(planes, 1, u_buffer);
1169     jni()->SetObjectArrayElement(planes, 2, v_buffer);
1170     return jni()->NewObject(
1171         *j_frame_class_, j_i420_frame_ctor_id_,
1172         frame->GetWidth(), frame->GetHeight(), strides, planes);
1173   }
1174
1175   // Return a VideoRenderer.I420Frame referring texture object in |frame|.
1176   jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
1177     NativeHandleImpl* handle =
1178         reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
1179     jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
1180     int texture_id = handle->GetTextureId();
1181     return jni()->NewObject(
1182         *j_frame_class_, j_texture_frame_ctor_id_,
1183         frame->GetWidth(), frame->GetHeight(), texture_object, texture_id);
1184   }
1185
1186   JNIEnv* jni() {
1187     return AttachCurrentThreadIfNeeded();
1188   }
1189
1190   ScopedGlobalRef<jobject> j_callbacks_;
1191   jmethodID j_set_size_id_;
1192   jmethodID j_render_frame_id_;
1193   ScopedGlobalRef<jclass> j_frame_class_;
1194   jmethodID j_i420_frame_ctor_id_;
1195   jmethodID j_texture_frame_ctor_id_;
1196   ScopedGlobalRef<jclass> j_byte_buffer_class_;
1197 };
1198
1199 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
1200 // TODO(fischman): consider pulling MediaCodecVideoEncoder out of this file and
1201 // into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
1202 // from this file.
1203
1204 //#define TRACK_BUFFER_TIMING
1205 #define TAG "MediaCodecVideo"
1206 #ifdef TRACK_BUFFER_TIMING
1207 #define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
1208 #else
1209 #define ALOGV(...)
1210 #endif
1211 #define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
1212 #define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
1213
1214 // Color formats supported by encoder - should mirror supportedColorList
1215 // from MediaCodecVideoEncoder.java
1216 enum COLOR_FORMATTYPE {
1217   COLOR_FormatYUV420Planar = 0x13,
1218   COLOR_FormatYUV420SemiPlanar = 0x15,
1219   COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
1220   // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
1221   // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
1222   // This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
1223   // but requires some (16, 32?) byte alignment.
1224   COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
1225 };
1226
1227 // Arbitrary interval to poll the codec for new outputs.
1228 enum { kMediaCodecPollMs = 10 };
1229 // Media codec maximum output buffer ready timeout.
1230 enum { kMediaCodecTimeoutMs = 500 };
1231 // Interval to print codec statistics (bitrate, fps, encoding/decoding time).
1232 enum { kMediaCodecStatisticsIntervalMs = 3000 };
1233
1234 static int64_t GetCurrentTimeMs() {
1235   return TickTime::Now().Ticks() / 1000000LL;
1236 }
1237
1238 // Allow Invoke() calls from from current thread.
1239 static void AllowBlockingCalls() {
1240   Thread* current_thread = Thread::Current();
1241   if (current_thread != NULL)
1242     current_thread->SetAllowBlockingCalls(true);
1243 }
1244
1245 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
1246 // Android's MediaCodec SDK API behind the scenes to implement (hopefully)
1247 // HW-backed video encode.  This C++ class is implemented as a very thin shim,
1248 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
1249 // MediaCodecVideoEncoder is created, operated, and destroyed on a single
1250 // thread, currently the libjingle Worker thread.
1251 class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
1252                                public rtc::MessageHandler {
1253  public:
1254   virtual ~MediaCodecVideoEncoder();
1255   explicit MediaCodecVideoEncoder(JNIEnv* jni);
1256
1257   // webrtc::VideoEncoder implementation.  Everything trampolines to
1258   // |codec_thread_| for execution.
1259   virtual int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
1260                              int32_t /* number_of_cores */,
1261                              uint32_t /* max_payload_size */) OVERRIDE;
1262   virtual int32_t Encode(
1263       const webrtc::I420VideoFrame& input_image,
1264       const webrtc::CodecSpecificInfo* /* codec_specific_info */,
1265       const std::vector<webrtc::VideoFrameType>* frame_types) OVERRIDE;
1266   virtual int32_t RegisterEncodeCompleteCallback(
1267       webrtc::EncodedImageCallback* callback) OVERRIDE;
1268   virtual int32_t Release() OVERRIDE;
1269   virtual int32_t SetChannelParameters(uint32_t /* packet_loss */,
1270                                        int /* rtt */) OVERRIDE;
1271   virtual int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) OVERRIDE;
1272
1273   // rtc::MessageHandler implementation.
1274   virtual void OnMessage(rtc::Message* msg) OVERRIDE;
1275
1276  private:
1277   // CHECK-fail if not running on |codec_thread_|.
1278   void CheckOnCodecThread();
1279
1280   // Release() and InitEncode() in an attempt to restore the codec to an
1281   // operable state.  Necessary after all manner of OMX-layer errors.
1282   void ResetCodec();
1283
1284   // Implementation of webrtc::VideoEncoder methods above, all running on the
1285   // codec thread exclusively.
1286   //
1287   // If width==0 then this is assumed to be a re-initialization and the
1288   // previously-current values are reused instead of the passed parameters
1289   // (makes it easier to reason about thread-safety).
1290   int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
1291   int32_t EncodeOnCodecThread(
1292       const webrtc::I420VideoFrame& input_image,
1293       const std::vector<webrtc::VideoFrameType>* frame_types);
1294   int32_t RegisterEncodeCompleteCallbackOnCodecThread(
1295       webrtc::EncodedImageCallback* callback);
1296   int32_t ReleaseOnCodecThread();
1297   int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
1298
1299   // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
1300   int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
1301   jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
1302   bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
1303   jlong GetOutputBufferInfoPresentationTimestampUs(
1304       JNIEnv* jni,
1305       jobject j_output_buffer_info);
1306
1307   // Deliver any outputs pending in the MediaCodec to our |callback_| and return
1308   // true on success.
1309   bool DeliverPendingOutputs(JNIEnv* jni);
1310
1311   // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
1312   // |codec_thread_| synchronously.
1313   webrtc::EncodedImageCallback* callback_;
1314
1315   // State that is constant for the lifetime of this object once the ctor
1316   // returns.
1317   scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
1318   ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
1319   ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
1320   jmethodID j_init_encode_method_;
1321   jmethodID j_dequeue_input_buffer_method_;
1322   jmethodID j_encode_method_;
1323   jmethodID j_release_method_;
1324   jmethodID j_set_rates_method_;
1325   jmethodID j_dequeue_output_buffer_method_;
1326   jmethodID j_release_output_buffer_method_;
1327   jfieldID j_color_format_field_;
1328   jfieldID j_info_index_field_;
1329   jfieldID j_info_buffer_field_;
1330   jfieldID j_info_is_key_frame_field_;
1331   jfieldID j_info_presentation_timestamp_us_field_;
1332
1333   // State that is valid only between InitEncode() and the next Release().
1334   // Touched only on codec_thread_ so no explicit synchronization necessary.
1335   int width_;   // Frame width in pixels.
1336   int height_;  // Frame height in pixels.
1337   bool inited_;
1338   uint16_t picture_id_;
1339   enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
1340   int last_set_bitrate_kbps_;  // Last-requested bitrate in kbps.
1341   int last_set_fps_;  // Last-requested frame rate.
1342   int64_t current_timestamp_us_;  // Current frame timestamps in us.
1343   int frames_received_;  // Number of frames received by encoder.
1344   int frames_dropped_;  // Number of frames dropped by encoder.
1345   int frames_resolution_update_;  // Number of frames with new codec resolution.
1346   int frames_in_queue_;  // Number of frames in encoder queue.
1347   int64_t start_time_ms_;  // Start time for statistics.
1348   int current_frames_;  // Number of frames in the current statistics interval.
1349   int current_bytes_;  // Encoded bytes in the current statistics interval.
1350   int current_encoding_time_ms_;  // Overall encoding time in the current second
1351   int64_t last_input_timestamp_ms_;  // Timestamp of last received yuv frame.
1352   int64_t last_output_timestamp_ms_;  // Timestamp of last encoded frame.
1353   std::vector<int32_t> timestamps_;  // Video frames timestamp queue.
1354   std::vector<int64_t> render_times_ms_;  // Video frames render time queue.
1355   std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to
1356                                              // encoder input.
1357   // Frame size in bytes fed to MediaCodec.
1358   int yuv_size_;
1359   // True only when between a callback_->Encoded() call return a positive value
1360   // and the next Encode() call being ignored.
1361   bool drop_next_input_frame_;
1362   // Global references; must be deleted in Release().
1363   std::vector<jobject> input_buffers_;
1364 };
1365
1366 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
1367   // Call Release() to ensure no more callbacks to us after we are deleted.
1368   Release();
1369 }
1370
1371 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
1372   : callback_(NULL),
1373     inited_(false),
1374     picture_id_(0),
1375     codec_thread_(new Thread()),
1376     j_media_codec_video_encoder_class_(
1377         jni,
1378         FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
1379     j_media_codec_video_encoder_(
1380         jni,
1381         jni->NewObject(*j_media_codec_video_encoder_class_,
1382                        GetMethodID(jni,
1383                                    *j_media_codec_video_encoder_class_,
1384                                    "<init>",
1385                                    "()V"))) {
1386   ScopedLocalRefFrame local_ref_frame(jni);
1387   // It would be nice to avoid spinning up a new thread per MediaCodec, and
1388   // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
1389   // 2732 means that deadlocks abound.  This class synchronously trampolines
1390   // to |codec_thread_|, so if anything else can be coming to _us_ from
1391   // |codec_thread_|, or from any thread holding the |_sendCritSect| described
1392   // in the bug, we have a problem.  For now work around that with a dedicated
1393   // thread.
1394   codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
1395   CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
1396
1397   jclass j_output_buffer_info_class =
1398       FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
1399   j_init_encode_method_ = GetMethodID(jni,
1400                                       *j_media_codec_video_encoder_class_,
1401                                       "initEncode",
1402                                       "(IIII)[Ljava/nio/ByteBuffer;");
1403   j_dequeue_input_buffer_method_ = GetMethodID(
1404       jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
1405   j_encode_method_ = GetMethodID(
1406       jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
1407   j_release_method_ =
1408       GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
1409   j_set_rates_method_ = GetMethodID(
1410       jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
1411   j_dequeue_output_buffer_method_ =
1412       GetMethodID(jni,
1413                   *j_media_codec_video_encoder_class_,
1414                   "dequeueOutputBuffer",
1415                   "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
1416   j_release_output_buffer_method_ = GetMethodID(
1417       jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
1418
1419   j_color_format_field_ =
1420       GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
1421   j_info_index_field_ =
1422       GetFieldID(jni, j_output_buffer_info_class, "index", "I");
1423   j_info_buffer_field_ = GetFieldID(
1424       jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
1425   j_info_is_key_frame_field_ =
1426       GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
1427   j_info_presentation_timestamp_us_field_ = GetFieldID(
1428       jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
1429   CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
1430   AllowBlockingCalls();
1431 }
1432
1433 int32_t MediaCodecVideoEncoder::InitEncode(
1434     const webrtc::VideoCodec* codec_settings,
1435     int32_t /* number_of_cores */,
1436     uint32_t /* max_payload_size */) {
1437   // Factory should guard against other codecs being used with us.
1438   CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
1439
1440   return codec_thread_->Invoke<int32_t>(
1441       Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
1442            this,
1443            codec_settings->width,
1444            codec_settings->height,
1445            codec_settings->startBitrate,
1446            codec_settings->maxFramerate));
1447 }
1448
1449 int32_t MediaCodecVideoEncoder::Encode(
1450     const webrtc::I420VideoFrame& frame,
1451     const webrtc::CodecSpecificInfo* /* codec_specific_info */,
1452     const std::vector<webrtc::VideoFrameType>* frame_types) {
1453   return codec_thread_->Invoke<int32_t>(Bind(
1454       &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
1455 }
1456
1457 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
1458     webrtc::EncodedImageCallback* callback) {
1459   return codec_thread_->Invoke<int32_t>(
1460       Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
1461            this,
1462            callback));
1463 }
1464
1465 int32_t MediaCodecVideoEncoder::Release() {
1466   return codec_thread_->Invoke<int32_t>(
1467       Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
1468 }
1469
1470 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
1471                                                      int /* rtt */) {
1472   return WEBRTC_VIDEO_CODEC_OK;
1473 }
1474
1475 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
1476                                          uint32_t frame_rate) {
1477   return codec_thread_->Invoke<int32_t>(
1478       Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
1479            this,
1480            new_bit_rate,
1481            frame_rate));
1482 }
1483
1484 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
1485   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1486   ScopedLocalRefFrame local_ref_frame(jni);
1487
1488   // We only ever send one message to |this| directly (not through a Bind()'d
1489   // functor), so expect no ID/data.
1490   CHECK(!msg->message_id) << "Unexpected message!";
1491   CHECK(!msg->pdata) << "Unexpected message!";
1492   CheckOnCodecThread();
1493   if (!inited_) {
1494     return;
1495   }
1496
1497   // It would be nice to recover from a failure here if one happened, but it's
1498   // unclear how to signal such a failure to the app, so instead we stay silent
1499   // about it and let the next app-called API method reveal the borkedness.
1500   DeliverPendingOutputs(jni);
1501   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
1502 }
1503
1504 void MediaCodecVideoEncoder::CheckOnCodecThread() {
1505   CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
1506       << "Running on wrong thread!";
1507 }
1508
1509 void MediaCodecVideoEncoder::ResetCodec() {
1510   ALOGE("ResetCodec");
1511   if (Release() != WEBRTC_VIDEO_CODEC_OK ||
1512       codec_thread_->Invoke<int32_t>(Bind(
1513           &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
1514           width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
1515     // TODO(fischman): wouldn't it be nice if there was a way to gracefully
1516     // degrade to a SW encoder at this point?  There isn't one AFAICT :(
1517     // https://code.google.com/p/webrtc/issues/detail?id=2920
1518   }
1519 }
1520
1521 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
1522     int width, int height, int kbps, int fps) {
1523   CheckOnCodecThread();
1524   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1525   ScopedLocalRefFrame local_ref_frame(jni);
1526
1527   ALOGD("InitEncodeOnCodecThread %d x %d. Bitrate: %d kbps. Fps: %d",
1528       width, height, kbps, fps);
1529   if (kbps == 0) {
1530     kbps = last_set_bitrate_kbps_;
1531   }
1532   if (fps == 0) {
1533     fps = last_set_fps_;
1534   }
1535
1536   width_ = width;
1537   height_ = height;
1538   last_set_bitrate_kbps_ = kbps;
1539   last_set_fps_ = fps;
1540   yuv_size_ = width_ * height_ * 3 / 2;
1541   frames_received_ = 0;
1542   frames_dropped_ = 0;
1543   frames_resolution_update_ = 0;
1544   frames_in_queue_ = 0;
1545   current_timestamp_us_ = 0;
1546   start_time_ms_ = GetCurrentTimeMs();
1547   current_frames_ = 0;
1548   current_bytes_ = 0;
1549   current_encoding_time_ms_ = 0;
1550   last_input_timestamp_ms_ = -1;
1551   last_output_timestamp_ms_ = -1;
1552   timestamps_.clear();
1553   render_times_ms_.clear();
1554   frame_rtc_times_ms_.clear();
1555   drop_next_input_frame_ = false;
1556   picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
1557   // We enforce no extra stride/padding in the format creation step.
1558   jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
1559       jni->CallObjectMethod(*j_media_codec_video_encoder_,
1560                             j_init_encode_method_,
1561                             width_,
1562                             height_,
1563                             kbps,
1564                             fps));
1565   CHECK_EXCEPTION(jni);
1566   if (IsNull(jni, input_buffers))
1567     return WEBRTC_VIDEO_CODEC_ERROR;
1568
1569   inited_ = true;
1570   switch (GetIntField(jni, *j_media_codec_video_encoder_,
1571       j_color_format_field_)) {
1572     case COLOR_FormatYUV420Planar:
1573       encoder_fourcc_ = libyuv::FOURCC_YU12;
1574       break;
1575     case COLOR_FormatYUV420SemiPlanar:
1576     case COLOR_QCOM_FormatYUV420SemiPlanar:
1577     case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
1578       encoder_fourcc_ = libyuv::FOURCC_NV12;
1579       break;
1580     default:
1581       LOG(LS_ERROR) << "Wrong color format.";
1582       return WEBRTC_VIDEO_CODEC_ERROR;
1583   }
1584   size_t num_input_buffers = jni->GetArrayLength(input_buffers);
1585   CHECK(input_buffers_.empty())
1586       << "Unexpected double InitEncode without Release";
1587   input_buffers_.resize(num_input_buffers);
1588   for (size_t i = 0; i < num_input_buffers; ++i) {
1589     input_buffers_[i] =
1590         jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
1591     int64 yuv_buffer_capacity =
1592         jni->GetDirectBufferCapacity(input_buffers_[i]);
1593     CHECK_EXCEPTION(jni);
1594     CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
1595   }
1596   CHECK_EXCEPTION(jni);
1597
1598   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
1599   return WEBRTC_VIDEO_CODEC_OK;
1600 }
1601
1602 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
1603     const webrtc::I420VideoFrame& frame,
1604     const std::vector<webrtc::VideoFrameType>* frame_types) {
1605   CheckOnCodecThread();
1606   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1607   ScopedLocalRefFrame local_ref_frame(jni);
1608
1609   if (!inited_) {
1610     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1611   }
1612   frames_received_++;
1613   if (!DeliverPendingOutputs(jni)) {
1614     ResetCodec();
1615     // Continue as if everything's fine.
1616   }
1617
1618   if (drop_next_input_frame_) {
1619     ALOGV("Encoder drop frame - failed callback.");
1620     drop_next_input_frame_ = false;
1621     return WEBRTC_VIDEO_CODEC_OK;
1622   }
1623
1624   CHECK(frame_types->size() == 1) << "Unexpected stream count";
1625   if (frame.width() != width_ || frame.height() != height_) {
1626     frames_resolution_update_++;
1627     ALOGD("Unexpected frame resolution change from %d x %d to %d x %d",
1628         width_, height_, frame.width(), frame.height());
1629     if (frames_resolution_update_ > 3) {
1630       // Reset codec if we received more than 3 frames with new resolution.
1631       width_ = frame.width();
1632       height_ = frame.height();
1633       frames_resolution_update_ = 0;
1634       ResetCodec();
1635     }
1636     return WEBRTC_VIDEO_CODEC_OK;
1637   }
1638   frames_resolution_update_ = 0;
1639
1640   bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
1641
1642   // Check if we accumulated too many frames in encoder input buffers
1643   // or the encoder latency exceeds 70 ms and drop frame if so.
1644   if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
1645     int encoder_latency_ms = last_input_timestamp_ms_ -
1646         last_output_timestamp_ms_;
1647     if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
1648       ALOGD("Drop frame - encoder is behind by %d ms. Q size: %d",
1649           encoder_latency_ms, frames_in_queue_);
1650       frames_dropped_++;
1651       return WEBRTC_VIDEO_CODEC_OK;
1652     }
1653   }
1654
1655   int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
1656                                                 j_dequeue_input_buffer_method_);
1657   CHECK_EXCEPTION(jni);
1658   if (j_input_buffer_index == -1) {
1659     // Video codec falls behind - no input buffer available.
1660     ALOGV("Encoder drop frame - no input buffers available");
1661     frames_dropped_++;
1662     return WEBRTC_VIDEO_CODEC_OK;  // TODO(fischman): see webrtc bug 2887.
1663   }
1664   if (j_input_buffer_index == -2) {
1665     ResetCodec();
1666     return WEBRTC_VIDEO_CODEC_ERROR;
1667   }
1668
1669   ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
1670       frames_received_, j_input_buffer_index, current_timestamp_us_ / 1000);
1671
1672   jobject j_input_buffer = input_buffers_[j_input_buffer_index];
1673   uint8* yuv_buffer =
1674       reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
1675   CHECK_EXCEPTION(jni);
1676   CHECK(yuv_buffer) << "Indirect buffer??";
1677   CHECK(!libyuv::ConvertFromI420(
1678           frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
1679           frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
1680           frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
1681           yuv_buffer, width_,
1682           width_, height_,
1683           encoder_fourcc_))
1684       << "ConvertFromI420 failed";
1685   last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
1686   frames_in_queue_++;
1687
1688   // Save input image timestamps for later output
1689   timestamps_.push_back(frame.timestamp());
1690   render_times_ms_.push_back(frame.render_time_ms());
1691   frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
1692
1693   bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1694                                               j_encode_method_,
1695                                               key_frame,
1696                                               j_input_buffer_index,
1697                                               yuv_size_,
1698                                               current_timestamp_us_);
1699   CHECK_EXCEPTION(jni);
1700   current_timestamp_us_ += 1000000 / last_set_fps_;
1701
1702   if (!encode_status || !DeliverPendingOutputs(jni)) {
1703     ResetCodec();
1704     return WEBRTC_VIDEO_CODEC_ERROR;
1705   }
1706
1707   return WEBRTC_VIDEO_CODEC_OK;
1708 }
1709
1710 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
1711     webrtc::EncodedImageCallback* callback) {
1712   CheckOnCodecThread();
1713   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1714   ScopedLocalRefFrame local_ref_frame(jni);
1715   callback_ = callback;
1716   return WEBRTC_VIDEO_CODEC_OK;
1717 }
1718
1719 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
1720   if (!inited_) {
1721     return WEBRTC_VIDEO_CODEC_OK;
1722   }
1723   CheckOnCodecThread();
1724   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1725   ALOGD("EncoderRelease: Frames received: %d. Frames dropped: %d.",
1726       frames_received_,frames_dropped_);
1727   ScopedLocalRefFrame local_ref_frame(jni);
1728   for (size_t i = 0; i < input_buffers_.size(); ++i)
1729     jni->DeleteGlobalRef(input_buffers_[i]);
1730   input_buffers_.clear();
1731   jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
1732   CHECK_EXCEPTION(jni);
1733   rtc::MessageQueueManager::Clear(this);
1734   inited_ = false;
1735   return WEBRTC_VIDEO_CODEC_OK;
1736 }
1737
1738 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
1739                                                       uint32_t frame_rate) {
1740   CheckOnCodecThread();
1741   if (last_set_bitrate_kbps_ == new_bit_rate &&
1742       last_set_fps_ == frame_rate) {
1743     return WEBRTC_VIDEO_CODEC_OK;
1744   }
1745   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1746   ScopedLocalRefFrame local_ref_frame(jni);
1747   if (new_bit_rate > 0) {
1748     last_set_bitrate_kbps_ = new_bit_rate;
1749   }
1750   if (frame_rate > 0) {
1751     last_set_fps_ = frame_rate;
1752   }
1753   bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1754                                        j_set_rates_method_,
1755                                        last_set_bitrate_kbps_,
1756                                        last_set_fps_);
1757   CHECK_EXCEPTION(jni);
1758   if (!ret) {
1759     ResetCodec();
1760     return WEBRTC_VIDEO_CODEC_ERROR;
1761   }
1762   return WEBRTC_VIDEO_CODEC_OK;
1763 }
1764
1765 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
1766     JNIEnv* jni,
1767     jobject j_output_buffer_info) {
1768   return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
1769 }
1770
1771 jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
1772     JNIEnv* jni,
1773     jobject j_output_buffer_info) {
1774   return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
1775 }
1776
1777 bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
1778     JNIEnv* jni,
1779     jobject j_output_buffer_info) {
1780   return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
1781 }
1782
1783 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
1784     JNIEnv* jni,
1785     jobject j_output_buffer_info) {
1786   return GetLongField(
1787       jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
1788 }
1789
1790 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
1791   while (true) {
1792     jobject j_output_buffer_info = jni->CallObjectMethod(
1793         *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
1794     CHECK_EXCEPTION(jni);
1795     if (IsNull(jni, j_output_buffer_info)) {
1796       break;
1797     }
1798
1799     int output_buffer_index =
1800         GetOutputBufferInfoIndex(jni, j_output_buffer_info);
1801     if (output_buffer_index == -1) {
1802       ResetCodec();
1803       return false;
1804     }
1805
1806     // Get frame timestamps from a queue.
1807     last_output_timestamp_ms_ =
1808         GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
1809         1000;
1810     int32_t timestamp = timestamps_.front();
1811     timestamps_.erase(timestamps_.begin());
1812     int64_t render_time_ms = render_times_ms_.front();
1813     render_times_ms_.erase(render_times_ms_.begin());
1814     int64_t frame_encoding_time_ms = GetCurrentTimeMs() -
1815         frame_rtc_times_ms_.front();
1816     frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
1817     frames_in_queue_--;
1818
1819     // Extract payload and key frame flag.
1820     int32_t callback_status = 0;
1821     jobject j_output_buffer =
1822         GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
1823     bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
1824     size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
1825     uint8* payload = reinterpret_cast<uint8_t*>(
1826         jni->GetDirectBufferAddress(j_output_buffer));
1827     CHECK_EXCEPTION(jni);
1828
1829     ALOGV("Encoder got output buffer # %d. Size: %d. TS: %lld. Latency: %lld."
1830         " EncTime: %lld",
1831         output_buffer_index, payload_size, last_output_timestamp_ms_,
1832         last_input_timestamp_ms_ - last_output_timestamp_ms_,
1833         frame_encoding_time_ms);
1834
1835     // Calculate and print encoding statistics - every 3 seconds.
1836     current_frames_++;
1837     current_bytes_ += payload_size;
1838     current_encoding_time_ms_ += frame_encoding_time_ms;
1839     int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
1840     if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
1841         current_frames_ > 0) {
1842       ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
1843           " encTime: %d for last %d ms",
1844           current_bytes_ * 8 / statistic_time_ms,
1845           last_set_bitrate_kbps_,
1846           (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
1847           current_encoding_time_ms_ / current_frames_, statistic_time_ms);
1848       start_time_ms_ = GetCurrentTimeMs();
1849       current_frames_ = 0;
1850       current_bytes_= 0;
1851       current_encoding_time_ms_ = 0;
1852     }
1853
1854     // Callback - return encoded frame.
1855     if (callback_) {
1856       scoped_ptr<webrtc::EncodedImage> image(
1857           new webrtc::EncodedImage(payload, payload_size, payload_size));
1858       image->_encodedWidth = width_;
1859       image->_encodedHeight = height_;
1860       image->_timeStamp = timestamp;
1861       image->capture_time_ms_ = render_time_ms;
1862       image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
1863       image->_completeFrame = true;
1864
1865       webrtc::CodecSpecificInfo info;
1866       memset(&info, 0, sizeof(info));
1867       info.codecType = kVideoCodecVP8;
1868       info.codecSpecific.VP8.pictureId = picture_id_;
1869       info.codecSpecific.VP8.nonReference = false;
1870       info.codecSpecific.VP8.simulcastIdx = 0;
1871       info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
1872       info.codecSpecific.VP8.layerSync = false;
1873       info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
1874       info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
1875       picture_id_ = (picture_id_ + 1) & 0x7FFF;
1876
1877       // Generate a header describing a single fragment.
1878       webrtc::RTPFragmentationHeader header;
1879       memset(&header, 0, sizeof(header));
1880       header.VerifyAndAllocateFragmentationHeader(1);
1881       header.fragmentationOffset[0] = 0;
1882       header.fragmentationLength[0] = image->_length;
1883       header.fragmentationPlType[0] = 0;
1884       header.fragmentationTimeDiff[0] = 0;
1885
1886       callback_status = callback_->Encoded(*image, &info, &header);
1887     }
1888
1889     // Return output buffer back to the encoder.
1890     bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1891                                           j_release_output_buffer_method_,
1892                                           output_buffer_index);
1893     CHECK_EXCEPTION(jni);
1894     if (!success) {
1895       ResetCodec();
1896       return false;
1897     }
1898
1899     if (callback_status > 0) {
1900       drop_next_input_frame_ = true;
1901     // Theoretically could handle callback_status<0 here, but unclear what that
1902     // would mean for us.
1903     }
1904   }
1905
1906   return true;
1907 }
1908
1909 // Simplest-possible implementation of an encoder factory, churns out
1910 // MediaCodecVideoEncoders on demand (or errors, if that's not possible).
1911 class MediaCodecVideoEncoderFactory
1912     : public cricket::WebRtcVideoEncoderFactory {
1913  public:
1914   MediaCodecVideoEncoderFactory();
1915   virtual ~MediaCodecVideoEncoderFactory();
1916
1917   // WebRtcVideoEncoderFactory implementation.
1918   virtual webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
1919       OVERRIDE;
1920   virtual const std::vector<VideoCodec>& codecs() const OVERRIDE;
1921   virtual void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) OVERRIDE;
1922
1923  private:
1924   // Empty if platform support is lacking, const after ctor returns.
1925   std::vector<VideoCodec> supported_codecs_;
1926 };
1927
1928 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
1929   JNIEnv* jni = AttachCurrentThreadIfNeeded();
1930   ScopedLocalRefFrame local_ref_frame(jni);
1931   jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
1932   bool is_platform_supported = jni->CallStaticBooleanMethod(
1933       j_encoder_class,
1934       GetStaticMethodID(jni, j_encoder_class, "isPlatformSupported", "()Z"));
1935   CHECK_EXCEPTION(jni);
1936   if (!is_platform_supported)
1937     return;
1938
1939   // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
1940   // encoder?  Sure would be.  Too bad it doesn't.  So we hard-code some
1941   // reasonable defaults.
1942   supported_codecs_.push_back(
1943       VideoCodec(kVideoCodecVP8, "VP8", 1280, 1280, 30));
1944 }
1945
1946 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
1947
1948 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
1949     webrtc::VideoCodecType type) {
1950   if (type != kVideoCodecVP8 || supported_codecs_.empty())
1951     return NULL;
1952   return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded());
1953 }
1954
1955 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
1956 MediaCodecVideoEncoderFactory::codecs() const {
1957   return supported_codecs_;
1958 }
1959
1960 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1961     webrtc::VideoEncoder* encoder) {
1962   delete encoder;
1963 }
1964
1965 class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
1966                                public rtc::MessageHandler {
1967  public:
1968   explicit MediaCodecVideoDecoder(JNIEnv* jni);
1969   virtual ~MediaCodecVideoDecoder();
1970
1971   static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
1972
1973   virtual int32_t InitDecode(const VideoCodec* codecSettings,
1974       int32_t numberOfCores) OVERRIDE;
1975
1976   virtual int32_t
1977   Decode(const EncodedImage& inputImage, bool missingFrames,
1978          const RTPFragmentationHeader* fragmentation,
1979          const CodecSpecificInfo* codecSpecificInfo = NULL,
1980          int64_t renderTimeMs = -1) OVERRIDE;
1981
1982   virtual int32_t RegisterDecodeCompleteCallback(
1983       DecodedImageCallback* callback) OVERRIDE;
1984
1985   virtual int32_t Release() OVERRIDE;
1986
1987   virtual int32_t Reset() OVERRIDE;
1988   // rtc::MessageHandler implementation.
1989   virtual void OnMessage(rtc::Message* msg) OVERRIDE;
1990
1991  private:
1992   // CHECK-fail if not running on |codec_thread_|.
1993   void CheckOnCodecThread();
1994
1995   int32_t InitDecodeOnCodecThread();
1996   int32_t ReleaseOnCodecThread();
1997   int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
1998   // Deliver any outputs pending in the MediaCodec to our |callback_| and return
1999   // true on success.
2000   bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
2001
2002
2003   bool key_frame_required_;
2004   bool inited_;
2005   bool use_surface_;
2006   int error_count_;
2007   VideoCodec codec_;
2008   I420VideoFrame decoded_image_;
2009   NativeHandleImpl native_handle_;
2010   DecodedImageCallback* callback_;
2011   int frames_received_;  // Number of frames received by decoder.
2012   int frames_decoded_;  // Number of frames decoded by decoder
2013   int64_t start_time_ms_;  // Start time for statistics.
2014   int current_frames_;  // Number of frames in the current statistics interval.
2015   int current_bytes_;  // Encoded bytes in the current statistics interval.
2016   int current_decoding_time_ms_;  // Overall decoding time in the current second
2017   uint32_t max_pending_frames_;  // Maximum number of pending input frames
2018   std::vector<int32_t> timestamps_;
2019   std::vector<int64_t> ntp_times_ms_;
2020   std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to
2021                                              // decoder input.
2022
2023   // State that is constant for the lifetime of this object once the ctor
2024   // returns.
2025   scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
2026   ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
2027   ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
2028   jmethodID j_init_decode_method_;
2029   jmethodID j_release_method_;
2030   jmethodID j_dequeue_input_buffer_method_;
2031   jmethodID j_queue_input_buffer_method_;
2032   jmethodID j_dequeue_output_buffer_method_;
2033   jmethodID j_release_output_buffer_method_;
2034   // MediaCodecVideoDecoder fields.
2035   jfieldID j_input_buffers_field_;
2036   jfieldID j_output_buffers_field_;
2037   jfieldID j_color_format_field_;
2038   jfieldID j_width_field_;
2039   jfieldID j_height_field_;
2040   jfieldID j_stride_field_;
2041   jfieldID j_slice_height_field_;
2042   jfieldID j_surface_texture_field_;
2043   jfieldID j_textureID_field_;
2044   // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
2045   jfieldID j_info_index_field_;
2046   jfieldID j_info_offset_field_;
2047   jfieldID j_info_size_field_;
2048   jfieldID j_info_presentation_timestamp_us_field_;
2049
2050   // Global references; must be deleted in Release().
2051   std::vector<jobject> input_buffers_;
2052   jobject surface_texture_;
2053   jobject previous_surface_texture_;
2054
2055   // Render EGL context.
2056   static jobject render_egl_context_;
2057 };
2058
2059 jobject MediaCodecVideoDecoder::render_egl_context_ = NULL;
2060
2061 int MediaCodecVideoDecoder::SetAndroidObjects(JNIEnv* jni,
2062     jobject render_egl_context) {
2063   if (render_egl_context_) {
2064     jni->DeleteGlobalRef(render_egl_context_);
2065   }
2066   if (IsNull(jni, render_egl_context)) {
2067     render_egl_context_ = NULL;
2068   } else {
2069     render_egl_context_ = jni->NewGlobalRef(render_egl_context);
2070     CHECK_EXCEPTION(jni) << "error calling NewGlobalRef for EGL Context.";
2071     jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
2072     if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
2073       ALOGE("Wrong EGL Context.");
2074       jni->DeleteGlobalRef(render_egl_context_);
2075       render_egl_context_ = NULL;
2076     }
2077   }
2078   if (render_egl_context_ == NULL) {
2079     ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
2080   }
2081   return 0;
2082 }
2083
2084 MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni)
2085   : key_frame_required_(true),
2086     inited_(false),
2087     error_count_(0),
2088     surface_texture_(NULL),
2089     previous_surface_texture_(NULL),
2090     codec_thread_(new Thread()),
2091     j_media_codec_video_decoder_class_(
2092         jni,
2093         FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
2094           j_media_codec_video_decoder_(
2095               jni,
2096               jni->NewObject(*j_media_codec_video_decoder_class_,
2097                    GetMethodID(jni,
2098                               *j_media_codec_video_decoder_class_,
2099                               "<init>",
2100                               "()V"))) {
2101   ScopedLocalRefFrame local_ref_frame(jni);
2102   codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
2103   CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
2104
2105   j_init_decode_method_ = GetMethodID(
2106       jni, *j_media_codec_video_decoder_class_, "initDecode",
2107       "(IIZZLandroid/opengl/EGLContext;)Z");
2108   j_release_method_ =
2109       GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
2110   j_dequeue_input_buffer_method_ = GetMethodID(
2111       jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
2112   j_queue_input_buffer_method_ = GetMethodID(
2113       jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
2114   j_dequeue_output_buffer_method_ = GetMethodID(
2115       jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
2116       "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
2117   j_release_output_buffer_method_ = GetMethodID(
2118       jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
2119
2120   j_input_buffers_field_ = GetFieldID(
2121       jni, *j_media_codec_video_decoder_class_,
2122       "inputBuffers", "[Ljava/nio/ByteBuffer;");
2123   j_output_buffers_field_ = GetFieldID(
2124       jni, *j_media_codec_video_decoder_class_,
2125       "outputBuffers", "[Ljava/nio/ByteBuffer;");
2126   j_color_format_field_ = GetFieldID(
2127       jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
2128   j_width_field_ = GetFieldID(
2129       jni, *j_media_codec_video_decoder_class_, "width", "I");
2130   j_height_field_ = GetFieldID(
2131       jni, *j_media_codec_video_decoder_class_, "height", "I");
2132   j_stride_field_ = GetFieldID(
2133       jni, *j_media_codec_video_decoder_class_, "stride", "I");
2134   j_slice_height_field_ = GetFieldID(
2135       jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
2136   j_textureID_field_ = GetFieldID(
2137       jni, *j_media_codec_video_decoder_class_, "textureID", "I");
2138   j_surface_texture_field_ = GetFieldID(
2139       jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
2140       "Landroid/graphics/SurfaceTexture;");
2141
2142   jclass j_decoder_output_buffer_info_class = FindClass(jni,
2143       "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
2144   j_info_index_field_ = GetFieldID(
2145       jni, j_decoder_output_buffer_info_class, "index", "I");
2146   j_info_offset_field_ = GetFieldID(
2147       jni, j_decoder_output_buffer_info_class, "offset", "I");
2148   j_info_size_field_ = GetFieldID(
2149       jni, j_decoder_output_buffer_info_class, "size", "I");
2150   j_info_presentation_timestamp_us_field_ = GetFieldID(
2151       jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
2152
2153   CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
2154   use_surface_ = true;
2155   if (render_egl_context_ == NULL)
2156     use_surface_ = false;
2157   memset(&codec_, 0, sizeof(codec_));
2158   AllowBlockingCalls();
2159 }
2160
2161 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
2162   // Call Release() to ensure no more callbacks to us after we are deleted.
2163   Release();
2164   // Delete global references.
2165   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2166   if (previous_surface_texture_ != NULL)
2167     jni->DeleteGlobalRef(previous_surface_texture_);
2168   if (surface_texture_ != NULL)
2169     jni->DeleteGlobalRef(surface_texture_);
2170 }
2171
2172 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
2173     int32_t numberOfCores) {
2174   if (inst == NULL) {
2175     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
2176   }
2177   int ret_val = Release();
2178   if (ret_val < 0) {
2179     return ret_val;
2180   }
2181   // Save VideoCodec instance for later.
2182   if (&codec_ != inst) {
2183     codec_ = *inst;
2184   }
2185   codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
2186
2187   // Always start with a complete key frame.
2188   key_frame_required_ = true;
2189   frames_received_ = 0;
2190   frames_decoded_ = 0;
2191
2192   // Call Java init.
2193   return codec_thread_->Invoke<int32_t>(
2194       Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
2195 }
2196
2197 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
2198   CheckOnCodecThread();
2199   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2200   ScopedLocalRefFrame local_ref_frame(jni);
2201   ALOGD("InitDecodeOnCodecThread: %d x %d. Fps: %d. Errors: %d",
2202       codec_.width, codec_.height, codec_.maxFramerate, error_count_);
2203   bool use_sw_codec = false;
2204   if (error_count_ > 1) {
2205     // If more than one critical errors happen for HW codec, switch to SW codec.
2206     use_sw_codec = true;
2207   }
2208
2209   bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
2210                                        j_init_decode_method_,
2211                                        codec_.width,
2212                                        codec_.height,
2213                                        use_sw_codec,
2214                                        use_surface_,
2215                                        render_egl_context_);
2216   CHECK_EXCEPTION(jni);
2217   if (!success) {
2218     return WEBRTC_VIDEO_CODEC_ERROR;
2219   }
2220   inited_ = true;
2221
2222   max_pending_frames_ = 0;
2223   if (use_surface_) {
2224     max_pending_frames_ = 1;
2225   }
2226   start_time_ms_ = GetCurrentTimeMs();
2227   current_frames_ = 0;
2228   current_bytes_ = 0;
2229   current_decoding_time_ms_ = 0;
2230   timestamps_.clear();
2231   ntp_times_ms_.clear();
2232   frame_rtc_times_ms_.clear();
2233
2234   jobjectArray input_buffers = (jobjectArray)GetObjectField(
2235       jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
2236   size_t num_input_buffers = jni->GetArrayLength(input_buffers);
2237   input_buffers_.resize(num_input_buffers);
2238   for (size_t i = 0; i < num_input_buffers; ++i) {
2239     input_buffers_[i] =
2240         jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
2241     CHECK_EXCEPTION(jni);
2242   }
2243
2244   if (use_surface_) {
2245     jobject surface_texture = GetObjectField(
2246         jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
2247     if (previous_surface_texture_ != NULL) {
2248       jni->DeleteGlobalRef(previous_surface_texture_);
2249     }
2250     previous_surface_texture_ = surface_texture_;
2251     surface_texture_ = jni->NewGlobalRef(surface_texture);
2252   }
2253   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
2254
2255   return WEBRTC_VIDEO_CODEC_OK;
2256 }
2257
2258 int32_t MediaCodecVideoDecoder::Release() {
2259   return codec_thread_->Invoke<int32_t>(
2260         Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
2261 }
2262
2263 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
2264   if (!inited_) {
2265     return WEBRTC_VIDEO_CODEC_OK;
2266   }
2267   CheckOnCodecThread();
2268   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2269   ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
2270   ScopedLocalRefFrame local_ref_frame(jni);
2271   for (size_t i = 0; i < input_buffers_.size(); i++) {
2272     jni->DeleteGlobalRef(input_buffers_[i]);
2273   }
2274   input_buffers_.clear();
2275   jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
2276   CHECK_EXCEPTION(jni);
2277   rtc::MessageQueueManager::Clear(this);
2278   inited_ = false;
2279   return WEBRTC_VIDEO_CODEC_OK;
2280 }
2281
2282
2283 void MediaCodecVideoDecoder::CheckOnCodecThread() {
2284   CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
2285       << "Running on wrong thread!";
2286 }
2287
2288 int32_t MediaCodecVideoDecoder::Decode(
2289     const EncodedImage& inputImage,
2290     bool missingFrames,
2291     const RTPFragmentationHeader* fragmentation,
2292     const CodecSpecificInfo* codecSpecificInfo,
2293     int64_t renderTimeMs) {
2294   if (!inited_) {
2295     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2296   }
2297   if (callback_ == NULL) {
2298     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2299   }
2300   if (inputImage._buffer == NULL && inputImage._length > 0) {
2301     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
2302   }
2303   // Check if encoded frame dimension has changed.
2304   if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
2305       (inputImage._encodedWidth != codec_.width ||
2306       inputImage._encodedHeight != codec_.height)) {
2307     codec_.width = inputImage._encodedWidth;
2308     codec_.height = inputImage._encodedHeight;
2309     InitDecode(&codec_, 1);
2310   }
2311
2312   // Always start with a complete key frame.
2313   if (key_frame_required_) {
2314     if (inputImage._frameType != webrtc::kKeyFrame) {
2315       return WEBRTC_VIDEO_CODEC_ERROR;
2316     }
2317     if (!inputImage._completeFrame) {
2318       return WEBRTC_VIDEO_CODEC_ERROR;
2319     }
2320     key_frame_required_ = false;
2321   }
2322   if (inputImage._length == 0) {
2323     return WEBRTC_VIDEO_CODEC_ERROR;
2324   }
2325
2326   return codec_thread_->Invoke<int32_t>(Bind(
2327       &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
2328 }
2329
2330 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
2331     const EncodedImage& inputImage) {
2332   static uint8_t yVal_ = 0x7f;
2333
2334   CheckOnCodecThread();
2335   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2336   ScopedLocalRefFrame local_ref_frame(jni);
2337
2338   // Try to drain the decoder and wait until output is not too
2339   // much behind the input.
2340   if (frames_received_ > frames_decoded_ + max_pending_frames_) {
2341     ALOGV("Wait for output...");
2342     if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
2343       error_count_++;
2344       Reset();
2345       return WEBRTC_VIDEO_CODEC_ERROR;
2346     }
2347     if (frames_received_ > frames_decoded_ + max_pending_frames_) {
2348       ALOGE("Output buffer dequeue timeout");
2349       error_count_++;
2350       Reset();
2351       return WEBRTC_VIDEO_CODEC_ERROR;
2352     }
2353   }
2354
2355   // Get input buffer.
2356   int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
2357                                                 j_dequeue_input_buffer_method_);
2358   CHECK_EXCEPTION(jni);
2359   if (j_input_buffer_index < 0) {
2360     ALOGE("dequeueInputBuffer error");
2361     error_count_++;
2362     Reset();
2363     return WEBRTC_VIDEO_CODEC_ERROR;
2364   }
2365
2366   // Copy encoded data to Java ByteBuffer.
2367   jobject j_input_buffer = input_buffers_[j_input_buffer_index];
2368   uint8* buffer =
2369       reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
2370   CHECK(buffer) << "Indirect buffer??";
2371   int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
2372   CHECK_EXCEPTION(jni);
2373   if (buffer_capacity < inputImage._length) {
2374     ALOGE("Input frame size %d is bigger than buffer size %d.",
2375         inputImage._length, buffer_capacity);
2376     error_count_++;
2377     Reset();
2378     return WEBRTC_VIDEO_CODEC_ERROR;
2379   }
2380   ALOGV("Decoder frame in # %d. Buffer # %d. Size: %d",
2381       frames_received_, j_input_buffer_index, inputImage._length);
2382   memcpy(buffer, inputImage._buffer, inputImage._length);
2383
2384   // Save input image timestamps for later output.
2385   frames_received_++;
2386   current_bytes_ += inputImage._length;
2387   timestamps_.push_back(inputImage._timeStamp);
2388   ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
2389   frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
2390
2391   // Feed input to decoder.
2392   jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
2393   bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
2394                                         j_queue_input_buffer_method_,
2395                                         j_input_buffer_index,
2396                                         inputImage._length,
2397                                         timestamp_us);
2398   CHECK_EXCEPTION(jni);
2399   if (!success) {
2400     ALOGE("queueInputBuffer error");
2401     error_count_++;
2402     Reset();
2403     return WEBRTC_VIDEO_CODEC_ERROR;
2404   }
2405
2406   // Try to drain the decoder
2407   if (!DeliverPendingOutputs(jni, 0)) {
2408     ALOGE("DeliverPendingOutputs error");
2409     error_count_++;
2410     Reset();
2411     return WEBRTC_VIDEO_CODEC_ERROR;
2412   }
2413
2414   return WEBRTC_VIDEO_CODEC_OK;
2415 }
2416
2417 bool MediaCodecVideoDecoder::DeliverPendingOutputs(
2418     JNIEnv* jni, int dequeue_timeout_us) {
2419   if (frames_received_ <= frames_decoded_) {
2420     // No need to query for output buffers - decoder is drained.
2421     return true;
2422   }
2423   // Get decoder output.
2424   jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
2425       *j_media_codec_video_decoder_,
2426       j_dequeue_output_buffer_method_,
2427       dequeue_timeout_us);
2428
2429   CHECK_EXCEPTION(jni);
2430   if (IsNull(jni, j_decoder_output_buffer_info)) {
2431     return true;
2432   }
2433
2434   // Extract output buffer info from Java DecoderOutputBufferInfo.
2435   int output_buffer_index =
2436       GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
2437   if (output_buffer_index < 0) {
2438     ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
2439     return false;
2440   }
2441   int output_buffer_offset =
2442       GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
2443   int output_buffer_size =
2444       GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
2445   CHECK_EXCEPTION(jni);
2446
2447   // Get decoded video frame properties.
2448   int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
2449       j_color_format_field_);
2450   int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
2451   int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
2452   int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
2453   int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
2454       j_slice_height_field_);
2455   int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
2456       j_textureID_field_);
2457
2458   // Extract data from Java ByteBuffer and create output yuv420 frame -
2459   // for non surface decoding only.
2460   if (!use_surface_) {
2461     if (output_buffer_size < width * height * 3 / 2) {
2462       ALOGE("Insufficient output buffer size: %d", output_buffer_size);
2463       return false;
2464     }
2465     jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
2466         jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
2467     jobject output_buffer =
2468         jni->GetObjectArrayElement(output_buffers, output_buffer_index);
2469     uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
2470         output_buffer));
2471     CHECK_EXCEPTION(jni);
2472     payload += output_buffer_offset;
2473
2474     // Create yuv420 frame.
2475     if (color_format == COLOR_FormatYUV420Planar) {
2476       decoded_image_.CreateFrame(
2477           stride * slice_height, payload,
2478           (stride * slice_height) / 4, payload + (stride * slice_height),
2479           (stride * slice_height) / 4, payload + (5 * stride * slice_height / 4),
2480           width, height,
2481           stride, stride / 2, stride / 2);
2482     } else {
2483       // All other supported formats are nv12.
2484       decoded_image_.CreateEmptyFrame(width, height, width,
2485           width / 2, width / 2);
2486       libyuv::NV12ToI420(
2487           payload, stride,
2488           payload + stride * slice_height, stride,
2489           decoded_image_.buffer(webrtc::kYPlane),
2490           decoded_image_.stride(webrtc::kYPlane),
2491           decoded_image_.buffer(webrtc::kUPlane),
2492           decoded_image_.stride(webrtc::kUPlane),
2493           decoded_image_.buffer(webrtc::kVPlane),
2494           decoded_image_.stride(webrtc::kVPlane),
2495           width, height);
2496     }
2497   }
2498
2499   // Get frame timestamps from a queue.
2500   int32_t timestamp = timestamps_.front();
2501   timestamps_.erase(timestamps_.begin());
2502   int64_t ntp_time_ms = ntp_times_ms_.front();
2503   ntp_times_ms_.erase(ntp_times_ms_.begin());
2504   int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
2505       frame_rtc_times_ms_.front();
2506   frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
2507
2508   ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
2509       " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
2510       color_format, output_buffer_size, frame_decoding_time_ms);
2511
2512   // Return output buffer back to codec.
2513   bool success = jni->CallBooleanMethod(
2514       *j_media_codec_video_decoder_,
2515       j_release_output_buffer_method_,
2516       output_buffer_index,
2517       use_surface_);
2518   CHECK_EXCEPTION(jni);
2519   if (!success) {
2520     ALOGE("releaseOutputBuffer error");
2521     return false;
2522   }
2523
2524   // Calculate and print decoding statistics - every 3 seconds.
2525   frames_decoded_++;
2526   current_frames_++;
2527   current_decoding_time_ms_ += frame_decoding_time_ms;
2528   int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
2529   if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
2530       current_frames_ > 0) {
2531     ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
2532         current_bytes_ * 8 / statistic_time_ms,
2533         (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
2534         current_decoding_time_ms_ / current_frames_, statistic_time_ms);
2535     start_time_ms_ = GetCurrentTimeMs();
2536     current_frames_ = 0;
2537     current_bytes_= 0;
2538     current_decoding_time_ms_ = 0;
2539   }
2540
2541   // Callback - output decoded frame.
2542   int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
2543   if (use_surface_) {
2544     native_handle_.SetTextureObject(surface_texture_, texture_id);
2545     TextureVideoFrame texture_image(
2546         &native_handle_, width, height, timestamp, 0);
2547     texture_image.set_ntp_time_ms(ntp_time_ms);
2548     callback_status = callback_->Decoded(texture_image);
2549   } else {
2550     decoded_image_.set_timestamp(timestamp);
2551     decoded_image_.set_ntp_time_ms(ntp_time_ms);
2552     callback_status = callback_->Decoded(decoded_image_);
2553   }
2554   if (callback_status > 0) {
2555     ALOGE("callback error");
2556   }
2557
2558   return true;
2559 }
2560
2561 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
2562     DecodedImageCallback* callback) {
2563   callback_ = callback;
2564   return WEBRTC_VIDEO_CODEC_OK;
2565 }
2566
2567 int32_t MediaCodecVideoDecoder::Reset() {
2568   ALOGD("DecoderReset");
2569   if (!inited_) {
2570     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2571   }
2572   return InitDecode(&codec_, 1);
2573 }
2574
2575 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
2576   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2577   ScopedLocalRefFrame local_ref_frame(jni);
2578   if (!inited_) {
2579     return;
2580   }
2581   // We only ever send one message to |this| directly (not through a Bind()'d
2582   // functor), so expect no ID/data.
2583   CHECK(!msg->message_id) << "Unexpected message!";
2584   CHECK(!msg->pdata) << "Unexpected message!";
2585   CheckOnCodecThread();
2586
2587   if (!DeliverPendingOutputs(jni, 0)) {
2588     error_count_++;
2589     Reset();
2590   }
2591   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
2592 }
2593
2594 class MediaCodecVideoDecoderFactory
2595     : public cricket::WebRtcVideoDecoderFactory {
2596  public:
2597   MediaCodecVideoDecoderFactory();
2598   virtual ~MediaCodecVideoDecoderFactory();
2599   // WebRtcVideoDecoderFactory implementation.
2600   virtual webrtc::VideoDecoder* CreateVideoDecoder(
2601       webrtc::VideoCodecType type) OVERRIDE;
2602
2603   virtual void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) OVERRIDE;
2604
2605  private:
2606   bool is_platform_supported_;
2607 };
2608
2609 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
2610   JNIEnv* jni = AttachCurrentThreadIfNeeded();
2611   ScopedLocalRefFrame local_ref_frame(jni);
2612   jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
2613   is_platform_supported_ = jni->CallStaticBooleanMethod(
2614       j_decoder_class,
2615       GetStaticMethodID(jni, j_decoder_class, "isPlatformSupported", "()Z"));
2616   CHECK_EXCEPTION(jni);
2617 }
2618
2619 MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
2620
2621 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
2622     webrtc::VideoCodecType type) {
2623   if (type != kVideoCodecVP8 || !is_platform_supported_) {
2624     return NULL;
2625   }
2626   return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded());
2627 }
2628
2629
2630 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
2631     webrtc::VideoDecoder* decoder) {
2632   delete decoder;
2633 }
2634
2635 #endif  // #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2636
2637 }  // anonymous namespace
2638
2639 // Convenience macro defining JNI-accessible methods in the org.webrtc package.
2640 // Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
2641 #define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \
2642   Java_org_webrtc_##name
2643
2644 extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
2645   CHECK(!g_jvm) << "JNI_OnLoad called more than once!";
2646   g_jvm = jvm;
2647   CHECK(g_jvm) << "JNI_OnLoad handed NULL?";
2648
2649   CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
2650
2651   CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
2652
2653   JNIEnv* jni;
2654   if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
2655     return -1;
2656   g_class_reference_holder = new ClassReferenceHolder(jni);
2657
2658   return JNI_VERSION_1_6;
2659 }
2660
2661 extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
2662   g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
2663   delete g_class_reference_holder;
2664   g_class_reference_holder = NULL;
2665   CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
2666   g_jvm = NULL;
2667 }
2668
2669 static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
2670   jfieldID native_dc_id = GetFieldID(jni,
2671       GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
2672   jlong j_d = GetLongField(jni, j_dc, native_dc_id);
2673   return reinterpret_cast<DataChannelInterface*>(j_d);
2674 }
2675
2676 JOW(jlong, DataChannel_registerObserverNative)(
2677     JNIEnv* jni, jobject j_dc, jobject j_observer) {
2678   scoped_ptr<DataChannelObserverWrapper> observer(
2679       new DataChannelObserverWrapper(jni, j_observer));
2680   ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
2681   return jlongFromPointer(observer.release());
2682 }
2683
2684 JOW(void, DataChannel_unregisterObserverNative)(
2685     JNIEnv* jni, jobject j_dc, jlong native_observer) {
2686   ExtractNativeDC(jni, j_dc)->UnregisterObserver();
2687   delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer);
2688 }
2689
2690 JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) {
2691   return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
2692 }
2693
2694 JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) {
2695   return JavaEnumFromIndex(
2696       jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state());
2697 }
2698
2699 JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
2700   uint64 buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
2701   CHECK_LE(buffered_amount, std::numeric_limits<int64>::max())
2702       << "buffered_amount overflowed jlong!";
2703   return static_cast<jlong>(buffered_amount);
2704 }
2705
2706 JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) {
2707   ExtractNativeDC(jni, j_dc)->Close();
2708 }
2709
2710 JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc,
2711                                       jbyteArray data, jboolean binary) {
2712   jbyte* bytes = jni->GetByteArrayElements(data, NULL);
2713   bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
2714       rtc::Buffer(bytes, jni->GetArrayLength(data)),
2715       binary));
2716   jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
2717   return ret;
2718 }
2719
2720 JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) {
2721   CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
2722 }
2723
2724 JOW(void, Logging_nativeEnableTracing)(
2725     JNIEnv* jni, jclass, jstring j_path, jint nativeLevels,
2726     jint nativeSeverity) {
2727   std::string path = JavaToStdString(jni, j_path);
2728   if (nativeLevels != webrtc::kTraceNone) {
2729     webrtc::Trace::set_level_filter(nativeLevels);
2730 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2731     if (path != "logcat:") {
2732 #endif
2733       CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
2734           << "SetTraceFile failed";
2735 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2736     } else {
2737       // Intentionally leak this to avoid needing to reason about its lifecycle.
2738       // It keeps no state and functions only as a dispatch point.
2739       static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
2740     }
2741 #endif
2742   }
2743   rtc::LogMessage::LogToDebug(nativeSeverity);
2744 }
2745
2746 JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) {
2747   CHECK_RELEASE(reinterpret_cast<PeerConnectionInterface*>(j_p));
2748 }
2749
2750 JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
2751   PCOJava* p = reinterpret_cast<PCOJava*>(j_p);
2752   delete p;
2753 }
2754
2755 JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
2756   CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
2757 }
2758
2759 JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) {
2760   delete reinterpret_cast<cricket::VideoCapturer*>(j_p);
2761 }
2762
2763 JOW(void, VideoRenderer_freeGuiVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
2764   delete reinterpret_cast<VideoRendererWrapper*>(j_p);
2765 }
2766
2767 JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
2768   delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
2769 }
2770
2771 JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
2772   CHECK_RELEASE(reinterpret_cast<MediaStreamTrackInterface*>(j_p));
2773 }
2774
2775 JOW(jboolean, MediaStream_nativeAddAudioTrack)(
2776     JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
2777   return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
2778       reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
2779 }
2780
2781 JOW(jboolean, MediaStream_nativeAddVideoTrack)(
2782     JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
2783   return reinterpret_cast<MediaStreamInterface*>(pointer)
2784       ->AddTrack(reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
2785 }
2786
2787 JOW(jboolean, MediaStream_nativeRemoveAudioTrack)(
2788     JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
2789   return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
2790       reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
2791 }
2792
2793 JOW(jboolean, MediaStream_nativeRemoveVideoTrack)(
2794     JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
2795   return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
2796       reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
2797 }
2798
2799 JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) {
2800   return JavaStringFromStdString(
2801       jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
2802 }
2803
2804 JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) {
2805   CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
2806 }
2807
2808 JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
2809     JNIEnv * jni, jclass, jobject j_observer) {
2810   return (jlong)new PCOJava(jni, j_observer);
2811 }
2812
2813 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2814 JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
2815     JNIEnv* jni, jclass, jobject context,
2816     jboolean initialize_audio, jboolean initialize_video,
2817     jobject render_egl_context) {
2818   CHECK(g_jvm) << "JNI_OnLoad failed to run?";
2819   bool failure = false;
2820   if (!factory_static_initialized) {
2821     if (initialize_video) {
2822       failure |= webrtc::SetCaptureAndroidVM(g_jvm, context);
2823       failure |= webrtc::SetRenderAndroidVM(g_jvm);
2824     }
2825     if (initialize_audio)
2826       failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
2827     factory_static_initialized = true;
2828   }
2829   if (initialize_video)
2830     failure |= MediaCodecVideoDecoder::SetAndroidObjects(jni,
2831         render_egl_context);
2832   return !failure;
2833 }
2834 #endif  // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2835
2836 // Helper struct for working around the fact that CreatePeerConnectionFactory()
2837 // comes in two flavors: either entirely automagical (constructing its own
2838 // threads and deleting them on teardown, but no external codec factory support)
2839 // or entirely manual (requires caller to delete threads after factory
2840 // teardown).  This struct takes ownership of its ctor's arguments to present a
2841 // single thing for Java to hold and eventually free.
2842 class OwnedFactoryAndThreads {
2843  public:
2844   OwnedFactoryAndThreads(Thread* worker_thread,
2845                          Thread* signaling_thread,
2846                          PeerConnectionFactoryInterface* factory)
2847       : worker_thread_(worker_thread),
2848         signaling_thread_(signaling_thread),
2849         factory_(factory) {}
2850
2851   ~OwnedFactoryAndThreads() { CHECK_RELEASE(factory_); }
2852
2853   PeerConnectionFactoryInterface* factory() { return factory_; }
2854
2855  private:
2856   const scoped_ptr<Thread> worker_thread_;
2857   const scoped_ptr<Thread> signaling_thread_;
2858   PeerConnectionFactoryInterface* factory_;  // Const after ctor except dtor.
2859 };
2860
2861 JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
2862     JNIEnv* jni, jclass) {
2863   // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
2864   // ThreadManager only WrapCurrentThread()s the thread where it is first
2865   // created.  Since the semantics around when auto-wrapping happens in
2866   // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
2867   // about ramifications of auto-wrapping there.
2868   rtc::ThreadManager::Instance()->WrapCurrentThread();
2869   webrtc::Trace::CreateTrace();
2870   Thread* worker_thread = new Thread();
2871   worker_thread->SetName("worker_thread", NULL);
2872   Thread* signaling_thread = new Thread();
2873   signaling_thread->SetName("signaling_thread", NULL);
2874   CHECK(worker_thread->Start() && signaling_thread->Start())
2875       << "Failed to start threads";
2876   scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
2877   scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
2878 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2879   encoder_factory.reset(new MediaCodecVideoEncoderFactory());
2880   decoder_factory.reset(new MediaCodecVideoDecoderFactory());
2881 #endif
2882   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2883       webrtc::CreatePeerConnectionFactory(worker_thread,
2884                                           signaling_thread,
2885                                           NULL,
2886                                           encoder_factory.release(),
2887                                           decoder_factory.release()));
2888   OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
2889       worker_thread, signaling_thread, factory.release());
2890   return jlongFromPointer(owned_factory);
2891 }
2892
2893 JOW(void, PeerConnectionFactory_freeFactory)(JNIEnv*, jclass, jlong j_p) {
2894   delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
2895   webrtc::Trace::ReturnTrace();
2896 }
2897
2898 static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
2899   return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
2900 }
2901
2902 JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
2903     JNIEnv* jni, jclass, jlong native_factory, jstring label) {
2904   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2905       factoryFromJava(native_factory));
2906   rtc::scoped_refptr<MediaStreamInterface> stream(
2907       factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
2908   return (jlong)stream.release();
2909 }
2910
2911 JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
2912     JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
2913     jobject j_constraints) {
2914   scoped_ptr<ConstraintsWrapper> constraints(
2915       new ConstraintsWrapper(jni, j_constraints));
2916   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2917       factoryFromJava(native_factory));
2918   rtc::scoped_refptr<VideoSourceInterface> source(
2919       factory->CreateVideoSource(
2920           reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
2921           constraints.get()));
2922   return (jlong)source.release();
2923 }
2924
2925 JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
2926     JNIEnv* jni, jclass, jlong native_factory, jstring id,
2927     jlong native_source) {
2928   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2929       factoryFromJava(native_factory));
2930   rtc::scoped_refptr<VideoTrackInterface> track(
2931       factory->CreateVideoTrack(
2932           JavaToStdString(jni, id),
2933           reinterpret_cast<VideoSourceInterface*>(native_source)));
2934   return (jlong)track.release();
2935 }
2936
2937 JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
2938     JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
2939   scoped_ptr<ConstraintsWrapper> constraints(
2940       new ConstraintsWrapper(jni, j_constraints));
2941   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2942       factoryFromJava(native_factory));
2943   rtc::scoped_refptr<AudioSourceInterface> source(
2944       factory->CreateAudioSource(constraints.get()));
2945   return (jlong)source.release();
2946 }
2947
2948 JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
2949     JNIEnv* jni, jclass, jlong native_factory, jstring id,
2950     jlong native_source) {
2951   rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2952       factoryFromJava(native_factory));
2953   rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
2954       JavaToStdString(jni, id),
2955       reinterpret_cast<AudioSourceInterface*>(native_source)));
2956   return (jlong)track.release();
2957 }
2958
2959 static void JavaIceServersToJsepIceServers(
2960     JNIEnv* jni, jobject j_ice_servers,
2961     PeerConnectionInterface::IceServers* ice_servers) {
2962   jclass list_class = GetObjectClass(jni, j_ice_servers);
2963   jmethodID iterator_id = GetMethodID(
2964       jni, list_class, "iterator", "()Ljava/util/Iterator;");
2965   jobject iterator = jni->CallObjectMethod(j_ice_servers, iterator_id);
2966   CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
2967   jmethodID iterator_has_next = GetMethodID(
2968       jni, GetObjectClass(jni, iterator), "hasNext", "()Z");
2969   jmethodID iterator_next = GetMethodID(
2970       jni, GetObjectClass(jni, iterator), "next", "()Ljava/lang/Object;");
2971   while (jni->CallBooleanMethod(iterator, iterator_has_next)) {
2972     CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
2973     jobject j_ice_server = jni->CallObjectMethod(iterator, iterator_next);
2974     CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
2975     jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
2976     jfieldID j_ice_server_uri_id =
2977         GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;");
2978     jfieldID j_ice_server_username_id =
2979         GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
2980     jfieldID j_ice_server_password_id =
2981         GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
2982     jstring uri = reinterpret_cast<jstring>(
2983         GetObjectField(jni, j_ice_server, j_ice_server_uri_id));
2984     jstring username = reinterpret_cast<jstring>(
2985         GetObjectField(jni, j_ice_server, j_ice_server_username_id));
2986     jstring password = reinterpret_cast<jstring>(
2987         GetObjectField(jni, j_ice_server, j_ice_server_password_id));
2988     PeerConnectionInterface::IceServer server;
2989     server.uri = JavaToStdString(jni, uri);
2990     server.username = JavaToStdString(jni, username);
2991     server.password = JavaToStdString(jni, password);
2992     ice_servers->push_back(server);
2993   }
2994   CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
2995 }
2996
2997 JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
2998     JNIEnv *jni, jclass, jlong factory, jobject j_ice_servers,
2999     jobject j_constraints, jlong observer_p) {
3000   rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
3001       reinterpret_cast<PeerConnectionFactoryInterface*>(
3002           factoryFromJava(factory)));
3003   PeerConnectionInterface::IceServers servers;
3004   JavaIceServersToJsepIceServers(jni, j_ice_servers, &servers);
3005   PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
3006   observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints));
3007   rtc::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection(
3008       servers, observer->constraints(), NULL, NULL, observer));
3009   return (jlong)pc.release();
3010 }
3011
3012 static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
3013     JNIEnv* jni, jobject j_pc) {
3014   jfieldID native_pc_id = GetFieldID(jni,
3015       GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
3016   jlong j_p = GetLongField(jni, j_pc, native_pc_id);
3017   return rtc::scoped_refptr<PeerConnectionInterface>(
3018       reinterpret_cast<PeerConnectionInterface*>(j_p));
3019 }
3020
3021 JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) {
3022   const SessionDescriptionInterface* sdp =
3023       ExtractNativePC(jni, j_pc)->local_description();
3024   return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
3025 }
3026
3027 JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) {
3028   const SessionDescriptionInterface* sdp =
3029       ExtractNativePC(jni, j_pc)->remote_description();
3030   return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
3031 }
3032
3033 JOW(jobject, PeerConnection_createDataChannel)(
3034     JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) {
3035   DataChannelInit init = JavaDataChannelInitToNative(jni, j_init);
3036   rtc::scoped_refptr<DataChannelInterface> channel(
3037       ExtractNativePC(jni, j_pc)->CreateDataChannel(
3038           JavaToStdString(jni, j_label), &init));
3039   // Mustn't pass channel.get() directly through NewObject to avoid reading its
3040   // vararg parameter as 64-bit and reading memory that doesn't belong to the
3041   // 32-bit parameter.
3042   jlong nativeChannelPtr = jlongFromPointer(channel.get());
3043   CHECK(nativeChannelPtr) << "Failed to create DataChannel";
3044   jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
3045   jmethodID j_data_channel_ctor = GetMethodID(
3046       jni, j_data_channel_class, "<init>", "(J)V");
3047   jobject j_channel = jni->NewObject(
3048       j_data_channel_class, j_data_channel_ctor, nativeChannelPtr);
3049   CHECK_EXCEPTION(jni) << "error during NewObject";
3050   // Channel is now owned by Java object, and will be freed from there.
3051   int bumped_count = channel->AddRef();
3052   CHECK(bumped_count == 2) << "Unexpected refcount";
3053   return j_channel;
3054 }
3055
3056 JOW(void, PeerConnection_createOffer)(
3057     JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
3058   ConstraintsWrapper* constraints =
3059       new ConstraintsWrapper(jni, j_constraints);
3060   rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
3061       new rtc::RefCountedObject<CreateSdpObserverWrapper>(
3062           jni, j_observer, constraints));
3063   ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
3064 }
3065
3066 JOW(void, PeerConnection_createAnswer)(
3067     JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
3068   ConstraintsWrapper* constraints =
3069       new ConstraintsWrapper(jni, j_constraints);
3070   rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
3071       new rtc::RefCountedObject<CreateSdpObserverWrapper>(
3072           jni, j_observer, constraints));
3073   ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
3074 }
3075
3076 // Helper to create a SessionDescriptionInterface from a SessionDescription.
3077 static SessionDescriptionInterface* JavaSdpToNativeSdp(
3078     JNIEnv* jni, jobject j_sdp) {
3079   jfieldID j_type_id = GetFieldID(
3080       jni, GetObjectClass(jni, j_sdp), "type",
3081       "Lorg/webrtc/SessionDescription$Type;");
3082   jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
3083   jmethodID j_canonical_form_id = GetMethodID(
3084       jni, GetObjectClass(jni, j_type), "canonicalForm",
3085       "()Ljava/lang/String;");
3086   jstring j_type_string = (jstring)jni->CallObjectMethod(
3087       j_type, j_canonical_form_id);
3088   CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
3089   std::string std_type = JavaToStdString(jni, j_type_string);
3090
3091   jfieldID j_description_id = GetFieldID(
3092       jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;");
3093   jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
3094   std::string std_description = JavaToStdString(jni, j_description);
3095
3096   return webrtc::CreateSessionDescription(
3097       std_type, std_description, NULL);
3098 }
3099
3100 JOW(void, PeerConnection_setLocalDescription)(
3101     JNIEnv* jni, jobject j_pc,
3102     jobject j_observer, jobject j_sdp) {
3103   rtc::scoped_refptr<SetSdpObserverWrapper> observer(
3104       new rtc::RefCountedObject<SetSdpObserverWrapper>(
3105           jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
3106   ExtractNativePC(jni, j_pc)->SetLocalDescription(
3107       observer, JavaSdpToNativeSdp(jni, j_sdp));
3108 }
3109
3110 JOW(void, PeerConnection_setRemoteDescription)(
3111     JNIEnv* jni, jobject j_pc,
3112     jobject j_observer, jobject j_sdp) {
3113   rtc::scoped_refptr<SetSdpObserverWrapper> observer(
3114       new rtc::RefCountedObject<SetSdpObserverWrapper>(
3115           jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
3116   ExtractNativePC(jni, j_pc)->SetRemoteDescription(
3117       observer, JavaSdpToNativeSdp(jni, j_sdp));
3118 }
3119
3120 JOW(jboolean, PeerConnection_updateIce)(
3121     JNIEnv* jni, jobject j_pc, jobject j_ice_servers, jobject j_constraints) {
3122   PeerConnectionInterface::IceServers ice_servers;
3123   JavaIceServersToJsepIceServers(jni, j_ice_servers, &ice_servers);
3124   scoped_ptr<ConstraintsWrapper> constraints(
3125       new ConstraintsWrapper(jni, j_constraints));
3126   return ExtractNativePC(jni, j_pc)->UpdateIce(ice_servers, constraints.get());
3127 }
3128
3129 JOW(jboolean, PeerConnection_nativeAddIceCandidate)(
3130     JNIEnv* jni, jobject j_pc, jstring j_sdp_mid,
3131     jint j_sdp_mline_index, jstring j_candidate_sdp) {
3132   std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
3133   std::string sdp = JavaToStdString(jni, j_candidate_sdp);
3134   scoped_ptr<IceCandidateInterface> candidate(
3135       webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
3136   return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
3137 }
3138
3139 JOW(jboolean, PeerConnection_nativeAddLocalStream)(
3140     JNIEnv* jni, jobject j_pc, jlong native_stream) {
3141   return ExtractNativePC(jni, j_pc)->AddStream(
3142       reinterpret_cast<MediaStreamInterface*>(native_stream));
3143 }
3144
3145 JOW(void, PeerConnection_nativeRemoveLocalStream)(
3146     JNIEnv* jni, jobject j_pc, jlong native_stream) {
3147   ExtractNativePC(jni, j_pc)->RemoveStream(
3148       reinterpret_cast<MediaStreamInterface*>(native_stream));
3149 }
3150
3151 JOW(bool, PeerConnection_nativeGetStats)(
3152     JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) {
3153   rtc::scoped_refptr<StatsObserverWrapper> observer(
3154       new rtc::RefCountedObject<StatsObserverWrapper>(jni, j_observer));
3155   return ExtractNativePC(jni, j_pc)->GetStats(
3156       observer,
3157       reinterpret_cast<MediaStreamTrackInterface*>(native_track),
3158       PeerConnectionInterface::kStatsOutputLevelStandard);
3159 }
3160
3161 JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) {
3162   PeerConnectionInterface::SignalingState state =
3163       ExtractNativePC(jni, j_pc)->signaling_state();
3164   return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state);
3165 }
3166
3167 JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) {
3168   PeerConnectionInterface::IceConnectionState state =
3169       ExtractNativePC(jni, j_pc)->ice_connection_state();
3170   return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state);
3171 }
3172
3173 JOW(jobject, PeerGathering_iceGatheringState)(JNIEnv* jni, jobject j_pc) {
3174   PeerConnectionInterface::IceGatheringState state =
3175       ExtractNativePC(jni, j_pc)->ice_gathering_state();
3176   return JavaEnumFromIndex(jni, "PeerGathering$IceGatheringState", state);
3177 }
3178
3179 JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) {
3180   ExtractNativePC(jni, j_pc)->Close();
3181   return;
3182 }
3183
3184 JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
3185   rtc::scoped_refptr<MediaSourceInterface> p(
3186       reinterpret_cast<MediaSourceInterface*>(j_p));
3187   return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
3188 }
3189
3190 JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)(
3191     JNIEnv* jni, jclass, jstring j_device_name) {
3192   std::string device_name = JavaToStdString(jni, j_device_name);
3193   scoped_ptr<cricket::DeviceManagerInterface> device_manager(
3194       cricket::DeviceManagerFactory::Create());
3195   CHECK(device_manager->Init()) << "DeviceManager::Init() failed";
3196   cricket::Device device;
3197   if (!device_manager->GetVideoCaptureDevice(device_name, &device)) {
3198     LOG(LS_ERROR) << "GetVideoCaptureDevice failed for " << device_name;
3199     return 0;
3200   }
3201   scoped_ptr<cricket::VideoCapturer> capturer(
3202       device_manager->CreateVideoCapturer(device));
3203   return (jlong)capturer.release();
3204 }
3205
3206 JOW(jlong, VideoRenderer_nativeCreateGuiVideoRenderer)(
3207     JNIEnv* jni, jclass, int x, int y) {
3208   scoped_ptr<VideoRendererWrapper> renderer(VideoRendererWrapper::Create(
3209       cricket::VideoRendererFactory::CreateGuiVideoRenderer(x, y)));
3210   return (jlong)renderer.release();
3211 }
3212
3213 JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
3214     JNIEnv* jni, jclass, jobject j_callbacks) {
3215   scoped_ptr<JavaVideoRendererWrapper> renderer(
3216       new JavaVideoRendererWrapper(jni, j_callbacks));
3217   return (jlong)renderer.release();
3218 }
3219
3220 JOW(jlong, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
3221   cricket::VideoCapturer* capturer =
3222       reinterpret_cast<VideoSourceInterface*>(j_p)->GetVideoCapturer();
3223   scoped_ptr<cricket::VideoFormatPod> format(
3224       new cricket::VideoFormatPod(*capturer->GetCaptureFormat()));
3225   capturer->Stop();
3226   return jlongFromPointer(format.release());
3227 }
3228
3229 JOW(void, VideoSource_restart)(
3230     JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
3231   CHECK(j_p_source);
3232   CHECK(j_p_format);
3233   scoped_ptr<cricket::VideoFormatPod> format(
3234       reinterpret_cast<cricket::VideoFormatPod*>(j_p_format));
3235   reinterpret_cast<VideoSourceInterface*>(j_p_source)->GetVideoCapturer()->
3236       StartCapturing(cricket::VideoFormat(*format));
3237 }
3238
3239 JOW(void, VideoSource_freeNativeVideoFormat)(
3240     JNIEnv* jni, jclass, jlong j_p) {
3241   delete reinterpret_cast<cricket::VideoFormatPod*>(j_p);
3242 }
3243
3244 JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
3245   return JavaStringFromStdString(
3246       jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
3247 }
3248
3249 JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) {
3250   return JavaStringFromStdString(
3251       jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
3252 }
3253
3254 JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) {
3255   return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
3256 }
3257
3258 JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
3259   return JavaEnumFromIndex(
3260       jni,
3261       "MediaStreamTrack$State",
3262       reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
3263 }
3264
3265 JOW(jboolean, MediaStreamTrack_nativeSetState)(
3266     JNIEnv* jni, jclass, jlong j_p, jint j_new_state) {
3267   MediaStreamTrackInterface::TrackState new_state =
3268       (MediaStreamTrackInterface::TrackState)j_new_state;
3269   return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
3270       ->set_state(new_state);
3271 }
3272
3273 JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
3274     JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
3275   return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
3276       ->set_enabled(enabled);
3277 }
3278
3279 JOW(void, VideoTrack_nativeAddRenderer)(
3280     JNIEnv* jni, jclass,
3281     jlong j_video_track_pointer, jlong j_renderer_pointer) {
3282   reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->AddRenderer(
3283       reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
3284 }
3285
3286 JOW(void, VideoTrack_nativeRemoveRenderer)(
3287     JNIEnv* jni, jclass,
3288     jlong j_video_track_pointer, jlong j_renderer_pointer) {
3289   reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->RemoveRenderer(
3290       reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
3291 }