3 * Copyright 2013, Google Inc.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 // Hints for future visitors:
29 // This entire file is an implementation detail of the org.webrtc Java package,
30 // the most interesting bits of which are org.webrtc.PeerConnection{,Factory}.
31 // The layout of this file is roughly:
32 // - various helper C++ functions & classes that wrap Java counterparts and
33 // expose a C++ interface that can be passed to the C++ PeerConnection APIs
34 // - implementations of methods declared "static" in the Java package (named
35 // things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by
38 // Lifecycle notes: objects are owned where they will be called; in other words
39 // FooObservers are owned by C++-land, and user-callable objects (e.g.
40 // PeerConnection and VideoTrack) are owned by Java-land.
41 // When this file allocates C++ RefCountInterfaces it AddRef()s an artificial
42 // ref simulating the jlong held in Java-land, and then Release()s the ref in
43 // the respective free call. Sometimes this AddRef is implicit in the
44 // construction of a scoped_refptr<> which is then .release()d.
45 // Any persistent (non-local) references from C++ to Java must be global or weak
46 // (in which case they must be checked before use)!
48 // Exception notes: pretty much all JNI calls can throw Java exceptions, so each
49 // call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
50 // call. In this file this is done in CHECK_EXCEPTION, making for much easier
51 // debugging in case of failure (the alternative is to wait for control to
52 // return to the Java frame that called code in this file, at which point it's
53 // impossible to tell which JNI call broke).
57 #define JNIEXPORT __attribute__((visibility("default")))
59 #include <asm/unistd.h>
60 #include <sys/prctl.h>
61 #include <sys/syscall.h>
66 #include "talk/app/webrtc/mediaconstraintsinterface.h"
67 #include "talk/app/webrtc/peerconnectioninterface.h"
68 #include "talk/app/webrtc/videosourceinterface.h"
69 #include "talk/media/base/videocapturer.h"
70 #include "talk/media/base/videorenderer.h"
71 #include "talk/media/devices/videorendererfactory.h"
72 #include "talk/media/webrtc/webrtcvideocapturer.h"
73 #include "talk/media/webrtc/webrtcvideodecoderfactory.h"
74 #include "talk/media/webrtc/webrtcvideoencoderfactory.h"
75 #include "third_party/icu/source/common/unicode/unistr.h"
76 #include "third_party/libyuv/include/libyuv/convert.h"
77 #include "third_party/libyuv/include/libyuv/convert_from.h"
78 #include "third_party/libyuv/include/libyuv/video_common.h"
79 #include "webrtc/base/bind.h"
80 #include "webrtc/base/checks.h"
81 #include "webrtc/base/logging.h"
82 #include "webrtc/base/messagequeue.h"
83 #include "webrtc/base/ssladapter.h"
84 #include "webrtc/common_video/interface/texture_video_frame.h"
85 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
86 #include "webrtc/system_wrappers/interface/compile_assert.h"
87 #include "webrtc/system_wrappers/interface/trace.h"
88 #include "webrtc/video_engine/include/vie_base.h"
89 #include "webrtc/voice_engine/include/voe_base.h"
91 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
92 #include <android/log.h>
93 #include "webrtc/modules/video_capture/video_capture_internal.h"
94 #include "webrtc/modules/video_render/video_render_internal.h"
95 #include "webrtc/system_wrappers/interface/logcat_trace_context.h"
96 #include "webrtc/system_wrappers/interface/tick_util.h"
97 using webrtc::CodecSpecificInfo;
98 using webrtc::DecodedImageCallback;
99 using webrtc::EncodedImage;
100 using webrtc::I420VideoFrame;
101 using webrtc::LogcatTraceContext;
102 using webrtc::RTPFragmentationHeader;
103 using webrtc::TextureVideoFrame;
104 using webrtc::TickTime;
105 using webrtc::VideoCodec;
108 using icu::UnicodeString;
111 using rtc::ThreadManager;
112 using rtc::scoped_ptr;
113 using webrtc::AudioSourceInterface;
114 using webrtc::AudioTrackInterface;
115 using webrtc::AudioTrackVector;
116 using webrtc::CreateSessionDescriptionObserver;
117 using webrtc::DataBuffer;
118 using webrtc::DataChannelInit;
119 using webrtc::DataChannelInterface;
120 using webrtc::DataChannelObserver;
121 using webrtc::IceCandidateInterface;
122 using webrtc::NativeHandle;
123 using webrtc::MediaConstraintsInterface;
124 using webrtc::MediaSourceInterface;
125 using webrtc::MediaStreamInterface;
126 using webrtc::MediaStreamTrackInterface;
127 using webrtc::PeerConnectionFactoryInterface;
128 using webrtc::PeerConnectionInterface;
129 using webrtc::PeerConnectionObserver;
130 using webrtc::SessionDescriptionInterface;
131 using webrtc::SetSessionDescriptionObserver;
132 using webrtc::StatsObserver;
133 using webrtc::StatsReport;
134 using webrtc::VideoRendererInterface;
135 using webrtc::VideoSourceInterface;
136 using webrtc::VideoTrackInterface;
137 using webrtc::VideoTrackVector;
138 using webrtc::kVideoCodecVP8;
140 // Abort the process if |jni| has a Java exception pending.
141 // This macros uses the comma operator to execute ExceptionDescribe
142 // and ExceptionClear ignoring their return values and sending ""
143 // to the error stream.
144 #define CHECK_EXCEPTION(jni) \
145 CHECK(!jni->ExceptionCheck()) \
146 << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
148 // Helper that calls ptr->Release() and aborts the process with a useful
149 // message if that didn't actually delete *ptr because of extra refcounts.
150 #define CHECK_RELEASE(ptr) \
151 CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
155 static JavaVM* g_jvm = NULL; // Set in JNI_OnLoad().
157 static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
158 // Key for per-thread JNIEnv* data. Non-NULL in threads attached to |g_jvm| by
159 // AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
160 // were attached by the JVM because of a Java->native call.
161 static pthread_key_t g_jni_ptr;
163 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
164 // Set in PeerConnectionFactory_initializeAndroidGlobals().
165 static bool factory_static_initialized = false;
169 // Return thread ID as a string.
170 static std::string GetThreadId() {
171 char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
172 CHECK_LT(snprintf(buf, sizeof(buf), "%llu", syscall(__NR_gettid)),
174 << "Thread id is bigger than uint64??";
175 return std::string(buf);
178 // Return the current thread's name.
179 static std::string GetThreadName() {
181 CHECK_EQ(0, prctl(PR_GET_NAME, name)) << "prctl(PR_GET_NAME) failed";
183 return std::string(name);
186 // Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
187 static JNIEnv* GetEnv() {
189 jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
190 CHECK(((env != NULL) && (status == JNI_OK)) ||
191 ((env == NULL) && (status == JNI_EDETACHED)))
192 << "Unexpected GetEnv return: " << status << ":" << env;
193 return reinterpret_cast<JNIEnv*>(env);
196 static void ThreadDestructor(void* prev_jni_ptr) {
197 // This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
198 // we were responsible for originally attaching the thread, so are responsible
199 // for detaching it now. However, because some JVM implementations (notably
200 // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
201 // the JVMs accounting info for this thread may already be wiped out by the
202 // time this is called. Thus it may appear we are already detached even though
203 // it was our responsibility to detach! Oh well.
207 CHECK(GetEnv() == prev_jni_ptr)
208 << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
209 jint status = g_jvm->DetachCurrentThread();
210 CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
211 CHECK(!GetEnv()) << "Detaching was a successful no-op???";
214 static void CreateJNIPtrKey() {
215 CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
216 << "pthread_key_create";
219 // Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
220 static JNIEnv* AttachCurrentThreadIfNeeded() {
221 JNIEnv* jni = GetEnv();
224 CHECK(!pthread_getspecific(g_jni_ptr))
225 << "TLS has a JNIEnv* but not attached?";
227 char* name = strdup((GetThreadName() + " - " + GetThreadId()).c_str());
228 JavaVMAttachArgs args;
229 args.version = JNI_VERSION_1_6;
232 // Deal with difference in signatures between Oracle's jni.h and Android's.
233 #ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec!
238 CHECK(!g_jvm->AttachCurrentThread(&env, &args)) << "Failed to attach thread";
240 CHECK(env) << "AttachCurrentThread handed back NULL!";
241 jni = reinterpret_cast<JNIEnv*>(env);
242 CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
246 // Return a |jlong| that will correctly convert back to |ptr|. This is needed
247 // because the alternative (of silently passing a 32-bit pointer to a vararg
248 // function expecting a 64-bit param) picks up garbage in the high 32 bits.
249 static jlong jlongFromPointer(void* ptr) {
250 COMPILE_ASSERT(sizeof(intptr_t) <= sizeof(jlong),
251 Time_to_rethink_the_use_of_jlongs);
252 // Going through intptr_t to be obvious about the definedness of the
253 // conversion from pointer to integral type. intptr_t to jlong is a standard
254 // widening by the COMPILE_ASSERT above.
255 jlong ret = reinterpret_cast<intptr_t>(ptr);
256 assert(reinterpret_cast<void*>(ret) == ptr);
260 // Android's FindClass() is trickier than usual because the app-specific
261 // ClassLoader is not consulted when there is no app-specific frame on the
262 // stack. Consequently, we only look up classes once in JNI_OnLoad.
263 // http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
264 class ClassReferenceHolder {
266 explicit ClassReferenceHolder(JNIEnv* jni) {
267 LoadClass(jni, "java/nio/ByteBuffer");
268 LoadClass(jni, "org/webrtc/AudioTrack");
269 LoadClass(jni, "org/webrtc/DataChannel");
270 LoadClass(jni, "org/webrtc/DataChannel$Buffer");
271 LoadClass(jni, "org/webrtc/DataChannel$Init");
272 LoadClass(jni, "org/webrtc/DataChannel$State");
273 LoadClass(jni, "org/webrtc/IceCandidate");
274 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
275 LoadClass(jni, "android/graphics/SurfaceTexture");
276 LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
277 LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
278 LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
279 LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
280 jclass j_decoder_class = GetClass("org/webrtc/MediaCodecVideoDecoder");
281 jmethodID j_is_egl14_supported_method = jni->GetStaticMethodID(
282 j_decoder_class, "isEGL14Supported", "()Z");
283 bool is_egl14_supported = jni->CallStaticBooleanMethod(
284 j_decoder_class, j_is_egl14_supported_method);
285 CHECK_EXCEPTION(jni);
286 if (is_egl14_supported) {
287 LoadClass(jni, "android/opengl/EGLContext");
290 LoadClass(jni, "org/webrtc/MediaSource$State");
291 LoadClass(jni, "org/webrtc/MediaStream");
292 LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
293 LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
294 LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
295 LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
296 LoadClass(jni, "org/webrtc/SessionDescription");
297 LoadClass(jni, "org/webrtc/SessionDescription$Type");
298 LoadClass(jni, "org/webrtc/StatsReport");
299 LoadClass(jni, "org/webrtc/StatsReport$Value");
300 LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
301 LoadClass(jni, "org/webrtc/VideoTrack");
304 ~ClassReferenceHolder() {
305 CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
308 void FreeReferences(JNIEnv* jni) {
309 for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
310 it != classes_.end(); ++it) {
311 jni->DeleteGlobalRef(it->second);
316 jclass GetClass(const std::string& name) {
317 std::map<std::string, jclass>::iterator it = classes_.find(name);
318 CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
323 void LoadClass(JNIEnv* jni, const std::string& name) {
324 jclass localRef = jni->FindClass(name.c_str());
325 CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
326 CHECK(localRef) << name;
327 jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
328 CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
329 CHECK(globalRef) << name;
330 bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
331 CHECK(inserted) << "Duplicate class name: " << name;
334 std::map<std::string, jclass> classes_;
337 // Allocated in JNI_OnLoad(), freed in JNI_OnUnLoad().
338 static ClassReferenceHolder* g_class_reference_holder = NULL;
340 // JNIEnv-helper methods that CHECK success: no Java exception thrown and found
341 // object/class/method/field is non-null.
342 jmethodID GetMethodID(
343 JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
344 jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
345 CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
347 CHECK(m) << name << ", " << signature;
351 jmethodID GetStaticMethodID(
352 JNIEnv* jni, jclass c, const char* name, const char* signature) {
353 jmethodID m = jni->GetStaticMethodID(c, name, signature);
354 CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
356 CHECK(m) << name << ", " << signature;
361 JNIEnv* jni, jclass c, const char* name, const char* signature) {
362 jfieldID f = jni->GetFieldID(c, name, signature);
363 CHECK_EXCEPTION(jni) << "error during GetFieldID";
364 CHECK(f) << name << ", " << signature;
368 // Returns a global reference guaranteed to be valid for the lifetime of the
370 jclass FindClass(JNIEnv* jni, const char* name) {
371 return g_class_reference_holder->GetClass(name);
374 jclass GetObjectClass(JNIEnv* jni, jobject object) {
375 jclass c = jni->GetObjectClass(object);
376 CHECK_EXCEPTION(jni) << "error during GetObjectClass";
377 CHECK(c) << "GetObjectClass returned NULL";
381 jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
382 jobject o = jni->GetObjectField(object, id);
383 CHECK_EXCEPTION(jni) << "error during GetObjectField";
384 CHECK(o) << "GetObjectField returned NULL";
388 jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
389 return static_cast<jstring>(GetObjectField(jni, object, id));
392 jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
393 jlong l = jni->GetLongField(object, id);
394 CHECK_EXCEPTION(jni) << "error during GetLongField";
398 jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
399 jint i = jni->GetIntField(object, id);
400 CHECK_EXCEPTION(jni) << "error during GetIntField";
404 bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
405 jboolean b = jni->GetBooleanField(object, id);
406 CHECK_EXCEPTION(jni) << "error during GetBooleanField";
410 jobject NewGlobalRef(JNIEnv* jni, jobject o) {
411 jobject ret = jni->NewGlobalRef(o);
412 CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
417 void DeleteGlobalRef(JNIEnv* jni, jobject o) {
418 jni->DeleteGlobalRef(o);
419 CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
422 // Given a jweak reference, allocate a (strong) local reference scoped to the
423 // lifetime of this object if the weak reference is still valid, or NULL
427 WeakRef(JNIEnv* jni, jweak ref)
428 : jni_(jni), obj_(jni_->NewLocalRef(ref)) {
429 CHECK_EXCEPTION(jni) << "error during NewLocalRef";
433 jni_->DeleteLocalRef(obj_);
434 CHECK_EXCEPTION(jni_) << "error during DeleteLocalRef";
437 jobject obj() { return obj_; }
444 // Scope Java local references to the lifetime of this object. Use in all C++
445 // callbacks (i.e. entry points that don't originate in a Java callstack
446 // through a "native" method call).
447 class ScopedLocalRefFrame {
449 explicit ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
450 CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
452 ~ScopedLocalRefFrame() {
453 jni_->PopLocalFrame(NULL);
460 // Scoped holder for global Java refs.
461 template<class T> // T is jclass, jobject, jintArray, etc.
462 class ScopedGlobalRef {
464 ScopedGlobalRef(JNIEnv* jni, T obj)
465 : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
467 DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
469 T operator*() const {
476 // Java references to "null" can only be distinguished as such in C++ by
477 // creating a local reference, so this helper wraps that logic.
478 static bool IsNull(JNIEnv* jni, jobject obj) {
479 ScopedLocalRefFrame local_ref_frame(jni);
480 return jni->NewLocalRef(obj) == NULL;
483 // Return the (singleton) Java Enum object corresponding to |index|;
484 // |state_class_fragment| is something like "MediaSource$State".
485 jobject JavaEnumFromIndex(
486 JNIEnv* jni, const std::string& state_class_fragment, int index) {
487 std::string state_class_name = "org/webrtc/" + state_class_fragment;
488 jclass state_class = FindClass(jni, state_class_name.c_str());
489 jmethodID state_values_id = GetStaticMethodID(
490 jni, state_class, "values", ("()[L" + state_class_name + ";").c_str());
491 jobjectArray state_values = static_cast<jobjectArray>(
492 jni->CallStaticObjectMethod(state_class, state_values_id));
493 CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
494 jobject ret = jni->GetObjectArrayElement(state_values, index);
495 CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
499 // Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
500 static jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
501 UnicodeString ustr(UnicodeString::fromUTF8(native));
502 jstring jstr = jni->NewString(ustr.getBuffer(), ustr.length());
503 CHECK_EXCEPTION(jni) << "error during NewString";
507 // Given a (UTF-16) jstring return a new UTF-8 native string.
508 static std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
509 const jchar* jchars = jni->GetStringChars(j_string, NULL);
510 CHECK_EXCEPTION(jni) << "Error during GetStringChars";
511 UnicodeString ustr(jchars, jni->GetStringLength(j_string));
512 CHECK_EXCEPTION(jni) << "Error during GetStringLength";
513 jni->ReleaseStringChars(j_string, jchars);
514 CHECK_EXCEPTION(jni) << "Error during ReleaseStringChars";
516 return ustr.toUTF8String(ret);
519 static DataChannelInit JavaDataChannelInitToNative(
520 JNIEnv* jni, jobject j_init) {
521 DataChannelInit init;
523 jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
524 jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
525 jfieldID max_retransmit_time_id =
526 GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
527 jfieldID max_retransmits_id =
528 GetFieldID(jni, j_init_class, "maxRetransmits", "I");
529 jfieldID protocol_id =
530 GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
531 jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
532 jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
534 init.ordered = GetBooleanField(jni, j_init, ordered_id);
535 init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
536 init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
537 init.protocol = JavaToStdString(
538 jni, GetStringField(jni, j_init, protocol_id));
539 init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
540 init.id = GetIntField(jni, j_init, id_id);
545 class ConstraintsWrapper;
547 // Adapter between the C++ PeerConnectionObserver interface and the Java
548 // PeerConnection.Observer interface. Wraps an instance of the Java interface
549 // and dispatches C++ callbacks to Java.
550 class PCOJava : public PeerConnectionObserver {
552 PCOJava(JNIEnv* jni, jobject j_observer)
553 : j_observer_global_(jni, j_observer),
554 j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
555 j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
556 j_media_stream_ctor_(GetMethodID(
557 jni, *j_media_stream_class_, "<init>", "(J)V")),
558 j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
559 j_audio_track_ctor_(GetMethodID(
560 jni, *j_audio_track_class_, "<init>", "(J)V")),
561 j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
562 j_video_track_ctor_(GetMethodID(
563 jni, *j_video_track_class_, "<init>", "(J)V")),
564 j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
565 j_data_channel_ctor_(GetMethodID(
566 jni, *j_data_channel_class_, "<init>", "(J)V")) {
569 virtual ~PCOJava() {}
571 virtual void OnIceCandidate(const IceCandidateInterface* candidate) OVERRIDE {
572 ScopedLocalRefFrame local_ref_frame(jni());
574 CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
575 jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
576 jmethodID ctor = GetMethodID(jni(), candidate_class,
577 "<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
578 jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
579 jstring j_sdp = JavaStringFromStdString(jni(), sdp);
580 jobject j_candidate = jni()->NewObject(
581 candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp);
582 CHECK_EXCEPTION(jni()) << "error during NewObject";
583 jmethodID m = GetMethodID(jni(), *j_observer_class_,
584 "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
585 jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
586 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
589 virtual void OnSignalingChange(
590 PeerConnectionInterface::SignalingState new_state) OVERRIDE {
591 ScopedLocalRefFrame local_ref_frame(jni());
592 jmethodID m = GetMethodID(
593 jni(), *j_observer_class_, "onSignalingChange",
594 "(Lorg/webrtc/PeerConnection$SignalingState;)V");
595 jobject new_state_enum =
596 JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
597 jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
598 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
601 virtual void OnIceConnectionChange(
602 PeerConnectionInterface::IceConnectionState new_state) OVERRIDE {
603 ScopedLocalRefFrame local_ref_frame(jni());
604 jmethodID m = GetMethodID(
605 jni(), *j_observer_class_, "onIceConnectionChange",
606 "(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
607 jobject new_state_enum = JavaEnumFromIndex(
608 jni(), "PeerConnection$IceConnectionState", new_state);
609 jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
610 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
613 virtual void OnIceGatheringChange(
614 PeerConnectionInterface::IceGatheringState new_state) OVERRIDE {
615 ScopedLocalRefFrame local_ref_frame(jni());
616 jmethodID m = GetMethodID(
617 jni(), *j_observer_class_, "onIceGatheringChange",
618 "(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
619 jobject new_state_enum = JavaEnumFromIndex(
620 jni(), "PeerConnection$IceGatheringState", new_state);
621 jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
622 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
625 virtual void OnAddStream(MediaStreamInterface* stream) OVERRIDE {
626 ScopedLocalRefFrame local_ref_frame(jni());
627 jobject j_stream = jni()->NewObject(
628 *j_media_stream_class_, j_media_stream_ctor_, (jlong)stream);
629 CHECK_EXCEPTION(jni()) << "error during NewObject";
631 AudioTrackVector audio_tracks = stream->GetAudioTracks();
632 for (size_t i = 0; i < audio_tracks.size(); ++i) {
633 AudioTrackInterface* track = audio_tracks[i];
634 jstring id = JavaStringFromStdString(jni(), track->id());
635 jobject j_track = jni()->NewObject(
636 *j_audio_track_class_, j_audio_track_ctor_, (jlong)track, id);
637 CHECK_EXCEPTION(jni()) << "error during NewObject";
638 jfieldID audio_tracks_id = GetFieldID(jni(),
639 *j_media_stream_class_,
641 "Ljava/util/LinkedList;");
642 jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
643 jmethodID add = GetMethodID(jni(),
644 GetObjectClass(jni(), audio_tracks),
646 "(Ljava/lang/Object;)Z");
647 jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
648 CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
652 VideoTrackVector video_tracks = stream->GetVideoTracks();
653 for (size_t i = 0; i < video_tracks.size(); ++i) {
654 VideoTrackInterface* track = video_tracks[i];
655 jstring id = JavaStringFromStdString(jni(), track->id());
656 jobject j_track = jni()->NewObject(
657 *j_video_track_class_, j_video_track_ctor_, (jlong)track, id);
658 CHECK_EXCEPTION(jni()) << "error during NewObject";
659 jfieldID video_tracks_id = GetFieldID(jni(),
660 *j_media_stream_class_,
662 "Ljava/util/LinkedList;");
663 jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
664 jmethodID add = GetMethodID(jni(),
665 GetObjectClass(jni(), video_tracks),
667 "(Ljava/lang/Object;)Z");
668 jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
669 CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
672 streams_[stream] = jni()->NewWeakGlobalRef(j_stream);
673 CHECK_EXCEPTION(jni()) << "error during NewWeakGlobalRef";
675 jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
676 "(Lorg/webrtc/MediaStream;)V");
677 jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
678 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
681 virtual void OnRemoveStream(MediaStreamInterface* stream) OVERRIDE {
682 ScopedLocalRefFrame local_ref_frame(jni());
683 NativeToJavaStreamsMap::iterator it = streams_.find(stream);
684 CHECK(it != streams_.end()) << "unexpected stream: " << std::hex << stream;
686 WeakRef s(jni(), it->second);
691 jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
692 "(Lorg/webrtc/MediaStream;)V");
693 jni()->CallVoidMethod(*j_observer_global_, m, s.obj());
694 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
697 virtual void OnDataChannel(DataChannelInterface* channel) OVERRIDE {
698 ScopedLocalRefFrame local_ref_frame(jni());
699 jobject j_channel = jni()->NewObject(
700 *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel);
701 CHECK_EXCEPTION(jni()) << "error during NewObject";
703 jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
704 "(Lorg/webrtc/DataChannel;)V");
705 jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
707 // Channel is now owned by Java object, and will be freed from
708 // DataChannel.dispose(). Important that this be done _after_ the
709 // CallVoidMethod above as Java code might call back into native code and be
710 // surprised to see a refcount of 2.
711 int bumped_count = channel->AddRef();
712 CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
714 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
717 virtual void OnRenegotiationNeeded() OVERRIDE {
718 ScopedLocalRefFrame local_ref_frame(jni());
720 GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
721 jni()->CallVoidMethod(*j_observer_global_, m);
722 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
725 void SetConstraints(ConstraintsWrapper* constraints) {
726 CHECK(!constraints_.get()) << "constraints already set!";
727 constraints_.reset(constraints);
730 const ConstraintsWrapper* constraints() { return constraints_.get(); }
734 return AttachCurrentThreadIfNeeded();
737 const ScopedGlobalRef<jobject> j_observer_global_;
738 const ScopedGlobalRef<jclass> j_observer_class_;
739 const ScopedGlobalRef<jclass> j_media_stream_class_;
740 const jmethodID j_media_stream_ctor_;
741 const ScopedGlobalRef<jclass> j_audio_track_class_;
742 const jmethodID j_audio_track_ctor_;
743 const ScopedGlobalRef<jclass> j_video_track_class_;
744 const jmethodID j_video_track_ctor_;
745 const ScopedGlobalRef<jclass> j_data_channel_class_;
746 const jmethodID j_data_channel_ctor_;
747 typedef std::map<void*, jweak> NativeToJavaStreamsMap;
748 NativeToJavaStreamsMap streams_; // C++ -> Java streams.
749 scoped_ptr<ConstraintsWrapper> constraints_;
752 // Wrapper for a Java MediaConstraints object. Copies all needed data so when
753 // the constructor returns the Java object is no longer needed.
754 class ConstraintsWrapper : public MediaConstraintsInterface {
756 ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) {
757 PopulateConstraintsFromJavaPairList(
758 jni, j_constraints, "mandatory", &mandatory_);
759 PopulateConstraintsFromJavaPairList(
760 jni, j_constraints, "optional", &optional_);
763 virtual ~ConstraintsWrapper() {}
765 // MediaConstraintsInterface.
766 virtual const Constraints& GetMandatory() const OVERRIDE {
770 virtual const Constraints& GetOptional() const OVERRIDE {
775 // Helper for translating a List<Pair<String, String>> to a Constraints.
776 static void PopulateConstraintsFromJavaPairList(
777 JNIEnv* jni, jobject j_constraints,
778 const char* field_name, Constraints* field) {
779 jfieldID j_id = GetFieldID(jni,
780 GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;");
781 jobject j_list = GetObjectField(jni, j_constraints, j_id);
782 jmethodID j_iterator_id = GetMethodID(jni,
783 GetObjectClass(jni, j_list), "iterator", "()Ljava/util/Iterator;");
784 jobject j_iterator = jni->CallObjectMethod(j_list, j_iterator_id);
785 CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
786 jmethodID j_has_next = GetMethodID(jni,
787 GetObjectClass(jni, j_iterator), "hasNext", "()Z");
788 jmethodID j_next = GetMethodID(jni,
789 GetObjectClass(jni, j_iterator), "next", "()Ljava/lang/Object;");
790 while (jni->CallBooleanMethod(j_iterator, j_has_next)) {
791 CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
792 jobject entry = jni->CallObjectMethod(j_iterator, j_next);
793 CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
794 jmethodID get_key = GetMethodID(jni,
795 GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;");
796 jstring j_key = reinterpret_cast<jstring>(
797 jni->CallObjectMethod(entry, get_key));
798 CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
799 jmethodID get_value = GetMethodID(jni,
800 GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;");
801 jstring j_value = reinterpret_cast<jstring>(
802 jni->CallObjectMethod(entry, get_value));
803 CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
804 field->push_back(Constraint(JavaToStdString(jni, j_key),
805 JavaToStdString(jni, j_value)));
807 CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
810 Constraints mandatory_;
811 Constraints optional_;
814 static jobject JavaSdpFromNativeSdp(
815 JNIEnv* jni, const SessionDescriptionInterface* desc) {
817 CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
818 jstring j_description = JavaStringFromStdString(jni, sdp);
820 jclass j_type_class = FindClass(
821 jni, "org/webrtc/SessionDescription$Type");
822 jmethodID j_type_from_canonical = GetStaticMethodID(
823 jni, j_type_class, "fromCanonicalForm",
824 "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
825 jstring j_type_string = JavaStringFromStdString(jni, desc->type());
826 jobject j_type = jni->CallStaticObjectMethod(
827 j_type_class, j_type_from_canonical, j_type_string);
828 CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
830 jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
831 jmethodID j_sdp_ctor = GetMethodID(
832 jni, j_sdp_class, "<init>",
833 "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
834 jobject j_sdp = jni->NewObject(
835 j_sdp_class, j_sdp_ctor, j_type, j_description);
836 CHECK_EXCEPTION(jni) << "error during NewObject";
840 template <class T> // T is one of {Create,Set}SessionDescriptionObserver.
841 class SdpObserverWrapper : public T {
843 SdpObserverWrapper(JNIEnv* jni, jobject j_observer,
844 ConstraintsWrapper* constraints)
845 : constraints_(constraints),
846 j_observer_global_(jni, j_observer),
847 j_observer_class_(jni, GetObjectClass(jni, j_observer)) {
850 virtual ~SdpObserverWrapper() {}
852 // Can't mark OVERRIDE because of templating.
853 virtual void OnSuccess() {
854 ScopedLocalRefFrame local_ref_frame(jni());
855 jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
856 jni()->CallVoidMethod(*j_observer_global_, m);
857 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
860 // Can't mark OVERRIDE because of templating.
861 virtual void OnSuccess(SessionDescriptionInterface* desc) {
862 ScopedLocalRefFrame local_ref_frame(jni());
863 jmethodID m = GetMethodID(
864 jni(), *j_observer_class_, "onCreateSuccess",
865 "(Lorg/webrtc/SessionDescription;)V");
866 jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
867 jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
868 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
872 // Common implementation for failure of Set & Create types, distinguished by
873 // |op| being "Set" or "Create".
874 void OnFailure(const std::string& op, const std::string& error) {
875 jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
876 "(Ljava/lang/String;)V");
877 jstring j_error_string = JavaStringFromStdString(jni(), error);
878 jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
879 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
883 return AttachCurrentThreadIfNeeded();
887 scoped_ptr<ConstraintsWrapper> constraints_;
888 const ScopedGlobalRef<jobject> j_observer_global_;
889 const ScopedGlobalRef<jclass> j_observer_class_;
892 class CreateSdpObserverWrapper
893 : public SdpObserverWrapper<CreateSessionDescriptionObserver> {
895 CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
896 ConstraintsWrapper* constraints)
897 : SdpObserverWrapper(jni, j_observer, constraints) {}
899 virtual void OnFailure(const std::string& error) OVERRIDE {
900 ScopedLocalRefFrame local_ref_frame(jni());
901 SdpObserverWrapper::OnFailure(std::string("Create"), error);
905 class SetSdpObserverWrapper
906 : public SdpObserverWrapper<SetSessionDescriptionObserver> {
908 SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
909 ConstraintsWrapper* constraints)
910 : SdpObserverWrapper(jni, j_observer, constraints) {}
912 virtual void OnFailure(const std::string& error) OVERRIDE {
913 ScopedLocalRefFrame local_ref_frame(jni());
914 SdpObserverWrapper::OnFailure(std::string("Set"), error);
918 // Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
919 // and dispatching the callback from C++ back to Java.
920 class DataChannelObserverWrapper : public DataChannelObserver {
922 DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer)
923 : j_observer_global_(jni, j_observer),
924 j_observer_class_(jni, GetObjectClass(jni, j_observer)),
925 j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
926 j_on_state_change_mid_(GetMethodID(jni, *j_observer_class_,
927 "onStateChange", "()V")),
928 j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage",
929 "(Lorg/webrtc/DataChannel$Buffer;)V")),
930 j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_,
931 "<init>", "(Ljava/nio/ByteBuffer;Z)V")) {
934 virtual ~DataChannelObserverWrapper() {}
936 virtual void OnStateChange() OVERRIDE {
937 ScopedLocalRefFrame local_ref_frame(jni());
938 jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
939 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
942 virtual void OnMessage(const DataBuffer& buffer) OVERRIDE {
943 ScopedLocalRefFrame local_ref_frame(jni());
944 jobject byte_buffer =
945 jni()->NewDirectByteBuffer(const_cast<char*>(buffer.data.data()),
946 buffer.data.length());
947 jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
948 byte_buffer, buffer.binary);
949 jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
950 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
955 return AttachCurrentThreadIfNeeded();
958 const ScopedGlobalRef<jobject> j_observer_global_;
959 const ScopedGlobalRef<jclass> j_observer_class_;
960 const ScopedGlobalRef<jclass> j_buffer_class_;
961 const jmethodID j_on_state_change_mid_;
962 const jmethodID j_on_message_mid_;
963 const jmethodID j_buffer_ctor_;
966 // Adapter for a Java StatsObserver presenting a C++ StatsObserver and
967 // dispatching the callback from C++ back to Java.
968 class StatsObserverWrapper : public StatsObserver {
970 StatsObserverWrapper(JNIEnv* jni, jobject j_observer)
971 : j_observer_global_(jni, j_observer),
972 j_observer_class_(jni, GetObjectClass(jni, j_observer)),
973 j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
974 j_stats_report_ctor_(GetMethodID(
975 jni, *j_stats_report_class_, "<init>",
976 "(Ljava/lang/String;Ljava/lang/String;D"
977 "[Lorg/webrtc/StatsReport$Value;)V")),
978 j_value_class_(jni, FindClass(
979 jni, "org/webrtc/StatsReport$Value")),
980 j_value_ctor_(GetMethodID(
981 jni, *j_value_class_, "<init>",
982 "(Ljava/lang/String;Ljava/lang/String;)V")) {
985 virtual ~StatsObserverWrapper() {}
987 virtual void OnComplete(const std::vector<StatsReport>& reports) OVERRIDE {
988 ScopedLocalRefFrame local_ref_frame(jni());
989 jobjectArray j_reports = ReportsToJava(jni(), reports);
990 jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
991 "([Lorg/webrtc/StatsReport;)V");
992 jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
993 CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
997 jobjectArray ReportsToJava(
998 JNIEnv* jni, const std::vector<StatsReport>& reports) {
999 jobjectArray reports_array = jni->NewObjectArray(
1000 reports.size(), *j_stats_report_class_, NULL);
1001 for (int i = 0; i < reports.size(); ++i) {
1002 ScopedLocalRefFrame local_ref_frame(jni);
1003 const StatsReport& report = reports[i];
1004 jstring j_id = JavaStringFromStdString(jni, report.id);
1005 jstring j_type = JavaStringFromStdString(jni, report.type);
1006 jobjectArray j_values = ValuesToJava(jni, report.values);
1007 jobject j_report = jni->NewObject(*j_stats_report_class_,
1008 j_stats_report_ctor_,
1013 jni->SetObjectArrayElement(reports_array, i, j_report);
1015 return reports_array;
1018 jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) {
1019 jobjectArray j_values = jni->NewObjectArray(
1020 values.size(), *j_value_class_, NULL);
1021 for (int i = 0; i < values.size(); ++i) {
1022 ScopedLocalRefFrame local_ref_frame(jni);
1023 const StatsReport::Value& value = values[i];
1024 jstring j_name = JavaStringFromStdString(jni, value.name);
1025 jstring j_value = JavaStringFromStdString(jni, value.value);
1026 jobject j_element_value =
1027 jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
1028 jni->SetObjectArrayElement(j_values, i, j_element_value);
1034 return AttachCurrentThreadIfNeeded();
1037 const ScopedGlobalRef<jobject> j_observer_global_;
1038 const ScopedGlobalRef<jclass> j_observer_class_;
1039 const ScopedGlobalRef<jclass> j_stats_report_class_;
1040 const jmethodID j_stats_report_ctor_;
1041 const ScopedGlobalRef<jclass> j_value_class_;
1042 const jmethodID j_value_ctor_;
1045 // Adapter presenting a cricket::VideoRenderer as a
1046 // webrtc::VideoRendererInterface.
1047 class VideoRendererWrapper : public VideoRendererInterface {
1049 static VideoRendererWrapper* Create(cricket::VideoRenderer* renderer) {
1051 return new VideoRendererWrapper(renderer);
1055 virtual ~VideoRendererWrapper() {}
1057 virtual void SetSize(int width, int height) OVERRIDE {
1058 ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
1059 const bool kNotReserved = false; // What does this param mean??
1060 renderer_->SetSize(width, height, kNotReserved);
1063 virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
1064 ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
1065 renderer_->RenderFrame(frame);
1069 explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
1070 : renderer_(renderer) {}
1072 scoped_ptr<cricket::VideoRenderer> renderer_;
1075 // Wrapper for texture object in TextureVideoFrame.
1076 class NativeHandleImpl : public NativeHandle {
1078 NativeHandleImpl() :
1079 ref_count_(0), texture_object_(NULL), texture_id_(-1) {}
1080 virtual ~NativeHandleImpl() {}
1081 virtual int32_t AddRef() {
1082 return ++ref_count_;
1084 virtual int32_t Release() {
1085 return --ref_count_;
1087 virtual void* GetHandle() {
1088 return texture_object_;
1090 int GetTextureId() {
1093 void SetTextureObject(void *texture_object, int texture_id) {
1094 texture_object_ = reinterpret_cast<jobject>(texture_object);
1095 texture_id_ = texture_id;
1097 int32_t ref_count() {
1103 jobject texture_object_;
1104 int32_t texture_id_;
1107 // Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
1109 class JavaVideoRendererWrapper : public VideoRendererInterface {
1111 JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
1112 : j_callbacks_(jni, j_callbacks),
1113 j_set_size_id_(GetMethodID(
1114 jni, GetObjectClass(jni, j_callbacks), "setSize", "(II)V")),
1115 j_render_frame_id_(GetMethodID(
1116 jni, GetObjectClass(jni, j_callbacks), "renderFrame",
1117 "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
1119 FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
1120 j_i420_frame_ctor_id_(GetMethodID(
1121 jni, *j_frame_class_, "<init>", "(II[I[Ljava/nio/ByteBuffer;)V")),
1122 j_texture_frame_ctor_id_(GetMethodID(
1123 jni, *j_frame_class_, "<init>",
1124 "(IILjava/lang/Object;I)V")),
1125 j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
1126 CHECK_EXCEPTION(jni);
1129 virtual ~JavaVideoRendererWrapper() {}
1131 virtual void SetSize(int width, int height) OVERRIDE {
1132 ScopedLocalRefFrame local_ref_frame(jni());
1133 jni()->CallVoidMethod(*j_callbacks_, j_set_size_id_, width, height);
1134 CHECK_EXCEPTION(jni());
1137 virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
1138 ScopedLocalRefFrame local_ref_frame(jni());
1139 if (frame->GetNativeHandle() != NULL) {
1140 jobject j_frame = CricketToJavaTextureFrame(frame);
1141 jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
1142 CHECK_EXCEPTION(jni());
1144 jobject j_frame = CricketToJavaI420Frame(frame);
1145 jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
1146 CHECK_EXCEPTION(jni());
1151 // Return a VideoRenderer.I420Frame referring to the data in |frame|.
1152 jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
1153 jintArray strides = jni()->NewIntArray(3);
1154 jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
1155 strides_array[0] = frame->GetYPitch();
1156 strides_array[1] = frame->GetUPitch();
1157 strides_array[2] = frame->GetVPitch();
1158 jni()->ReleaseIntArrayElements(strides, strides_array, 0);
1159 jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
1160 jobject y_buffer = jni()->NewDirectByteBuffer(
1161 const_cast<uint8*>(frame->GetYPlane()),
1162 frame->GetYPitch() * frame->GetHeight());
1163 jobject u_buffer = jni()->NewDirectByteBuffer(
1164 const_cast<uint8*>(frame->GetUPlane()), frame->GetChromaSize());
1165 jobject v_buffer = jni()->NewDirectByteBuffer(
1166 const_cast<uint8*>(frame->GetVPlane()), frame->GetChromaSize());
1167 jni()->SetObjectArrayElement(planes, 0, y_buffer);
1168 jni()->SetObjectArrayElement(planes, 1, u_buffer);
1169 jni()->SetObjectArrayElement(planes, 2, v_buffer);
1170 return jni()->NewObject(
1171 *j_frame_class_, j_i420_frame_ctor_id_,
1172 frame->GetWidth(), frame->GetHeight(), strides, planes);
1175 // Return a VideoRenderer.I420Frame referring texture object in |frame|.
1176 jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
1177 NativeHandleImpl* handle =
1178 reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
1179 jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
1180 int texture_id = handle->GetTextureId();
1181 return jni()->NewObject(
1182 *j_frame_class_, j_texture_frame_ctor_id_,
1183 frame->GetWidth(), frame->GetHeight(), texture_object, texture_id);
1187 return AttachCurrentThreadIfNeeded();
1190 ScopedGlobalRef<jobject> j_callbacks_;
1191 jmethodID j_set_size_id_;
1192 jmethodID j_render_frame_id_;
1193 ScopedGlobalRef<jclass> j_frame_class_;
1194 jmethodID j_i420_frame_ctor_id_;
1195 jmethodID j_texture_frame_ctor_id_;
1196 ScopedGlobalRef<jclass> j_byte_buffer_class_;
1199 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
1200 // TODO(fischman): consider pulling MediaCodecVideoEncoder out of this file and
1201 // into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
1204 //#define TRACK_BUFFER_TIMING
1205 #define TAG "MediaCodecVideo"
1206 #ifdef TRACK_BUFFER_TIMING
1207 #define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
1211 #define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
1212 #define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
1214 // Color formats supported by encoder - should mirror supportedColorList
1215 // from MediaCodecVideoEncoder.java
1216 enum COLOR_FORMATTYPE {
1217 COLOR_FormatYUV420Planar = 0x13,
1218 COLOR_FormatYUV420SemiPlanar = 0x15,
1219 COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
1220 // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
1221 // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
1222 // This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
1223 // but requires some (16, 32?) byte alignment.
1224 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
1227 // Arbitrary interval to poll the codec for new outputs.
1228 enum { kMediaCodecPollMs = 10 };
1229 // Media codec maximum output buffer ready timeout.
1230 enum { kMediaCodecTimeoutMs = 500 };
1231 // Interval to print codec statistics (bitrate, fps, encoding/decoding time).
1232 enum { kMediaCodecStatisticsIntervalMs = 3000 };
1234 static int64_t GetCurrentTimeMs() {
1235 return TickTime::Now().Ticks() / 1000000LL;
1238 // Allow Invoke() calls from from current thread.
1239 static void AllowBlockingCalls() {
1240 Thread* current_thread = Thread::Current();
1241 if (current_thread != NULL)
1242 current_thread->SetAllowBlockingCalls(true);
1245 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
1246 // Android's MediaCodec SDK API behind the scenes to implement (hopefully)
1247 // HW-backed video encode. This C++ class is implemented as a very thin shim,
1248 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
1249 // MediaCodecVideoEncoder is created, operated, and destroyed on a single
1250 // thread, currently the libjingle Worker thread.
1251 class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
1252 public rtc::MessageHandler {
1254 virtual ~MediaCodecVideoEncoder();
1255 explicit MediaCodecVideoEncoder(JNIEnv* jni);
1257 // webrtc::VideoEncoder implementation. Everything trampolines to
1258 // |codec_thread_| for execution.
1259 virtual int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
1260 int32_t /* number_of_cores */,
1261 uint32_t /* max_payload_size */) OVERRIDE;
1262 virtual int32_t Encode(
1263 const webrtc::I420VideoFrame& input_image,
1264 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
1265 const std::vector<webrtc::VideoFrameType>* frame_types) OVERRIDE;
1266 virtual int32_t RegisterEncodeCompleteCallback(
1267 webrtc::EncodedImageCallback* callback) OVERRIDE;
1268 virtual int32_t Release() OVERRIDE;
1269 virtual int32_t SetChannelParameters(uint32_t /* packet_loss */,
1270 int /* rtt */) OVERRIDE;
1271 virtual int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) OVERRIDE;
1273 // rtc::MessageHandler implementation.
1274 virtual void OnMessage(rtc::Message* msg) OVERRIDE;
1277 // CHECK-fail if not running on |codec_thread_|.
1278 void CheckOnCodecThread();
1280 // Release() and InitEncode() in an attempt to restore the codec to an
1281 // operable state. Necessary after all manner of OMX-layer errors.
1284 // Implementation of webrtc::VideoEncoder methods above, all running on the
1285 // codec thread exclusively.
1287 // If width==0 then this is assumed to be a re-initialization and the
1288 // previously-current values are reused instead of the passed parameters
1289 // (makes it easier to reason about thread-safety).
1290 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
1291 int32_t EncodeOnCodecThread(
1292 const webrtc::I420VideoFrame& input_image,
1293 const std::vector<webrtc::VideoFrameType>* frame_types);
1294 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
1295 webrtc::EncodedImageCallback* callback);
1296 int32_t ReleaseOnCodecThread();
1297 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
1299 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
1300 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
1301 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
1302 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
1303 jlong GetOutputBufferInfoPresentationTimestampUs(
1305 jobject j_output_buffer_info);
1307 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
1309 bool DeliverPendingOutputs(JNIEnv* jni);
1311 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
1312 // |codec_thread_| synchronously.
1313 webrtc::EncodedImageCallback* callback_;
1315 // State that is constant for the lifetime of this object once the ctor
1317 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
1318 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
1319 ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
1320 jmethodID j_init_encode_method_;
1321 jmethodID j_dequeue_input_buffer_method_;
1322 jmethodID j_encode_method_;
1323 jmethodID j_release_method_;
1324 jmethodID j_set_rates_method_;
1325 jmethodID j_dequeue_output_buffer_method_;
1326 jmethodID j_release_output_buffer_method_;
1327 jfieldID j_color_format_field_;
1328 jfieldID j_info_index_field_;
1329 jfieldID j_info_buffer_field_;
1330 jfieldID j_info_is_key_frame_field_;
1331 jfieldID j_info_presentation_timestamp_us_field_;
1333 // State that is valid only between InitEncode() and the next Release().
1334 // Touched only on codec_thread_ so no explicit synchronization necessary.
1335 int width_; // Frame width in pixels.
1336 int height_; // Frame height in pixels.
1338 uint16_t picture_id_;
1339 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
1340 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
1341 int last_set_fps_; // Last-requested frame rate.
1342 int64_t current_timestamp_us_; // Current frame timestamps in us.
1343 int frames_received_; // Number of frames received by encoder.
1344 int frames_dropped_; // Number of frames dropped by encoder.
1345 int frames_resolution_update_; // Number of frames with new codec resolution.
1346 int frames_in_queue_; // Number of frames in encoder queue.
1347 int64_t start_time_ms_; // Start time for statistics.
1348 int current_frames_; // Number of frames in the current statistics interval.
1349 int current_bytes_; // Encoded bytes in the current statistics interval.
1350 int current_encoding_time_ms_; // Overall encoding time in the current second
1351 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
1352 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
1353 std::vector<int32_t> timestamps_; // Video frames timestamp queue.
1354 std::vector<int64_t> render_times_ms_; // Video frames render time queue.
1355 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
1357 // Frame size in bytes fed to MediaCodec.
1359 // True only when between a callback_->Encoded() call return a positive value
1360 // and the next Encode() call being ignored.
1361 bool drop_next_input_frame_;
1362 // Global references; must be deleted in Release().
1363 std::vector<jobject> input_buffers_;
1366 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
1367 // Call Release() to ensure no more callbacks to us after we are deleted.
1371 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
1375 codec_thread_(new Thread()),
1376 j_media_codec_video_encoder_class_(
1378 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
1379 j_media_codec_video_encoder_(
1381 jni->NewObject(*j_media_codec_video_encoder_class_,
1383 *j_media_codec_video_encoder_class_,
1386 ScopedLocalRefFrame local_ref_frame(jni);
1387 // It would be nice to avoid spinning up a new thread per MediaCodec, and
1388 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
1389 // 2732 means that deadlocks abound. This class synchronously trampolines
1390 // to |codec_thread_|, so if anything else can be coming to _us_ from
1391 // |codec_thread_|, or from any thread holding the |_sendCritSect| described
1392 // in the bug, we have a problem. For now work around that with a dedicated
1394 codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
1395 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
1397 jclass j_output_buffer_info_class =
1398 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
1399 j_init_encode_method_ = GetMethodID(jni,
1400 *j_media_codec_video_encoder_class_,
1402 "(IIII)[Ljava/nio/ByteBuffer;");
1403 j_dequeue_input_buffer_method_ = GetMethodID(
1404 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
1405 j_encode_method_ = GetMethodID(
1406 jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
1408 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
1409 j_set_rates_method_ = GetMethodID(
1410 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
1411 j_dequeue_output_buffer_method_ =
1413 *j_media_codec_video_encoder_class_,
1414 "dequeueOutputBuffer",
1415 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
1416 j_release_output_buffer_method_ = GetMethodID(
1417 jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
1419 j_color_format_field_ =
1420 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
1421 j_info_index_field_ =
1422 GetFieldID(jni, j_output_buffer_info_class, "index", "I");
1423 j_info_buffer_field_ = GetFieldID(
1424 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
1425 j_info_is_key_frame_field_ =
1426 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
1427 j_info_presentation_timestamp_us_field_ = GetFieldID(
1428 jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
1429 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
1430 AllowBlockingCalls();
1433 int32_t MediaCodecVideoEncoder::InitEncode(
1434 const webrtc::VideoCodec* codec_settings,
1435 int32_t /* number_of_cores */,
1436 uint32_t /* max_payload_size */) {
1437 // Factory should guard against other codecs being used with us.
1438 CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
1440 return codec_thread_->Invoke<int32_t>(
1441 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
1443 codec_settings->width,
1444 codec_settings->height,
1445 codec_settings->startBitrate,
1446 codec_settings->maxFramerate));
1449 int32_t MediaCodecVideoEncoder::Encode(
1450 const webrtc::I420VideoFrame& frame,
1451 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
1452 const std::vector<webrtc::VideoFrameType>* frame_types) {
1453 return codec_thread_->Invoke<int32_t>(Bind(
1454 &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
1457 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
1458 webrtc::EncodedImageCallback* callback) {
1459 return codec_thread_->Invoke<int32_t>(
1460 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
1465 int32_t MediaCodecVideoEncoder::Release() {
1466 return codec_thread_->Invoke<int32_t>(
1467 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
1470 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
1472 return WEBRTC_VIDEO_CODEC_OK;
1475 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
1476 uint32_t frame_rate) {
1477 return codec_thread_->Invoke<int32_t>(
1478 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
1484 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
1485 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1486 ScopedLocalRefFrame local_ref_frame(jni);
1488 // We only ever send one message to |this| directly (not through a Bind()'d
1489 // functor), so expect no ID/data.
1490 CHECK(!msg->message_id) << "Unexpected message!";
1491 CHECK(!msg->pdata) << "Unexpected message!";
1492 CheckOnCodecThread();
1497 // It would be nice to recover from a failure here if one happened, but it's
1498 // unclear how to signal such a failure to the app, so instead we stay silent
1499 // about it and let the next app-called API method reveal the borkedness.
1500 DeliverPendingOutputs(jni);
1501 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
1504 void MediaCodecVideoEncoder::CheckOnCodecThread() {
1505 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
1506 << "Running on wrong thread!";
1509 void MediaCodecVideoEncoder::ResetCodec() {
1510 ALOGE("ResetCodec");
1511 if (Release() != WEBRTC_VIDEO_CODEC_OK ||
1512 codec_thread_->Invoke<int32_t>(Bind(
1513 &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
1514 width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
1515 // TODO(fischman): wouldn't it be nice if there was a way to gracefully
1516 // degrade to a SW encoder at this point? There isn't one AFAICT :(
1517 // https://code.google.com/p/webrtc/issues/detail?id=2920
1521 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
1522 int width, int height, int kbps, int fps) {
1523 CheckOnCodecThread();
1524 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1525 ScopedLocalRefFrame local_ref_frame(jni);
1527 ALOGD("InitEncodeOnCodecThread %d x %d. Bitrate: %d kbps. Fps: %d",
1528 width, height, kbps, fps);
1530 kbps = last_set_bitrate_kbps_;
1533 fps = last_set_fps_;
1538 last_set_bitrate_kbps_ = kbps;
1539 last_set_fps_ = fps;
1540 yuv_size_ = width_ * height_ * 3 / 2;
1541 frames_received_ = 0;
1542 frames_dropped_ = 0;
1543 frames_resolution_update_ = 0;
1544 frames_in_queue_ = 0;
1545 current_timestamp_us_ = 0;
1546 start_time_ms_ = GetCurrentTimeMs();
1547 current_frames_ = 0;
1549 current_encoding_time_ms_ = 0;
1550 last_input_timestamp_ms_ = -1;
1551 last_output_timestamp_ms_ = -1;
1552 timestamps_.clear();
1553 render_times_ms_.clear();
1554 frame_rtc_times_ms_.clear();
1555 drop_next_input_frame_ = false;
1556 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
1557 // We enforce no extra stride/padding in the format creation step.
1558 jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
1559 jni->CallObjectMethod(*j_media_codec_video_encoder_,
1560 j_init_encode_method_,
1565 CHECK_EXCEPTION(jni);
1566 if (IsNull(jni, input_buffers))
1567 return WEBRTC_VIDEO_CODEC_ERROR;
1570 switch (GetIntField(jni, *j_media_codec_video_encoder_,
1571 j_color_format_field_)) {
1572 case COLOR_FormatYUV420Planar:
1573 encoder_fourcc_ = libyuv::FOURCC_YU12;
1575 case COLOR_FormatYUV420SemiPlanar:
1576 case COLOR_QCOM_FormatYUV420SemiPlanar:
1577 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
1578 encoder_fourcc_ = libyuv::FOURCC_NV12;
1581 LOG(LS_ERROR) << "Wrong color format.";
1582 return WEBRTC_VIDEO_CODEC_ERROR;
1584 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
1585 CHECK(input_buffers_.empty())
1586 << "Unexpected double InitEncode without Release";
1587 input_buffers_.resize(num_input_buffers);
1588 for (size_t i = 0; i < num_input_buffers; ++i) {
1590 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
1591 int64 yuv_buffer_capacity =
1592 jni->GetDirectBufferCapacity(input_buffers_[i]);
1593 CHECK_EXCEPTION(jni);
1594 CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
1596 CHECK_EXCEPTION(jni);
1598 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
1599 return WEBRTC_VIDEO_CODEC_OK;
1602 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
1603 const webrtc::I420VideoFrame& frame,
1604 const std::vector<webrtc::VideoFrameType>* frame_types) {
1605 CheckOnCodecThread();
1606 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1607 ScopedLocalRefFrame local_ref_frame(jni);
1610 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1613 if (!DeliverPendingOutputs(jni)) {
1615 // Continue as if everything's fine.
1618 if (drop_next_input_frame_) {
1619 ALOGV("Encoder drop frame - failed callback.");
1620 drop_next_input_frame_ = false;
1621 return WEBRTC_VIDEO_CODEC_OK;
1624 CHECK(frame_types->size() == 1) << "Unexpected stream count";
1625 if (frame.width() != width_ || frame.height() != height_) {
1626 frames_resolution_update_++;
1627 ALOGD("Unexpected frame resolution change from %d x %d to %d x %d",
1628 width_, height_, frame.width(), frame.height());
1629 if (frames_resolution_update_ > 3) {
1630 // Reset codec if we received more than 3 frames with new resolution.
1631 width_ = frame.width();
1632 height_ = frame.height();
1633 frames_resolution_update_ = 0;
1636 return WEBRTC_VIDEO_CODEC_OK;
1638 frames_resolution_update_ = 0;
1640 bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
1642 // Check if we accumulated too many frames in encoder input buffers
1643 // or the encoder latency exceeds 70 ms and drop frame if so.
1644 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
1645 int encoder_latency_ms = last_input_timestamp_ms_ -
1646 last_output_timestamp_ms_;
1647 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
1648 ALOGD("Drop frame - encoder is behind by %d ms. Q size: %d",
1649 encoder_latency_ms, frames_in_queue_);
1651 return WEBRTC_VIDEO_CODEC_OK;
1655 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
1656 j_dequeue_input_buffer_method_);
1657 CHECK_EXCEPTION(jni);
1658 if (j_input_buffer_index == -1) {
1659 // Video codec falls behind - no input buffer available.
1660 ALOGV("Encoder drop frame - no input buffers available");
1662 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
1664 if (j_input_buffer_index == -2) {
1666 return WEBRTC_VIDEO_CODEC_ERROR;
1669 ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
1670 frames_received_, j_input_buffer_index, current_timestamp_us_ / 1000);
1672 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
1674 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
1675 CHECK_EXCEPTION(jni);
1676 CHECK(yuv_buffer) << "Indirect buffer??";
1677 CHECK(!libyuv::ConvertFromI420(
1678 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
1679 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
1680 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
1684 << "ConvertFromI420 failed";
1685 last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
1688 // Save input image timestamps for later output
1689 timestamps_.push_back(frame.timestamp());
1690 render_times_ms_.push_back(frame.render_time_ms());
1691 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
1693 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1696 j_input_buffer_index,
1698 current_timestamp_us_);
1699 CHECK_EXCEPTION(jni);
1700 current_timestamp_us_ += 1000000 / last_set_fps_;
1702 if (!encode_status || !DeliverPendingOutputs(jni)) {
1704 return WEBRTC_VIDEO_CODEC_ERROR;
1707 return WEBRTC_VIDEO_CODEC_OK;
1710 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
1711 webrtc::EncodedImageCallback* callback) {
1712 CheckOnCodecThread();
1713 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1714 ScopedLocalRefFrame local_ref_frame(jni);
1715 callback_ = callback;
1716 return WEBRTC_VIDEO_CODEC_OK;
1719 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
1721 return WEBRTC_VIDEO_CODEC_OK;
1723 CheckOnCodecThread();
1724 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1725 ALOGD("EncoderRelease: Frames received: %d. Frames dropped: %d.",
1726 frames_received_,frames_dropped_);
1727 ScopedLocalRefFrame local_ref_frame(jni);
1728 for (size_t i = 0; i < input_buffers_.size(); ++i)
1729 jni->DeleteGlobalRef(input_buffers_[i]);
1730 input_buffers_.clear();
1731 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
1732 CHECK_EXCEPTION(jni);
1733 rtc::MessageQueueManager::Clear(this);
1735 return WEBRTC_VIDEO_CODEC_OK;
1738 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
1739 uint32_t frame_rate) {
1740 CheckOnCodecThread();
1741 if (last_set_bitrate_kbps_ == new_bit_rate &&
1742 last_set_fps_ == frame_rate) {
1743 return WEBRTC_VIDEO_CODEC_OK;
1745 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1746 ScopedLocalRefFrame local_ref_frame(jni);
1747 if (new_bit_rate > 0) {
1748 last_set_bitrate_kbps_ = new_bit_rate;
1750 if (frame_rate > 0) {
1751 last_set_fps_ = frame_rate;
1753 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1754 j_set_rates_method_,
1755 last_set_bitrate_kbps_,
1757 CHECK_EXCEPTION(jni);
1760 return WEBRTC_VIDEO_CODEC_ERROR;
1762 return WEBRTC_VIDEO_CODEC_OK;
1765 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
1767 jobject j_output_buffer_info) {
1768 return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
1771 jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
1773 jobject j_output_buffer_info) {
1774 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
1777 bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
1779 jobject j_output_buffer_info) {
1780 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
1783 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
1785 jobject j_output_buffer_info) {
1786 return GetLongField(
1787 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
1790 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
1792 jobject j_output_buffer_info = jni->CallObjectMethod(
1793 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
1794 CHECK_EXCEPTION(jni);
1795 if (IsNull(jni, j_output_buffer_info)) {
1799 int output_buffer_index =
1800 GetOutputBufferInfoIndex(jni, j_output_buffer_info);
1801 if (output_buffer_index == -1) {
1806 // Get frame timestamps from a queue.
1807 last_output_timestamp_ms_ =
1808 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
1810 int32_t timestamp = timestamps_.front();
1811 timestamps_.erase(timestamps_.begin());
1812 int64_t render_time_ms = render_times_ms_.front();
1813 render_times_ms_.erase(render_times_ms_.begin());
1814 int64_t frame_encoding_time_ms = GetCurrentTimeMs() -
1815 frame_rtc_times_ms_.front();
1816 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
1819 // Extract payload and key frame flag.
1820 int32_t callback_status = 0;
1821 jobject j_output_buffer =
1822 GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
1823 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
1824 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
1825 uint8* payload = reinterpret_cast<uint8_t*>(
1826 jni->GetDirectBufferAddress(j_output_buffer));
1827 CHECK_EXCEPTION(jni);
1829 ALOGV("Encoder got output buffer # %d. Size: %d. TS: %lld. Latency: %lld."
1831 output_buffer_index, payload_size, last_output_timestamp_ms_,
1832 last_input_timestamp_ms_ - last_output_timestamp_ms_,
1833 frame_encoding_time_ms);
1835 // Calculate and print encoding statistics - every 3 seconds.
1837 current_bytes_ += payload_size;
1838 current_encoding_time_ms_ += frame_encoding_time_ms;
1839 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
1840 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
1841 current_frames_ > 0) {
1842 ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
1843 " encTime: %d for last %d ms",
1844 current_bytes_ * 8 / statistic_time_ms,
1845 last_set_bitrate_kbps_,
1846 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
1847 current_encoding_time_ms_ / current_frames_, statistic_time_ms);
1848 start_time_ms_ = GetCurrentTimeMs();
1849 current_frames_ = 0;
1851 current_encoding_time_ms_ = 0;
1854 // Callback - return encoded frame.
1856 scoped_ptr<webrtc::EncodedImage> image(
1857 new webrtc::EncodedImage(payload, payload_size, payload_size));
1858 image->_encodedWidth = width_;
1859 image->_encodedHeight = height_;
1860 image->_timeStamp = timestamp;
1861 image->capture_time_ms_ = render_time_ms;
1862 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
1863 image->_completeFrame = true;
1865 webrtc::CodecSpecificInfo info;
1866 memset(&info, 0, sizeof(info));
1867 info.codecType = kVideoCodecVP8;
1868 info.codecSpecific.VP8.pictureId = picture_id_;
1869 info.codecSpecific.VP8.nonReference = false;
1870 info.codecSpecific.VP8.simulcastIdx = 0;
1871 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
1872 info.codecSpecific.VP8.layerSync = false;
1873 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
1874 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
1875 picture_id_ = (picture_id_ + 1) & 0x7FFF;
1877 // Generate a header describing a single fragment.
1878 webrtc::RTPFragmentationHeader header;
1879 memset(&header, 0, sizeof(header));
1880 header.VerifyAndAllocateFragmentationHeader(1);
1881 header.fragmentationOffset[0] = 0;
1882 header.fragmentationLength[0] = image->_length;
1883 header.fragmentationPlType[0] = 0;
1884 header.fragmentationTimeDiff[0] = 0;
1886 callback_status = callback_->Encoded(*image, &info, &header);
1889 // Return output buffer back to the encoder.
1890 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1891 j_release_output_buffer_method_,
1892 output_buffer_index);
1893 CHECK_EXCEPTION(jni);
1899 if (callback_status > 0) {
1900 drop_next_input_frame_ = true;
1901 // Theoretically could handle callback_status<0 here, but unclear what that
1902 // would mean for us.
1909 // Simplest-possible implementation of an encoder factory, churns out
1910 // MediaCodecVideoEncoders on demand (or errors, if that's not possible).
1911 class MediaCodecVideoEncoderFactory
1912 : public cricket::WebRtcVideoEncoderFactory {
1914 MediaCodecVideoEncoderFactory();
1915 virtual ~MediaCodecVideoEncoderFactory();
1917 // WebRtcVideoEncoderFactory implementation.
1918 virtual webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
1920 virtual const std::vector<VideoCodec>& codecs() const OVERRIDE;
1921 virtual void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) OVERRIDE;
1924 // Empty if platform support is lacking, const after ctor returns.
1925 std::vector<VideoCodec> supported_codecs_;
1928 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
1929 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1930 ScopedLocalRefFrame local_ref_frame(jni);
1931 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
1932 bool is_platform_supported = jni->CallStaticBooleanMethod(
1934 GetStaticMethodID(jni, j_encoder_class, "isPlatformSupported", "()Z"));
1935 CHECK_EXCEPTION(jni);
1936 if (!is_platform_supported)
1939 // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
1940 // encoder? Sure would be. Too bad it doesn't. So we hard-code some
1941 // reasonable defaults.
1942 supported_codecs_.push_back(
1943 VideoCodec(kVideoCodecVP8, "VP8", 1280, 1280, 30));
1946 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
1948 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
1949 webrtc::VideoCodecType type) {
1950 if (type != kVideoCodecVP8 || supported_codecs_.empty())
1952 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded());
1955 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
1956 MediaCodecVideoEncoderFactory::codecs() const {
1957 return supported_codecs_;
1960 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1961 webrtc::VideoEncoder* encoder) {
1965 class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
1966 public rtc::MessageHandler {
1968 explicit MediaCodecVideoDecoder(JNIEnv* jni);
1969 virtual ~MediaCodecVideoDecoder();
1971 static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
1973 virtual int32_t InitDecode(const VideoCodec* codecSettings,
1974 int32_t numberOfCores) OVERRIDE;
1977 Decode(const EncodedImage& inputImage, bool missingFrames,
1978 const RTPFragmentationHeader* fragmentation,
1979 const CodecSpecificInfo* codecSpecificInfo = NULL,
1980 int64_t renderTimeMs = -1) OVERRIDE;
1982 virtual int32_t RegisterDecodeCompleteCallback(
1983 DecodedImageCallback* callback) OVERRIDE;
1985 virtual int32_t Release() OVERRIDE;
1987 virtual int32_t Reset() OVERRIDE;
1988 // rtc::MessageHandler implementation.
1989 virtual void OnMessage(rtc::Message* msg) OVERRIDE;
1992 // CHECK-fail if not running on |codec_thread_|.
1993 void CheckOnCodecThread();
1995 int32_t InitDecodeOnCodecThread();
1996 int32_t ReleaseOnCodecThread();
1997 int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
1998 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
2000 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
2003 bool key_frame_required_;
2008 I420VideoFrame decoded_image_;
2009 NativeHandleImpl native_handle_;
2010 DecodedImageCallback* callback_;
2011 int frames_received_; // Number of frames received by decoder.
2012 int frames_decoded_; // Number of frames decoded by decoder
2013 int64_t start_time_ms_; // Start time for statistics.
2014 int current_frames_; // Number of frames in the current statistics interval.
2015 int current_bytes_; // Encoded bytes in the current statistics interval.
2016 int current_decoding_time_ms_; // Overall decoding time in the current second
2017 uint32_t max_pending_frames_; // Maximum number of pending input frames
2018 std::vector<int32_t> timestamps_;
2019 std::vector<int64_t> ntp_times_ms_;
2020 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
2023 // State that is constant for the lifetime of this object once the ctor
2025 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
2026 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
2027 ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
2028 jmethodID j_init_decode_method_;
2029 jmethodID j_release_method_;
2030 jmethodID j_dequeue_input_buffer_method_;
2031 jmethodID j_queue_input_buffer_method_;
2032 jmethodID j_dequeue_output_buffer_method_;
2033 jmethodID j_release_output_buffer_method_;
2034 // MediaCodecVideoDecoder fields.
2035 jfieldID j_input_buffers_field_;
2036 jfieldID j_output_buffers_field_;
2037 jfieldID j_color_format_field_;
2038 jfieldID j_width_field_;
2039 jfieldID j_height_field_;
2040 jfieldID j_stride_field_;
2041 jfieldID j_slice_height_field_;
2042 jfieldID j_surface_texture_field_;
2043 jfieldID j_textureID_field_;
2044 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
2045 jfieldID j_info_index_field_;
2046 jfieldID j_info_offset_field_;
2047 jfieldID j_info_size_field_;
2048 jfieldID j_info_presentation_timestamp_us_field_;
2050 // Global references; must be deleted in Release().
2051 std::vector<jobject> input_buffers_;
2052 jobject surface_texture_;
2053 jobject previous_surface_texture_;
2055 // Render EGL context.
2056 static jobject render_egl_context_;
2059 jobject MediaCodecVideoDecoder::render_egl_context_ = NULL;
2061 int MediaCodecVideoDecoder::SetAndroidObjects(JNIEnv* jni,
2062 jobject render_egl_context) {
2063 if (render_egl_context_) {
2064 jni->DeleteGlobalRef(render_egl_context_);
2066 if (IsNull(jni, render_egl_context)) {
2067 render_egl_context_ = NULL;
2069 render_egl_context_ = jni->NewGlobalRef(render_egl_context);
2070 CHECK_EXCEPTION(jni) << "error calling NewGlobalRef for EGL Context.";
2071 jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
2072 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
2073 ALOGE("Wrong EGL Context.");
2074 jni->DeleteGlobalRef(render_egl_context_);
2075 render_egl_context_ = NULL;
2078 if (render_egl_context_ == NULL) {
2079 ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
2084 MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni)
2085 : key_frame_required_(true),
2088 surface_texture_(NULL),
2089 previous_surface_texture_(NULL),
2090 codec_thread_(new Thread()),
2091 j_media_codec_video_decoder_class_(
2093 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
2094 j_media_codec_video_decoder_(
2096 jni->NewObject(*j_media_codec_video_decoder_class_,
2098 *j_media_codec_video_decoder_class_,
2101 ScopedLocalRefFrame local_ref_frame(jni);
2102 codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
2103 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
2105 j_init_decode_method_ = GetMethodID(
2106 jni, *j_media_codec_video_decoder_class_, "initDecode",
2107 "(IIZZLandroid/opengl/EGLContext;)Z");
2109 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
2110 j_dequeue_input_buffer_method_ = GetMethodID(
2111 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
2112 j_queue_input_buffer_method_ = GetMethodID(
2113 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
2114 j_dequeue_output_buffer_method_ = GetMethodID(
2115 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
2116 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
2117 j_release_output_buffer_method_ = GetMethodID(
2118 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
2120 j_input_buffers_field_ = GetFieldID(
2121 jni, *j_media_codec_video_decoder_class_,
2122 "inputBuffers", "[Ljava/nio/ByteBuffer;");
2123 j_output_buffers_field_ = GetFieldID(
2124 jni, *j_media_codec_video_decoder_class_,
2125 "outputBuffers", "[Ljava/nio/ByteBuffer;");
2126 j_color_format_field_ = GetFieldID(
2127 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
2128 j_width_field_ = GetFieldID(
2129 jni, *j_media_codec_video_decoder_class_, "width", "I");
2130 j_height_field_ = GetFieldID(
2131 jni, *j_media_codec_video_decoder_class_, "height", "I");
2132 j_stride_field_ = GetFieldID(
2133 jni, *j_media_codec_video_decoder_class_, "stride", "I");
2134 j_slice_height_field_ = GetFieldID(
2135 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
2136 j_textureID_field_ = GetFieldID(
2137 jni, *j_media_codec_video_decoder_class_, "textureID", "I");
2138 j_surface_texture_field_ = GetFieldID(
2139 jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
2140 "Landroid/graphics/SurfaceTexture;");
2142 jclass j_decoder_output_buffer_info_class = FindClass(jni,
2143 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
2144 j_info_index_field_ = GetFieldID(
2145 jni, j_decoder_output_buffer_info_class, "index", "I");
2146 j_info_offset_field_ = GetFieldID(
2147 jni, j_decoder_output_buffer_info_class, "offset", "I");
2148 j_info_size_field_ = GetFieldID(
2149 jni, j_decoder_output_buffer_info_class, "size", "I");
2150 j_info_presentation_timestamp_us_field_ = GetFieldID(
2151 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
2153 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
2154 use_surface_ = true;
2155 if (render_egl_context_ == NULL)
2156 use_surface_ = false;
2157 memset(&codec_, 0, sizeof(codec_));
2158 AllowBlockingCalls();
2161 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
2162 // Call Release() to ensure no more callbacks to us after we are deleted.
2164 // Delete global references.
2165 JNIEnv* jni = AttachCurrentThreadIfNeeded();
2166 if (previous_surface_texture_ != NULL)
2167 jni->DeleteGlobalRef(previous_surface_texture_);
2168 if (surface_texture_ != NULL)
2169 jni->DeleteGlobalRef(surface_texture_);
2172 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
2173 int32_t numberOfCores) {
2175 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
2177 int ret_val = Release();
2181 // Save VideoCodec instance for later.
2182 if (&codec_ != inst) {
2185 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
2187 // Always start with a complete key frame.
2188 key_frame_required_ = true;
2189 frames_received_ = 0;
2190 frames_decoded_ = 0;
2193 return codec_thread_->Invoke<int32_t>(
2194 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
2197 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
2198 CheckOnCodecThread();
2199 JNIEnv* jni = AttachCurrentThreadIfNeeded();
2200 ScopedLocalRefFrame local_ref_frame(jni);
2201 ALOGD("InitDecodeOnCodecThread: %d x %d. Fps: %d. Errors: %d",
2202 codec_.width, codec_.height, codec_.maxFramerate, error_count_);
2203 bool use_sw_codec = false;
2204 if (error_count_ > 1) {
2205 // If more than one critical errors happen for HW codec, switch to SW codec.
2206 use_sw_codec = true;
2209 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
2210 j_init_decode_method_,
2215 render_egl_context_);
2216 CHECK_EXCEPTION(jni);
2218 return WEBRTC_VIDEO_CODEC_ERROR;
2222 max_pending_frames_ = 0;
2224 max_pending_frames_ = 1;
2226 start_time_ms_ = GetCurrentTimeMs();
2227 current_frames_ = 0;
2229 current_decoding_time_ms_ = 0;
2230 timestamps_.clear();
2231 ntp_times_ms_.clear();
2232 frame_rtc_times_ms_.clear();
2234 jobjectArray input_buffers = (jobjectArray)GetObjectField(
2235 jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
2236 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
2237 input_buffers_.resize(num_input_buffers);
2238 for (size_t i = 0; i < num_input_buffers; ++i) {
2240 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
2241 CHECK_EXCEPTION(jni);
2245 jobject surface_texture = GetObjectField(
2246 jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
2247 if (previous_surface_texture_ != NULL) {
2248 jni->DeleteGlobalRef(previous_surface_texture_);
2250 previous_surface_texture_ = surface_texture_;
2251 surface_texture_ = jni->NewGlobalRef(surface_texture);
2253 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
2255 return WEBRTC_VIDEO_CODEC_OK;
2258 int32_t MediaCodecVideoDecoder::Release() {
2259 return codec_thread_->Invoke<int32_t>(
2260 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
2263 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
2265 return WEBRTC_VIDEO_CODEC_OK;
2267 CheckOnCodecThread();
2268 JNIEnv* jni = AttachCurrentThreadIfNeeded();
2269 ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
2270 ScopedLocalRefFrame local_ref_frame(jni);
2271 for (size_t i = 0; i < input_buffers_.size(); i++) {
2272 jni->DeleteGlobalRef(input_buffers_[i]);
2274 input_buffers_.clear();
2275 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
2276 CHECK_EXCEPTION(jni);
2277 rtc::MessageQueueManager::Clear(this);
2279 return WEBRTC_VIDEO_CODEC_OK;
2283 void MediaCodecVideoDecoder::CheckOnCodecThread() {
2284 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
2285 << "Running on wrong thread!";
2288 int32_t MediaCodecVideoDecoder::Decode(
2289 const EncodedImage& inputImage,
2291 const RTPFragmentationHeader* fragmentation,
2292 const CodecSpecificInfo* codecSpecificInfo,
2293 int64_t renderTimeMs) {
2295 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2297 if (callback_ == NULL) {
2298 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2300 if (inputImage._buffer == NULL && inputImage._length > 0) {
2301 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
2303 // Check if encoded frame dimension has changed.
2304 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
2305 (inputImage._encodedWidth != codec_.width ||
2306 inputImage._encodedHeight != codec_.height)) {
2307 codec_.width = inputImage._encodedWidth;
2308 codec_.height = inputImage._encodedHeight;
2309 InitDecode(&codec_, 1);
2312 // Always start with a complete key frame.
2313 if (key_frame_required_) {
2314 if (inputImage._frameType != webrtc::kKeyFrame) {
2315 return WEBRTC_VIDEO_CODEC_ERROR;
2317 if (!inputImage._completeFrame) {
2318 return WEBRTC_VIDEO_CODEC_ERROR;
2320 key_frame_required_ = false;
2322 if (inputImage._length == 0) {
2323 return WEBRTC_VIDEO_CODEC_ERROR;
2326 return codec_thread_->Invoke<int32_t>(Bind(
2327 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
2330 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
2331 const EncodedImage& inputImage) {
2332 static uint8_t yVal_ = 0x7f;
2334 CheckOnCodecThread();
2335 JNIEnv* jni = AttachCurrentThreadIfNeeded();
2336 ScopedLocalRefFrame local_ref_frame(jni);
2338 // Try to drain the decoder and wait until output is not too
2339 // much behind the input.
2340 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
2341 ALOGV("Wait for output...");
2342 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
2345 return WEBRTC_VIDEO_CODEC_ERROR;
2347 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
2348 ALOGE("Output buffer dequeue timeout");
2351 return WEBRTC_VIDEO_CODEC_ERROR;
2355 // Get input buffer.
2356 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
2357 j_dequeue_input_buffer_method_);
2358 CHECK_EXCEPTION(jni);
2359 if (j_input_buffer_index < 0) {
2360 ALOGE("dequeueInputBuffer error");
2363 return WEBRTC_VIDEO_CODEC_ERROR;
2366 // Copy encoded data to Java ByteBuffer.
2367 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
2369 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
2370 CHECK(buffer) << "Indirect buffer??";
2371 int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
2372 CHECK_EXCEPTION(jni);
2373 if (buffer_capacity < inputImage._length) {
2374 ALOGE("Input frame size %d is bigger than buffer size %d.",
2375 inputImage._length, buffer_capacity);
2378 return WEBRTC_VIDEO_CODEC_ERROR;
2380 ALOGV("Decoder frame in # %d. Buffer # %d. Size: %d",
2381 frames_received_, j_input_buffer_index, inputImage._length);
2382 memcpy(buffer, inputImage._buffer, inputImage._length);
2384 // Save input image timestamps for later output.
2386 current_bytes_ += inputImage._length;
2387 timestamps_.push_back(inputImage._timeStamp);
2388 ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
2389 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
2391 // Feed input to decoder.
2392 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
2393 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
2394 j_queue_input_buffer_method_,
2395 j_input_buffer_index,
2398 CHECK_EXCEPTION(jni);
2400 ALOGE("queueInputBuffer error");
2403 return WEBRTC_VIDEO_CODEC_ERROR;
2406 // Try to drain the decoder
2407 if (!DeliverPendingOutputs(jni, 0)) {
2408 ALOGE("DeliverPendingOutputs error");
2411 return WEBRTC_VIDEO_CODEC_ERROR;
2414 return WEBRTC_VIDEO_CODEC_OK;
2417 bool MediaCodecVideoDecoder::DeliverPendingOutputs(
2418 JNIEnv* jni, int dequeue_timeout_us) {
2419 if (frames_received_ <= frames_decoded_) {
2420 // No need to query for output buffers - decoder is drained.
2423 // Get decoder output.
2424 jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
2425 *j_media_codec_video_decoder_,
2426 j_dequeue_output_buffer_method_,
2427 dequeue_timeout_us);
2429 CHECK_EXCEPTION(jni);
2430 if (IsNull(jni, j_decoder_output_buffer_info)) {
2434 // Extract output buffer info from Java DecoderOutputBufferInfo.
2435 int output_buffer_index =
2436 GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
2437 if (output_buffer_index < 0) {
2438 ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
2441 int output_buffer_offset =
2442 GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
2443 int output_buffer_size =
2444 GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
2445 CHECK_EXCEPTION(jni);
2447 // Get decoded video frame properties.
2448 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
2449 j_color_format_field_);
2450 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
2451 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
2452 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
2453 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
2454 j_slice_height_field_);
2455 int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
2456 j_textureID_field_);
2458 // Extract data from Java ByteBuffer and create output yuv420 frame -
2459 // for non surface decoding only.
2460 if (!use_surface_) {
2461 if (output_buffer_size < width * height * 3 / 2) {
2462 ALOGE("Insufficient output buffer size: %d", output_buffer_size);
2465 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
2466 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
2467 jobject output_buffer =
2468 jni->GetObjectArrayElement(output_buffers, output_buffer_index);
2469 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
2471 CHECK_EXCEPTION(jni);
2472 payload += output_buffer_offset;
2474 // Create yuv420 frame.
2475 if (color_format == COLOR_FormatYUV420Planar) {
2476 decoded_image_.CreateFrame(
2477 stride * slice_height, payload,
2478 (stride * slice_height) / 4, payload + (stride * slice_height),
2479 (stride * slice_height) / 4, payload + (5 * stride * slice_height / 4),
2481 stride, stride / 2, stride / 2);
2483 // All other supported formats are nv12.
2484 decoded_image_.CreateEmptyFrame(width, height, width,
2485 width / 2, width / 2);
2488 payload + stride * slice_height, stride,
2489 decoded_image_.buffer(webrtc::kYPlane),
2490 decoded_image_.stride(webrtc::kYPlane),
2491 decoded_image_.buffer(webrtc::kUPlane),
2492 decoded_image_.stride(webrtc::kUPlane),
2493 decoded_image_.buffer(webrtc::kVPlane),
2494 decoded_image_.stride(webrtc::kVPlane),
2499 // Get frame timestamps from a queue.
2500 int32_t timestamp = timestamps_.front();
2501 timestamps_.erase(timestamps_.begin());
2502 int64_t ntp_time_ms = ntp_times_ms_.front();
2503 ntp_times_ms_.erase(ntp_times_ms_.begin());
2504 int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
2505 frame_rtc_times_ms_.front();
2506 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
2508 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
2509 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
2510 color_format, output_buffer_size, frame_decoding_time_ms);
2512 // Return output buffer back to codec.
2513 bool success = jni->CallBooleanMethod(
2514 *j_media_codec_video_decoder_,
2515 j_release_output_buffer_method_,
2516 output_buffer_index,
2518 CHECK_EXCEPTION(jni);
2520 ALOGE("releaseOutputBuffer error");
2524 // Calculate and print decoding statistics - every 3 seconds.
2527 current_decoding_time_ms_ += frame_decoding_time_ms;
2528 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
2529 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
2530 current_frames_ > 0) {
2531 ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
2532 current_bytes_ * 8 / statistic_time_ms,
2533 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
2534 current_decoding_time_ms_ / current_frames_, statistic_time_ms);
2535 start_time_ms_ = GetCurrentTimeMs();
2536 current_frames_ = 0;
2538 current_decoding_time_ms_ = 0;
2541 // Callback - output decoded frame.
2542 int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
2544 native_handle_.SetTextureObject(surface_texture_, texture_id);
2545 TextureVideoFrame texture_image(
2546 &native_handle_, width, height, timestamp, 0);
2547 texture_image.set_ntp_time_ms(ntp_time_ms);
2548 callback_status = callback_->Decoded(texture_image);
2550 decoded_image_.set_timestamp(timestamp);
2551 decoded_image_.set_ntp_time_ms(ntp_time_ms);
2552 callback_status = callback_->Decoded(decoded_image_);
2554 if (callback_status > 0) {
2555 ALOGE("callback error");
2561 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
2562 DecodedImageCallback* callback) {
2563 callback_ = callback;
2564 return WEBRTC_VIDEO_CODEC_OK;
2567 int32_t MediaCodecVideoDecoder::Reset() {
2568 ALOGD("DecoderReset");
2570 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
2572 return InitDecode(&codec_, 1);
2575 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
2576 JNIEnv* jni = AttachCurrentThreadIfNeeded();
2577 ScopedLocalRefFrame local_ref_frame(jni);
2581 // We only ever send one message to |this| directly (not through a Bind()'d
2582 // functor), so expect no ID/data.
2583 CHECK(!msg->message_id) << "Unexpected message!";
2584 CHECK(!msg->pdata) << "Unexpected message!";
2585 CheckOnCodecThread();
2587 if (!DeliverPendingOutputs(jni, 0)) {
2591 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
2594 class MediaCodecVideoDecoderFactory
2595 : public cricket::WebRtcVideoDecoderFactory {
2597 MediaCodecVideoDecoderFactory();
2598 virtual ~MediaCodecVideoDecoderFactory();
2599 // WebRtcVideoDecoderFactory implementation.
2600 virtual webrtc::VideoDecoder* CreateVideoDecoder(
2601 webrtc::VideoCodecType type) OVERRIDE;
2603 virtual void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) OVERRIDE;
2606 bool is_platform_supported_;
2609 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
2610 JNIEnv* jni = AttachCurrentThreadIfNeeded();
2611 ScopedLocalRefFrame local_ref_frame(jni);
2612 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
2613 is_platform_supported_ = jni->CallStaticBooleanMethod(
2615 GetStaticMethodID(jni, j_decoder_class, "isPlatformSupported", "()Z"));
2616 CHECK_EXCEPTION(jni);
2619 MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
2621 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
2622 webrtc::VideoCodecType type) {
2623 if (type != kVideoCodecVP8 || !is_platform_supported_) {
2626 return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded());
2630 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
2631 webrtc::VideoDecoder* decoder) {
2635 #endif // #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2637 } // anonymous namespace
2639 // Convenience macro defining JNI-accessible methods in the org.webrtc package.
2640 // Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
2641 #define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \
2642 Java_org_webrtc_##name
2644 extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
2645 CHECK(!g_jvm) << "JNI_OnLoad called more than once!";
2647 CHECK(g_jvm) << "JNI_OnLoad handed NULL?";
2649 CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
2651 CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
2654 if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
2656 g_class_reference_holder = new ClassReferenceHolder(jni);
2658 return JNI_VERSION_1_6;
2661 extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
2662 g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
2663 delete g_class_reference_holder;
2664 g_class_reference_holder = NULL;
2665 CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
2669 static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
2670 jfieldID native_dc_id = GetFieldID(jni,
2671 GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
2672 jlong j_d = GetLongField(jni, j_dc, native_dc_id);
2673 return reinterpret_cast<DataChannelInterface*>(j_d);
2676 JOW(jlong, DataChannel_registerObserverNative)(
2677 JNIEnv* jni, jobject j_dc, jobject j_observer) {
2678 scoped_ptr<DataChannelObserverWrapper> observer(
2679 new DataChannelObserverWrapper(jni, j_observer));
2680 ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
2681 return jlongFromPointer(observer.release());
2684 JOW(void, DataChannel_unregisterObserverNative)(
2685 JNIEnv* jni, jobject j_dc, jlong native_observer) {
2686 ExtractNativeDC(jni, j_dc)->UnregisterObserver();
2687 delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer);
2690 JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) {
2691 return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
2694 JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) {
2695 return JavaEnumFromIndex(
2696 jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state());
2699 JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
2700 uint64 buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
2701 CHECK_LE(buffered_amount, std::numeric_limits<int64>::max())
2702 << "buffered_amount overflowed jlong!";
2703 return static_cast<jlong>(buffered_amount);
2706 JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) {
2707 ExtractNativeDC(jni, j_dc)->Close();
2710 JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc,
2711 jbyteArray data, jboolean binary) {
2712 jbyte* bytes = jni->GetByteArrayElements(data, NULL);
2713 bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
2714 rtc::Buffer(bytes, jni->GetArrayLength(data)),
2716 jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
2720 JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) {
2721 CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
2724 JOW(void, Logging_nativeEnableTracing)(
2725 JNIEnv* jni, jclass, jstring j_path, jint nativeLevels,
2726 jint nativeSeverity) {
2727 std::string path = JavaToStdString(jni, j_path);
2728 if (nativeLevels != webrtc::kTraceNone) {
2729 webrtc::Trace::set_level_filter(nativeLevels);
2730 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2731 if (path != "logcat:") {
2733 CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
2734 << "SetTraceFile failed";
2735 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2737 // Intentionally leak this to avoid needing to reason about its lifecycle.
2738 // It keeps no state and functions only as a dispatch point.
2739 static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
2743 rtc::LogMessage::LogToDebug(nativeSeverity);
2746 JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) {
2747 CHECK_RELEASE(reinterpret_cast<PeerConnectionInterface*>(j_p));
2750 JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
2751 PCOJava* p = reinterpret_cast<PCOJava*>(j_p);
2755 JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
2756 CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
2759 JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) {
2760 delete reinterpret_cast<cricket::VideoCapturer*>(j_p);
2763 JOW(void, VideoRenderer_freeGuiVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
2764 delete reinterpret_cast<VideoRendererWrapper*>(j_p);
2767 JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
2768 delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
2771 JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
2772 CHECK_RELEASE(reinterpret_cast<MediaStreamTrackInterface*>(j_p));
2775 JOW(jboolean, MediaStream_nativeAddAudioTrack)(
2776 JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
2777 return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
2778 reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
2781 JOW(jboolean, MediaStream_nativeAddVideoTrack)(
2782 JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
2783 return reinterpret_cast<MediaStreamInterface*>(pointer)
2784 ->AddTrack(reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
2787 JOW(jboolean, MediaStream_nativeRemoveAudioTrack)(
2788 JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
2789 return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
2790 reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
2793 JOW(jboolean, MediaStream_nativeRemoveVideoTrack)(
2794 JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
2795 return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
2796 reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
2799 JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) {
2800 return JavaStringFromStdString(
2801 jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
2804 JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) {
2805 CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
2808 JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
2809 JNIEnv * jni, jclass, jobject j_observer) {
2810 return (jlong)new PCOJava(jni, j_observer);
2813 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2814 JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
2815 JNIEnv* jni, jclass, jobject context,
2816 jboolean initialize_audio, jboolean initialize_video,
2817 jobject render_egl_context) {
2818 CHECK(g_jvm) << "JNI_OnLoad failed to run?";
2819 bool failure = false;
2820 if (!factory_static_initialized) {
2821 if (initialize_video) {
2822 failure |= webrtc::SetCaptureAndroidVM(g_jvm, context);
2823 failure |= webrtc::SetRenderAndroidVM(g_jvm);
2825 if (initialize_audio)
2826 failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
2827 factory_static_initialized = true;
2829 if (initialize_video)
2830 failure |= MediaCodecVideoDecoder::SetAndroidObjects(jni,
2831 render_egl_context);
2834 #endif // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2836 // Helper struct for working around the fact that CreatePeerConnectionFactory()
2837 // comes in two flavors: either entirely automagical (constructing its own
2838 // threads and deleting them on teardown, but no external codec factory support)
2839 // or entirely manual (requires caller to delete threads after factory
2840 // teardown). This struct takes ownership of its ctor's arguments to present a
2841 // single thing for Java to hold and eventually free.
2842 class OwnedFactoryAndThreads {
2844 OwnedFactoryAndThreads(Thread* worker_thread,
2845 Thread* signaling_thread,
2846 PeerConnectionFactoryInterface* factory)
2847 : worker_thread_(worker_thread),
2848 signaling_thread_(signaling_thread),
2849 factory_(factory) {}
2851 ~OwnedFactoryAndThreads() { CHECK_RELEASE(factory_); }
2853 PeerConnectionFactoryInterface* factory() { return factory_; }
2856 const scoped_ptr<Thread> worker_thread_;
2857 const scoped_ptr<Thread> signaling_thread_;
2858 PeerConnectionFactoryInterface* factory_; // Const after ctor except dtor.
2861 JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
2862 JNIEnv* jni, jclass) {
2863 // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
2864 // ThreadManager only WrapCurrentThread()s the thread where it is first
2865 // created. Since the semantics around when auto-wrapping happens in
2866 // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
2867 // about ramifications of auto-wrapping there.
2868 rtc::ThreadManager::Instance()->WrapCurrentThread();
2869 webrtc::Trace::CreateTrace();
2870 Thread* worker_thread = new Thread();
2871 worker_thread->SetName("worker_thread", NULL);
2872 Thread* signaling_thread = new Thread();
2873 signaling_thread->SetName("signaling_thread", NULL);
2874 CHECK(worker_thread->Start() && signaling_thread->Start())
2875 << "Failed to start threads";
2876 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
2877 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
2878 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
2879 encoder_factory.reset(new MediaCodecVideoEncoderFactory());
2880 decoder_factory.reset(new MediaCodecVideoDecoderFactory());
2882 rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2883 webrtc::CreatePeerConnectionFactory(worker_thread,
2886 encoder_factory.release(),
2887 decoder_factory.release()));
2888 OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
2889 worker_thread, signaling_thread, factory.release());
2890 return jlongFromPointer(owned_factory);
2893 JOW(void, PeerConnectionFactory_freeFactory)(JNIEnv*, jclass, jlong j_p) {
2894 delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
2895 webrtc::Trace::ReturnTrace();
2898 static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
2899 return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
2902 JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
2903 JNIEnv* jni, jclass, jlong native_factory, jstring label) {
2904 rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2905 factoryFromJava(native_factory));
2906 rtc::scoped_refptr<MediaStreamInterface> stream(
2907 factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
2908 return (jlong)stream.release();
2911 JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
2912 JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
2913 jobject j_constraints) {
2914 scoped_ptr<ConstraintsWrapper> constraints(
2915 new ConstraintsWrapper(jni, j_constraints));
2916 rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2917 factoryFromJava(native_factory));
2918 rtc::scoped_refptr<VideoSourceInterface> source(
2919 factory->CreateVideoSource(
2920 reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
2921 constraints.get()));
2922 return (jlong)source.release();
2925 JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
2926 JNIEnv* jni, jclass, jlong native_factory, jstring id,
2927 jlong native_source) {
2928 rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2929 factoryFromJava(native_factory));
2930 rtc::scoped_refptr<VideoTrackInterface> track(
2931 factory->CreateVideoTrack(
2932 JavaToStdString(jni, id),
2933 reinterpret_cast<VideoSourceInterface*>(native_source)));
2934 return (jlong)track.release();
2937 JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
2938 JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
2939 scoped_ptr<ConstraintsWrapper> constraints(
2940 new ConstraintsWrapper(jni, j_constraints));
2941 rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2942 factoryFromJava(native_factory));
2943 rtc::scoped_refptr<AudioSourceInterface> source(
2944 factory->CreateAudioSource(constraints.get()));
2945 return (jlong)source.release();
2948 JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
2949 JNIEnv* jni, jclass, jlong native_factory, jstring id,
2950 jlong native_source) {
2951 rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
2952 factoryFromJava(native_factory));
2953 rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
2954 JavaToStdString(jni, id),
2955 reinterpret_cast<AudioSourceInterface*>(native_source)));
2956 return (jlong)track.release();
2959 static void JavaIceServersToJsepIceServers(
2960 JNIEnv* jni, jobject j_ice_servers,
2961 PeerConnectionInterface::IceServers* ice_servers) {
2962 jclass list_class = GetObjectClass(jni, j_ice_servers);
2963 jmethodID iterator_id = GetMethodID(
2964 jni, list_class, "iterator", "()Ljava/util/Iterator;");
2965 jobject iterator = jni->CallObjectMethod(j_ice_servers, iterator_id);
2966 CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
2967 jmethodID iterator_has_next = GetMethodID(
2968 jni, GetObjectClass(jni, iterator), "hasNext", "()Z");
2969 jmethodID iterator_next = GetMethodID(
2970 jni, GetObjectClass(jni, iterator), "next", "()Ljava/lang/Object;");
2971 while (jni->CallBooleanMethod(iterator, iterator_has_next)) {
2972 CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
2973 jobject j_ice_server = jni->CallObjectMethod(iterator, iterator_next);
2974 CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
2975 jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
2976 jfieldID j_ice_server_uri_id =
2977 GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;");
2978 jfieldID j_ice_server_username_id =
2979 GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
2980 jfieldID j_ice_server_password_id =
2981 GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
2982 jstring uri = reinterpret_cast<jstring>(
2983 GetObjectField(jni, j_ice_server, j_ice_server_uri_id));
2984 jstring username = reinterpret_cast<jstring>(
2985 GetObjectField(jni, j_ice_server, j_ice_server_username_id));
2986 jstring password = reinterpret_cast<jstring>(
2987 GetObjectField(jni, j_ice_server, j_ice_server_password_id));
2988 PeerConnectionInterface::IceServer server;
2989 server.uri = JavaToStdString(jni, uri);
2990 server.username = JavaToStdString(jni, username);
2991 server.password = JavaToStdString(jni, password);
2992 ice_servers->push_back(server);
2994 CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
2997 JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
2998 JNIEnv *jni, jclass, jlong factory, jobject j_ice_servers,
2999 jobject j_constraints, jlong observer_p) {
3000 rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
3001 reinterpret_cast<PeerConnectionFactoryInterface*>(
3002 factoryFromJava(factory)));
3003 PeerConnectionInterface::IceServers servers;
3004 JavaIceServersToJsepIceServers(jni, j_ice_servers, &servers);
3005 PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
3006 observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints));
3007 rtc::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection(
3008 servers, observer->constraints(), NULL, NULL, observer));
3009 return (jlong)pc.release();
3012 static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
3013 JNIEnv* jni, jobject j_pc) {
3014 jfieldID native_pc_id = GetFieldID(jni,
3015 GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
3016 jlong j_p = GetLongField(jni, j_pc, native_pc_id);
3017 return rtc::scoped_refptr<PeerConnectionInterface>(
3018 reinterpret_cast<PeerConnectionInterface*>(j_p));
3021 JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) {
3022 const SessionDescriptionInterface* sdp =
3023 ExtractNativePC(jni, j_pc)->local_description();
3024 return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
3027 JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) {
3028 const SessionDescriptionInterface* sdp =
3029 ExtractNativePC(jni, j_pc)->remote_description();
3030 return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
3033 JOW(jobject, PeerConnection_createDataChannel)(
3034 JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) {
3035 DataChannelInit init = JavaDataChannelInitToNative(jni, j_init);
3036 rtc::scoped_refptr<DataChannelInterface> channel(
3037 ExtractNativePC(jni, j_pc)->CreateDataChannel(
3038 JavaToStdString(jni, j_label), &init));
3039 // Mustn't pass channel.get() directly through NewObject to avoid reading its
3040 // vararg parameter as 64-bit and reading memory that doesn't belong to the
3041 // 32-bit parameter.
3042 jlong nativeChannelPtr = jlongFromPointer(channel.get());
3043 CHECK(nativeChannelPtr) << "Failed to create DataChannel";
3044 jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
3045 jmethodID j_data_channel_ctor = GetMethodID(
3046 jni, j_data_channel_class, "<init>", "(J)V");
3047 jobject j_channel = jni->NewObject(
3048 j_data_channel_class, j_data_channel_ctor, nativeChannelPtr);
3049 CHECK_EXCEPTION(jni) << "error during NewObject";
3050 // Channel is now owned by Java object, and will be freed from there.
3051 int bumped_count = channel->AddRef();
3052 CHECK(bumped_count == 2) << "Unexpected refcount";
3056 JOW(void, PeerConnection_createOffer)(
3057 JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
3058 ConstraintsWrapper* constraints =
3059 new ConstraintsWrapper(jni, j_constraints);
3060 rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
3061 new rtc::RefCountedObject<CreateSdpObserverWrapper>(
3062 jni, j_observer, constraints));
3063 ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
3066 JOW(void, PeerConnection_createAnswer)(
3067 JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
3068 ConstraintsWrapper* constraints =
3069 new ConstraintsWrapper(jni, j_constraints);
3070 rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
3071 new rtc::RefCountedObject<CreateSdpObserverWrapper>(
3072 jni, j_observer, constraints));
3073 ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
3076 // Helper to create a SessionDescriptionInterface from a SessionDescription.
3077 static SessionDescriptionInterface* JavaSdpToNativeSdp(
3078 JNIEnv* jni, jobject j_sdp) {
3079 jfieldID j_type_id = GetFieldID(
3080 jni, GetObjectClass(jni, j_sdp), "type",
3081 "Lorg/webrtc/SessionDescription$Type;");
3082 jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
3083 jmethodID j_canonical_form_id = GetMethodID(
3084 jni, GetObjectClass(jni, j_type), "canonicalForm",
3085 "()Ljava/lang/String;");
3086 jstring j_type_string = (jstring)jni->CallObjectMethod(
3087 j_type, j_canonical_form_id);
3088 CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
3089 std::string std_type = JavaToStdString(jni, j_type_string);
3091 jfieldID j_description_id = GetFieldID(
3092 jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;");
3093 jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
3094 std::string std_description = JavaToStdString(jni, j_description);
3096 return webrtc::CreateSessionDescription(
3097 std_type, std_description, NULL);
3100 JOW(void, PeerConnection_setLocalDescription)(
3101 JNIEnv* jni, jobject j_pc,
3102 jobject j_observer, jobject j_sdp) {
3103 rtc::scoped_refptr<SetSdpObserverWrapper> observer(
3104 new rtc::RefCountedObject<SetSdpObserverWrapper>(
3105 jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
3106 ExtractNativePC(jni, j_pc)->SetLocalDescription(
3107 observer, JavaSdpToNativeSdp(jni, j_sdp));
3110 JOW(void, PeerConnection_setRemoteDescription)(
3111 JNIEnv* jni, jobject j_pc,
3112 jobject j_observer, jobject j_sdp) {
3113 rtc::scoped_refptr<SetSdpObserverWrapper> observer(
3114 new rtc::RefCountedObject<SetSdpObserverWrapper>(
3115 jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
3116 ExtractNativePC(jni, j_pc)->SetRemoteDescription(
3117 observer, JavaSdpToNativeSdp(jni, j_sdp));
3120 JOW(jboolean, PeerConnection_updateIce)(
3121 JNIEnv* jni, jobject j_pc, jobject j_ice_servers, jobject j_constraints) {
3122 PeerConnectionInterface::IceServers ice_servers;
3123 JavaIceServersToJsepIceServers(jni, j_ice_servers, &ice_servers);
3124 scoped_ptr<ConstraintsWrapper> constraints(
3125 new ConstraintsWrapper(jni, j_constraints));
3126 return ExtractNativePC(jni, j_pc)->UpdateIce(ice_servers, constraints.get());
3129 JOW(jboolean, PeerConnection_nativeAddIceCandidate)(
3130 JNIEnv* jni, jobject j_pc, jstring j_sdp_mid,
3131 jint j_sdp_mline_index, jstring j_candidate_sdp) {
3132 std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
3133 std::string sdp = JavaToStdString(jni, j_candidate_sdp);
3134 scoped_ptr<IceCandidateInterface> candidate(
3135 webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
3136 return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
3139 JOW(jboolean, PeerConnection_nativeAddLocalStream)(
3140 JNIEnv* jni, jobject j_pc, jlong native_stream) {
3141 return ExtractNativePC(jni, j_pc)->AddStream(
3142 reinterpret_cast<MediaStreamInterface*>(native_stream));
3145 JOW(void, PeerConnection_nativeRemoveLocalStream)(
3146 JNIEnv* jni, jobject j_pc, jlong native_stream) {
3147 ExtractNativePC(jni, j_pc)->RemoveStream(
3148 reinterpret_cast<MediaStreamInterface*>(native_stream));
3151 JOW(bool, PeerConnection_nativeGetStats)(
3152 JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) {
3153 rtc::scoped_refptr<StatsObserverWrapper> observer(
3154 new rtc::RefCountedObject<StatsObserverWrapper>(jni, j_observer));
3155 return ExtractNativePC(jni, j_pc)->GetStats(
3157 reinterpret_cast<MediaStreamTrackInterface*>(native_track),
3158 PeerConnectionInterface::kStatsOutputLevelStandard);
3161 JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) {
3162 PeerConnectionInterface::SignalingState state =
3163 ExtractNativePC(jni, j_pc)->signaling_state();
3164 return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state);
3167 JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) {
3168 PeerConnectionInterface::IceConnectionState state =
3169 ExtractNativePC(jni, j_pc)->ice_connection_state();
3170 return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state);
3173 JOW(jobject, PeerGathering_iceGatheringState)(JNIEnv* jni, jobject j_pc) {
3174 PeerConnectionInterface::IceGatheringState state =
3175 ExtractNativePC(jni, j_pc)->ice_gathering_state();
3176 return JavaEnumFromIndex(jni, "PeerGathering$IceGatheringState", state);
3179 JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) {
3180 ExtractNativePC(jni, j_pc)->Close();
3184 JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
3185 rtc::scoped_refptr<MediaSourceInterface> p(
3186 reinterpret_cast<MediaSourceInterface*>(j_p));
3187 return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
3190 JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)(
3191 JNIEnv* jni, jclass, jstring j_device_name) {
3192 std::string device_name = JavaToStdString(jni, j_device_name);
3193 scoped_ptr<cricket::DeviceManagerInterface> device_manager(
3194 cricket::DeviceManagerFactory::Create());
3195 CHECK(device_manager->Init()) << "DeviceManager::Init() failed";
3196 cricket::Device device;
3197 if (!device_manager->GetVideoCaptureDevice(device_name, &device)) {
3198 LOG(LS_ERROR) << "GetVideoCaptureDevice failed for " << device_name;
3201 scoped_ptr<cricket::VideoCapturer> capturer(
3202 device_manager->CreateVideoCapturer(device));
3203 return (jlong)capturer.release();
3206 JOW(jlong, VideoRenderer_nativeCreateGuiVideoRenderer)(
3207 JNIEnv* jni, jclass, int x, int y) {
3208 scoped_ptr<VideoRendererWrapper> renderer(VideoRendererWrapper::Create(
3209 cricket::VideoRendererFactory::CreateGuiVideoRenderer(x, y)));
3210 return (jlong)renderer.release();
3213 JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
3214 JNIEnv* jni, jclass, jobject j_callbacks) {
3215 scoped_ptr<JavaVideoRendererWrapper> renderer(
3216 new JavaVideoRendererWrapper(jni, j_callbacks));
3217 return (jlong)renderer.release();
3220 JOW(jlong, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
3221 cricket::VideoCapturer* capturer =
3222 reinterpret_cast<VideoSourceInterface*>(j_p)->GetVideoCapturer();
3223 scoped_ptr<cricket::VideoFormatPod> format(
3224 new cricket::VideoFormatPod(*capturer->GetCaptureFormat()));
3226 return jlongFromPointer(format.release());
3229 JOW(void, VideoSource_restart)(
3230 JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
3233 scoped_ptr<cricket::VideoFormatPod> format(
3234 reinterpret_cast<cricket::VideoFormatPod*>(j_p_format));
3235 reinterpret_cast<VideoSourceInterface*>(j_p_source)->GetVideoCapturer()->
3236 StartCapturing(cricket::VideoFormat(*format));
3239 JOW(void, VideoSource_freeNativeVideoFormat)(
3240 JNIEnv* jni, jclass, jlong j_p) {
3241 delete reinterpret_cast<cricket::VideoFormatPod*>(j_p);
3244 JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
3245 return JavaStringFromStdString(
3246 jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
3249 JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) {
3250 return JavaStringFromStdString(
3251 jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
3254 JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) {
3255 return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
3258 JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
3259 return JavaEnumFromIndex(
3261 "MediaStreamTrack$State",
3262 reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
3265 JOW(jboolean, MediaStreamTrack_nativeSetState)(
3266 JNIEnv* jni, jclass, jlong j_p, jint j_new_state) {
3267 MediaStreamTrackInterface::TrackState new_state =
3268 (MediaStreamTrackInterface::TrackState)j_new_state;
3269 return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
3270 ->set_state(new_state);
3273 JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
3274 JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
3275 return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
3276 ->set_enabled(enabled);
3279 JOW(void, VideoTrack_nativeAddRenderer)(
3280 JNIEnv* jni, jclass,
3281 jlong j_video_track_pointer, jlong j_renderer_pointer) {
3282 reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->AddRenderer(
3283 reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
3286 JOW(void, VideoTrack_nativeRemoveRenderer)(
3287 JNIEnv* jni, jclass,
3288 jlong j_video_track_pointer, jlong j_renderer_pointer) {
3289 reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->RemoveRenderer(
3290 reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));