#include <map>
#include <sys/prctl.h>
#include <sys/syscall.h>
+#include <unistd.h>
#include "talk/app/webrtc/mediaconstraintsinterface.h"
#include "talk/app/webrtc/peerconnectioninterface.h"
#include "talk/app/webrtc/videosourceinterface.h"
+#include "talk/base/bind.h"
#include "talk/base/logging.h"
+#include "talk/base/messagequeue.h"
#include "talk/base/ssladapter.h"
#include "talk/media/base/videocapturer.h"
#include "talk/media/base/videorenderer.h"
#include "talk/media/devices/videorendererfactory.h"
#include "talk/media/webrtc/webrtcvideocapturer.h"
+#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
#include "third_party/icu/source/common/unicode/unistr.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
#include "webrtc/system_wrappers/interface/compile_assert.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_base.h"
#endif
using icu::UnicodeString;
+using talk_base::Bind;
+using talk_base::Thread;
+using talk_base::ThreadManager;
+using talk_base::scoped_ptr;
using webrtc::AudioSourceInterface;
using webrtc::AudioTrackInterface;
using webrtc::AudioTrackVector;
using webrtc::VideoSourceInterface;
using webrtc::VideoTrackInterface;
using webrtc::VideoTrackVector;
+using webrtc::kVideoCodecVP8;
// Abort the process if |x| is false, emitting |msg|.
#define CHECK(x, msg) \
return jni;
}
-// Return a |jlong| that will automatically convert back to |ptr| when assigned
-// to a |uint64|
+// Return a |jlong| that will correctly convert back to |ptr|. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
static jlong jlongFromPointer(void* ptr) {
- COMPILE_ASSERT(sizeof(intptr_t) <= sizeof(uint64),
+ COMPILE_ASSERT(sizeof(intptr_t) <= sizeof(jlong),
Time_to_rethink_the_use_of_jlongs);
- // Guaranteed to fit by the COMPILE_ASSERT above.
- uint64 u64 = reinterpret_cast<intptr_t>(ptr);
- // If the unsigned value fits in the signed type, return it directly.
- if (u64 <= std::numeric_limits<int64>::max())
- return u64;
- // Otherwise, we need to get move u64 into the range of [int64min, -1] subject
- // to the constraints of remaining equal to |u64| modulo |2^64|.
- u64 = std::numeric_limits<uint64>::max() - u64; // In [0,int64max].
- int64 i64 = -u64; // In [-int64max, 0].
- i64 -= 1; // In [int64min, -1], and i64+2^64==u64.
- return i64;
+ // Going through intptr_t to be obvious about the definedness of the
+ // conversion from pointer to integral type. intptr_t to jlong is a standard
+ // widening by the COMPILE_ASSERT above.
+ jlong ret = reinterpret_cast<intptr_t>(ptr);
+ assert(reinterpret_cast<void*>(ret) == ptr);
+ return ret;
}
// Android's FindClass() is trickier than usual because the app-specific
LoadClass(jni, "org/webrtc/DataChannel$Init");
LoadClass(jni, "org/webrtc/DataChannel$State");
LoadClass(jni, "org/webrtc/IceCandidate");
+#ifdef ANDROID
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+#endif
LoadClass(jni, "org/webrtc/MediaSource$State");
LoadClass(jni, "org/webrtc/MediaStream");
LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
- LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
+ LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
LoadClass(jni, "org/webrtc/SessionDescription");
LoadClass(jni, "org/webrtc/SessionDescription$Type");
LoadClass(jni, "org/webrtc/StatsReport");
return f;
}
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
jclass FindClass(JNIEnv* jni, const char* name) {
return g_class_reference_holder->GetClass(name);
}
jobject const obj_;
};
-// Given a local ref, take ownership of it and delete the ref when this goes out
-// of scope.
-template<class T> // T is jclass, jobject, jintArray, etc.
-class ScopedLocalRef {
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+class ScopedLocalRefFrame {
public:
- ScopedLocalRef(JNIEnv* jni, T obj)
- : jni_(jni), obj_(obj) {}
- ~ScopedLocalRef() {
- jni_->DeleteLocalRef(obj_);
+ explicit ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
+ CHECK(!jni_->PushLocalFrame(0), "Failed to PushLocalFrame");
}
- T operator*() const {
- return obj_;
+ ~ScopedLocalRefFrame() {
+ jni_->PopLocalFrame(NULL);
}
+
private:
JNIEnv* jni_;
- T obj_;
};
// Scoped holder for global Java refs.
template<class T> // T is jclass, jobject, jintArray, etc.
class ScopedGlobalRef {
public:
- explicit ScopedGlobalRef(JNIEnv* jni, T obj)
+ ScopedGlobalRef(JNIEnv* jni, T obj)
: obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
~ScopedGlobalRef() {
DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
T obj_;
};
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+static bool IsNull(JNIEnv* jni, jobject obj) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ return jni->NewLocalRef(obj) == NULL;
+}
+
// Return the (singleton) Java Enum object corresponding to |index|;
// |state_class_fragment| is something like "MediaSource$State".
jobject JavaEnumFromIndex(
jclass state_class = FindClass(jni, state_class_name.c_str());
jmethodID state_values_id = GetStaticMethodID(
jni, state_class, "values", ("()[L" + state_class_name + ";").c_str());
- ScopedLocalRef<jobjectArray> state_values(
- jni,
- (jobjectArray)jni->CallStaticObjectMethod(state_class, state_values_id));
+ jobjectArray state_values = static_cast<jobjectArray>(
+ jni->CallStaticObjectMethod(state_class, state_values_id));
CHECK_EXCEPTION(jni, "error during CallStaticObjectMethod");
- jobject ret = jni->GetObjectArrayElement(*state_values, index);
+ jobject ret = jni->GetObjectArrayElement(state_values, index);
CHECK_EXCEPTION(jni, "error during GetObjectArrayElement");
return ret;
}
virtual ~PCOJava() {}
virtual void OnIceCandidate(const IceCandidateInterface* candidate) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
std::string sdp;
CHECK(candidate->ToString(&sdp), "got so far: " << sdp);
jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
jmethodID ctor = GetMethodID(jni(), candidate_class,
"<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
- ScopedLocalRef<jstring> j_mid(
- jni(), JavaStringFromStdString(jni(), candidate->sdp_mid()));
- ScopedLocalRef<jstring> j_sdp(jni(), JavaStringFromStdString(jni(), sdp));
- ScopedLocalRef<jobject> j_candidate(jni(), jni()->NewObject(
- candidate_class, ctor, *j_mid, candidate->sdp_mline_index(), *j_sdp));
+ jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
+ jstring j_sdp = JavaStringFromStdString(jni(), sdp);
+ jobject j_candidate = jni()->NewObject(
+ candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp);
CHECK_EXCEPTION(jni(), "error during NewObject");
jmethodID m = GetMethodID(jni(), *j_observer_class_,
"onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
- jni()->CallVoidMethod(*j_observer_global_, m, *j_candidate);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
virtual void OnError() OVERRIDE {
- jmethodID m = GetMethodID(jni(), *j_observer_class_, "onError", "(V)V");
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onError", "()V");
jni()->CallVoidMethod(*j_observer_global_, m);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
virtual void OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(
jni(), *j_observer_class_, "onSignalingChange",
"(Lorg/webrtc/PeerConnection$SignalingState;)V");
- ScopedLocalRef<jobject> new_state_enum(jni(), JavaEnumFromIndex(
- jni(), "PeerConnection$SignalingState", new_state));
- jni()->CallVoidMethod(*j_observer_global_, m, *new_state_enum);
+ jobject new_state_enum =
+ JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
virtual void OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(
jni(), *j_observer_class_, "onIceConnectionChange",
"(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
- ScopedLocalRef<jobject> new_state_enum(jni(), JavaEnumFromIndex(
- jni(), "PeerConnection$IceConnectionState", new_state));
- jni()->CallVoidMethod(*j_observer_global_, m, *new_state_enum);
+ jobject new_state_enum = JavaEnumFromIndex(
+ jni(), "PeerConnection$IceConnectionState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
virtual void OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(
jni(), *j_observer_class_, "onIceGatheringChange",
"(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
- ScopedLocalRef<jobject> new_state_enum(jni(), JavaEnumFromIndex(
- jni(), "PeerConnection$IceGatheringState", new_state));
- jni()->CallVoidMethod(*j_observer_global_, m, *new_state_enum);
+ jobject new_state_enum = JavaEnumFromIndex(
+ jni(), "PeerConnection$IceGatheringState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
virtual void OnAddStream(MediaStreamInterface* stream) OVERRIDE {
- ScopedLocalRef<jobject> j_stream(jni(), jni()->NewObject(
- *j_media_stream_class_, j_media_stream_ctor_, (jlong)stream));
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_stream = jni()->NewObject(
+ *j_media_stream_class_, j_media_stream_ctor_, (jlong)stream);
CHECK_EXCEPTION(jni(), "error during NewObject");
AudioTrackVector audio_tracks = stream->GetAudioTracks();
for (size_t i = 0; i < audio_tracks.size(); ++i) {
AudioTrackInterface* track = audio_tracks[i];
- ScopedLocalRef<jstring> id(
- jni(), JavaStringFromStdString(jni(), track->id()));
- ScopedLocalRef<jobject> j_track(jni(), jni()->NewObject(
- *j_audio_track_class_, j_audio_track_ctor_, (jlong)track, *id));
+ jstring id = JavaStringFromStdString(jni(), track->id());
+ jobject j_track = jni()->NewObject(
+ *j_audio_track_class_, j_audio_track_ctor_, (jlong)track, id);
CHECK_EXCEPTION(jni(), "error during NewObject");
jfieldID audio_tracks_id = GetFieldID(jni(),
*j_media_stream_class_,
"audioTracks",
"Ljava/util/LinkedList;");
- ScopedLocalRef<jobject> audio_tracks(jni(), GetObjectField(
- jni(), *j_stream, audio_tracks_id));
+ jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
jmethodID add = GetMethodID(jni(),
- GetObjectClass(jni(), *audio_tracks), "add", "(Ljava/lang/Object;)Z");
- jboolean added = jni()->CallBooleanMethod(*audio_tracks, add, *j_track);
+ GetObjectClass(jni(), audio_tracks),
+ "add",
+ "(Ljava/lang/Object;)Z");
+ jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
CHECK_EXCEPTION(jni(), "error during CallBooleanMethod");
CHECK(added, "");
}
VideoTrackVector video_tracks = stream->GetVideoTracks();
for (size_t i = 0; i < video_tracks.size(); ++i) {
VideoTrackInterface* track = video_tracks[i];
- ScopedLocalRef<jstring> id(
- jni(), JavaStringFromStdString(jni(), track->id()));
- ScopedLocalRef<jobject> j_track(jni(), jni()->NewObject(
- *j_video_track_class_, j_video_track_ctor_, (jlong)track, *id));
+ jstring id = JavaStringFromStdString(jni(), track->id());
+ jobject j_track = jni()->NewObject(
+ *j_video_track_class_, j_video_track_ctor_, (jlong)track, id);
CHECK_EXCEPTION(jni(), "error during NewObject");
jfieldID video_tracks_id = GetFieldID(jni(),
*j_media_stream_class_,
"videoTracks",
"Ljava/util/LinkedList;");
- ScopedLocalRef<jobject> video_tracks(jni(), GetObjectField(
- jni(), *j_stream, video_tracks_id));
+ jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
jmethodID add = GetMethodID(jni(),
- GetObjectClass(jni(), *video_tracks), "add", "(Ljava/lang/Object;)Z");
- jboolean added = jni()->CallBooleanMethod(*video_tracks, add, *j_track);
+ GetObjectClass(jni(), video_tracks),
+ "add",
+ "(Ljava/lang/Object;)Z");
+ jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
CHECK_EXCEPTION(jni(), "error during CallBooleanMethod");
CHECK(added, "");
}
- streams_[stream] = jni()->NewWeakGlobalRef(*j_stream);
+ streams_[stream] = jni()->NewWeakGlobalRef(j_stream);
CHECK_EXCEPTION(jni(), "error during NewWeakGlobalRef");
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
"(Lorg/webrtc/MediaStream;)V");
- jni()->CallVoidMethod(*j_observer_global_, m, *j_stream);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
virtual void OnRemoveStream(MediaStreamInterface* stream) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
NativeToJavaStreamsMap::iterator it = streams_.find(stream);
CHECK(it != streams_.end(), "unexpected stream: " << std::hex << stream);
}
virtual void OnDataChannel(DataChannelInterface* channel) OVERRIDE {
- ScopedLocalRef<jobject> j_channel(jni(), jni()->NewObject(
- *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel));
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_channel = jni()->NewObject(
+ *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel);
CHECK_EXCEPTION(jni(), "error during NewObject");
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
"(Lorg/webrtc/DataChannel;)V");
- jni()->CallVoidMethod(*j_observer_global_, m, *j_channel);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
// Channel is now owned by Java object, and will be freed from
// DataChannel.dispose(). Important that this be done _after_ the
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
+ virtual void OnRenegotiationNeeded() OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m =
+ GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
+ jni()->CallVoidMethod(*j_observer_global_, m);
+ CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
+ }
+
void SetConstraints(ConstraintsWrapper* constraints) {
CHECK(!constraints_.get(), "constraints already set!");
constraints_.reset(constraints);
const jmethodID j_data_channel_ctor_;
typedef std::map<void*, jweak> NativeToJavaStreamsMap;
NativeToJavaStreamsMap streams_; // C++ -> Java streams.
- talk_base::scoped_ptr<ConstraintsWrapper> constraints_;
+ scoped_ptr<ConstraintsWrapper> constraints_;
};
// Wrapper for a Java MediaConstraints object. Copies all needed data so when
JNIEnv* jni, const SessionDescriptionInterface* desc) {
std::string sdp;
CHECK(desc->ToString(&sdp), "got so far: " << sdp);
- ScopedLocalRef<jstring> j_description(jni, JavaStringFromStdString(jni, sdp));
+ jstring j_description = JavaStringFromStdString(jni, sdp);
jclass j_type_class = FindClass(
jni, "org/webrtc/SessionDescription$Type");
jmethodID j_type_from_canonical = GetStaticMethodID(
jni, j_type_class, "fromCanonicalForm",
"(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
- ScopedLocalRef<jstring> j_type_string(
- jni, JavaStringFromStdString(jni, desc->type()));
+ jstring j_type_string = JavaStringFromStdString(jni, desc->type());
jobject j_type = jni->CallStaticObjectMethod(
- j_type_class, j_type_from_canonical, *j_type_string);
+ j_type_class, j_type_from_canonical, j_type_string);
CHECK_EXCEPTION(jni, "error during CallObjectMethod");
jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
jni, j_sdp_class, "<init>",
"(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
jobject j_sdp = jni->NewObject(
- j_sdp_class, j_sdp_ctor, j_type, *j_description);
+ j_sdp_class, j_sdp_ctor, j_type, j_description);
CHECK_EXCEPTION(jni, "error during NewObject");
return j_sdp;
}
// Can't mark OVERRIDE because of templating.
virtual void OnSuccess() {
+ ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
jni()->CallVoidMethod(*j_observer_global_, m);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
// Can't mark OVERRIDE because of templating.
virtual void OnSuccess(SessionDescriptionInterface* desc) {
+ ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(
jni(), *j_observer_class_, "onCreateSuccess",
"(Lorg/webrtc/SessionDescription;)V");
- ScopedLocalRef<jobject> j_sdp(jni(), JavaSdpFromNativeSdp(jni(), desc));
- jni()->CallVoidMethod(*j_observer_global_, m, *j_sdp);
+ jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
void OnFailure(const std::string& op, const std::string& error) {
jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
"(Ljava/lang/String;)V");
- ScopedLocalRef<jstring> j_error_string(
- jni(), JavaStringFromStdString(jni(), error));
- jni()->CallVoidMethod(*j_observer_global_, m, *j_error_string);
+ jstring j_error_string = JavaStringFromStdString(jni(), error);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
- private:
JNIEnv* jni() {
return AttachCurrentThreadIfNeeded();
}
- talk_base::scoped_ptr<ConstraintsWrapper> constraints_;
+ private:
+ scoped_ptr<ConstraintsWrapper> constraints_;
const ScopedGlobalRef<jobject> j_observer_global_;
const ScopedGlobalRef<jclass> j_observer_class_;
};
: SdpObserverWrapper(jni, j_observer, constraints) {}
virtual void OnFailure(const std::string& error) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
SdpObserverWrapper::OnFailure(std::string("Create"), error);
}
};
: SdpObserverWrapper(jni, j_observer, constraints) {}
virtual void OnFailure(const std::string& error) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
SdpObserverWrapper::OnFailure(std::string("Set"), error);
}
};
virtual ~DataChannelObserverWrapper() {}
virtual void OnStateChange() OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
virtual void OnMessage(const DataBuffer& buffer) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
jobject byte_buffer =
jni()->NewDirectByteBuffer(const_cast<char*>(buffer.data.data()),
buffer.data.length());
virtual ~StatsObserverWrapper() {}
virtual void OnComplete(const std::vector<StatsReport>& reports) OVERRIDE {
- ScopedLocalRef<jobjectArray> j_reports(jni(),
- ReportsToJava(jni(), reports));
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobjectArray j_reports = ReportsToJava(jni(), reports);
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
"([Lorg/webrtc/StatsReport;)V");
- jni()->CallVoidMethod(*j_observer_global_, m, *j_reports);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
CHECK_EXCEPTION(jni(), "error during CallVoidMethod");
}
jobjectArray reports_array = jni->NewObjectArray(
reports.size(), *j_stats_report_class_, NULL);
for (int i = 0; i < reports.size(); ++i) {
+ ScopedLocalRefFrame local_ref_frame(jni);
const StatsReport& report = reports[i];
- ScopedLocalRef<jstring> j_id(
- jni, JavaStringFromStdString(jni, report.id));
- ScopedLocalRef<jstring> j_type(
- jni, JavaStringFromStdString(jni, report.type));
- ScopedLocalRef<jobjectArray> j_values(
- jni, ValuesToJava(jni, report.values));
- ScopedLocalRef<jobject> j_report(jni, jni->NewObject(
- *j_stats_report_class_, j_stats_report_ctor_, *j_id, *j_type,
- report.timestamp, *j_values));
- jni->SetObjectArrayElement(reports_array, i, *j_report);
+ jstring j_id = JavaStringFromStdString(jni, report.id);
+ jstring j_type = JavaStringFromStdString(jni, report.type);
+ jobjectArray j_values = ValuesToJava(jni, report.values);
+ jobject j_report = jni->NewObject(*j_stats_report_class_,
+ j_stats_report_ctor_,
+ j_id,
+ j_type,
+ report.timestamp,
+ j_values);
+ jni->SetObjectArrayElement(reports_array, i, j_report);
}
return reports_array;
}
jobjectArray j_values = jni->NewObjectArray(
values.size(), *j_value_class_, NULL);
for (int i = 0; i < values.size(); ++i) {
+ ScopedLocalRefFrame local_ref_frame(jni);
const StatsReport::Value& value = values[i];
- ScopedLocalRef<jstring> j_name(
- jni, JavaStringFromStdString(jni, value.name));
- ScopedLocalRef<jstring> j_value(
- jni, JavaStringFromStdString(jni, value.value));
- ScopedLocalRef<jobject> j_element_value(jni, jni->NewObject(
- *j_value_class_, j_value_ctor_, *j_name, *j_value));
- jni->SetObjectArrayElement(j_values, i, *j_element_value);
+ jstring j_name = JavaStringFromStdString(jni, value.name);
+ jstring j_value = JavaStringFromStdString(jni, value.value);
+ jobject j_element_value =
+ jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
+ jni->SetObjectArrayElement(j_values, i, j_element_value);
}
return j_values;
}
virtual ~VideoRendererWrapper() {}
virtual void SetSize(int width, int height) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
const bool kNotReserved = false; // What does this param mean??
renderer_->SetSize(width, height, kNotReserved);
}
virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
renderer_->RenderFrame(frame);
}
explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
: renderer_(renderer) {}
- talk_base::scoped_ptr<cricket::VideoRenderer> renderer_;
+ scoped_ptr<cricket::VideoRenderer> renderer_;
};
// Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
virtual ~JavaVideoRendererWrapper() {}
virtual void SetSize(int width, int height) OVERRIDE {
+ ScopedLocalRefFrame local_ref_frame(jni());
jni()->CallVoidMethod(*j_callbacks_, j_set_size_id_, width, height);
CHECK_EXCEPTION(jni(), "");
}
virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
- ScopedLocalRef<jobject> j_frame(jni(), CricketToJavaFrame(frame));
- jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, *j_frame);
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_frame = CricketToJavaFrame(frame);
+ jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
CHECK_EXCEPTION(jni(), "");
}
private:
// Return a VideoRenderer.I420Frame referring to the data in |frame|.
jobject CricketToJavaFrame(const cricket::VideoFrame* frame) {
- ScopedLocalRef<jintArray> strides(jni(), jni()->NewIntArray(3));
- jint* strides_array = jni()->GetIntArrayElements(*strides, NULL);
+ jintArray strides = jni()->NewIntArray(3);
+ jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
strides_array[0] = frame->GetYPitch();
strides_array[1] = frame->GetUPitch();
strides_array[2] = frame->GetVPitch();
- jni()->ReleaseIntArrayElements(*strides, strides_array, 0);
- ScopedLocalRef<jobjectArray> planes(
- jni(), jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL));
- ScopedLocalRef<jobject> y_buffer(jni(), jni()->NewDirectByteBuffer(
+ jni()->ReleaseIntArrayElements(strides, strides_array, 0);
+ jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
+ jobject y_buffer = jni()->NewDirectByteBuffer(
const_cast<uint8*>(frame->GetYPlane()),
- frame->GetYPitch() * frame->GetHeight()));
- ScopedLocalRef<jobject> u_buffer(jni(), jni()->NewDirectByteBuffer(
- const_cast<uint8*>(frame->GetUPlane()), frame->GetChromaSize()));
- ScopedLocalRef<jobject> v_buffer(jni(), jni()->NewDirectByteBuffer(
- const_cast<uint8*>(frame->GetVPlane()), frame->GetChromaSize()));
- jni()->SetObjectArrayElement(*planes, 0, *y_buffer);
- jni()->SetObjectArrayElement(*planes, 1, *u_buffer);
- jni()->SetObjectArrayElement(*planes, 2, *v_buffer);
+ frame->GetYPitch() * frame->GetHeight());
+ jobject u_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8*>(frame->GetUPlane()), frame->GetChromaSize());
+ jobject v_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8*>(frame->GetVPlane()), frame->GetChromaSize());
+ jni()->SetObjectArrayElement(planes, 0, y_buffer);
+ jni()->SetObjectArrayElement(planes, 1, u_buffer);
+ jni()->SetObjectArrayElement(planes, 2, v_buffer);
return jni()->NewObject(
*j_frame_class_, j_frame_ctor_id_,
- frame->GetWidth(), frame->GetHeight(), *strides, *planes);
+ frame->GetWidth(), frame->GetHeight(), strides, planes);
}
JNIEnv* jni() {
ScopedGlobalRef<jclass> j_byte_buffer_class_;
};
-} // anonymous namespace
+#ifdef ANDROID
+// TODO(fischman): consider pulling MediaCodecVideoEncoder out of this file and
+// into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
+// from this file.
+
+// Arbitrary interval to poll the codec for new outputs.
+enum { kMediaCodecPollMs = 10 };
+
+// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
+// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
+// HW-backed video encode. This C++ class is implemented as a very thin shim,
+// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
+// MediaCodecVideoEncoder is created, operated, and destroyed on a single
+// thread, currently the libjingle Worker thread.
+class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
+ public talk_base::MessageHandler {
+ public:
+ virtual ~MediaCodecVideoEncoder();
+ explicit MediaCodecVideoEncoder(JNIEnv* jni);
+
+ // webrtc::VideoEncoder implementation. Everything trampolines to
+ // |codec_thread_| for execution.
+ virtual int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
+ int32_t /* number_of_cores */,
+ uint32_t /* max_payload_size */) OVERRIDE;
+ virtual int32_t Encode(
+ const webrtc::I420VideoFrame& input_image,
+ const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+ const std::vector<webrtc::VideoFrameType>* frame_types) OVERRIDE;
+ virtual int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) OVERRIDE;
+ virtual int32_t Release() OVERRIDE;
+ virtual int32_t SetChannelParameters(uint32_t /* packet_loss */,
+ int /* rtt */) OVERRIDE;
+ virtual int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) OVERRIDE;
+
+ // talk_base::MessageHandler implementation.
+ virtual void OnMessage(talk_base::Message* msg) OVERRIDE;
+
+ private:
+ // CHECK-fail if not running on |codec_thread_|.
+ void CheckOnCodecThread();
+
+ // Release() and InitEncode() in an attempt to restore the codec to an
+ // operable state. Necessary after all manner of OMX-layer errors.
+ void ResetCodec();
+
+ // Implementation of webrtc::VideoEncoder methods above, all running on the
+ // codec thread exclusively.
+ //
+ // If width==0 then this is assumed to be a re-initialization and the
+ // previously-current values are reused instead of the passed parameters
+ // (makes it easier to reason about thread-safety).
+ int32_t InitEncodeOnCodecThread(int width, int height, int kbps);
+ int32_t EncodeOnCodecThread(
+ const webrtc::I420VideoFrame& input_image,
+ const std::vector<webrtc::VideoFrameType>* frame_types);
+ int32_t RegisterEncodeCompleteCallbackOnCodecThread(
+ webrtc::EncodedImageCallback* callback);
+ int32_t ReleaseOnCodecThread();
+ int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
+
+ // Reset parameters valid between InitEncode() & Release() (see below).
+ void ResetParameters(JNIEnv* jni);
+
+ // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
+ int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
+ jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
+ bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
+ jlong GetOutputBufferInfoPresentationTimestampUs(
+ JNIEnv* jni,
+ jobject j_output_buffer_info);
+
+ // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+ // true on success.
+ bool DeliverPendingOutputs(JNIEnv* jni);
+
+ // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
+ // |codec_thread_| synchronously.
+ webrtc::EncodedImageCallback* callback_;
+
+ // State that is constant for the lifetime of this object once the ctor
+ // returns.
+ scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
+ ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
+ ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
+ jmethodID j_init_encode_method_;
+ jmethodID j_dequeue_input_buffer_method_;
+ jmethodID j_encode_method_;
+ jmethodID j_release_method_;
+ jmethodID j_set_rates_method_;
+ jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_release_output_buffer_method_;
+ jfieldID j_info_index_field_;
+ jfieldID j_info_buffer_field_;
+ jfieldID j_info_is_key_frame_field_;
+ jfieldID j_info_presentation_timestamp_us_field_;
+
+ // State that is valid only between InitEncode() and the next Release().
+ // Touched only on codec_thread_ so no explicit synchronization necessary.
+ int width_; // Frame width in pixels.
+ int height_; // Frame height in pixels.
+ int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
+ // Frame size in bytes fed to MediaCodec (stride==width, sliceHeight==height).
+ int nv12_size_;
+ // True only when between a callback_->Encoded() call return a positive value
+ // and the next Encode() call being ignored.
+ bool drop_next_input_frame_;
+ // Global references; must be deleted in Release().
+ std::vector<jobject> input_buffers_;
+};
+
+enum { MSG_SET_RATES, MSG_POLL_FOR_READY_OUTPUTS, };
+
+MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
+ // We depend on ResetParameters() to ensure no more callbacks to us after we
+ // are deleted, so assert it here.
+ CHECK(width_ == 0, "Release() should have been called");
+}
+
+MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
+ : callback_(NULL),
+ codec_thread_(new Thread()),
+ j_media_codec_video_encoder_class_(
+ jni,
+ FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
+ j_media_codec_video_encoder_(
+ jni,
+ jni->NewObject(*j_media_codec_video_encoder_class_,
+ GetMethodID(jni,
+ *j_media_codec_video_encoder_class_,
+ "<init>",
+ "()V"))) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ // It would be nice to avoid spinning up a new thread per MediaCodec, and
+ // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
+ // 2732 means that deadlocks abound. This class synchronously trampolines
+ // to |codec_thread_|, so if anything else can be coming to _us_ from
+ // |codec_thread_|, or from any thread holding the |_sendCritSect| described
+ // in the bug, we have a problem. For now work around that with a dedicated
+ // thread.
+ codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
+ CHECK(codec_thread_->Start(), "Failed to start MediaCodecVideoEncoder");
+
+ ResetParameters(jni);
+
+ jclass j_output_buffer_info_class =
+ FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+ j_init_encode_method_ = GetMethodID(jni,
+ *j_media_codec_video_encoder_class_,
+ "initEncode",
+ "(III)[Ljava/nio/ByteBuffer;");
+ j_dequeue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
+ j_encode_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
+ j_release_method_ =
+ GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
+ j_set_rates_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
+ j_dequeue_output_buffer_method_ =
+ GetMethodID(jni,
+ *j_media_codec_video_encoder_class_,
+ "dequeueOutputBuffer",
+ "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
+ j_release_output_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
+
+ j_info_index_field_ =
+ GetFieldID(jni, j_output_buffer_info_class, "index", "I");
+ j_info_buffer_field_ = GetFieldID(
+ jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
+ j_info_is_key_frame_field_ =
+ GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
+ j_info_presentation_timestamp_us_field_ = GetFieldID(
+ jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
+ CHECK_EXCEPTION(jni, "MediaCodecVideoEncoder ctor failed");
+}
+
+int32_t MediaCodecVideoEncoder::InitEncode(
+ const webrtc::VideoCodec* codec_settings,
+ int32_t /* number_of_cores */,
+ uint32_t /* max_payload_size */) {
+ // Factory should guard against other codecs being used with us.
+ CHECK(codec_settings->codecType == kVideoCodecVP8, "Unsupported codec");
+
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
+ this,
+ codec_settings->width,
+ codec_settings->height,
+ codec_settings->startBitrate));
+}
+
+int32_t MediaCodecVideoEncoder::Encode(
+ const webrtc::I420VideoFrame& frame,
+ const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+ const std::vector<webrtc::VideoFrameType>* frame_types) {
+ return codec_thread_->Invoke<int32_t>(Bind(
+ &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) {
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
+ this,
+ callback));
+}
+
+int32_t MediaCodecVideoEncoder::Release() {
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
+ int /* rtt */) {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
+ uint32_t frame_rate) {
+ return codec_thread_->Invoke<int32_t>(
+ Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
+ this,
+ new_bit_rate,
+ frame_rate));
+}
+
+void MediaCodecVideoEncoder::OnMessage(talk_base::Message* msg) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ // We only ever send one message to |this| directly (not through a Bind()'d
+ // functor), so expect no ID/data.
+ CHECK(!msg->message_id, "Unexpected message!");
+ CHECK(!msg->pdata, "Unexpected message!");
+ CheckOnCodecThread();
+
+ // It would be nice to recover from a failure here if one happened, but it's
+ // unclear how to signal such a failure to the app, so instead we stay silent
+ // about it and let the next app-called API method reveal the borkedness.
+ DeliverPendingOutputs(jni);
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+void MediaCodecVideoEncoder::CheckOnCodecThread() {
+ CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread(),
+ "Running on wrong thread!");
+}
+
+void MediaCodecVideoEncoder::ResetCodec() {
+ if (Release() != WEBRTC_VIDEO_CODEC_OK ||
+ codec_thread_->Invoke<int32_t>(Bind(
+ &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, 0, 0, 0)) !=
+ WEBRTC_VIDEO_CODEC_OK) {
+ // TODO(fischman): wouldn't it be nice if there was a way to gracefully
+ // degrade to a SW encoder at this point? There isn't one AFAICT :(
+ // https://code.google.com/p/webrtc/issues/detail?id=2920
+ }
+}
+
+int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
+ int width, int height, int kbps) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ if (width == 0) {
+ width = width_;
+ height = height_;
+ kbps = last_set_bitrate_kbps_;
+ }
+
+ width_ = width;
+ height_ = height;
+ last_set_bitrate_kbps_ = kbps;
+ nv12_size_ = width_ * height_ * 3 / 2;
+ // We enforce no extra stride/padding in the format creation step.
+ jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+ jni->CallObjectMethod(*j_media_codec_video_encoder_,
+ j_init_encode_method_,
+ width_,
+ height_,
+ kbps));
+ CHECK_EXCEPTION(jni, "");
+ if (IsNull(jni, input_buffers))
+ return WEBRTC_VIDEO_CODEC_ERROR;
+
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ CHECK(input_buffers_.empty(), "Unexpected double InitEncode without Release");
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ int64 nv12_buffer_capacity =
+ jni->GetDirectBufferCapacity(input_buffers_[i]);
+ CHECK_EXCEPTION(jni, "");
+ CHECK(nv12_buffer_capacity >= nv12_size_, "Insufficient capacity");
+ }
+ CHECK_EXCEPTION(jni, "");
+
+ codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
+ const webrtc::I420VideoFrame& frame,
+ const std::vector<webrtc::VideoFrameType>* frame_types) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ if (!DeliverPendingOutputs(jni)) {
+ ResetCodec();
+ // Continue as if everything's fine.
+ }
+
+ if (drop_next_input_frame_) {
+ drop_next_input_frame_ = false;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ CHECK(frame_types->size() == 1, "Unexpected stream count");
+ bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
+
+ CHECK(frame.width() == width_, "Unexpected resolution change");
+ CHECK(frame.height() == height_, "Unexpected resolution change");
+
+ int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+ j_dequeue_input_buffer_method_);
+ CHECK_EXCEPTION(jni, "");
+ if (j_input_buffer_index == -1)
+ return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
+ if (j_input_buffer_index == -2) {
+ ResetCodec();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+ uint8* nv12_buffer =
+ reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
+ CHECK_EXCEPTION(jni, "");
+ CHECK(nv12_buffer, "Indirect buffer??");
+ CHECK(!libyuv::I420ToNV12(
+ frame.buffer(webrtc::kYPlane),
+ frame.stride(webrtc::kYPlane),
+ frame.buffer(webrtc::kUPlane),
+ frame.stride(webrtc::kUPlane),
+ frame.buffer(webrtc::kVPlane),
+ frame.stride(webrtc::kVPlane),
+ nv12_buffer,
+ frame.width(),
+ nv12_buffer + frame.stride(webrtc::kYPlane) * frame.height(),
+ frame.width(),
+ frame.width(),
+ frame.height()),
+ "I420ToNV12 failed");
+ jlong timestamp_us = frame.render_time_ms() * 1000;
+ int64_t start = talk_base::Time();
+ bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_encode_method_,
+ key_frame,
+ j_input_buffer_index,
+ nv12_size_,
+ timestamp_us);
+ CHECK_EXCEPTION(jni, "");
+ if (!encode_status || !DeliverPendingOutputs(jni)) {
+ ResetCodec();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
+ webrtc::EncodedImageCallback* callback) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ for (size_t i = 0; i < input_buffers_.size(); ++i)
+ jni->DeleteGlobalRef(input_buffers_[i]);
+ input_buffers_.clear();
+ jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
+ ResetParameters(jni);
+ CHECK_EXCEPTION(jni, "");
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
+ uint32_t frame_rate) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ last_set_bitrate_kbps_ = new_bit_rate;
+ bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_set_rates_method_,
+ new_bit_rate,
+ frame_rate);
+ CHECK_EXCEPTION(jni, "");
+ if (!ret) {
+ ResetCodec();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoEncoder::ResetParameters(JNIEnv* jni) {
+ talk_base::MessageQueueManager::Clear(this);
+ width_ = 0;
+ height_ = 0;
+ nv12_size_ = 0;
+ drop_next_input_frame_ = false;
+ CHECK(input_buffers_.empty(),
+ "ResetParameters called while holding input_buffers_!");
+}
+
+int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
+}
+
+jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
+}
+bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
+}
+
+jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetLongField(
+ jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
+}
+
+bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+ while (true) {
+ jobject j_output_buffer_info = jni->CallObjectMethod(
+ *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
+ CHECK_EXCEPTION(jni, "");
+ if (IsNull(jni, j_output_buffer_info))
+ break;
+
+ int output_buffer_index =
+ GetOutputBufferInfoIndex(jni, j_output_buffer_info);
+ if (output_buffer_index == -1) {
+ ResetCodec();
+ return false;
+ }
+
+ jlong capture_time_ms =
+ GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
+ 1000;
+
+ int32_t callback_status = 0;
+ if (callback_) {
+ jobject j_output_buffer =
+ GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
+ bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
+ size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
+ uint8* payload = reinterpret_cast<uint8_t*>(
+ jni->GetDirectBufferAddress(j_output_buffer));
+ CHECK_EXCEPTION(jni, "");
+ scoped_ptr<webrtc::EncodedImage> image(
+ new webrtc::EncodedImage(payload, payload_size, payload_size));
+ image->_encodedWidth = width_;
+ image->_encodedHeight = height_;
+ // Convert capture time to 90 kHz RTP timestamp.
+ image->_timeStamp = static_cast<uint32_t>(90 * capture_time_ms);
+ image->capture_time_ms_ = capture_time_ms;
+ image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
+ image->_completeFrame = true;
+
+ webrtc::CodecSpecificInfo info;
+ memset(&info, 0, sizeof(info));
+ info.codecType = kVideoCodecVP8;
+ info.codecSpecific.VP8.pictureId = webrtc::kNoPictureId;
+ info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
+ info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
+
+ // Generate a header describing a single fragment.
+ webrtc::RTPFragmentationHeader header;
+ memset(&header, 0, sizeof(header));
+ header.VerifyAndAllocateFragmentationHeader(1);
+ header.fragmentationOffset[0] = 0;
+ header.fragmentationLength[0] = image->_length;
+ header.fragmentationPlType[0] = 0;
+ header.fragmentationTimeDiff[0] = 0;
+
+ callback_status = callback_->Encoded(*image, &info, &header);
+ }
+
+ bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_release_output_buffer_method_,
+ output_buffer_index);
+ CHECK_EXCEPTION(jni, "");
+ if (!success) {
+ ResetCodec();
+ return false;
+ }
+
+ if (callback_status > 0)
+ drop_next_input_frame_ = true;
+ // Theoretically could handle callback_status<0 here, but unclear what that
+ // would mean for us.
+ }
+
+ return true;
+}
+
+// Simplest-possible implementation of an encoder factory, churns out
+// MediaCodecVideoEncoders on demand (or errors, if that's not possible).
+class MediaCodecVideoEncoderFactory
+ : public cricket::WebRtcVideoEncoderFactory {
+ public:
+ MediaCodecVideoEncoderFactory();
+ virtual ~MediaCodecVideoEncoderFactory();
+
+ // WebRtcVideoEncoderFactory implementation.
+ virtual webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
+ OVERRIDE;
+ virtual void AddObserver(Observer* observer) OVERRIDE;
+ virtual void RemoveObserver(Observer* observer) OVERRIDE;
+ virtual const std::vector<VideoCodec>& codecs() const OVERRIDE;
+ virtual void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) OVERRIDE;
+
+ private:
+ // Empty if platform support is lacking, const after ctor returns.
+ std::vector<VideoCodec> supported_codecs_;
+};
+
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+ bool is_platform_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isPlatformSupported", "()Z"));
+ CHECK_EXCEPTION(jni, "");
+ if (!is_platform_supported)
+ return;
+
+ if (true) {
+ // TODO(fischman): re-enable once
+ // https://code.google.com/p/webrtc/issues/detail?id=2899 is fixed. Until
+ // then the Android MediaCodec experience is too abysmal to turn on.
+ return;
+ }
+
+ // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
+ // encoder? Sure would be. Too bad it doesn't. So we hard-code some
+ // reasonable defaults.
+ supported_codecs_.push_back(
+ VideoCodec(kVideoCodecVP8, "VP8", 1920, 1088, 30));
+}
+
+MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
+
+webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
+ webrtc::VideoCodecType type) {
+ if (type != kVideoCodecVP8 || supported_codecs_.empty())
+ return NULL;
+ return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded());
+}
+
+// Since the available codec list is never going to change, we ignore the
+// Observer-related interface here.
+void MediaCodecVideoEncoderFactory::AddObserver(Observer* observer) {}
+void MediaCodecVideoEncoderFactory::RemoveObserver(Observer* observer) {}
+
+const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
+MediaCodecVideoEncoderFactory::codecs() const {
+ return supported_codecs_;
+}
+
+void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
+ webrtc::VideoEncoder* encoder) {
+ delete encoder;
+}
+
+#endif // ANDROID
+
+} // anonymous namespace
// Convenience macro defining JNI-accessible methods in the org.webrtc package.
// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
}
extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
+ g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
delete g_class_reference_holder;
g_class_reference_holder = NULL;
CHECK(talk_base::CleanupSSL(), "Failed to CleanupSSL()");
JOW(jlong, DataChannel_registerObserverNative)(
JNIEnv* jni, jobject j_dc, jobject j_observer) {
- talk_base::scoped_ptr<DataChannelObserverWrapper> observer(
+ scoped_ptr<DataChannelObserverWrapper> observer(
new DataChannelObserverWrapper(jni, j_observer));
ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
- return reinterpret_cast<jlong>(observer.release());
+ return jlongFromPointer(observer.release());
}
JOW(void, DataChannel_unregisterObserverNative)(
}
#endif // ANDROID
+// Helper struct for working around the fact that CreatePeerConnectionFactory()
+// comes in two flavors: either entirely automagical (constructing its own
+// threads and deleting them on teardown, but no external codec factory support)
+// or entirely manual (requires caller to delete threads after factory
+// teardown). This struct takes ownership of its ctor's arguments to present a
+// single thing for Java to hold and eventually free.
+class OwnedFactoryAndThreads {
+ public:
+ OwnedFactoryAndThreads(Thread* worker_thread,
+ Thread* signaling_thread,
+ PeerConnectionFactoryInterface* factory)
+ : worker_thread_(worker_thread),
+ signaling_thread_(signaling_thread),
+ factory_(factory) {}
+
+ ~OwnedFactoryAndThreads() { CHECK_RELEASE(factory_); }
+
+ PeerConnectionFactoryInterface* factory() { return factory_; }
+
+ private:
+ const scoped_ptr<Thread> worker_thread_;
+ const scoped_ptr<Thread> signaling_thread_;
+ PeerConnectionFactoryInterface* factory_; // Const after ctor except dtor.
+};
+
JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
JNIEnv* jni, jclass) {
webrtc::Trace::CreateTrace();
+ Thread* worker_thread = new Thread();
+ worker_thread->SetName("worker_thread", NULL);
+ Thread* signaling_thread = new Thread();
+ signaling_thread->SetName("signaling_thread", NULL);
+ CHECK(worker_thread->Start() && signaling_thread->Start(),
+ "Failed to start threads");
+ scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
+#ifdef ANDROID
+ encoder_factory.reset(new MediaCodecVideoEncoderFactory());
+#endif
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
- webrtc::CreatePeerConnectionFactory());
- return (jlong)factory.release();
+ webrtc::CreatePeerConnectionFactory(worker_thread,
+ signaling_thread,
+ NULL,
+ encoder_factory.release(),
+ NULL));
+ OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
+ worker_thread, signaling_thread, factory.release());
+ return jlongFromPointer(owned_factory);
}
JOW(void, PeerConnectionFactory_freeFactory)(JNIEnv*, jclass, jlong j_p) {
- CHECK_RELEASE(reinterpret_cast<PeerConnectionFactoryInterface*>(j_p));
+ delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
webrtc::Trace::ReturnTrace();
}
+static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
+ return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
+}
+
JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
JNIEnv* jni, jclass, jlong native_factory, jstring label) {
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
- reinterpret_cast<PeerConnectionFactoryInterface*>(native_factory));
+ factoryFromJava(native_factory));
talk_base::scoped_refptr<MediaStreamInterface> stream(
factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
return (jlong)stream.release();
JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
jobject j_constraints) {
- talk_base::scoped_ptr<ConstraintsWrapper> constraints(
+ scoped_ptr<ConstraintsWrapper> constraints(
new ConstraintsWrapper(jni, j_constraints));
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
- reinterpret_cast<PeerConnectionFactoryInterface*>(native_factory));
+ factoryFromJava(native_factory));
talk_base::scoped_refptr<VideoSourceInterface> source(
factory->CreateVideoSource(
reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
JNIEnv* jni, jclass, jlong native_factory, jstring id,
jlong native_source) {
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
- reinterpret_cast<PeerConnectionFactoryInterface*>(native_factory));
+ factoryFromJava(native_factory));
talk_base::scoped_refptr<VideoTrackInterface> track(
factory->CreateVideoTrack(
JavaToStdString(jni, id),
return (jlong)track.release();
}
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
+ scoped_ptr<ConstraintsWrapper> constraints(
+ new ConstraintsWrapper(jni, j_constraints));
+ talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ talk_base::scoped_refptr<AudioSourceInterface> source(
+ factory->CreateAudioSource(constraints.get()));
+ return (jlong)source.release();
+}
+
JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
- JNIEnv* jni, jclass, jlong native_factory, jstring id) {
+ JNIEnv* jni, jclass, jlong native_factory, jstring id,
+ jlong native_source) {
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
- reinterpret_cast<PeerConnectionFactoryInterface*>(native_factory));
- talk_base::scoped_refptr<AudioTrackInterface> track(
- factory->CreateAudioTrack(JavaToStdString(jni, id), NULL));
+ factoryFromJava(native_factory));
+ talk_base::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<AudioSourceInterface*>(native_source)));
return (jlong)track.release();
}
JNIEnv *jni, jclass, jlong factory, jobject j_ice_servers,
jobject j_constraints, jlong observer_p) {
talk_base::scoped_refptr<PeerConnectionFactoryInterface> f(
- reinterpret_cast<PeerConnectionFactoryInterface*>(factory));
+ reinterpret_cast<PeerConnectionFactoryInterface*>(
+ factoryFromJava(factory)));
PeerConnectionInterface::IceServers servers;
JavaIceServersToJsepIceServers(jni, j_ice_servers, &servers);
PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
JNIEnv* jni, jobject j_pc, jobject j_ice_servers, jobject j_constraints) {
PeerConnectionInterface::IceServers ice_servers;
JavaIceServersToJsepIceServers(jni, j_ice_servers, &ice_servers);
- talk_base::scoped_ptr<ConstraintsWrapper> constraints(
+ scoped_ptr<ConstraintsWrapper> constraints(
new ConstraintsWrapper(jni, j_constraints));
return ExtractNativePC(jni, j_pc)->UpdateIce(ice_servers, constraints.get());
}
jint j_sdp_mline_index, jstring j_candidate_sdp) {
std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
std::string sdp = JavaToStdString(jni, j_candidate_sdp);
- talk_base::scoped_ptr<IceCandidateInterface> candidate(
+ scoped_ptr<IceCandidateInterface> candidate(
webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
}
JOW(jboolean, PeerConnection_nativeAddLocalStream)(
JNIEnv* jni, jobject j_pc, jlong native_stream, jobject j_constraints) {
- talk_base::scoped_ptr<ConstraintsWrapper> constraints(
+ scoped_ptr<ConstraintsWrapper> constraints(
new ConstraintsWrapper(jni, j_constraints));
return ExtractNativePC(jni, j_pc)->AddStream(
reinterpret_cast<MediaStreamInterface*>(native_stream),
JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)(
JNIEnv* jni, jclass, jstring j_device_name) {
std::string device_name = JavaToStdString(jni, j_device_name);
- talk_base::scoped_ptr<cricket::DeviceManagerInterface> device_manager(
+ scoped_ptr<cricket::DeviceManagerInterface> device_manager(
cricket::DeviceManagerFactory::Create());
CHECK(device_manager->Init(), "DeviceManager::Init() failed");
cricket::Device device;
LOG(LS_ERROR) << "GetVideoCaptureDevice failed for " << device_name;
return 0;
}
- talk_base::scoped_ptr<cricket::VideoCapturer> capturer(
+ scoped_ptr<cricket::VideoCapturer> capturer(
device_manager->CreateVideoCapturer(device));
return (jlong)capturer.release();
}
JOW(jlong, VideoRenderer_nativeCreateGuiVideoRenderer)(
JNIEnv* jni, jclass, int x, int y) {
- talk_base::scoped_ptr<VideoRendererWrapper> renderer(
- VideoRendererWrapper::Create(
- cricket::VideoRendererFactory::CreateGuiVideoRenderer(x, y)));
+ scoped_ptr<VideoRendererWrapper> renderer(VideoRendererWrapper::Create(
+ cricket::VideoRendererFactory::CreateGuiVideoRenderer(x, y)));
return (jlong)renderer.release();
}
JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
JNIEnv* jni, jclass, jobject j_callbacks) {
- talk_base::scoped_ptr<JavaVideoRendererWrapper> renderer(
+ scoped_ptr<JavaVideoRendererWrapper> renderer(
new JavaVideoRendererWrapper(jni, j_callbacks));
return (jlong)renderer.release();
}
JOW(jlong, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
cricket::VideoCapturer* capturer =
reinterpret_cast<VideoSourceInterface*>(j_p)->GetVideoCapturer();
- talk_base::scoped_ptr<cricket::VideoFormatPod> format(
+ scoped_ptr<cricket::VideoFormatPod> format(
new cricket::VideoFormatPod(*capturer->GetCaptureFormat()));
capturer->Stop();
return jlongFromPointer(format.release());
JOW(void, VideoSource_restart)(
JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
- talk_base::scoped_ptr<cricket::VideoFormatPod> format(
+ CHECK(j_p_source, "");
+ CHECK(j_p_format, "");
+ scoped_ptr<cricket::VideoFormatPod> format(
reinterpret_cast<cricket::VideoFormatPod*>(j_p_format));
reinterpret_cast<VideoSourceInterface*>(j_p_source)->GetVideoCapturer()->
StartCapturing(cricket::VideoFormat(*format));