diff options
| author | Andras Becsi <andras.becsi@digia.com> | 2013-12-11 21:33:03 +0100 |
|---|---|---|
| committer | Andras Becsi <andras.becsi@digia.com> | 2013-12-13 12:34:07 +0100 |
| commit | f2a33ff9cbc6d19943f1c7fbddd1f23d23975577 (patch) | |
| tree | 0586a32aa390ade8557dfd6b4897f43a07449578 /chromium/third_party/libjingle/source | |
| parent | 5362912cdb5eea702b68ebe23702468d17c3017a (diff) | |
| download | qtwebengine-chromium-f2a33ff9cbc6d19943f1c7fbddd1f23d23975577.tar.gz | |
Update Chromium to branch 1650 (31.0.1650.63)
Change-Id: I57d8c832eaec1eb2364e0a8e7352a6dd354db99f
Reviewed-by: Jocelyn Turcotte <jocelyn.turcotte@digia.com>
Diffstat (limited to 'chromium/third_party/libjingle/source')
195 files changed, 3918 insertions, 6790 deletions
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc index 2b45845756f..9409fd7a210 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc @@ -98,6 +98,7 @@ bool DataChannel::HasNegotiationCompleted() { DataChannel::~DataChannel() { ClearQueuedReceivedData(); ClearQueuedSendData(); + ClearQueuedControlData(); } void DataChannel::RegisterObserver(DataChannelObserver* observer) { @@ -193,7 +194,8 @@ bool DataChannel::SendControl(const talk_base::Buffer* buffer) { void DataChannel::SetReceiveSsrc(uint32 receive_ssrc) { if (receive_ssrc_set_) { ASSERT(session_->data_channel_type() == cricket::DCT_RTP || - receive_ssrc_ == send_ssrc_); + !send_ssrc_set_ || + receive_ssrc_ == send_ssrc_); return; } receive_ssrc_ = receive_ssrc; @@ -209,7 +211,8 @@ void DataChannel::RemotePeerRequestClose() { void DataChannel::SetSendSsrc(uint32 send_ssrc) { if (send_ssrc_set_) { ASSERT(session_->data_channel_type() == cricket::DCT_RTP || - receive_ssrc_ == send_ssrc_); + !receive_ssrc_set_ || + receive_ssrc_ == send_ssrc_); return; } send_ssrc_ = send_ssrc; @@ -250,14 +253,16 @@ void DataChannel::OnChannelReady(bool writable) { if (!writable) { return; } - // Update the readyState if the channel is writable for the first time; - // otherwise it means the channel was blocked for sending and now unblocked, - // so send the queued data now. + // Update the readyState and send the queued control message if the channel + // is writable for the first time; otherwise it means the channel was blocked + // for sending and now unblocked, so send the queued data now. if (!was_ever_writable_) { was_ever_writable_ = true; UpdateState(); + DeliverQueuedControlData(); + ASSERT(queued_send_data_.empty()); } else if (state_ == kOpen) { - SendQueuedSendData(); + DeliverQueuedSendData(); } } @@ -321,15 +326,13 @@ void DataChannel::ConnectToDataSession() { data_session_->SignalDataReceived.connect(this, &DataChannel::OnDataReceived); cricket::StreamParams params = cricket::StreamParams::CreateLegacy(id()); - data_session_->media_channel()->AddSendStream(params); - data_session_->media_channel()->AddRecvStream(params); + data_session_->AddRecvStream(params); + data_session_->AddSendStream(params); } void DataChannel::DisconnectFromDataSession() { - if (data_session_->media_channel() != NULL) { - data_session_->media_channel()->RemoveSendStream(id()); - data_session_->media_channel()->RemoveRecvStream(id()); - } + data_session_->RemoveSendStream(id()); + data_session_->RemoveRecvStream(id()); data_session_->SignalReadyToSendData.disconnect(this); data_session_->SignalDataReceived.disconnect(this); data_session_ = NULL; @@ -356,7 +359,7 @@ void DataChannel::ClearQueuedReceivedData() { } } -void DataChannel::SendQueuedSendData() { +void DataChannel::DeliverQueuedSendData() { DeliverQueuedControlData(); if (!was_ever_writable_) { return; @@ -366,7 +369,7 @@ void DataChannel::SendQueuedSendData() { DataBuffer* buffer = queued_send_data_.front(); cricket::SendDataResult send_result; if (!InternalSendWithoutQueueing(*buffer, &send_result)) { - LOG(LS_WARNING) << "SendQueuedSendData aborted due to send_result " + LOG(LS_WARNING) << "DeliverQueuedSendData aborted due to send_result " << send_result; break; } @@ -375,6 +378,14 @@ void DataChannel::SendQueuedSendData() { } } +void DataChannel::ClearQueuedControlData() { + while (!queued_control_data_.empty()) { + const talk_base::Buffer *buf = queued_control_data_.front(); + queued_control_data_.pop(); + delete buf; + } +} + void DataChannel::DeliverQueuedControlData() { if (was_ever_writable_) { while (!queued_control_data_.empty()) { diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h index 28fe3446d5d..3ce3c1b5b4e 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h @@ -67,8 +67,17 @@ class DataChannel : public DataChannelInterface, virtual void RegisterObserver(DataChannelObserver* observer); virtual void UnregisterObserver(); - virtual std::string label() const { return label_; } + virtual std::string label() const { return label_; } virtual bool reliable() const; + virtual bool ordered() const { return config_.ordered; } + virtual uint16 maxRetransmitTime() const { + return config_.maxRetransmitTime; + } + virtual uint16 maxRetransmits() const { + return config_.maxRetransmits; + } + virtual std::string protocol() const { return config_.protocol; } + virtual bool negotiated() const { return config_.negotiated; } virtual int id() const { return config_.id; } virtual uint64 buffered_amount() const; virtual void Close(); @@ -116,9 +125,10 @@ class DataChannel : public DataChannelInterface, bool IsConnectedToDataSession() { return data_session_ != NULL; } void DeliverQueuedControlData(); void QueueControl(const talk_base::Buffer* buffer); + void ClearQueuedControlData(); void DeliverQueuedReceivedData(); void ClearQueuedReceivedData(); - void SendQueuedSendData(); + void DeliverQueuedSendData(); void ClearQueuedSendData(); bool InternalSendWithoutQueueing(const DataBuffer& buffer, cricket::SendDataResult* send_result); @@ -158,6 +168,11 @@ BEGIN_PROXY_MAP(DataChannel) PROXY_METHOD0(void, UnregisterObserver) PROXY_CONSTMETHOD0(std::string, label) PROXY_CONSTMETHOD0(bool, reliable) + PROXY_CONSTMETHOD0(bool, ordered) + PROXY_CONSTMETHOD0(uint16, maxRetransmitTime) + PROXY_CONSTMETHOD0(uint16, maxRetransmits) + PROXY_CONSTMETHOD0(std::string, protocol) + PROXY_CONSTMETHOD0(bool, negotiated) PROXY_CONSTMETHOD0(int, id) PROXY_CONSTMETHOD0(DataState, state) PROXY_CONSTMETHOD0(uint64, buffered_amount) diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc index 69060b751d8..2b0a9fe5a65 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc @@ -29,6 +29,7 @@ #include "talk/app/webrtc/jsep.h" #include "talk/app/webrtc/mediastreamsignaling.h" #include "talk/app/webrtc/test/fakeconstraints.h" +#include "talk/app/webrtc/test/fakedtlsidentityservice.h" #include "talk/app/webrtc/webrtcsession.h" #include "talk/base/gunit.h" #include "talk/media/base/fakemediaengine.h" @@ -75,7 +76,7 @@ class SctpDataChannelTest : public testing::Test { talk_base::Thread::Current())), media_stream_signaling_( new webrtc::MediaStreamSignaling(talk_base::Thread::Current(), - NULL)), + NULL, channel_manager_.get())), session_(channel_manager_.get(), talk_base::Thread::Current(), talk_base::Thread::Current(), @@ -92,20 +93,21 @@ class SctpDataChannelTest : public testing::Test { constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp, true); constraints.AddMandatory(MediaConstraintsInterface::kEnableSctpDataChannels, true); - ASSERT_TRUE(session_.Initialize(&constraints, NULL)); + ASSERT_TRUE(session_.Initialize(&constraints, + new FakeIdentityService())); + webrtc_data_channel_ = webrtc::DataChannel::Create(&session_, "test", NULL); + ASSERT_TRUE(media_stream_signaling_->AddDataChannel(webrtc_data_channel_)); + talk_base::scoped_refptr<CreateSessionDescriptionObserverForTest> observer = new CreateSessionDescriptionObserverForTest(); session_.CreateOffer(observer.get(), NULL); - EXPECT_TRUE_WAIT(observer->description() != NULL, 1000); + EXPECT_TRUE_WAIT(observer->description() != NULL, 2000); ASSERT_TRUE(observer->description() != NULL); ASSERT_TRUE(session_.SetLocalDescription(observer->ReleaseDescription(), NULL)); - - webrtc_data_channel_ = webrtc::DataChannel::Create(&session_, "test", NULL); // Connect to the media channel. webrtc_data_channel_->SetSendSsrc(kFakeSsrc); webrtc_data_channel_->SetReceiveSsrc(kFakeSsrc); - session_.data_channel()->SignalReadyToSendData(true); } @@ -116,7 +118,6 @@ class SctpDataChannelTest : public testing::Test { session_.data_channel()->SignalReadyToSendData(true); } } - cricket::FakeMediaEngine* media_engine_; cricket::FakeDataEngine* data_engine_; talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_; diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h index 82d375c1962..7be8a50f5d4 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h @@ -110,6 +110,16 @@ class DataChannelInterface : public talk_base::RefCountInterface { // DataChannel object from other DataChannel objects. virtual std::string label() const = 0; virtual bool reliable() const = 0; + + // TODO(tommyw): Remove these dummy implementations when all classes have + // implemented these APIs. They should all just return the values the + // DataChannel was created with. + virtual bool ordered() const { return false; } + virtual uint16 maxRetransmitTime() const { return 0; } + virtual uint16 maxRetransmits() const { return 0; } + virtual std::string protocol() const { return std::string(); } + virtual bool negotiated() const { return false; } + virtual int id() const = 0; virtual DataState state() const = 0; // The buffered_amount returns the number of bytes of application data diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/dtmfsender_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/dtmfsender_unittest.cc index e1c3be9b12b..a4835054ade 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/dtmfsender_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/dtmfsender_unittest.cc @@ -206,9 +206,9 @@ class DtmfSenderTest : public testing::Test { while (it_ref != dtmf_queue_ref.end() && it != dtmf_queue.end()) { EXPECT_EQ(it_ref->code, it->code); EXPECT_EQ(it_ref->duration, it->duration); - // Allow ~20ms error. - EXPECT_GE(it_ref->gap, it->gap - 20); - EXPECT_LE(it_ref->gap, it->gap + 20); + // Allow ~100ms error. + EXPECT_GE(it_ref->gap, it->gap - 100); + EXPECT_LE(it_ref->gap, it->gap + 100); ++it_ref; ++it; } diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/peerconnection_jni.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/peerconnection_jni.cc deleted file mode 100644 index fa3ad1b451a..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/peerconnection_jni.cc +++ /dev/null @@ -1,1551 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -// Hints for future visitors: -// This entire file is an implementation detail of the org.webrtc Java package, -// the most interesting bits of which are org.webrtc.PeerConnection{,Factory}. -// The layout of this file is roughly: -// - various helper C++ functions & classes that wrap Java counterparts and -// expose a C++ interface that can be passed to the C++ PeerConnection APIs -// - implementations of methods declared "static" in the Java package (named -// things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by -// the JNI spec). -// -// Lifecycle notes: objects are owned where they will be called; in other words -// FooObservers are owned by C++-land, and user-callable objects (e.g. -// PeerConnection and VideoTrack) are owned by Java-land. -// When this file allocates C++ RefCountInterfaces it AddRef()s an artificial -// ref simulating the jlong held in Java-land, and then Release()s the ref in -// the respective free call. Sometimes this AddRef is implicit in the -// construction of a scoped_refptr<> which is then .release()d. -// Any persistent (non-local) references from C++ to Java must be global or weak -// (in which case they must be checked before use)! -// -// Exception notes: pretty much all JNI calls can throw Java exceptions, so each -// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck() -// call. In this file this is done in CHECK_EXCEPTION, making for much easier -// debugging in case of failure (the alternative is to wait for control to -// return to the Java frame that called code in this file, at which point it's -// impossible to tell which JNI call broke). - -#include <jni.h> -#undef JNIEXPORT -#define JNIEXPORT __attribute__((visibility("default"))) - -#include <limits> -#include <map> - -#include "talk/app/webrtc/mediaconstraintsinterface.h" -#include "talk/app/webrtc/peerconnectioninterface.h" -#include "talk/app/webrtc/videosourceinterface.h" -#include "talk/base/logging.h" -#include "talk/base/ssladapter.h" -#include "talk/media/base/videocapturer.h" -#include "talk/media/base/videorenderer.h" -#include "talk/media/devices/videorendererfactory.h" -#include "talk/media/webrtc/webrtcvideocapturer.h" -#include "third_party/icu/source/common/unicode/unistr.h" -#include "webrtc/system_wrappers/interface/trace.h" -#include "webrtc/video_engine/include/vie_base.h" -#include "webrtc/voice_engine/include/voe_base.h" - -using icu::UnicodeString; -using webrtc::AudioSourceInterface; -using webrtc::AudioTrackInterface; -using webrtc::AudioTrackVector; -using webrtc::CreateSessionDescriptionObserver; -using webrtc::DataBuffer; -using webrtc::DataChannelInit; -using webrtc::DataChannelInterface; -using webrtc::DataChannelObserver; -using webrtc::IceCandidateInterface; -using webrtc::MediaConstraintsInterface; -using webrtc::MediaSourceInterface; -using webrtc::MediaStreamInterface; -using webrtc::MediaStreamTrackInterface; -using webrtc::PeerConnectionFactoryInterface; -using webrtc::PeerConnectionInterface; -using webrtc::PeerConnectionObserver; -using webrtc::SessionDescriptionInterface; -using webrtc::SetSessionDescriptionObserver; -using webrtc::StatsObserver; -using webrtc::StatsReport; -using webrtc::VideoRendererInterface; -using webrtc::VideoSourceInterface; -using webrtc::VideoTrackInterface; -using webrtc::VideoTrackVector; -using webrtc::VideoRendererInterface; - -// Abort the process if |x| is false, emitting |msg|. -#define CHECK(x, msg) \ - if (x) {} else { \ - LOG(LS_ERROR) << __FILE__ << ":" << __LINE__ << ": " << msg; \ - abort(); \ - } -// Abort the process if |jni| has a Java exception pending, emitting |msg|. -#define CHECK_EXCEPTION(jni, msg) \ - if (0) {} else { \ - if (jni->ExceptionCheck()) { \ - jni->ExceptionDescribe(); \ - jni->ExceptionClear(); \ - CHECK(0, msg); \ - } \ - } - -namespace { - -static JavaVM* g_jvm = NULL; // Set in JNI_OnLoad(). - -static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT; -static pthread_key_t g_jni_ptr; // Key for per-thread JNIEnv* data. - -static void ThreadDestructor(void* unused) { - jint status = g_jvm->DetachCurrentThread(); - CHECK(status == JNI_OK, "Failed to detach thread: " << status); -} - -static void CreateJNIPtrKey() { - CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor), - "pthread_key_create"); -} - -// Deal with difference in signatures between Oracle's jni.h and Android's. -static JNIEnv* AttachCurrentThreadIfNeeded() { - CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey), - "pthread_once"); - JNIEnv* jni = reinterpret_cast<JNIEnv*>(pthread_getspecific(g_jni_ptr)); - if (jni == NULL) { -#ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec! - void* env; -#else - JNIEnv* env; -#endif - CHECK(!g_jvm->AttachCurrentThread(&env, NULL), "Failed to attach thread"); - CHECK(env, "AttachCurrentThread handed back NULL!"); - jni = reinterpret_cast<JNIEnv*>(env); - CHECK(!pthread_setspecific(g_jni_ptr, jni), "pthread_setspecific"); - } - return jni; -} - -// Android's FindClass() is trickier than usual because the app-specific -// ClassLoader is not consulted when there is no app-specific frame on the -// stack. Consequently, we only look up classes once in JNI_OnLoad. -// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass -class ClassReferenceHolder { - public: - explicit ClassReferenceHolder(JNIEnv* jni) { - LoadClass(jni, "java/nio/ByteBuffer"); - LoadClass(jni, "org/webrtc/AudioTrack"); - LoadClass(jni, "org/webrtc/DataChannel"); - LoadClass(jni, "org/webrtc/DataChannel$Buffer"); - LoadClass(jni, "org/webrtc/DataChannel$Init"); - LoadClass(jni, "org/webrtc/DataChannel$State"); - LoadClass(jni, "org/webrtc/IceCandidate"); - LoadClass(jni, "org/webrtc/MediaSource$State"); - LoadClass(jni, "org/webrtc/MediaStream"); - LoadClass(jni, "org/webrtc/MediaStreamTrack$State"); - LoadClass(jni, "org/webrtc/PeerConnection$SignalingState"); - LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState"); - LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState"); - LoadClass(jni, "org/webrtc/SessionDescription"); - LoadClass(jni, "org/webrtc/SessionDescription$Type"); - LoadClass(jni, "org/webrtc/StatsReport"); - LoadClass(jni, "org/webrtc/StatsReport$Value"); - LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame"); - LoadClass(jni, "org/webrtc/VideoTrack"); - } - - ~ClassReferenceHolder() { - CHECK(classes_.empty(), "Must call FreeReferences() before dtor!"); - } - - void FreeReferences(JNIEnv* jni) { - for (std::map<std::string, jclass>::const_iterator it = classes_.begin(); - it != classes_.end(); ++it) { - jni->DeleteGlobalRef(it->second); - } - classes_.clear(); - } - - jclass GetClass(const std::string& name) { - std::map<std::string, jclass>::iterator it = classes_.find(name); - CHECK(it != classes_.end(), "Unexpected GetClass() call for: " << name); - return it->second; - } - - private: - void LoadClass(JNIEnv* jni, const std::string& name) { - jclass localRef = jni->FindClass(name.c_str()); - CHECK_EXCEPTION(jni, "error during FindClass: " << name); - CHECK(localRef, name); - jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef)); - CHECK_EXCEPTION(jni, "error during NewGlobalRef: " << name); - CHECK(globalRef, name); - bool inserted = classes_.insert(std::make_pair(name, globalRef)).second; - CHECK(inserted, "Duplicate class name: " << name); - } - - std::map<std::string, jclass> classes_; -}; - -// Allocated in JNI_OnLoad(), freed in JNI_OnUnLoad(). -static ClassReferenceHolder* g_class_reference_holder = NULL; - -// JNIEnv-helper methods that CHECK success: no Java exception thrown and found -// object/class/method/field is non-null. -jmethodID GetMethodID( - JNIEnv* jni, jclass c, const std::string& name, const char* signature) { - jmethodID m = jni->GetMethodID(c, name.c_str(), signature); - CHECK_EXCEPTION(jni, - "error during GetMethodID: " << name << ", " << signature); - CHECK(m, name << ", " << signature); - return m; -} - -jmethodID GetStaticMethodID( - JNIEnv* jni, jclass c, const char* name, const char* signature) { - jmethodID m = jni->GetStaticMethodID(c, name, signature); - CHECK_EXCEPTION(jni, - "error during GetStaticMethodID: " - << name << ", " << signature); - CHECK(m, name << ", " << signature); - return m; -} - -jfieldID GetFieldID( - JNIEnv* jni, jclass c, const char* name, const char* signature) { - jfieldID f = jni->GetFieldID(c, name, signature); - CHECK_EXCEPTION(jni, "error during GetFieldID"); - CHECK(f, name << ", " << signature); - return f; -} - -jclass FindClass(JNIEnv* jni, const char* name) { - return g_class_reference_holder->GetClass(name); -} - -jclass GetObjectClass(JNIEnv* jni, jobject object) { - jclass c = jni->GetObjectClass(object); - CHECK_EXCEPTION(jni, "error during GetObjectClass"); - CHECK(c, ""); - return c; -} - -jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) { - jobject o = jni->GetObjectField(object, id); - CHECK_EXCEPTION(jni, "error during GetObjectField"); - CHECK(o, ""); - return o; -} - -jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) { - return static_cast<jstring>(GetObjectField(jni, object, id)); -} - -jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) { - jlong l = jni->GetLongField(object, id); - CHECK_EXCEPTION(jni, "error during GetLongField"); - return l; -} - -jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) { - jint i = jni->GetIntField(object, id); - CHECK_EXCEPTION(jni, "error during GetIntField"); - return i; -} - -bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) { - jboolean b = jni->GetBooleanField(object, id); - CHECK_EXCEPTION(jni, "error during GetBooleanField"); - return b; -} - -jobject NewGlobalRef(JNIEnv* jni, jobject o) { - jobject ret = jni->NewGlobalRef(o); - CHECK_EXCEPTION(jni, "error during NewGlobalRef"); - CHECK(ret, ""); - return ret; -} - -void DeleteGlobalRef(JNIEnv* jni, jobject o) { - jni->DeleteGlobalRef(o); - CHECK_EXCEPTION(jni, "error during DeleteGlobalRef"); -} - -// Given a jweak reference, allocate a (strong) local reference scoped to the -// lifetime of this object if the weak reference is still valid, or NULL -// otherwise. -class WeakRef { - public: - WeakRef(JNIEnv* jni, jweak ref) - : jni_(jni), obj_(jni_->NewLocalRef(ref)) { - CHECK_EXCEPTION(jni, "error during NewLocalRef"); - } - ~WeakRef() { - if (obj_) { - jni_->DeleteLocalRef(obj_); - CHECK_EXCEPTION(jni_, "error during DeleteLocalRef"); - } - } - jobject obj() { return obj_; } - - private: - JNIEnv* const jni_; - jobject const obj_; -}; - -// Given a local ref, take ownership of it and delete the ref when this goes out -// of scope. -template<class T> // T is jclass, jobject, jintArray, etc. -class ScopedLocalRef { - public: - ScopedLocalRef(JNIEnv* jni, T obj) - : jni_(jni), obj_(obj) {} - ~ScopedLocalRef() { - jni_->DeleteLocalRef(obj_); - } - T operator*() const { - return obj_; - } - private: - JNIEnv* jni_; - T obj_; -}; - -// Scoped holder for global Java refs. -template<class T> // T is jclass, jobject, jintArray, etc. -class ScopedGlobalRef { - public: - explicit ScopedGlobalRef(JNIEnv* jni, T obj) - : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {} - ~ScopedGlobalRef() { - DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_); - } - T operator*() const { - return obj_; - } - private: - T obj_; -}; - -// Return the (singleton) Java Enum object corresponding to |index|; -// |state_class_fragment| is something like "MediaSource$State". -jobject JavaEnumFromIndex( - JNIEnv* jni, const std::string& state_class_fragment, int index) { - std::string state_class_name = "org/webrtc/" + state_class_fragment; - jclass state_class = FindClass(jni, state_class_name.c_str()); - jmethodID state_values_id = GetStaticMethodID( - jni, state_class, "values", ("()[L" + state_class_name + ";").c_str()); - ScopedLocalRef<jobjectArray> state_values( - jni, - (jobjectArray)jni->CallStaticObjectMethod(state_class, state_values_id)); - CHECK_EXCEPTION(jni, "error during CallStaticObjectMethod"); - jobject ret = jni->GetObjectArrayElement(*state_values, index); - CHECK_EXCEPTION(jni, "error during GetObjectArrayElement"); - return ret; -} - -// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring. -static jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) { - UnicodeString ustr(UnicodeString::fromUTF8(native)); - jstring jstr = jni->NewString(ustr.getBuffer(), ustr.length()); - CHECK_EXCEPTION(jni, "error during NewString"); - return jstr; -} - -// Given a (UTF-16) jstring return a new UTF-8 native string. -static std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) { - const jchar* jchars = jni->GetStringChars(j_string, NULL); - CHECK_EXCEPTION(jni, "Error during GetStringChars"); - UnicodeString ustr(jchars, jni->GetStringLength(j_string)); - CHECK_EXCEPTION(jni, "Error during GetStringLength"); - jni->ReleaseStringChars(j_string, jchars); - CHECK_EXCEPTION(jni, "Error during ReleaseStringChars"); - std::string ret; - return ustr.toUTF8String(ret); -} - -static DataChannelInit JavaDataChannelInitToNative( - JNIEnv* jni, jobject j_init) { - DataChannelInit init; - - jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init"); - jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z"); - jfieldID max_retransmit_time_id = - GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I"); - jfieldID max_retransmits_id = - GetFieldID(jni, j_init_class, "maxRetransmits", "I"); - jfieldID protocol_id = - GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;"); - jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z"); - jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I"); - - init.ordered = GetBooleanField(jni, j_init, ordered_id); - init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id); - init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id); - init.protocol = JavaToStdString( - jni, GetStringField(jni, j_init, protocol_id)); - init.negotiated = GetBooleanField(jni, j_init, negotiated_id); - init.id = GetIntField(jni, j_init, id_id); - - return init; -} - -class ConstraintsWrapper; - -// Adapter between the C++ PeerConnectionObserver interface and the Java -// PeerConnection.Observer interface. Wraps an instance of the Java interface -// and dispatches C++ callbacks to Java. -class PCOJava : public PeerConnectionObserver { - public: - PCOJava(JNIEnv* jni, jobject j_observer) - : j_observer_global_(jni, j_observer), - j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)), - j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")), - j_media_stream_ctor_(GetMethodID( - jni, *j_media_stream_class_, "<init>", "(J)V")), - j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")), - j_audio_track_ctor_(GetMethodID( - jni, *j_audio_track_class_, "<init>", "(J)V")), - j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")), - j_video_track_ctor_(GetMethodID( - jni, *j_video_track_class_, "<init>", "(J)V")), - j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")), - j_data_channel_ctor_(GetMethodID( - jni, *j_data_channel_class_, "<init>", "(J)V")) { - } - - virtual ~PCOJava() {} - - virtual void OnIceCandidate(const IceCandidateInterface* candidate) OVERRIDE { - std::string sdp; - CHECK(candidate->ToString(&sdp), "got so far: " << sdp); - jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate"); - jmethodID ctor = GetMethodID(jni(), candidate_class, - "<init>", "(Ljava/lang/String;ILjava/lang/String;)V"); - ScopedLocalRef<jstring> j_mid( - jni(), JavaStringFromStdString(jni(), candidate->sdp_mid())); - ScopedLocalRef<jstring> j_sdp(jni(), JavaStringFromStdString(jni(), sdp)); - ScopedLocalRef<jobject> j_candidate(jni(), jni()->NewObject( - candidate_class, ctor, *j_mid, candidate->sdp_mline_index(), *j_sdp)); - CHECK_EXCEPTION(jni(), "error during NewObject"); - jmethodID m = GetMethodID(jni(), *j_observer_class_, - "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V"); - jni()->CallVoidMethod(*j_observer_global_, m, *j_candidate); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - virtual void OnError() OVERRIDE { - jmethodID m = GetMethodID(jni(), *j_observer_class_, "onError", "(V)V"); - jni()->CallVoidMethod(*j_observer_global_, m); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - virtual void OnSignalingChange( - PeerConnectionInterface::SignalingState new_state) OVERRIDE { - jmethodID m = GetMethodID( - jni(), *j_observer_class_, "onSignalingChange", - "(Lorg/webrtc/PeerConnection$SignalingState;)V"); - ScopedLocalRef<jobject> new_state_enum(jni(), JavaEnumFromIndex( - jni(), "PeerConnection$SignalingState", new_state)); - jni()->CallVoidMethod(*j_observer_global_, m, *new_state_enum); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - virtual void OnIceConnectionChange( - PeerConnectionInterface::IceConnectionState new_state) OVERRIDE { - jmethodID m = GetMethodID( - jni(), *j_observer_class_, "onIceConnectionChange", - "(Lorg/webrtc/PeerConnection$IceConnectionState;)V"); - ScopedLocalRef<jobject> new_state_enum(jni(), JavaEnumFromIndex( - jni(), "PeerConnection$IceConnectionState", new_state)); - jni()->CallVoidMethod(*j_observer_global_, m, *new_state_enum); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - virtual void OnIceGatheringChange( - PeerConnectionInterface::IceGatheringState new_state) OVERRIDE { - jmethodID m = GetMethodID( - jni(), *j_observer_class_, "onIceGatheringChange", - "(Lorg/webrtc/PeerConnection$IceGatheringState;)V"); - ScopedLocalRef<jobject> new_state_enum(jni(), JavaEnumFromIndex( - jni(), "PeerConnection$IceGatheringState", new_state)); - jni()->CallVoidMethod(*j_observer_global_, m, *new_state_enum); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - virtual void OnAddStream(MediaStreamInterface* stream) OVERRIDE { - ScopedLocalRef<jobject> j_stream(jni(), jni()->NewObject( - *j_media_stream_class_, j_media_stream_ctor_, (jlong)stream)); - CHECK_EXCEPTION(jni(), "error during NewObject"); - - AudioTrackVector audio_tracks = stream->GetAudioTracks(); - for (size_t i = 0; i < audio_tracks.size(); ++i) { - AudioTrackInterface* track = audio_tracks[i]; - ScopedLocalRef<jstring> id( - jni(), JavaStringFromStdString(jni(), track->id())); - ScopedLocalRef<jobject> j_track(jni(), jni()->NewObject( - *j_audio_track_class_, j_audio_track_ctor_, (jlong)track, *id)); - CHECK_EXCEPTION(jni(), "error during NewObject"); - jfieldID audio_tracks_id = GetFieldID( - jni(), *j_media_stream_class_, "audioTracks", "Ljava/util/List;"); - ScopedLocalRef<jobject> audio_tracks(jni(), GetObjectField( - jni(), *j_stream, audio_tracks_id)); - jmethodID add = GetMethodID(jni(), - GetObjectClass(jni(), *audio_tracks), "add", "(Ljava/lang/Object;)Z"); - jboolean added = jni()->CallBooleanMethod(*audio_tracks, add, *j_track); - CHECK_EXCEPTION(jni(), "error during CallBooleanMethod"); - CHECK(added, ""); - } - - VideoTrackVector video_tracks = stream->GetVideoTracks(); - for (size_t i = 0; i < video_tracks.size(); ++i) { - VideoTrackInterface* track = video_tracks[i]; - ScopedLocalRef<jstring> id( - jni(), JavaStringFromStdString(jni(), track->id())); - ScopedLocalRef<jobject> j_track(jni(), jni()->NewObject( - *j_video_track_class_, j_video_track_ctor_, (jlong)track, *id)); - CHECK_EXCEPTION(jni(), "error during NewObject"); - jfieldID video_tracks_id = GetFieldID( - jni(), *j_media_stream_class_, "videoTracks", "Ljava/util/List;"); - ScopedLocalRef<jobject> video_tracks(jni(), GetObjectField( - jni(), *j_stream, video_tracks_id)); - jmethodID add = GetMethodID(jni(), - GetObjectClass(jni(), *video_tracks), "add", "(Ljava/lang/Object;)Z"); - jboolean added = jni()->CallBooleanMethod(*video_tracks, add, *j_track); - CHECK_EXCEPTION(jni(), "error during CallBooleanMethod"); - CHECK(added, ""); - } - streams_[stream] = jni()->NewWeakGlobalRef(*j_stream); - CHECK_EXCEPTION(jni(), "error during NewWeakGlobalRef"); - - jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream", - "(Lorg/webrtc/MediaStream;)V"); - jni()->CallVoidMethod(*j_observer_global_, m, *j_stream); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - virtual void OnRemoveStream(MediaStreamInterface* stream) OVERRIDE { - NativeToJavaStreamsMap::iterator it = streams_.find(stream); - CHECK(it != streams_.end(), "unexpected stream: " << std::hex << stream); - - WeakRef s(jni(), it->second); - streams_.erase(it); - if (!s.obj()) - return; - - jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream", - "(Lorg/webrtc/MediaStream;)V"); - jni()->CallVoidMethod(*j_observer_global_, m, s.obj()); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - virtual void OnDataChannel(DataChannelInterface* channel) OVERRIDE { - ScopedLocalRef<jobject> j_channel(jni(), jni()->NewObject( - *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel)); - CHECK_EXCEPTION(jni(), "error during NewObject"); - // Channel is now owned by Java object, and will be freed from - // DataChannel.dispose(). - channel->AddRef(); - - jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel", - "(Lorg/webrtc/DataChannel;)V"); - jni()->CallVoidMethod(*j_observer_global_, m, *j_channel); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - void SetConstraints(ConstraintsWrapper* constraints) { - CHECK(!constraints_.get(), "constraints already set!"); - constraints_.reset(constraints); - } - - const ConstraintsWrapper* constraints() { return constraints_.get(); } - - private: - JNIEnv* jni() { - return AttachCurrentThreadIfNeeded(); - } - - const ScopedGlobalRef<jobject> j_observer_global_; - const ScopedGlobalRef<jclass> j_observer_class_; - const ScopedGlobalRef<jclass> j_media_stream_class_; - const jmethodID j_media_stream_ctor_; - const ScopedGlobalRef<jclass> j_audio_track_class_; - const jmethodID j_audio_track_ctor_; - const ScopedGlobalRef<jclass> j_video_track_class_; - const jmethodID j_video_track_ctor_; - const ScopedGlobalRef<jclass> j_data_channel_class_; - const jmethodID j_data_channel_ctor_; - typedef std::map<void*, jweak> NativeToJavaStreamsMap; - NativeToJavaStreamsMap streams_; // C++ -> Java streams. - talk_base::scoped_ptr<ConstraintsWrapper> constraints_; -}; - -// Wrapper for a Java MediaConstraints object. Copies all needed data so when -// the constructor returns the Java object is no longer needed. -class ConstraintsWrapper : public MediaConstraintsInterface { - public: - ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) { - PopulateConstraintsFromJavaPairList( - jni, j_constraints, "mandatory", &mandatory_); - PopulateConstraintsFromJavaPairList( - jni, j_constraints, "optional", &optional_); - } - - virtual ~ConstraintsWrapper() {} - - // MediaConstraintsInterface. - virtual const Constraints& GetMandatory() const OVERRIDE { - return mandatory_; - } - - virtual const Constraints& GetOptional() const OVERRIDE { - return optional_; - } - - private: - // Helper for translating a List<Pair<String, String>> to a Constraints. - static void PopulateConstraintsFromJavaPairList( - JNIEnv* jni, jobject j_constraints, - const char* field_name, Constraints* field) { - jfieldID j_id = GetFieldID(jni, - GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;"); - jobject j_list = GetObjectField(jni, j_constraints, j_id); - jmethodID j_iterator_id = GetMethodID(jni, - GetObjectClass(jni, j_list), "iterator", "()Ljava/util/Iterator;"); - jobject j_iterator = jni->CallObjectMethod(j_list, j_iterator_id); - CHECK_EXCEPTION(jni, "error during CallObjectMethod"); - jmethodID j_has_next = GetMethodID(jni, - GetObjectClass(jni, j_iterator), "hasNext", "()Z"); - jmethodID j_next = GetMethodID(jni, - GetObjectClass(jni, j_iterator), "next", "()Ljava/lang/Object;"); - while (jni->CallBooleanMethod(j_iterator, j_has_next)) { - CHECK_EXCEPTION(jni, "error during CallBooleanMethod"); - jobject entry = jni->CallObjectMethod(j_iterator, j_next); - CHECK_EXCEPTION(jni, "error during CallObjectMethod"); - jmethodID get_key = GetMethodID(jni, - GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;"); - jstring j_key = reinterpret_cast<jstring>( - jni->CallObjectMethod(entry, get_key)); - CHECK_EXCEPTION(jni, "error during CallObjectMethod"); - jmethodID get_value = GetMethodID(jni, - GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;"); - jstring j_value = reinterpret_cast<jstring>( - jni->CallObjectMethod(entry, get_value)); - CHECK_EXCEPTION(jni, "error during CallObjectMethod"); - field->push_back(Constraint(JavaToStdString(jni, j_key), - JavaToStdString(jni, j_value))); - } - CHECK_EXCEPTION(jni, "error during CallBooleanMethod"); - } - - Constraints mandatory_; - Constraints optional_; -}; - -static jobject JavaSdpFromNativeSdp( - JNIEnv* jni, const SessionDescriptionInterface* desc) { - std::string sdp; - CHECK(desc->ToString(&sdp), "got so far: " << sdp); - ScopedLocalRef<jstring> j_description(jni, JavaStringFromStdString(jni, sdp)); - - jclass j_type_class = FindClass( - jni, "org/webrtc/SessionDescription$Type"); - jmethodID j_type_from_canonical = GetStaticMethodID( - jni, j_type_class, "fromCanonicalForm", - "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;"); - ScopedLocalRef<jstring> j_type_string( - jni, JavaStringFromStdString(jni, desc->type())); - jobject j_type = jni->CallStaticObjectMethod( - j_type_class, j_type_from_canonical, *j_type_string); - CHECK_EXCEPTION(jni, "error during CallObjectMethod"); - - jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription"); - jmethodID j_sdp_ctor = GetMethodID( - jni, j_sdp_class, "<init>", - "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V"); - jobject j_sdp = jni->NewObject( - j_sdp_class, j_sdp_ctor, j_type, *j_description); - CHECK_EXCEPTION(jni, "error during NewObject"); - return j_sdp; -} - -template <class T> // T is one of {Create,Set}SessionDescriptionObserver. -class SdpObserverWrapper : public T { - public: - SdpObserverWrapper(JNIEnv* jni, jobject j_observer, - ConstraintsWrapper* constraints) - : constraints_(constraints), - j_observer_global_(jni, j_observer), - j_observer_class_(jni, GetObjectClass(jni, j_observer)) { - } - - virtual ~SdpObserverWrapper() {} - - // Can't mark OVERRIDE because of templating. - virtual void OnSuccess() { - jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V"); - jni()->CallVoidMethod(*j_observer_global_, m); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - // Can't mark OVERRIDE because of templating. - virtual void OnSuccess(SessionDescriptionInterface* desc) { - jmethodID m = GetMethodID( - jni(), *j_observer_class_, "onCreateSuccess", - "(Lorg/webrtc/SessionDescription;)V"); - ScopedLocalRef<jobject> j_sdp(jni(), JavaSdpFromNativeSdp(jni(), desc)); - jni()->CallVoidMethod(*j_observer_global_, m, *j_sdp); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - protected: - // Common implementation for failure of Set & Create types, distinguished by - // |op| being "Set" or "Create". - void OnFailure(const std::string& op, const std::string& error) { - jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure", - "(Ljava/lang/String;)V"); - ScopedLocalRef<jstring> j_error_string( - jni(), JavaStringFromStdString(jni(), error)); - jni()->CallVoidMethod(*j_observer_global_, m, *j_error_string); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - private: - JNIEnv* jni() { - return AttachCurrentThreadIfNeeded(); - } - - talk_base::scoped_ptr<ConstraintsWrapper> constraints_; - const ScopedGlobalRef<jobject> j_observer_global_; - const ScopedGlobalRef<jclass> j_observer_class_; -}; - -class CreateSdpObserverWrapper - : public SdpObserverWrapper<CreateSessionDescriptionObserver> { - public: - CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer, - ConstraintsWrapper* constraints) - : SdpObserverWrapper(jni, j_observer, constraints) {} - - virtual void OnFailure(const std::string& error) OVERRIDE { - SdpObserverWrapper::OnFailure(std::string("Create"), error); - } -}; - -class SetSdpObserverWrapper - : public SdpObserverWrapper<SetSessionDescriptionObserver> { - public: - SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer, - ConstraintsWrapper* constraints) - : SdpObserverWrapper(jni, j_observer, constraints) {} - - virtual void OnFailure(const std::string& error) OVERRIDE { - SdpObserverWrapper::OnFailure(std::string("Set"), error); - } -}; - -// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver -// and dispatching the callback from C++ back to Java. -class DataChannelObserverWrapper : public DataChannelObserver { - public: - DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer) - : j_observer_global_(jni, j_observer), - j_observer_class_(jni, GetObjectClass(jni, j_observer)), - j_on_state_change_mid_(GetMethodID(jni, *j_observer_class_, - "onStateChange", "()V")), - j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage", - "(Lorg/webrtc/DataChannel$Buffer;)V")), - j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")), - j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_, - "<init>", "(Ljava/nio/ByteBuffer;Z)V")) { - } - - virtual ~DataChannelObserverWrapper() {} - - virtual void OnStateChange() OVERRIDE { - jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - virtual void OnMessage(const DataBuffer& buffer) OVERRIDE { - jobject byte_buffer = - jni()->NewDirectByteBuffer(const_cast<char*>(buffer.data.data()), - buffer.data.length()); - jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_, - byte_buffer, buffer.binary); - jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - private: - JNIEnv* jni() { - return AttachCurrentThreadIfNeeded(); - } - - const ScopedGlobalRef<jobject> j_observer_global_; - const ScopedGlobalRef<jclass> j_observer_class_; - const ScopedGlobalRef<jclass> j_buffer_class_; - const jmethodID j_on_state_change_mid_; - const jmethodID j_on_message_mid_; - const jmethodID j_buffer_ctor_; -}; - -// Adapter for a Java StatsObserver presenting a C++ StatsObserver and -// dispatching the callback from C++ back to Java. -class StatsObserverWrapper : public StatsObserver { - public: - StatsObserverWrapper(JNIEnv* jni, jobject j_observer) - : j_observer_global_(jni, j_observer), - j_observer_class_(jni, GetObjectClass(jni, j_observer)), - j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")), - j_stats_report_ctor_(GetMethodID( - jni, *j_stats_report_class_, "<init>", - "(Ljava/lang/String;Ljava/lang/String;D" - "[Lorg/webrtc/StatsReport$Value;)V")), - j_value_class_(jni, FindClass( - jni, "org/webrtc/StatsReport$Value")), - j_value_ctor_(GetMethodID( - jni, *j_value_class_, "<init>", - "(Ljava/lang/String;Ljava/lang/String;)V")) { - } - - virtual ~StatsObserverWrapper() {} - - virtual void OnComplete(const std::vector<StatsReport>& reports) OVERRIDE { - ScopedLocalRef<jobjectArray> j_reports(jni(), - ReportsToJava(jni(), reports)); - jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete", - "([Lorg/webrtc/StatsReport;)V"); - jni()->CallVoidMethod(*j_observer_global_, m, *j_reports); - CHECK_EXCEPTION(jni(), "error during CallVoidMethod"); - } - - private: - jobjectArray ReportsToJava( - JNIEnv* jni, const std::vector<StatsReport>& reports) { - jobjectArray reports_array = jni->NewObjectArray( - reports.size(), *j_stats_report_class_, NULL); - for (int i = 0; i < reports.size(); ++i) { - const StatsReport& report = reports[i]; - ScopedLocalRef<jstring> j_id( - jni, JavaStringFromStdString(jni, report.id)); - ScopedLocalRef<jstring> j_type( - jni, JavaStringFromStdString(jni, report.type)); - ScopedLocalRef<jobjectArray> j_values( - jni, ValuesToJava(jni, report.values)); - ScopedLocalRef<jobject> j_report(jni, jni->NewObject( - *j_stats_report_class_, j_stats_report_ctor_, *j_id, *j_type, - report.timestamp, *j_values)); - jni->SetObjectArrayElement(reports_array, i, *j_report); - } - return reports_array; - } - - jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) { - jobjectArray j_values = jni->NewObjectArray( - values.size(), *j_value_class_, NULL); - for (int i = 0; i < values.size(); ++i) { - const StatsReport::Value& value = values[i]; - ScopedLocalRef<jstring> j_name( - jni, JavaStringFromStdString(jni, value.name)); - ScopedLocalRef<jstring> j_value( - jni, JavaStringFromStdString(jni, value.value)); - ScopedLocalRef<jobject> j_element_value(jni, jni->NewObject( - *j_value_class_, j_value_ctor_, *j_name, *j_value)); - jni->SetObjectArrayElement(j_values, i, *j_element_value); - } - return j_values; - } - - JNIEnv* jni() { - return AttachCurrentThreadIfNeeded(); - } - - const ScopedGlobalRef<jobject> j_observer_global_; - const ScopedGlobalRef<jclass> j_observer_class_; - const ScopedGlobalRef<jclass> j_stats_report_class_; - const jmethodID j_stats_report_ctor_; - const ScopedGlobalRef<jclass> j_value_class_; - const jmethodID j_value_ctor_; -}; - -// Adapter presenting a cricket::VideoRenderer as a -// webrtc::VideoRendererInterface. -class VideoRendererWrapper : public VideoRendererInterface { - public: - static VideoRendererWrapper* Create(cricket::VideoRenderer* renderer) { - if (renderer) - return new VideoRendererWrapper(renderer); - return NULL; - } - - virtual ~VideoRendererWrapper() {} - - virtual void SetSize(int width, int height) OVERRIDE { - const bool kNotReserved = false; // What does this param mean?? - renderer_->SetSize(width, height, kNotReserved); - } - - virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE { - renderer_->RenderFrame(frame); - } - - private: - explicit VideoRendererWrapper(cricket::VideoRenderer* renderer) - : renderer_(renderer) {} - - talk_base::scoped_ptr<cricket::VideoRenderer> renderer_; -}; - -// Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer -// instance. -class JavaVideoRendererWrapper : public VideoRendererInterface { - public: - JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks) - : j_callbacks_(jni, j_callbacks), - j_set_size_id_(GetMethodID( - jni, GetObjectClass(jni, j_callbacks), "setSize", "(II)V")), - j_render_frame_id_(GetMethodID( - jni, GetObjectClass(jni, j_callbacks), "renderFrame", - "(Lorg/webrtc/VideoRenderer$I420Frame;)V")), - j_frame_class_(jni, - FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")), - j_frame_ctor_id_(GetMethodID( - jni, *j_frame_class_, "<init>", "(II[I[Ljava/nio/ByteBuffer;)V")), - j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) { - CHECK_EXCEPTION(jni, ""); - } - - virtual ~JavaVideoRendererWrapper() {} - - virtual void SetSize(int width, int height) OVERRIDE { - jni()->CallVoidMethod(*j_callbacks_, j_set_size_id_, width, height); - CHECK_EXCEPTION(jni(), ""); - } - - virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE { - ScopedLocalRef<jobject> j_frame(jni(), CricketToJavaFrame(frame)); - jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, *j_frame); - CHECK_EXCEPTION(jni(), ""); - } - - private: - // Return a VideoRenderer.I420Frame referring to the data in |frame|. - jobject CricketToJavaFrame(const cricket::VideoFrame* frame) { - ScopedLocalRef<jintArray> strides(jni(), jni()->NewIntArray(3)); - jint* strides_array = jni()->GetIntArrayElements(*strides, NULL); - strides_array[0] = frame->GetYPitch(); - strides_array[1] = frame->GetUPitch(); - strides_array[2] = frame->GetVPitch(); - jni()->ReleaseIntArrayElements(*strides, strides_array, 0); - ScopedLocalRef<jobjectArray> planes( - jni(), jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL)); - ScopedLocalRef<jobject> y_buffer(jni(), jni()->NewDirectByteBuffer( - const_cast<uint8*>(frame->GetYPlane()), - frame->GetYPitch() * frame->GetHeight())); - ScopedLocalRef<jobject> u_buffer(jni(), jni()->NewDirectByteBuffer( - const_cast<uint8*>(frame->GetUPlane()), frame->GetChromaSize())); - ScopedLocalRef<jobject> v_buffer(jni(), jni()->NewDirectByteBuffer( - const_cast<uint8*>(frame->GetVPlane()), frame->GetChromaSize())); - jni()->SetObjectArrayElement(*planes, 0, *y_buffer); - jni()->SetObjectArrayElement(*planes, 1, *u_buffer); - jni()->SetObjectArrayElement(*planes, 2, *v_buffer); - return jni()->NewObject( - *j_frame_class_, j_frame_ctor_id_, - frame->GetWidth(), frame->GetHeight(), *strides, *planes); - } - - JNIEnv* jni() { - return AttachCurrentThreadIfNeeded(); - } - - ScopedGlobalRef<jobject> j_callbacks_; - jmethodID j_set_size_id_; - jmethodID j_render_frame_id_; - ScopedGlobalRef<jclass> j_frame_class_; - jmethodID j_frame_ctor_id_; - ScopedGlobalRef<jclass> j_byte_buffer_class_; -}; - -} // anonymous namespace - - -// Convenience macro defining JNI-accessible methods in the org.webrtc package. -// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter. -#define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \ - Java_org_webrtc_##name - -extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) { - CHECK(!g_jvm, "JNI_OnLoad called more than once!"); - g_jvm = jvm; - CHECK(g_jvm, "JNI_OnLoad handed NULL?"); - - CHECK(talk_base::InitializeSSL(), "Failed to InitializeSSL()"); - - JNIEnv* jni; - if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK) - return -1; - g_class_reference_holder = new ClassReferenceHolder(jni); - - webrtc::Trace::CreateTrace(); - - return JNI_VERSION_1_6; -} - -extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) { - webrtc::Trace::ReturnTrace(); - delete g_class_reference_holder; - g_class_reference_holder = NULL; - CHECK(talk_base::CleanupSSL(), "Failed to CleanupSSL()"); -} - -static talk_base::scoped_refptr<DataChannelInterface> ExtractNativeDC( - JNIEnv* jni, jobject j_dc) { - jfieldID native_dc_id = GetFieldID(jni, - GetObjectClass(jni, j_dc), "nativeDataChannel", "J"); - jlong j_d = GetLongField(jni, j_dc, native_dc_id); - return talk_base::scoped_refptr<DataChannelInterface>( - reinterpret_cast<DataChannelInterface*>(j_d)); -} - -JOW(jlong, DataChannel_registerObserverNative)( - JNIEnv* jni, jobject j_dc, jobject j_observer) { - talk_base::scoped_ptr<DataChannelObserverWrapper> observer( - new DataChannelObserverWrapper(jni, j_observer)); - ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get()); - return reinterpret_cast<jlong>(observer.release()); -} - -JOW(void, DataChannel_unregisterObserverNative)( - JNIEnv* jni, jobject j_dc, jlong native_observer) { - ExtractNativeDC(jni, j_dc)->UnregisterObserver(); - delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer); -} - -JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) { - return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label()); -} - -JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) { - return JavaEnumFromIndex( - jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state()); -} - -JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) { - uint64 buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount(); - CHECK(buffered_amount <= std::numeric_limits<int64>::max(), - "buffered_amount overflowed jlong!"); - return static_cast<jlong>(buffered_amount); -} - -JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) { - ExtractNativeDC(jni, j_dc)->Close(); -} - -JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc, - jbyteArray data, jboolean binary) { - jbyte* bytes = jni->GetByteArrayElements(data, NULL); - bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer( - talk_base::Buffer(bytes, jni->GetArrayLength(data)), - binary)); - jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT); - return ret; -} - -JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) { - ExtractNativeDC(jni, j_dc)->Release(); -} - -JOW(void, Logging_nativeEnableTracing)( - JNIEnv* jni, jclass, jstring j_path, jint nativeLevels, - jint nativeSeverity) { - std::string path = JavaToStdString(jni, j_path); - if (nativeLevels != webrtc::kTraceNone) { - CHECK(!webrtc::Trace::SetTraceFile(path.c_str(), false), - "SetTraceFile failed"); - CHECK(!webrtc::Trace::SetLevelFilter(nativeLevels), - "SetLevelFilter failed"); - } - talk_base::LogMessage::LogToDebug(nativeSeverity); -} - -JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) { - reinterpret_cast<PeerConnectionInterface*>(j_p)->Release(); -} - -JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) { - PCOJava* p = reinterpret_cast<PCOJava*>(j_p); - delete p; -} - -JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) { - reinterpret_cast<MediaSourceInterface*>(j_p)->Release(); -} - -JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) { - delete reinterpret_cast<cricket::VideoCapturer*>(j_p); -} - -JOW(void, VideoRenderer_free)(JNIEnv*, jclass, jlong j_p) { - delete reinterpret_cast<VideoRendererWrapper*>(j_p); -} - -JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) { - reinterpret_cast<MediaStreamTrackInterface*>(j_p)->Release(); -} - -JOW(jboolean, MediaStream_nativeAddAudioTrack)( - JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) { - talk_base::scoped_refptr<MediaStreamInterface> stream( - reinterpret_cast<MediaStreamInterface*>(pointer)); - talk_base::scoped_refptr<AudioTrackInterface> track( - reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer)); - return stream->AddTrack(track); -} - -JOW(jboolean, MediaStream_nativeAddVideoTrack)( - JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) { - talk_base::scoped_refptr<MediaStreamInterface> stream( - reinterpret_cast<MediaStreamInterface*>(pointer)); - talk_base::scoped_refptr<VideoTrackInterface> track( - reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)); - return stream->AddTrack(track); -} - -JOW(jboolean, MediaStream_nativeRemoveAudioTrack)( - JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) { - talk_base::scoped_refptr<MediaStreamInterface> stream( - reinterpret_cast<MediaStreamInterface*>(pointer)); - talk_base::scoped_refptr<AudioTrackInterface> track( - reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer)); - return stream->RemoveTrack(track); -} - -JOW(jboolean, MediaStream_nativeRemoveVideoTrack)( - JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) { - talk_base::scoped_refptr<MediaStreamInterface> stream( - reinterpret_cast<MediaStreamInterface*>(pointer)); - talk_base::scoped_refptr<VideoTrackInterface> track( - reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)); - return stream->RemoveTrack(track); -} - -JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) { - return JavaStringFromStdString( - jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label()); -} - -JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) { - reinterpret_cast<MediaStreamInterface*>(j_p)->Release(); -} - -JOW(jlong, PeerConnectionFactory_nativeCreateObserver)( - JNIEnv * jni, jclass, jobject j_observer) { - return (jlong)new PCOJava(jni, j_observer); -} - -#ifdef ANDROID -JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)( - JNIEnv* jni, jclass, jobject context) { - CHECK(g_jvm, "JNI_OnLoad failed to run?"); - bool failure = false; - failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm, context); - failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context); - return !failure; -} -#endif // ANDROID - -JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)( - JNIEnv* jni, jclass) { - talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory( - webrtc::CreatePeerConnectionFactory()); - return (jlong)factory.release(); -} - -JOW(void, PeerConnectionFactory_freeFactory)(JNIEnv*, jclass, jlong j_p) { - reinterpret_cast<PeerConnectionFactoryInterface*>(j_p)->Release(); -} - -JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)( - JNIEnv* jni, jclass, jlong native_factory, jstring label) { - talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory( - reinterpret_cast<PeerConnectionFactoryInterface*>(native_factory)); - talk_base::scoped_refptr<MediaStreamInterface> stream( - factory->CreateLocalMediaStream(JavaToStdString(jni, label))); - return (jlong)stream.release(); -} - -JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)( - JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer, - jobject j_constraints) { - talk_base::scoped_ptr<ConstraintsWrapper> constraints( - new ConstraintsWrapper(jni, j_constraints)); - talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory( - reinterpret_cast<PeerConnectionFactoryInterface*>(native_factory)); - talk_base::scoped_refptr<VideoSourceInterface> source( - factory->CreateVideoSource( - reinterpret_cast<cricket::VideoCapturer*>(native_capturer), - constraints.get())); - return (jlong)source.release(); -} - -JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)( - JNIEnv* jni, jclass, jlong native_factory, jstring id, - jlong native_source) { - talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory( - reinterpret_cast<PeerConnectionFactoryInterface*>(native_factory)); - talk_base::scoped_refptr<VideoTrackInterface> track( - factory->CreateVideoTrack( - JavaToStdString(jni, id), - reinterpret_cast<VideoSourceInterface*>(native_source))); - return (jlong)track.release(); -} - -JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)( - JNIEnv* jni, jclass, jlong native_factory, jstring id) { - talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory( - reinterpret_cast<PeerConnectionFactoryInterface*>(native_factory)); - talk_base::scoped_refptr<AudioTrackInterface> track( - factory->CreateAudioTrack(JavaToStdString(jni, id), NULL)); - return (jlong)track.release(); -} - -static void JavaIceServersToJsepIceServers( - JNIEnv* jni, jobject j_ice_servers, - PeerConnectionInterface::IceServers* ice_servers) { - jclass list_class = GetObjectClass(jni, j_ice_servers); - jmethodID iterator_id = GetMethodID( - jni, list_class, "iterator", "()Ljava/util/Iterator;"); - jobject iterator = jni->CallObjectMethod(j_ice_servers, iterator_id); - CHECK_EXCEPTION(jni, "error during CallObjectMethod"); - jmethodID iterator_has_next = GetMethodID( - jni, GetObjectClass(jni, iterator), "hasNext", "()Z"); - jmethodID iterator_next = GetMethodID( - jni, GetObjectClass(jni, iterator), "next", "()Ljava/lang/Object;"); - while (jni->CallBooleanMethod(iterator, iterator_has_next)) { - CHECK_EXCEPTION(jni, "error during CallBooleanMethod"); - jobject j_ice_server = jni->CallObjectMethod(iterator, iterator_next); - CHECK_EXCEPTION(jni, "error during CallObjectMethod"); - jclass j_ice_server_class = GetObjectClass(jni, j_ice_server); - jfieldID j_ice_server_uri_id = - GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;"); - jfieldID j_ice_server_username_id = - GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;"); - jfieldID j_ice_server_password_id = - GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;"); - jstring uri = reinterpret_cast<jstring>( - GetObjectField(jni, j_ice_server, j_ice_server_uri_id)); - jstring username = reinterpret_cast<jstring>( - GetObjectField(jni, j_ice_server, j_ice_server_username_id)); - jstring password = reinterpret_cast<jstring>( - GetObjectField(jni, j_ice_server, j_ice_server_password_id)); - PeerConnectionInterface::IceServer server; - server.uri = JavaToStdString(jni, uri); - server.username = JavaToStdString(jni, username); - server.password = JavaToStdString(jni, password); - ice_servers->push_back(server); - } - CHECK_EXCEPTION(jni, "error during CallBooleanMethod"); -} - -JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)( - JNIEnv *jni, jclass, jlong factory, jobject j_ice_servers, - jobject j_constraints, jlong observer_p) { - talk_base::scoped_refptr<PeerConnectionFactoryInterface> f( - reinterpret_cast<PeerConnectionFactoryInterface*>(factory)); - PeerConnectionInterface::IceServers servers; - JavaIceServersToJsepIceServers(jni, j_ice_servers, &servers); - PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p); - observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints)); - talk_base::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection( - servers, observer->constraints(), NULL, observer)); - return (jlong)pc.release(); -} - -static talk_base::scoped_refptr<PeerConnectionInterface> ExtractNativePC( - JNIEnv* jni, jobject j_pc) { - jfieldID native_pc_id = GetFieldID(jni, - GetObjectClass(jni, j_pc), "nativePeerConnection", "J"); - jlong j_p = GetLongField(jni, j_pc, native_pc_id); - return talk_base::scoped_refptr<PeerConnectionInterface>( - reinterpret_cast<PeerConnectionInterface*>(j_p)); -} - -JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) { - const SessionDescriptionInterface* sdp = - ExtractNativePC(jni, j_pc)->local_description(); - return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL; -} - -JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) { - const SessionDescriptionInterface* sdp = - ExtractNativePC(jni, j_pc)->remote_description(); - return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL; -} - -JOW(jobject, PeerConnection_createDataChannel)( - JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) { - DataChannelInit init = JavaDataChannelInitToNative(jni, j_init); - talk_base::scoped_refptr<DataChannelInterface> channel( - ExtractNativePC(jni, j_pc)->CreateDataChannel( - JavaToStdString(jni, j_label), &init)); - jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel"); - jmethodID j_data_channel_ctor = GetMethodID( - jni, j_data_channel_class, "<init>", "(J)V"); - jobject j_channel = jni->NewObject( - j_data_channel_class, j_data_channel_ctor, channel.get()); - CHECK_EXCEPTION(jni, "error during NewObject"); - // Channel is now owned by Java object, and will be freed from there. - channel->AddRef(); - return j_channel; -} - -JOW(void, PeerConnection_createOffer)( - JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) { - ConstraintsWrapper* constraints = - new ConstraintsWrapper(jni, j_constraints); - talk_base::scoped_refptr<CreateSdpObserverWrapper> observer( - new talk_base::RefCountedObject<CreateSdpObserverWrapper>( - jni, j_observer, constraints)); - ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints); -} - -JOW(void, PeerConnection_createAnswer)( - JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) { - ConstraintsWrapper* constraints = - new ConstraintsWrapper(jni, j_constraints); - talk_base::scoped_refptr<CreateSdpObserverWrapper> observer( - new talk_base::RefCountedObject<CreateSdpObserverWrapper>( - jni, j_observer, constraints)); - ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints); -} - -// Helper to create a SessionDescriptionInterface from a SessionDescription. -static SessionDescriptionInterface* JavaSdpToNativeSdp( - JNIEnv* jni, jobject j_sdp) { - jfieldID j_type_id = GetFieldID( - jni, GetObjectClass(jni, j_sdp), "type", - "Lorg/webrtc/SessionDescription$Type;"); - jobject j_type = GetObjectField(jni, j_sdp, j_type_id); - jmethodID j_canonical_form_id = GetMethodID( - jni, GetObjectClass(jni, j_type), "canonicalForm", - "()Ljava/lang/String;"); - jstring j_type_string = (jstring)jni->CallObjectMethod( - j_type, j_canonical_form_id); - CHECK_EXCEPTION(jni, "error during CallObjectMethod"); - std::string std_type = JavaToStdString(jni, j_type_string); - - jfieldID j_description_id = GetFieldID( - jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;"); - jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id); - std::string std_description = JavaToStdString(jni, j_description); - - return webrtc::CreateSessionDescription( - std_type, std_description, NULL); -} - -JOW(void, PeerConnection_setLocalDescription)( - JNIEnv* jni, jobject j_pc, - jobject j_observer, jobject j_sdp) { - talk_base::scoped_refptr<SetSdpObserverWrapper> observer( - new talk_base::RefCountedObject<SetSdpObserverWrapper>( - jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL))); - ExtractNativePC(jni, j_pc)->SetLocalDescription( - observer, JavaSdpToNativeSdp(jni, j_sdp)); -} - -JOW(void, PeerConnection_setRemoteDescription)( - JNIEnv* jni, jobject j_pc, - jobject j_observer, jobject j_sdp) { - talk_base::scoped_refptr<SetSdpObserverWrapper> observer( - new talk_base::RefCountedObject<SetSdpObserverWrapper>( - jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL))); - ExtractNativePC(jni, j_pc)->SetRemoteDescription( - observer, JavaSdpToNativeSdp(jni, j_sdp)); -} - -JOW(jboolean, PeerConnection_updateIce)( - JNIEnv* jni, jobject j_pc, jobject j_ice_servers, jobject j_constraints) { - PeerConnectionInterface::IceServers ice_servers; - JavaIceServersToJsepIceServers(jni, j_ice_servers, &ice_servers); - talk_base::scoped_ptr<ConstraintsWrapper> constraints( - new ConstraintsWrapper(jni, j_constraints)); - return ExtractNativePC(jni, j_pc)->UpdateIce(ice_servers, constraints.get()); -} - -JOW(jboolean, PeerConnection_nativeAddIceCandidate)( - JNIEnv* jni, jobject j_pc, jstring j_sdp_mid, - jint j_sdp_mline_index, jstring j_candidate_sdp) { - std::string sdp_mid = JavaToStdString(jni, j_sdp_mid); - std::string sdp = JavaToStdString(jni, j_candidate_sdp); - talk_base::scoped_ptr<IceCandidateInterface> candidate( - webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL)); - return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get()); -} - -JOW(jboolean, PeerConnection_nativeAddLocalStream)( - JNIEnv* jni, jobject j_pc, jlong native_stream, jobject j_constraints) { - talk_base::scoped_ptr<ConstraintsWrapper> constraints( - new ConstraintsWrapper(jni, j_constraints)); - return ExtractNativePC(jni, j_pc)->AddStream( - reinterpret_cast<MediaStreamInterface*>(native_stream), - constraints.get()); -} - -JOW(void, PeerConnection_nativeRemoveLocalStream)( - JNIEnv* jni, jobject j_pc, jlong native_stream) { - ExtractNativePC(jni, j_pc)->RemoveStream( - reinterpret_cast<MediaStreamInterface*>(native_stream)); -} - -JOW(bool, PeerConnection_nativeGetStats)( - JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) { - talk_base::scoped_refptr<StatsObserverWrapper> observer( - new talk_base::RefCountedObject<StatsObserverWrapper>(jni, j_observer)); - return ExtractNativePC(jni, j_pc)->GetStats( - observer, reinterpret_cast<MediaStreamTrackInterface*>(native_track)); -} - -JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) { - PeerConnectionInterface::SignalingState state = - ExtractNativePC(jni, j_pc)->signaling_state(); - return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state); -} - -JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) { - PeerConnectionInterface::IceConnectionState state = - ExtractNativePC(jni, j_pc)->ice_connection_state(); - return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state); -} - -JOW(jobject, PeerGathering_iceGatheringState)(JNIEnv* jni, jobject j_pc) { - PeerConnectionInterface::IceGatheringState state = - ExtractNativePC(jni, j_pc)->ice_gathering_state(); - return JavaEnumFromIndex(jni, "PeerGathering$IceGatheringState", state); -} - -JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) { - ExtractNativePC(jni, j_pc)->Close(); - return; -} - -JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) { - talk_base::scoped_refptr<MediaSourceInterface> p( - reinterpret_cast<MediaSourceInterface*>(j_p)); - return JavaEnumFromIndex(jni, "MediaSource$State", p->state()); -} - -JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)( - JNIEnv* jni, jclass, jstring j_device_name) { - std::string device_name = JavaToStdString(jni, j_device_name); - talk_base::scoped_ptr<cricket::DeviceManagerInterface> device_manager( - cricket::DeviceManagerFactory::Create()); - CHECK(device_manager->Init(), "DeviceManager::Init() failed"); - cricket::Device device; - if (!device_manager->GetVideoCaptureDevice(device_name, &device)) { - LOG(LS_ERROR) << "GetVideoCaptureDevice failed"; - return 0; - } - talk_base::scoped_ptr<cricket::VideoCapturer> capturer( - device_manager->CreateVideoCapturer(device)); - return (jlong)capturer.release(); -} - -JOW(jlong, VideoRenderer_nativeCreateGuiVideoRenderer)( - JNIEnv* jni, jclass, int x, int y) { - talk_base::scoped_ptr<VideoRendererWrapper> renderer( - VideoRendererWrapper::Create( - cricket::VideoRendererFactory::CreateGuiVideoRenderer(x, y))); - return (jlong)renderer.release(); -} - -JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)( - JNIEnv* jni, jclass, jobject j_callbacks) { - talk_base::scoped_ptr<JavaVideoRendererWrapper> renderer( - new JavaVideoRendererWrapper(jni, j_callbacks)); - return (jlong)renderer.release(); -} - -JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) { - talk_base::scoped_refptr<MediaStreamTrackInterface> p( - reinterpret_cast<MediaStreamTrackInterface*>(j_p)); - return JavaStringFromStdString(jni, p->id()); -} - -JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) { - talk_base::scoped_refptr<MediaStreamTrackInterface> p( - reinterpret_cast<MediaStreamTrackInterface*>(j_p)); - return JavaStringFromStdString(jni, p->kind()); -} - -JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) { - talk_base::scoped_refptr<MediaStreamTrackInterface> p( - reinterpret_cast<MediaStreamTrackInterface*>(j_p)); - return p->enabled(); -} - -JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) { - talk_base::scoped_refptr<MediaStreamTrackInterface> p( - reinterpret_cast<MediaStreamTrackInterface*>(j_p)); - return JavaEnumFromIndex(jni, "MediaStreamTrack$State", p->state()); -} - -JOW(jboolean, MediaStreamTrack_nativeSetState)( - JNIEnv* jni, jclass, jlong j_p, jint j_new_state) { - talk_base::scoped_refptr<MediaStreamTrackInterface> p( - reinterpret_cast<MediaStreamTrackInterface*>(j_p)); - MediaStreamTrackInterface::TrackState new_state = - (MediaStreamTrackInterface::TrackState)j_new_state; - return p->set_state(new_state); -} - -JOW(jboolean, MediaStreamTrack_nativeSetEnabled)( - JNIEnv* jni, jclass, jlong j_p, jboolean enabled) { - talk_base::scoped_refptr<MediaStreamTrackInterface> p( - reinterpret_cast<MediaStreamTrackInterface*>(j_p)); - return p->set_enabled(enabled); -} - -JOW(void, VideoTrack_nativeAddRenderer)( - JNIEnv* jni, jclass, - jlong j_video_track_pointer, jlong j_renderer_pointer) { - talk_base::scoped_refptr<VideoTrackInterface> track( - reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)); - track->AddRenderer( - reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer)); -} - -JOW(void, VideoTrack_nativeRemoveRenderer)( - JNIEnv* jni, jclass, - jlong j_video_track_pointer, jlong j_renderer_pointer) { - talk_base::scoped_refptr<VideoTrackInterface> track( - reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)); - track->RemoveRenderer( - reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer)); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/AudioSource.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/AudioSource.java deleted file mode 100644 index 8b7a8f7dc40..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/AudioSource.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -/** - * Java wrapper for a C++ AudioSourceInterface. Used as the source for one or - * more {@code AudioTrack} objects. - */ -public class AudioSource extends MediaSource { - public AudioSource(long nativeSource) { - super(nativeSource); - } -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/AudioTrack.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/AudioTrack.java deleted file mode 100644 index 35d7c41f2bc..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/AudioTrack.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -/** Java wrapper for a C++ AudioTrackInterface */ -public class AudioTrack extends MediaStreamTrack { - public AudioTrack(long nativeTrack) { - super(nativeTrack); - } -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/DataChannel.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/DataChannel.java deleted file mode 100644 index d20e42d9834..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/DataChannel.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -import java.nio.ByteBuffer; - -/** Java wrapper for a C++ DataChannelInterface. */ -public class DataChannel { - /** Java wrapper for WebIDL RTCDataChannel. */ - public static class Init { - public boolean ordered = true; - // Optional unsigned short in WebIDL, -1 means unspecified. - public int maxRetransmitTimeMs = -1; - // Optional unsigned short in WebIDL, -1 means unspecified. - public int maxRetransmits = -1; - public String protocol = ""; - public boolean negotiated = true; - // Optional unsigned short in WebIDL, -1 means unspecified. - public int id = -1; - - public Init() {} - - // Called only by native code. - private Init( - boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, - String protocol, boolean negotiated, int id) { - this.ordered = ordered; - this.maxRetransmitTimeMs = maxRetransmitTimeMs; - this.maxRetransmits = maxRetransmits; - this.protocol = protocol; - this.negotiated = negotiated; - this.id = id; - } - } - - /** Java version of C++ DataBuffer. The atom of data in a DataChannel. */ - public static class Buffer { - /** The underlying data. */ - public final ByteBuffer data; - - /** - * Indicates whether |data| contains UTF-8 text or "binary data" - * (i.e. anything else). - */ - public final boolean binary; - - public Buffer(ByteBuffer data, boolean binary) { - this.data = data; - this.binary = binary; - } - } - - /** Java version of C++ DataChannelObserver. */ - public interface Observer { - /** The data channel state has changed. */ - public void onStateChange(); - /** - * A data buffer was successfully received. NOTE: |buffer.data| will be - * freed once this function returns so callers who want to use the data - * asynchronously must make sure to copy it first. - */ - public void onMessage(Buffer buffer); - } - - /** Keep in sync with DataChannelInterface::DataState. */ - public enum State { CONNECTING, OPEN, CLOSING, CLOSED }; - - private final long nativeDataChannel; - private long nativeObserver; - - public DataChannel(long nativeDataChannel) { - this.nativeDataChannel = nativeDataChannel; - } - - /** Register |observer|, replacing any previously-registered observer. */ - public void registerObserver(Observer observer) { - if (nativeObserver != 0) { - unregisterObserverNative(nativeObserver); - } - nativeObserver = registerObserverNative(observer); - } - private native long registerObserverNative(Observer observer); - - /** Unregister the (only) observer. */ - public void unregisterObserver() { - unregisterObserverNative(nativeObserver); - } - private native void unregisterObserverNative(long nativeObserver); - - public native String label(); - - public native State state(); - - /** - * Return the number of bytes of application data (UTF-8 text and binary data) - * that have been queued using SendBuffer but have not yet been transmitted - * to the network. - */ - public native long bufferedAmount(); - - /** Close the channel. */ - public native void close(); - - /** Send |data| to the remote peer; return success. */ - public boolean send(Buffer buffer) { - // TODO(fischman): this could be cleverer about avoiding copies if the - // ByteBuffer is direct and/or is backed by an array. - byte[] data = new byte[buffer.data.remaining()]; - buffer.data.get(data); - return sendNative(data, buffer.binary); - } - private native boolean sendNative(byte[] data, boolean binary); - - /** Dispose of native resources attached to this channel. */ - public native void dispose(); -}; diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/Logging.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/Logging.java deleted file mode 100644 index f6918b8bf82..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/Logging.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -import java.util.EnumSet; - -/** Java wrapper for WebRTC & libjingle logging. */ -public class Logging { - static { - System.loadLibrary("jingle_peerconnection_so"); - } - - // Keep in sync with webrtc/common_types.h:TraceLevel. - public enum TraceLevel { - TRACE_NONE(0x0000), - TRACE_STATEINFO(0x0001), - TRACE_WARNING(0x0002), - TRACE_ERROR(0x0004), - TRACE_CRITICAL(0x0008), - TRACE_APICALL(0x0010), - TRACE_DEFAULT(0x00ff), - TRACE_MODULECALL(0x0020), - TRACE_MEMORY(0x0100), - TRACE_TIMER(0x0200), - TRACE_STREAM(0x0400), - TRACE_DEBUG(0x0800), - TRACE_INFO(0x1000), - TRACE_TERSEINFO(0x2000), - TRACE_ALL(0xffff); - - public final int level; - TraceLevel(int level) { - this.level = level; - } - }; - - // Keep in sync with talk/base/logging.h:LoggingSeverity. - public enum Severity { - LS_SENSITIVE, LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, - }; - - - // Enable tracing to |path| at |levels| and |severity|. - public static void enableTracing( - String path, EnumSet<TraceLevel> levels, Severity severity) { - int nativeLevel = 0; - for (TraceLevel level : levels) { - nativeLevel |= level.level; - } - nativeEnableTracing(path, nativeLevel, severity.ordinal()); - } - - private static native void nativeEnableTracing( - String path, int nativeLevels, int nativeSeverity); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaConstraints.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaConstraints.java deleted file mode 100644 index ef303019c1b..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaConstraints.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -import java.util.LinkedList; -import java.util.List; - -/** - * Description of media constraints for {@code MediaStream} and - * {@code PeerConnection}. - */ -public class MediaConstraints { - /** Simple String key/value pair. */ - public static class KeyValuePair { - private final String key; - private final String value; - - public KeyValuePair(String key, String value) { - this.key = key; - this.value = value; - } - - public String getKey() { - return key; - } - - public String getValue() { - return value; - } - - public String toString() { - return key + ": " + value; - } - } - - - public final List<KeyValuePair> mandatory; - public final List<KeyValuePair> optional; - - public MediaConstraints() { - mandatory = new LinkedList<KeyValuePair>(); - optional = new LinkedList<KeyValuePair>(); - } - - private static String stringifyKeyValuePairList(List<KeyValuePair> list) { - StringBuilder builder = new StringBuilder("["); - for (KeyValuePair pair : list) { - if (builder.length() > 1) { - builder.append(", "); - } - builder.append(pair.toString()); - } - return builder.append("]").toString(); - } - - public String toString() { - return "mandatory: " + stringifyKeyValuePairList(mandatory) + - ", optional: " + stringifyKeyValuePairList(optional); - } -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaStream.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaStream.java deleted file mode 100644 index 431c5615827..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaStream.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -import java.util.LinkedList; -import java.util.List; - -/** Java wrapper for a C++ MediaStreamInterface. */ -public class MediaStream { - public final List<AudioTrack> audioTracks; - public final List<VideoTrack> videoTracks; - // Package-protected for LocalMediaStream and PeerConnection. - final long nativeStream; - - public MediaStream(long nativeStream) { - audioTracks = new LinkedList<AudioTrack>(); - videoTracks = new LinkedList<VideoTrack>(); - this.nativeStream = nativeStream; - } - - public boolean addTrack(AudioTrack track) { - if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) { - audioTracks.add(track); - return true; - } - return false; - } - - public boolean addTrack(VideoTrack track) { - if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) { - videoTracks.add(track); - return true; - } - return false; - } - - public boolean removeTrack(AudioTrack track) { - if (nativeRemoveAudioTrack(nativeStream, track.nativeTrack)) { - audioTracks.remove(track); - return true; - } - return false; - } - - public boolean removeTrack(VideoTrack track) { - if (nativeRemoveVideoTrack(nativeStream, track.nativeTrack)) { - videoTracks.remove(track); - return true; - } - return false; - } - - public void dispose() { - for (AudioTrack track : audioTracks) { - track.dispose(); - } - audioTracks.clear(); - for (VideoTrack track : videoTracks) { - track.dispose(); - } - videoTracks.clear(); - free(nativeStream); - } - - public String label() { - return nativeLabel(nativeStream); - } - - public String toString() { - return "[" + label() + ":A=" + audioTracks.size() + - ":V=" + videoTracks.size() + "]"; - } - - private static native boolean nativeAddAudioTrack( - long nativeStream, long nativeAudioTrack); - - private static native boolean nativeAddVideoTrack( - long nativeStream, long nativeVideoTrack); - - private static native boolean nativeRemoveAudioTrack( - long nativeStream, long nativeAudioTrack); - - private static native boolean nativeRemoveVideoTrack( - long nativeStream, long nativeVideoTrack); - - private static native String nativeLabel(long nativeStream); - - private static native void free(long nativeStream); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java deleted file mode 100644 index 0a0a7f636e8..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/PeerConnection.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - -package org.webrtc; - -import java.util.LinkedList; -import java.util.List; - -/** - * Java-land version of the PeerConnection APIs; wraps the C++ API - * http://www.webrtc.org/reference/native-apis, which in turn is inspired by the - * JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and - * http://www.w3.org/TR/mediacapture-streams/ - */ -public class PeerConnection { - static { - System.loadLibrary("jingle_peerconnection_so"); - } - - /** Tracks PeerConnectionInterface::IceGatheringState */ - public enum IceGatheringState { NEW, GATHERING, COMPLETE }; - - - /** Tracks PeerConnectionInterface::IceConnectionState */ - public enum IceConnectionState { - NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED - }; - - /** Tracks PeerConnectionInterface::SignalingState */ - public enum SignalingState { - STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER, - HAVE_REMOTE_PRANSWER, CLOSED - }; - - /** Java version of PeerConnectionObserver. */ - public static interface Observer { - /** Triggered when the SignalingState changes. */ - public void onSignalingChange(SignalingState newState); - - /** Triggered when the IceConnectionState changes. */ - public void onIceConnectionChange(IceConnectionState newState); - - /** Triggered when the IceGatheringState changes. */ - public void onIceGatheringChange(IceGatheringState newState); - - /** Triggered when a new ICE candidate has been found. */ - public void onIceCandidate(IceCandidate candidate); - - /** Triggered on any error. */ - public void onError(); - - /** Triggered when media is received on a new stream from remote peer. */ - public void onAddStream(MediaStream stream); - - /** Triggered when a remote peer close a stream. */ - public void onRemoveStream(MediaStream stream); - - /** Triggered when a remote peer opens a DataChannel. */ - public void onDataChannel(DataChannel dataChannel); - } - - /** Java version of PeerConnectionInterface.IceServer. */ - public static class IceServer { - public final String uri; - public final String username; - public final String password; - - /** Convenience constructor for STUN servers. */ - public IceServer(String uri) { - this(uri, "", ""); - } - - public IceServer(String uri, String username, String password) { - this.uri = uri; - this.username = username; - this.password = password; - } - - public String toString() { - return uri + "[" + username + ":" + password + "]"; - } - } - - private final List<MediaStream> localStreams; - private final long nativePeerConnection; - private final long nativeObserver; - - PeerConnection(long nativePeerConnection, long nativeObserver) { - this.nativePeerConnection = nativePeerConnection; - this.nativeObserver = nativeObserver; - localStreams = new LinkedList<MediaStream>(); - } - - // JsepInterface. - public native SessionDescription getLocalDescription(); - - public native SessionDescription getRemoteDescription(); - - public native DataChannel createDataChannel( - String label, DataChannel.Init init); - - public native void createOffer( - SdpObserver observer, MediaConstraints constraints); - - public native void createAnswer( - SdpObserver observer, MediaConstraints constraints); - - public native void setLocalDescription( - SdpObserver observer, SessionDescription sdp); - - public native void setRemoteDescription( - SdpObserver observer, SessionDescription sdp); - - public native boolean updateIce( - List<IceServer> iceServers, MediaConstraints constraints); - - public boolean addIceCandidate(IceCandidate candidate) { - return nativeAddIceCandidate( - candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp); - } - - public boolean addStream( - MediaStream stream, MediaConstraints constraints) { - boolean ret = nativeAddLocalStream(stream.nativeStream, constraints); - if (!ret) { - return false; - } - localStreams.add(stream); - return true; - } - - public void removeStream(MediaStream stream) { - nativeRemoveLocalStream(stream.nativeStream); - localStreams.remove(stream); - } - - public boolean getStats(StatsObserver observer, MediaStreamTrack track) { - return nativeGetStats(observer, (track == null) ? 0 : track.nativeTrack); - } - - // TODO(fischman): add support for DTMF-related methods once that API - // stabilizes. - public native SignalingState signalingState(); - - public native IceConnectionState iceConnectionState(); - - public native IceGatheringState iceGatheringState(); - - public native void close(); - - public void dispose() { - close(); - for (MediaStream stream : localStreams) { - stream.dispose(); - } - localStreams.clear(); - freePeerConnection(nativePeerConnection); - freeObserver(nativeObserver); - } - - private static native void freePeerConnection(long nativePeerConnection); - - private static native void freeObserver(long nativeObserver); - - private native boolean nativeAddIceCandidate( - String sdpMid, int sdpMLineIndex, String iceCandidateSdp); - - private native boolean nativeAddLocalStream( - long nativeStream, MediaConstraints constraints); - - private native void nativeRemoveLocalStream(long nativeStream); - - private native boolean nativeGetStats( - StatsObserver observer, long nativeTrack); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java deleted file mode 100644 index 03ed03f8e80..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - -package org.webrtc; - -import java.util.List; - -/** - * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to - * the PeerConnection API for clients. - */ -public class PeerConnectionFactory { - static { - System.loadLibrary("jingle_peerconnection_so"); - } - - private final long nativeFactory; - - // |context| is an android.content.Context object, but we keep it untyped here - // to allow building on non-Android platforms. - public static native boolean initializeAndroidGlobals(Object context); - - public PeerConnectionFactory() { - nativeFactory = nativeCreatePeerConnectionFactory(); - if (nativeFactory == 0) { - throw new RuntimeException("Failed to initialize PeerConnectionFactory!"); - } - } - - - public PeerConnection createPeerConnection( - List<PeerConnection.IceServer> iceServers, - MediaConstraints constraints, - PeerConnection.Observer observer) { - long nativeObserver = nativeCreateObserver(observer); - if (nativeObserver == 0) { - return null; - } - long nativePeerConnection = nativeCreatePeerConnection( - nativeFactory, iceServers, constraints, nativeObserver); - if (nativePeerConnection == 0) { - return null; - } - return new PeerConnection(nativePeerConnection, nativeObserver); - } - - public MediaStream createLocalMediaStream(String label) { - return new MediaStream( - nativeCreateLocalMediaStream(nativeFactory, label)); - } - - public VideoSource createVideoSource( - VideoCapturer capturer, MediaConstraints constraints) { - return new VideoSource(nativeCreateVideoSource( - nativeFactory, capturer.nativeVideoCapturer, constraints)); - } - - public VideoTrack createVideoTrack(String id, VideoSource source) { - return new VideoTrack(nativeCreateVideoTrack( - nativeFactory, id, source.nativeSource)); - } - - public AudioTrack createAudioTrack(String id) { - return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id)); - } - - public void dispose() { - freeFactory(nativeFactory); - } - - private static native long nativeCreatePeerConnectionFactory(); - - private static native long nativeCreateObserver( - PeerConnection.Observer observer); - - private static native long nativeCreatePeerConnection( - long nativeFactory, List<PeerConnection.IceServer> iceServers, - MediaConstraints constraints, long nativeObserver); - - private static native long nativeCreateLocalMediaStream( - long nativeFactory, String label); - - private static native long nativeCreateVideoSource( - long nativeFactory, long nativeVideoCapturer, - MediaConstraints constraints); - - private static native long nativeCreateVideoTrack( - long nativeFactory, String id, long nativeVideoSource); - - private static native long nativeCreateAudioTrack( - long nativeFactory, String id); - - private static native void freeFactory(long nativeFactory); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/SdpObserver.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/SdpObserver.java deleted file mode 100644 index c9eb14a02bb..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/SdpObserver.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -/** Interface for observing SDP-related events. */ -public interface SdpObserver { - /** Called on success of Create{Offer,Answer}(). */ - public void onCreateSuccess(SessionDescription sdp); - - /** Called on success of Set{Local,Remote}Description(). */ - public void onSetSuccess(); - - /** Called on error of Create{Offer,Answer}(). */ - public void onCreateFailure(String error); - - /** Called on error of Set{Local,Remote}Description(). */ - public void onSetFailure(String error); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/SessionDescription.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/SessionDescription.java deleted file mode 100644 index 982db8fc2f3..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/SessionDescription.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - -package org.webrtc; - -/** - * Description of an RFC 4566 Session. - * SDPs are passed as serialized Strings in Java-land and are materialized - * to SessionDescriptionInterface as appropriate in the JNI layer. - */ -public class SessionDescription { - /** Java-land enum version of SessionDescriptionInterface's type() string. */ - public static enum Type { - OFFER, PRANSWER, ANSWER; - - public String canonicalForm() { - return name().toLowerCase(); - } - - public static Type fromCanonicalForm(String canonical) { - return Type.valueOf(Type.class, canonical.toUpperCase()); - } - } - - public final Type type; - public final String description; - - public SessionDescription(Type type, String description) { - this.type = type; - this.description = description; - } -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/StatsObserver.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/StatsObserver.java deleted file mode 100644 index e61d8f74d13..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/StatsObserver.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -/** Interface for observing Stats reports (see webrtc::StatsObservers). */ -public interface StatsObserver { - /** Called when the reports are ready.*/ - public void onComplete(StatsReport[] reports); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/StatsReport.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/StatsReport.java deleted file mode 100644 index 8285ba23250..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/StatsReport.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -/** Java version of webrtc::StatsReport. */ -public class StatsReport { - - /** Java version of webrtc::StatsReport::Value. */ - public static class Value { - public final String name; - public final String value; - - public Value(String name, String value) { - this.name = name; - this.value = value; - } - - public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("[").append(name).append(": ").append(value).append("]"); - return builder.toString(); - } - } - - public final String id; - public final String type; - // Time since 1970-01-01T00:00:00Z in milliseconds. - public final double timestamp; - public final Value[] values; - - public StatsReport(String id, String type, double timestamp, Value[] values) { - this.id = id; - this.type = type; - this.timestamp = timestamp; - this.values = values; - } - - public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("id: ").append(id).append(", type: ").append(type) - .append(", timestamp: ").append(timestamp).append(", values: "); - for (int i = 0; i < values.length; ++i) { - builder.append(values[i].toString()).append(", "); - } - return builder.toString(); - } -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoCapturer.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoCapturer.java deleted file mode 100644 index eab5797bfda..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoCapturer.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -/** Java version of VideoCapturerInterface. */ -public class VideoCapturer { - final long nativeVideoCapturer; - - private VideoCapturer(long nativeVideoCapturer) { - this.nativeVideoCapturer = nativeVideoCapturer; - } - - public static VideoCapturer create(String deviceName) { - long nativeVideoCapturer = nativeCreateVideoCapturer(deviceName); - if (nativeVideoCapturer == 0) { - return null; - } - return new VideoCapturer(nativeVideoCapturer); - } - - public void dispose() { - free(nativeVideoCapturer); - } - - private static native long nativeCreateVideoCapturer(String deviceName); - - private static native void free(long nativeVideoCapturer); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java deleted file mode 100644 index 4cc341a48e1..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -import java.nio.ByteBuffer; -import java.util.Arrays; - -/** - * Java version of VideoRendererInterface. In addition to allowing clients to - * define their own rendering behavior (by passing in a Callbacks object), this - * class also provides a createGui() method for creating a GUI-rendering window - * on various platforms. - */ -public class VideoRenderer { - - /** Java version of cricket::VideoFrame. */ - public static class I420Frame { - public final int width; - public final int height; - public final int[] yuvStrides; - public final ByteBuffer[] yuvPlanes; - - /** - * Construct a frame of the given dimensions with the specified planar - * data. If |yuvPlanes| is null, new planes of the appropriate sizes are - * allocated. - */ - public I420Frame( - int width, int height, int[] yuvStrides, ByteBuffer[] yuvPlanes) { - this.width = width; - this.height = height; - this.yuvStrides = yuvStrides; - if (yuvPlanes == null) { - yuvPlanes = new ByteBuffer[3]; - yuvPlanes[0] = ByteBuffer.allocateDirect(yuvStrides[0] * height); - yuvPlanes[1] = ByteBuffer.allocateDirect(yuvStrides[1] * height); - yuvPlanes[2] = ByteBuffer.allocateDirect(yuvStrides[2] * height); - } - this.yuvPlanes = yuvPlanes; - } - - /** - * Copy the planes out of |source| into |this| and return |this|. Calling - * this with mismatched frame dimensions is a programming error and will - * likely crash. - */ - public I420Frame copyFrom(I420Frame source) { - if (!Arrays.equals(yuvStrides, source.yuvStrides) || - width != source.width || height != source.height) { - throw new RuntimeException("Mismatched dimensions! Source: " + - source.toString() + ", destination: " + toString()); - } - copyPlane(source.yuvPlanes[0], yuvPlanes[0]); - copyPlane(source.yuvPlanes[1], yuvPlanes[1]); - copyPlane(source.yuvPlanes[2], yuvPlanes[2]); - return this; - } - - @Override - public String toString() { - return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] + - ":" + yuvStrides[2]; - } - - // Copy the bytes out of |src| and into |dst|, ignoring and overwriting - // positon & limit in both buffers. - private void copyPlane(ByteBuffer src, ByteBuffer dst) { - src.position(0).limit(src.capacity()); - dst.put(src); - dst.position(0).limit(dst.capacity()); - } -} - - /** The real meat of VideoRendererInterface. */ - public static interface Callbacks { - public void setSize(int width, int height); - public void renderFrame(I420Frame frame); - } - - // |this| either wraps a native (GUI) renderer or a client-supplied Callbacks - // (Java) implementation; so exactly one of these will be non-0/null. - final long nativeVideoRenderer; - private final Callbacks callbacks; - - public static VideoRenderer createGui(int x, int y) { - long nativeVideoRenderer = nativeCreateGuiVideoRenderer(x, y); - if (nativeVideoRenderer == 0) { - return null; - } - return new VideoRenderer(nativeVideoRenderer); - } - - public VideoRenderer(Callbacks callbacks) { - nativeVideoRenderer = nativeWrapVideoRenderer(callbacks); - this.callbacks = callbacks; - } - - private VideoRenderer(long nativeVideoRenderer) { - this.nativeVideoRenderer = nativeVideoRenderer; - callbacks = null; - } - - public void dispose() { - free(nativeVideoRenderer); - } - - private static native long nativeCreateGuiVideoRenderer(int x, int y); - private static native long nativeWrapVideoRenderer(Callbacks callbacks); - - private static native void free(long nativeVideoRenderer); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoSource.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoSource.java deleted file mode 100644 index f29f312c6b7..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoSource.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - -package org.webrtc; - -/** Java version of VideoSourceInterface. */ -public class VideoSource extends MediaSource { - public VideoSource(long nativeSource) { - super(nativeSource); - } -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoTrack.java b/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoTrack.java deleted file mode 100644 index 90e5c956588..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/VideoTrack.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -import java.util.LinkedList; - -/** Java version of VideoTrackInterface. */ -public class VideoTrack extends MediaStreamTrack { - private final LinkedList<VideoRenderer> renderers; - - public VideoTrack(long nativeTrack) { - super(nativeTrack); - renderers = new LinkedList<VideoRenderer>(); - } - - public void addRenderer(VideoRenderer renderer) { - renderers.add(renderer); - nativeAddRenderer(nativeTrack, renderer.nativeVideoRenderer); - } - - public void removeRenderer(VideoRenderer renderer) { - if (!renderers.remove(renderer)) { - return; - } - nativeRemoveRenderer(nativeTrack, renderer.nativeVideoRenderer); - renderer.dispose(); - } - - public void dispose() { - while (!renderers.isEmpty()) { - removeRenderer(renderers.getFirst()); - } - } - - private static native void nativeAddRenderer( - long nativeTrack, long nativeRenderer); - - private static native void nativeRemoveRenderer( - long nativeTrack, long nativeRenderer); -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/javatests/src/org/webrtc/PeerConnectionTest.java b/chromium/third_party/libjingle/source/talk/app/webrtc/javatests/src/org/webrtc/PeerConnectionTest.java deleted file mode 100644 index dd4ef487fb0..00000000000 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/javatests/src/org/webrtc/PeerConnectionTest.java +++ /dev/null @@ -1,723 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.webrtc; - -import junit.framework.TestCase; - -import org.junit.Test; -import org.webrtc.PeerConnection.IceConnectionState; -import org.webrtc.PeerConnection.IceGatheringState; -import org.webrtc.PeerConnection.SignalingState; - -import java.lang.ref.WeakReference; -import java.nio.ByteBuffer; -import java.nio.charset.Charset; -import java.util.Arrays; -import java.util.EnumSet; -import java.util.IdentityHashMap; -import java.util.LinkedList; -import java.util.Map; -import java.util.TreeSet; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - -/** End-to-end tests for PeerConnection.java. */ -public class PeerConnectionTest extends TestCase { - // Set to true to render video. - private static final boolean RENDER_TO_GUI = false; - - private static class ObserverExpectations implements PeerConnection.Observer, - VideoRenderer.Callbacks, - DataChannel.Observer, - StatsObserver { - private final String name; - private int expectedIceCandidates = 0; - private int expectedErrors = 0; - private int expectedSetSize = 0; - private int previouslySeenWidth = 0; - private int previouslySeenHeight = 0; - private int expectedFramesDelivered = 0; - private LinkedList<SignalingState> expectedSignalingChanges = - new LinkedList<SignalingState>(); - private LinkedList<IceConnectionState> expectedIceConnectionChanges = - new LinkedList<IceConnectionState>(); - private LinkedList<IceGatheringState> expectedIceGatheringChanges = - new LinkedList<IceGatheringState>(); - private LinkedList<String> expectedAddStreamLabels = - new LinkedList<String>(); - private LinkedList<String> expectedRemoveStreamLabels = - new LinkedList<String>(); - public LinkedList<IceCandidate> gotIceCandidates = - new LinkedList<IceCandidate>(); - private Map<MediaStream, WeakReference<VideoRenderer>> renderers = - new IdentityHashMap<MediaStream, WeakReference<VideoRenderer>>(); - private DataChannel dataChannel; - private LinkedList<DataChannel.Buffer> expectedBuffers = - new LinkedList<DataChannel.Buffer>(); - private LinkedList<DataChannel.State> expectedStateChanges = - new LinkedList<DataChannel.State>(); - private LinkedList<String> expectedRemoteDataChannelLabels = - new LinkedList<String>(); - private int expectedStatsCallbacks = 0; - private LinkedList<StatsReport[]> gotStatsReports = - new LinkedList<StatsReport[]>(); - - public ObserverExpectations(String name) { - this.name = name; - } - - public synchronized void setDataChannel(DataChannel dataChannel) { - assertNull(this.dataChannel); - this.dataChannel = dataChannel; - this.dataChannel.registerObserver(this); - assertNotNull(this.dataChannel); - } - - public synchronized void expectIceCandidates(int count) { - expectedIceCandidates += count; - } - - public synchronized void onIceCandidate(IceCandidate candidate) { - --expectedIceCandidates; - // We don't assert expectedIceCandidates >= 0 because it's hard to know - // how many to expect, in general. We only use expectIceCandidates to - // assert a minimal count. - gotIceCandidates.add(candidate); - } - - public synchronized void expectError() { - ++expectedErrors; - } - - public synchronized void onError() { - assertTrue(--expectedErrors >= 0); - } - - public synchronized void expectSetSize() { - if (RENDER_TO_GUI) { - // When new frames are delivered to the GUI renderer we don't get - // notified of frame size info. - return; - } - ++expectedSetSize; - } - - @Override - public synchronized void setSize(int width, int height) { - assertFalse(RENDER_TO_GUI); - assertTrue(--expectedSetSize >= 0); - // Because different camera devices (fake & physical) produce different - // resolutions, we only sanity-check the set sizes, - assertTrue(width > 0); - assertTrue(height > 0); - if (previouslySeenWidth > 0) { - assertEquals(previouslySeenWidth, width); - assertEquals(previouslySeenHeight, height); - } else { - previouslySeenWidth = width; - previouslySeenHeight = height; - } - } - - public synchronized void expectFramesDelivered(int count) { - assertFalse(RENDER_TO_GUI); - expectedFramesDelivered += count; - } - - @Override - public synchronized void renderFrame(VideoRenderer.I420Frame frame) { - --expectedFramesDelivered; - } - - public synchronized void expectSignalingChange(SignalingState newState) { - expectedSignalingChanges.add(newState); - } - - @Override - public synchronized void onSignalingChange(SignalingState newState) { - assertEquals(expectedSignalingChanges.removeFirst(), newState); - } - - public synchronized void expectIceConnectionChange( - IceConnectionState newState) { - expectedIceConnectionChanges.add(newState); - } - - @Override - public synchronized void onIceConnectionChange( - IceConnectionState newState) { - assertEquals(expectedIceConnectionChanges.removeFirst(), newState); - } - - public synchronized void expectIceGatheringChange( - IceGatheringState newState) { - expectedIceGatheringChanges.add(newState); - } - - @Override - public synchronized void onIceGatheringChange(IceGatheringState newState) { - // It's fine to get a variable number of GATHERING messages before - // COMPLETE fires (depending on how long the test runs) so we don't assert - // any particular count. - if (newState == IceGatheringState.GATHERING) { - return; - } - assertEquals(expectedIceGatheringChanges.removeFirst(), newState); - } - - public synchronized void expectAddStream(String label) { - expectedAddStreamLabels.add(label); - } - - public synchronized void onAddStream(MediaStream stream) { - assertEquals(expectedAddStreamLabels.removeFirst(), stream.label()); - assertEquals(1, stream.videoTracks.size()); - assertEquals(1, stream.audioTracks.size()); - assertTrue(stream.videoTracks.get(0).id().endsWith("LMSv0")); - assertTrue(stream.audioTracks.get(0).id().endsWith("LMSa0")); - assertEquals("video", stream.videoTracks.get(0).kind()); - assertEquals("audio", stream.audioTracks.get(0).kind()); - VideoRenderer renderer = createVideoRenderer(this); - stream.videoTracks.get(0).addRenderer(renderer); - assertNull(renderers.put( - stream, new WeakReference<VideoRenderer>(renderer))); - } - - public synchronized void expectRemoveStream(String label) { - expectedRemoveStreamLabels.add(label); - } - - public synchronized void onRemoveStream(MediaStream stream) { - assertEquals(expectedRemoveStreamLabels.removeFirst(), stream.label()); - WeakReference<VideoRenderer> renderer = renderers.remove(stream); - assertNotNull(renderer); - assertNotNull(renderer.get()); - assertEquals(1, stream.videoTracks.size()); - stream.videoTracks.get(0).removeRenderer(renderer.get()); - } - - public synchronized void expectDataChannel(String label) { - expectedRemoteDataChannelLabels.add(label); - } - - @Override - public synchronized void onDataChannel(DataChannel remoteDataChannel) { - assertEquals(expectedRemoteDataChannelLabels.removeFirst(), - remoteDataChannel.label()); - setDataChannel(remoteDataChannel); - assertEquals(DataChannel.State.CONNECTING, dataChannel.state()); - } - - public synchronized void expectMessage(ByteBuffer expectedBuffer, - boolean expectedBinary) { - expectedBuffers.add( - new DataChannel.Buffer(expectedBuffer, expectedBinary)); - } - - @Override - public synchronized void onMessage(DataChannel.Buffer buffer) { - DataChannel.Buffer expected = expectedBuffers.removeFirst(); - assertEquals(expected.binary, buffer.binary); - assertTrue(expected.data.equals(buffer.data)); - } - - @Override - public synchronized void onStateChange() { - assertEquals(expectedStateChanges.removeFirst(), dataChannel.state()); - } - - public synchronized void expectStateChange(DataChannel.State state) { - expectedStateChanges.add(state); - } - - @Override - public synchronized void onComplete(StatsReport[] reports) { - if (--expectedStatsCallbacks < 0) { - throw new RuntimeException("Unexpected stats report: " + reports); - } - gotStatsReports.add(reports); - } - - public synchronized void expectStatsCallback() { - ++expectedStatsCallbacks; - } - - public synchronized LinkedList<StatsReport[]> takeStatsReports() { - LinkedList<StatsReport[]> got = gotStatsReports; - gotStatsReports = new LinkedList<StatsReport[]>(); - return got; - } - - // Return a set of expectations that haven't been satisfied yet, possibly - // empty if no such expectations exist. - public synchronized TreeSet<String> unsatisfiedExpectations() { - TreeSet<String> stillWaitingForExpectations = new TreeSet<String>(); - if (expectedIceCandidates > 0) { // See comment in onIceCandidate. - stillWaitingForExpectations.add("expectedIceCandidates"); - } - if (expectedErrors != 0) { - stillWaitingForExpectations.add("expectedErrors: " + expectedErrors); - } - if (expectedSignalingChanges.size() != 0) { - stillWaitingForExpectations.add( - "expectedSignalingChanges: " + expectedSignalingChanges.size()); - } - if (expectedIceConnectionChanges.size() != 0) { - stillWaitingForExpectations.add("expectedIceConnectionChanges: " + - expectedIceConnectionChanges.size()); - } - if (expectedIceGatheringChanges.size() != 0) { - stillWaitingForExpectations.add("expectedIceGatheringChanges: " + - expectedIceGatheringChanges.size()); - } - if (expectedAddStreamLabels.size() != 0) { - stillWaitingForExpectations.add( - "expectedAddStreamLabels: " + expectedAddStreamLabels.size()); - } - if (expectedRemoveStreamLabels.size() != 0) { - stillWaitingForExpectations.add( - "expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size()); - } - if (expectedSetSize != 0) { - stillWaitingForExpectations.add("expectedSetSize"); - } - if (expectedFramesDelivered > 0) { - stillWaitingForExpectations.add( - "expectedFramesDelivered: " + expectedFramesDelivered); - } - if (!expectedBuffers.isEmpty()) { - stillWaitingForExpectations.add( - "expectedBuffers: " + expectedBuffers.size()); - } - if (!expectedStateChanges.isEmpty()) { - stillWaitingForExpectations.add( - "expectedStateChanges: " + expectedStateChanges.size()); - } - if (!expectedRemoteDataChannelLabels.isEmpty()) { - stillWaitingForExpectations.add("expectedRemoteDataChannelLabels: " + - expectedRemoteDataChannelLabels.size()); - } - if (expectedStatsCallbacks != 0) { - stillWaitingForExpectations.add( - "expectedStatsCallbacks: " + expectedStatsCallbacks); - } - return stillWaitingForExpectations; - } - - public void waitForAllExpectationsToBeSatisfied() { - // TODO(fischman): problems with this approach: - // - come up with something better than a poll loop - // - avoid serializing expectations explicitly; the test is not as robust - // as it could be because it must place expectations between wait - // statements very precisely (e.g. frame must not arrive before its - // expectation, and expectation must not be registered so early as to - // stall a wait). Use callbacks to fire off dependent steps instead of - // explicitly waiting, so there can be just a single wait at the end of - // the test. - TreeSet<String> prev = null; - TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations(); - while (!stillWaitingForExpectations.isEmpty()) { - if (!stillWaitingForExpectations.equals(prev)) { - System.out.println( - name + " still waiting at\n " + - (new Throwable()).getStackTrace()[1] + - "\n for: " + - Arrays.toString(stillWaitingForExpectations.toArray())); - } - try { - Thread.sleep(10); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - prev = stillWaitingForExpectations; - stillWaitingForExpectations = unsatisfiedExpectations(); - } - if (prev == null) { - System.out.println(name + " didn't need to wait at\n " + - (new Throwable()).getStackTrace()[1]); - } - } - } - - private static class SdpObserverLatch implements SdpObserver { - private boolean success = false; - private SessionDescription sdp = null; - private String error = null; - private CountDownLatch latch = new CountDownLatch(1); - - public SdpObserverLatch() {} - - public void onCreateSuccess(SessionDescription sdp) { - this.sdp = sdp; - onSetSuccess(); - } - - public void onSetSuccess() { - success = true; - latch.countDown(); - } - - public void onCreateFailure(String error) { - onSetFailure(error); - } - - public void onSetFailure(String error) { - this.error = error; - latch.countDown(); - } - - public boolean await() { - try { - assertTrue(latch.await(1000, TimeUnit.MILLISECONDS)); - return getSuccess(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public boolean getSuccess() { - return success; - } - - public SessionDescription getSdp() { - return sdp; - } - - public String getError() { - return error; - } - } - - static int videoWindowsMapped = -1; - - private static class TestRenderer implements VideoRenderer.Callbacks { - public int width = -1; - public int height = -1; - public int numFramesDelivered = 0; - - public void setSize(int width, int height) { - assertEquals(this.width, -1); - assertEquals(this.height, -1); - this.width = width; - this.height = height; - } - - public void renderFrame(VideoRenderer.I420Frame frame) { - ++numFramesDelivered; - } - } - - private static VideoRenderer createVideoRenderer( - VideoRenderer.Callbacks videoCallbacks) { - if (!RENDER_TO_GUI) { - return new VideoRenderer(videoCallbacks); - } - ++videoWindowsMapped; - assertTrue(videoWindowsMapped < 4); - int x = videoWindowsMapped % 2 != 0 ? 700 : 0; - int y = videoWindowsMapped >= 2 ? 0 : 500; - return VideoRenderer.createGui(x, y); - } - - // Return a weak reference to test that ownership is correctly held by - // PeerConnection, not by test code. - private static WeakReference<MediaStream> addTracksToPC( - PeerConnectionFactory factory, PeerConnection pc, - VideoSource videoSource, - String streamLabel, String videoTrackId, String audioTrackId, - VideoRenderer.Callbacks videoCallbacks) { - MediaStream lMS = factory.createLocalMediaStream(streamLabel); - VideoTrack videoTrack = - factory.createVideoTrack(videoTrackId, videoSource); - assertNotNull(videoTrack); - VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks); - assertNotNull(videoRenderer); - videoTrack.addRenderer(videoRenderer); - lMS.addTrack(videoTrack); - // Just for fun, let's remove and re-add the track. - lMS.removeTrack(videoTrack); - lMS.addTrack(videoTrack); - lMS.addTrack(factory.createAudioTrack(audioTrackId)); - pc.addStream(lMS, new MediaConstraints()); - return new WeakReference<MediaStream>(lMS); - } - - private static void assertEquals( - SessionDescription lhs, SessionDescription rhs) { - assertEquals(lhs.type, rhs.type); - assertEquals(lhs.description, rhs.description); - } - - @Test - public void testCompleteSession() throws Exception { - // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging. - // Logging.enableTracing( - // "/tmp/AMI-nope.txt", - // EnumSet.of(Logging.TraceLevel.TRACE_ALL), - // Logging.Severity.LS_SENSITIVE); - - CountDownLatch testDone = new CountDownLatch(1); - - PeerConnectionFactory factory = new PeerConnectionFactory(); - MediaConstraints pcConstraints = new MediaConstraints(); - pcConstraints.mandatory.add( - new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true")); - pcConstraints.optional.add( - new MediaConstraints.KeyValuePair("RtpDataChannels", "true")); - // TODO(fischman): replace above with below to test SCTP channels when - // supported (https://code.google.com/p/webrtc/issues/detail?id=1408). - // pcConstraints.optional.add(new MediaConstraints.KeyValuePair( - // "internalSctpDataChannels", "true")); - - LinkedList<PeerConnection.IceServer> iceServers = - new LinkedList<PeerConnection.IceServer>(); - iceServers.add(new PeerConnection.IceServer( - "stun:stun.l.google.com:19302")); - iceServers.add(new PeerConnection.IceServer( - "turn:fake.example.com", "fakeUsername", "fakePassword")); - ObserverExpectations offeringExpectations = - new ObserverExpectations("PCTest:offerer"); - PeerConnection offeringPC = factory.createPeerConnection( - iceServers, pcConstraints, offeringExpectations); - assertNotNull(offeringPC); - - ObserverExpectations answeringExpectations = - new ObserverExpectations("PCTest:answerer"); - PeerConnection answeringPC = factory.createPeerConnection( - iceServers, pcConstraints, answeringExpectations); - assertNotNull(answeringPC); - - // We want to use the same camera for offerer & answerer, so create it here - // instead of in addTracksToPC. - VideoSource videoSource = factory.createVideoSource( - VideoCapturer.create(""), new MediaConstraints()); - - // TODO(fischman): the track ids here and in the addTracksToPC() call - // below hard-code the <mediaStreamLabel>[av]<index> scheme used in the - // serialized SDP, because the C++ API doesn't auto-translate. - // Drop |label| params from {Audio,Video}Track-related APIs once - // https://code.google.com/p/webrtc/issues/detail?id=1253 is fixed. - offeringExpectations.expectSetSize(); - WeakReference<MediaStream> oLMS = addTracksToPC( - factory, offeringPC, videoSource, "oLMS", "oLMSv0", "oLMSa0", - offeringExpectations); - - offeringExpectations.setDataChannel(offeringPC.createDataChannel( - "offeringDC", new DataChannel.Init())); - - SdpObserverLatch sdpLatch = new SdpObserverLatch(); - offeringPC.createOffer(sdpLatch, new MediaConstraints()); - assertTrue(sdpLatch.await()); - SessionDescription offerSdp = sdpLatch.getSdp(); - assertEquals(offerSdp.type, SessionDescription.Type.OFFER); - assertFalse(offerSdp.description.isEmpty()); - - sdpLatch = new SdpObserverLatch(); - answeringExpectations.expectSignalingChange( - SignalingState.HAVE_REMOTE_OFFER); - answeringExpectations.expectAddStream("oLMS"); - answeringExpectations.expectDataChannel("offeringDC"); - answeringPC.setRemoteDescription(sdpLatch, offerSdp); - assertEquals( - PeerConnection.SignalingState.STABLE, offeringPC.signalingState()); - assertTrue(sdpLatch.await()); - assertNull(sdpLatch.getSdp()); - - answeringExpectations.expectSetSize(); - WeakReference<MediaStream> aLMS = addTracksToPC( - factory, answeringPC, videoSource, "aLMS", "aLMSv0", "aLMSa0", - answeringExpectations); - - sdpLatch = new SdpObserverLatch(); - answeringPC.createAnswer(sdpLatch, new MediaConstraints()); - assertTrue(sdpLatch.await()); - SessionDescription answerSdp = sdpLatch.getSdp(); - assertEquals(answerSdp.type, SessionDescription.Type.ANSWER); - assertFalse(answerSdp.description.isEmpty()); - - offeringExpectations.expectIceCandidates(2); - answeringExpectations.expectIceCandidates(2); - - offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE); - answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE); - - sdpLatch = new SdpObserverLatch(); - answeringExpectations.expectSignalingChange(SignalingState.STABLE); - answeringPC.setLocalDescription(sdpLatch, answerSdp); - assertTrue(sdpLatch.await()); - assertNull(sdpLatch.getSdp()); - - sdpLatch = new SdpObserverLatch(); - offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER); - offeringPC.setLocalDescription(sdpLatch, offerSdp); - assertTrue(sdpLatch.await()); - assertNull(sdpLatch.getSdp()); - sdpLatch = new SdpObserverLatch(); - offeringExpectations.expectSignalingChange(SignalingState.STABLE); - offeringExpectations.expectAddStream("aLMS"); - offeringPC.setRemoteDescription(sdpLatch, answerSdp); - assertTrue(sdpLatch.await()); - assertNull(sdpLatch.getSdp()); - - offeringExpectations.waitForAllExpectationsToBeSatisfied(); - answeringExpectations.waitForAllExpectationsToBeSatisfied(); - - assertEquals(offeringPC.getLocalDescription().type, offerSdp.type); - assertEquals(offeringPC.getRemoteDescription().type, answerSdp.type); - assertEquals(answeringPC.getLocalDescription().type, answerSdp.type); - assertEquals(answeringPC.getRemoteDescription().type, offerSdp.type); - - if (!RENDER_TO_GUI) { - // Wait for at least some frames to be delivered at each end (number - // chosen arbitrarily). - offeringExpectations.expectFramesDelivered(10); - answeringExpectations.expectFramesDelivered(10); - offeringExpectations.expectSetSize(); - answeringExpectations.expectSetSize(); - } - - offeringExpectations.expectIceConnectionChange( - IceConnectionState.CHECKING); - offeringExpectations.expectIceConnectionChange( - IceConnectionState.CONNECTED); - answeringExpectations.expectIceConnectionChange( - IceConnectionState.CHECKING); - answeringExpectations.expectIceConnectionChange( - IceConnectionState.CONNECTED); - - offeringExpectations.expectStateChange(DataChannel.State.OPEN); - answeringExpectations.expectStateChange(DataChannel.State.OPEN); - - for (IceCandidate candidate : offeringExpectations.gotIceCandidates) { - answeringPC.addIceCandidate(candidate); - } - offeringExpectations.gotIceCandidates.clear(); - for (IceCandidate candidate : answeringExpectations.gotIceCandidates) { - offeringPC.addIceCandidate(candidate); - } - answeringExpectations.gotIceCandidates.clear(); - - offeringExpectations.waitForAllExpectationsToBeSatisfied(); - answeringExpectations.waitForAllExpectationsToBeSatisfied(); - - assertEquals( - PeerConnection.SignalingState.STABLE, offeringPC.signalingState()); - assertEquals( - PeerConnection.SignalingState.STABLE, answeringPC.signalingState()); - - // Test send & receive UTF-8 text. - answeringExpectations.expectMessage( - ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false); - DataChannel.Buffer buffer = new DataChannel.Buffer( - ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false); - assertTrue(offeringExpectations.dataChannel.send(buffer)); - answeringExpectations.waitForAllExpectationsToBeSatisfied(); - - // TODO(fischman): add testing of binary messages when SCTP channels are - // supported (https://code.google.com/p/webrtc/issues/detail?id=1408). - // // Construct this binary message two different ways to ensure no - // // shortcuts are taken. - // ByteBuffer expectedBinaryMessage = ByteBuffer.allocateDirect(5); - // for (byte i = 1; i < 6; ++i) { - // expectedBinaryMessage.put(i); - // } - // expectedBinaryMessage.flip(); - // offeringExpectations.expectMessage(expectedBinaryMessage, true); - // assertTrue(answeringExpectations.dataChannel.send( - // new DataChannel.Buffer( - // ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 } ), true))); - // offeringExpectations.waitForAllExpectationsToBeSatisfied(); - - offeringExpectations.expectStateChange(DataChannel.State.CLOSING); - answeringExpectations.expectStateChange(DataChannel.State.CLOSING); - answeringExpectations.dataChannel.close(); - offeringExpectations.dataChannel.close(); - // TODO(fischman): implement a new offer/answer exchange to finalize the - // closing of the channel in order to see the CLOSED state reached. - // offeringExpectations.expectStateChange(DataChannel.State.CLOSED); - // answeringExpectations.expectStateChange(DataChannel.State.CLOSED); - - if (RENDER_TO_GUI) { - try { - Thread.sleep(3000); - } catch (Throwable t) { - throw new RuntimeException(t); - } - } - - // TODO(fischman) MOAR test ideas: - // - Test that PC.removeStream() works; requires a second - // createOffer/createAnswer dance. - // - audit each place that uses |constraints| for specifying non-trivial - // constraints (and ensure they're honored). - // - test error cases - // - ensure reasonable coverage of _jni.cc is achieved. Coverage is - // extra-important because of all the free-text (class/method names, etc) - // in JNI-style programming; make sure no typos! - // - Test that shutdown mid-interaction is crash-free. - - // Free the Java-land objects, collect them, and sleep a bit to make sure we - // don't get late-arrival crashes after the Java-land objects have been - // freed. - shutdownPC(offeringPC, offeringExpectations); - offeringPC = null; - shutdownPC(answeringPC, answeringExpectations); - answeringPC = null; - System.gc(); - Thread.sleep(100); - } - - private static void shutdownPC( - PeerConnection pc, ObserverExpectations expectations) { - expectations.dataChannel.unregisterObserver(); - expectations.dataChannel.dispose(); - expectations.expectStatsCallback(); - assertTrue(pc.getStats(expectations, null)); - expectations.waitForAllExpectationsToBeSatisfied(); - expectations.expectIceConnectionChange(IceConnectionState.CLOSED); - expectations.expectSignalingChange(SignalingState.CLOSED); - pc.close(); - expectations.waitForAllExpectationsToBeSatisfied(); - expectations.expectStatsCallback(); - assertTrue(pc.getStats(expectations, null)); - expectations.waitForAllExpectationsToBeSatisfied(); - - System.out.println("FYI stats: "); - int reportIndex = -1; - for (StatsReport[] reports : expectations.takeStatsReports()) { - System.out.println(" Report #" + (++reportIndex)); - for (int i = 0; i < reports.length; ++i) { - System.out.println(" " + reports[i].toString()); - } - } - assertEquals(1, reportIndex); - System.out.println("End stats."); - - pc.dispose(); - } -} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription_unittest.cc index 83f67cb3add..e2b59fba205 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription_unittest.cc @@ -79,16 +79,18 @@ static cricket::SessionDescription* CreateCricketSessionDescription() { cricket::NS_GINGLE_P2P, std::vector<std::string>(), kCandidateUfragVoice, kCandidatePwdVoice, - cricket::ICEMODE_FULL, NULL, - cricket::Candidates())))); + cricket::ICEMODE_FULL, + cricket::CONNECTIONROLE_NONE, + NULL, cricket::Candidates())))); EXPECT_TRUE(desc->AddTransportInfo( cricket::TransportInfo(cricket::CN_VIDEO, cricket::TransportDescription( cricket::NS_GINGLE_P2P, std::vector<std::string>(), kCandidateUfragVideo, kCandidatePwdVideo, - cricket::ICEMODE_FULL, NULL, - cricket::Candidates())))); + cricket::ICEMODE_FULL, + cricket::CONNECTIONROLE_NONE, + NULL, cricket::Candidates())))); return desc; } diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc index 9706c076796..3663aace528 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc @@ -51,6 +51,8 @@ const char MediaConstraintsInterface::kNoiseSuppression[] = "googNoiseSuppression"; const char MediaConstraintsInterface::kHighpassFilter[] = "googHighpassFilter"; +const char MediaConstraintsInterface::kTypingNoiseDetection[] = + "googTypingNoiseDetection"; const char MediaConstraintsInterface::kInternalAecDump[] = "internalAecDump"; namespace { @@ -90,6 +92,8 @@ bool FromConstraints(const MediaConstraintsInterface::Constraints& constraints, options->highpass_filter.Set(value); else if (iter->key == MediaConstraintsInterface::kInternalAecDump) options->aec_dump.Set(value); + else if (iter->key == MediaConstraintsInterface::kTypingNoiseDetection) + options->typing_detection.Set(value); else success = false; } diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h b/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h index a6b23c63a15..bc3872ce472 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h @@ -80,12 +80,14 @@ class MediaConstraintsInterface { static const char kExperimentalAutoGainControl[]; // googAutoGainControl2 static const char kNoiseSuppression[]; // googNoiseSuppression static const char kHighpassFilter[]; // googHighpassFilter + static const char kTypingNoiseDetection[]; // googTypingNoiseDetection // Google-specific constraint keys for a local video source static const char kNoiseReduction[]; // googNoiseReduction static const char kLeakyBucket[]; // googLeakyBucket // googTemporalLayeredScreencast static const char kTemporalLayeredScreencast[]; + static const char kCpuOveruseDetection[]; // Constraint keys for CreateOffer / CreateAnswer // Specified by the W3C PeerConnection spec diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamhandler.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamhandler.cc index d43c6d5a1d8..b09af7892fb 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamhandler.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamhandler.cc @@ -28,7 +28,7 @@ #include "talk/app/webrtc/mediastreamhandler.h" #include "talk/app/webrtc/localaudiosource.h" -#include "talk/app/webrtc/localvideosource.h" +#include "talk/app/webrtc/videosource.h" #include "talk/app/webrtc/videosourceinterface.h" namespace webrtc { @@ -154,6 +154,8 @@ RemoteVideoTrackHandler::RemoteVideoTrackHandler( remote_video_track_(track), provider_(provider) { OnEnabledChanged(); + provider_->SetVideoPlayout(ssrc, true, + remote_video_track_->GetSource()->FrameInput()); } RemoteVideoTrackHandler::~RemoteVideoTrackHandler() { @@ -169,9 +171,6 @@ void RemoteVideoTrackHandler::OnStateChanged() { } void RemoteVideoTrackHandler::OnEnabledChanged() { - provider_->SetVideoPlayout(ssrc(), - remote_video_track_->enabled(), - remote_video_track_->FrameInput()); } MediaStreamHandler::MediaStreamHandler(MediaStreamInterface* stream, diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamhandler_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamhandler_unittest.cc index a874bde0b41..475258e9b98 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamhandler_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamhandler_unittest.cc @@ -30,9 +30,9 @@ #include <string> #include "talk/app/webrtc/audiotrack.h" -#include "talk/app/webrtc/localvideosource.h" #include "talk/app/webrtc/mediastream.h" #include "talk/app/webrtc/streamcollection.h" +#include "talk/app/webrtc/videosource.h" #include "talk/app/webrtc/videotrack.h" #include "talk/base/gunit.h" #include "talk/media/base/fakevideocapturer.h" @@ -86,6 +86,7 @@ class FakeVideoSource : public Notifier<VideoSourceInterface> { virtual void RemoveSink(cricket::VideoRenderer* output) {} virtual SourceState state() const { return state_; } virtual const cricket::VideoOptions* options() const { return &options_; } + virtual cricket::VideoRenderer* FrameInput() { return NULL; } protected: FakeVideoSource() : state_(kLive) {} @@ -149,8 +150,8 @@ class MediaStreamHandlerTest : public testing::Test { } void AddRemoteVideoTrack() { - EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, true, - video_track_->FrameInput())); + EXPECT_CALL(video_provider_, SetVideoPlayout( + kVideoSsrc, true, video_track_->GetSource()->FrameInput())); handlers_.AddRemoteVideoTrack(stream_, stream_->GetVideoTracks()[0], kVideoSsrc); } @@ -283,11 +284,8 @@ TEST_F(MediaStreamHandlerTest, LocalVideoTrackDisable) { TEST_F(MediaStreamHandlerTest, RemoteVideoTrackDisable) { AddRemoteVideoTrack(); - EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, _)); video_track_->set_enabled(false); - EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, true, - video_track_->FrameInput())); video_track_->set_enabled(true); RemoveRemoteVideoTrack(); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h index 0d3e39d892c..b2c4468fb99 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h @@ -107,6 +107,9 @@ class MediaStreamTrackInterface : public talk_base::RefCountInterface, virtual bool set_enabled(bool enable) = 0; // These methods should be called by implementation only. virtual bool set_state(TrackState new_state) = 0; + + protected: + virtual ~MediaStreamTrackInterface() {} }; // Interface for rendering VideoFrames from a VideoTrack @@ -131,12 +134,6 @@ class VideoTrackInterface : public MediaStreamTrackInterface { // Deregister a renderer. virtual void RemoveRenderer(VideoRendererInterface* renderer) = 0; - // Gets a pointer to the frame input of this VideoTrack. - // The pointer is valid for the lifetime of this VideoTrack. - // VideoFrames rendered to the cricket::VideoRenderer will be rendered on all - // registered renderers. - virtual cricket::VideoRenderer* FrameInput() = 0; - virtual VideoSourceInterface* GetSource() const = 0; protected: diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling.cc index c199f1e9c75..a23799f7369 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling.cc @@ -33,8 +33,11 @@ #include "talk/app/webrtc/mediastreamproxy.h" #include "talk/app/webrtc/mediaconstraintsinterface.h" #include "talk/app/webrtc/mediastreamtrackproxy.h" +#include "talk/app/webrtc/remotevideocapturer.h" +#include "talk/app/webrtc/videosource.h" #include "talk/app/webrtc/videotrack.h" #include "talk/base/bytebuffer.h" +#include "talk/media/sctp/sctpdataengine.h" static const char kDefaultStreamLabel[] = "default"; static const char kDefaultAudioTrackLabel[] = "defaulta0"; @@ -132,8 +135,10 @@ static bool EvaluateNeedForBundle(const cricket::MediaSessionOptions& options) { // Factory class for creating remote MediaStreams and MediaStreamTracks. class RemoteMediaStreamFactory { public: - explicit RemoteMediaStreamFactory(talk_base::Thread* signaling_thread) - : signaling_thread_(signaling_thread) { + explicit RemoteMediaStreamFactory(talk_base::Thread* signaling_thread, + cricket::ChannelManager* channel_manager) + : signaling_thread_(signaling_thread), + channel_manager_(channel_manager) { } talk_base::scoped_refptr<MediaStreamInterface> CreateMediaStream( @@ -144,21 +149,24 @@ class RemoteMediaStreamFactory { AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream, const std::string& track_id) { - return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(stream, - track_id); + return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>( + stream, track_id, static_cast<AudioSourceInterface*>(NULL)); } VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream, const std::string& track_id) { - return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(stream, - track_id); + return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>( + stream, track_id, VideoSource::Create(channel_manager_, + new RemoteVideoCapturer(), + NULL).get()); } private: - template <typename TI, typename T, typename TP> - TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id) { + template <typename TI, typename T, typename TP, typename S> + TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id, + S* source) { talk_base::scoped_refptr<TI> track( - TP::Create(signaling_thread_, T::Create(track_id, NULL))); + TP::Create(signaling_thread_, T::Create(track_id, source))); track->set_state(webrtc::MediaStreamTrackInterface::kLive); if (stream->AddTrack(track)) { return track; @@ -167,17 +175,20 @@ class RemoteMediaStreamFactory { } talk_base::Thread* signaling_thread_; + cricket::ChannelManager* channel_manager_; }; MediaStreamSignaling::MediaStreamSignaling( talk_base::Thread* signaling_thread, - MediaStreamSignalingObserver* stream_observer) + MediaStreamSignalingObserver* stream_observer, + cricket::ChannelManager* channel_manager) : signaling_thread_(signaling_thread), data_channel_factory_(NULL), stream_observer_(stream_observer), local_streams_(StreamCollection::Create()), remote_streams_(StreamCollection::Create()), - remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread)), + remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread, + channel_manager)), last_allocated_sctp_id_(0) { options_.has_video = false; options_.has_audio = false; @@ -225,6 +236,10 @@ bool MediaStreamSignaling::AllocateSctpId(int* id) { return true; } +bool MediaStreamSignaling::HasDataChannels() const { + return !data_channels_.empty(); +} + bool MediaStreamSignaling::AddDataChannel(DataChannel* data_channel) { ASSERT(data_channel != NULL); if (data_channels_.find(data_channel->label()) != data_channels_.end()) { @@ -599,15 +614,19 @@ void MediaStreamSignaling::OnRemoteTrackRemoved( if (media_type == cricket::MEDIA_TYPE_AUDIO) { talk_base::scoped_refptr<AudioTrackInterface> audio_track = stream->FindAudioTrack(track_id); - audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded); - stream->RemoveTrack(audio_track); - stream_observer_->OnRemoveRemoteAudioTrack(stream, audio_track); + if (audio_track) { + audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded); + stream->RemoveTrack(audio_track); + stream_observer_->OnRemoveRemoteAudioTrack(stream, audio_track); + } } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { talk_base::scoped_refptr<VideoTrackInterface> video_track = stream->FindVideoTrack(track_id); - video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded); - stream->RemoveTrack(video_track); - stream_observer_->OnRemoveRemoteVideoTrack(stream, video_track); + if (video_track) { + video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded); + stream->RemoveTrack(video_track); + stream_observer_->OnRemoveRemoteVideoTrack(stream, video_track); + } } else { ASSERT(false && "Invalid media type"); } @@ -621,11 +640,19 @@ void MediaStreamSignaling::RejectRemoteTracks(cricket::MediaType media_type) { MediaStreamInterface* stream = remote_streams_->find(info.stream_label); if (media_type == cricket::MEDIA_TYPE_AUDIO) { AudioTrackInterface* track = stream->FindAudioTrack(info.track_id); - track->set_state(webrtc::MediaStreamTrackInterface::kEnded); + // There's no guarantee the track is still available, e.g. the track may + // have been removed from the stream by javascript. + if (track) { + track->set_state(webrtc::MediaStreamTrackInterface::kEnded); + } } if (media_type == cricket::MEDIA_TYPE_VIDEO) { VideoTrackInterface* track = stream->FindVideoTrack(info.track_id); - track->set_state(webrtc::MediaStreamTrackInterface::kEnded); + // There's no guarantee the track is still available, e.g. the track may + // have been removed from the stream by javascript. + if (track) { + track->set_state(webrtc::MediaStreamTrackInterface::kEnded); + } } } } diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling.h b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling.h index f2437d8fcfb..f64bf978ffc 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling.h @@ -159,7 +159,8 @@ class MediaStreamSignalingObserver { class MediaStreamSignaling { public: MediaStreamSignaling(talk_base::Thread* signaling_thread, - MediaStreamSignalingObserver* stream_observer); + MediaStreamSignalingObserver* stream_observer, + cricket::ChannelManager* channel_manager); virtual ~MediaStreamSignaling(); // Notify all referenced objects that MediaStreamSignaling will be teared @@ -187,6 +188,8 @@ class MediaStreamSignaling { // be offered in a SessionDescription. void RemoveLocalStream(MediaStreamInterface* local_stream); + // Checks if any data channel has been added. + bool HasDataChannels() const; // Adds |data_channel| to the collection of DataChannels that will be // be offered in a SessionDescription. bool AddDataChannel(DataChannel* data_channel); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling_unittest.cc index 8b3dacb52f1..ea1336450da 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamsignaling_unittest.cc @@ -37,8 +37,11 @@ #include "talk/base/scoped_ptr.h" #include "talk/base/stringutils.h" #include "talk/base/thread.h" +#include "talk/media/base/fakemediaengine.h" +#include "talk/media/devices/fakedevicemanager.h" #include "talk/p2p/base/constants.h" #include "talk/p2p/base/sessiondescription.h" +#include "talk/session/media/channelmanager.h" static const char kStreams[][8] = {"stream1", "stream2"}; static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"}; @@ -285,13 +288,13 @@ class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver { } virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream, - VideoTrackInterface* video_track, - uint32 ssrc) { + VideoTrackInterface* video_track, + uint32 ssrc) { AddTrack(&remote_video_tracks_, stream, video_track, ssrc); } virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream, - AudioTrackInterface* audio_track) { + AudioTrackInterface* audio_track) { RemoveTrack(&remote_audio_tracks_, stream, audio_track); } @@ -392,8 +395,10 @@ class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver { class MediaStreamSignalingForTest : public webrtc::MediaStreamSignaling { public: - explicit MediaStreamSignalingForTest(MockSignalingObserver* observer) - : webrtc::MediaStreamSignaling(talk_base::Thread::Current(), observer) { + MediaStreamSignalingForTest(MockSignalingObserver* observer, + cricket::ChannelManager* channel_manager) + : webrtc::MediaStreamSignaling(talk_base::Thread::Current(), observer, + channel_manager) { }; using webrtc::MediaStreamSignaling::GetOptionsForOffer; @@ -406,7 +411,12 @@ class MediaStreamSignalingTest: public testing::Test { protected: virtual void SetUp() { observer_.reset(new MockSignalingObserver()); - signaling_.reset(new MediaStreamSignalingForTest(observer_.get())); + channel_manager_.reset( + new cricket::ChannelManager(new cricket::FakeMediaEngine(), + new cricket::FakeDeviceManager(), + talk_base::Thread::Current())); + signaling_.reset(new MediaStreamSignalingForTest(observer_.get(), + channel_manager_.get())); } // Create a collection of streams. @@ -497,6 +507,9 @@ class MediaStreamSignalingTest: public testing::Test { ASSERT_TRUE(stream->AddTrack(video_track)); } + // ChannelManager is used by VideoSource, so it should be released after all + // the video tracks. Put it as the first private variable should ensure that. + talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_; talk_base::scoped_refptr<StreamCollection> reference_collection_; talk_base::scoped_ptr<MockSignalingObserver> observer_; talk_base::scoped_ptr<MediaStreamSignalingForTest> signaling_; @@ -688,6 +701,9 @@ TEST_F(MediaStreamSignalingTest, UpdateRemoteStreams) { observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1); EXPECT_EQ(1u, observer_->NumberOfRemoteVideoTracks()); observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2); + ASSERT_EQ(1u, observer_->remote_streams()->count()); + MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); + EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != NULL); // Create a session description based on another SDP with another // MediaStream. @@ -778,6 +794,33 @@ TEST_F(MediaStreamSignalingTest, RejectMediaContent) { EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state()); } +// This test that it won't crash if the remote track as been removed outside +// of MediaStreamSignaling and then MediaStreamSignaling tries to reject +// this track. +TEST_F(MediaStreamSignalingTest, RemoveTrackThenRejectMediaContent) { + talk_base::scoped_ptr<SessionDescriptionInterface> desc( + webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, + kSdpStringWithStream1, NULL)); + EXPECT_TRUE(desc != NULL); + signaling_->OnRemoteDescriptionChanged(desc.get()); + + MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); + remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]); + remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]); + + cricket::ContentInfo* video_info = + desc->description()->GetContentByName("video"); + video_info->rejected = true; + signaling_->OnLocalDescriptionChanged(desc.get()); + + cricket::ContentInfo* audio_info = + desc->description()->GetContentByName("audio"); + audio_info->rejected = true; + signaling_->OnLocalDescriptionChanged(desc.get()); + + // No crash is a pass. +} + // This tests that a default MediaStream is created if a remote session // description doesn't contain any streams and no MSID support. // It also tests that the default stream is updated if a video m-line is added @@ -812,6 +855,28 @@ TEST_F(MediaStreamSignalingTest, SdpWithoutMsidCreatesDefaultStream) { observer_->VerifyRemoteVideoTrack("default", "defaultv0", 0); } +// This tests that it won't crash when MediaStreamSignaling tries to remove +// a remote track that as already been removed from the mediastream. +TEST_F(MediaStreamSignalingTest, RemoveAlreadyGoneRemoteStream) { + talk_base::scoped_ptr<SessionDescriptionInterface> desc_audio_only( + webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, + kSdpStringWithoutStreams, + NULL)); + ASSERT_TRUE(desc_audio_only != NULL); + signaling_->OnRemoteDescriptionChanged(desc_audio_only.get()); + MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); + remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]); + remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]); + + talk_base::scoped_ptr<SessionDescriptionInterface> desc( + webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, + kSdpStringWithoutStreams, NULL)); + ASSERT_TRUE(desc != NULL); + signaling_->OnRemoteDescriptionChanged(desc.get()); + + // No crash is a pass. +} + // This tests that a default MediaStream is created if the remote session // description doesn't contain any streams and don't contain an indication if // MSID is supported. diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamtrackproxy.h b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamtrackproxy.h index 1efc8c616ae..7c622e73332 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamtrackproxy.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediastreamtrackproxy.h @@ -61,7 +61,6 @@ BEGIN_PROXY_MAP(VideoTrack) PROXY_METHOD1(void, AddRenderer, VideoRendererInterface*) PROXY_METHOD1(void, RemoveRenderer, VideoRendererInterface*) - PROXY_METHOD0(cricket::VideoRenderer*, FrameInput) PROXY_CONSTMETHOD0(VideoSourceInterface*, GetSource) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCICEServer.mm b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCICEServer.mm index cb32673aa90..cc5a84a41fb 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCICEServer.mm +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCICEServer.mm @@ -33,22 +33,25 @@ @implementation RTCICEServer -- (id)initWithURI:(NSURL *)URI password:(NSString *)password { - if (!URI || !password) { +- (id)initWithURI:(NSURL *)URI + username:(NSString *)username + password:(NSString *)password { + if (!URI || !username || !password) { NSAssert(NO, @"nil arguments not allowed"); self = nil; return nil; } if ((self = [super init])) { _URI = URI; + _username = [username copy]; _password = [password copy]; } return self; } - (NSString *)description { - return [NSString stringWithFormat:@"Server: [%@]\nPassword: [%@]", - [self.URI absoluteString], self.password]; + return [NSString stringWithFormat:@"RTCICEServer: [%@:%@:%@]", + [self.URI absoluteString], self.username, self.password]; } @end @@ -58,6 +61,7 @@ - (webrtc::PeerConnectionInterface::IceServer)iceServer { webrtc::PeerConnectionInterface::IceServer iceServer; iceServer.uri = [[self.URI absoluteString] UTF8String]; + iceServer.username = [self.username UTF8String]; iceServer.password = [self.password UTF8String]; return iceServer; } diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCICEServer.h b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCICEServer.h index 717d042f782..63c14efaaf4 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCICEServer.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCICEServer.h @@ -30,14 +30,15 @@ // RTCICEServer allows for the creation of ICEServer structs. @interface RTCICEServer : NSObject -// The server URI. +// The server URI, username, and password. @property(nonatomic, strong, readonly) NSURL* URI; - -// The server password. +@property(nonatomic, copy, readonly) NSString* username; @property(nonatomic, copy, readonly) NSString* password; // Initializer for RTCICEServer taking uri and password. -- (id)initWithURI:(NSString*)URI password:(NSString*)password; +- (id)initWithURI:(NSString*)URI + username:(NSString*)username + password:(NSString*)password; #ifndef DOXYGEN_SHOULD_SKIP_THIS // Disallow init and don't add to documentation diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm b/chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm index 826409f561c..0ce8822fd1e 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionTest.mm @@ -99,6 +99,7 @@ NSString *stunURL = @"stun:stun.l.google.com:19302"; RTCICEServer *stunServer = [[RTCICEServer alloc] initWithURI:[NSURL URLWithString:stunURL] + username:@"" password:@""]; NSArray *iceServers = @[stunServer]; diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc index 2be76f439a2..9eaf915c395 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc @@ -303,7 +303,7 @@ bool PeerConnection::DoInitialize( port_allocator_->set_step_delay(cricket::kMinimumStepDelay); mediastream_signaling_.reset(new MediaStreamSignaling( - factory_->signaling_thread(), this)); + factory_->signaling_thread(), this, factory_->channel_manager())); session_.reset(new WebRtcSession(factory_->channel_manager(), factory_->signaling_thread(), @@ -354,10 +354,10 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream, } void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { + mediastream_signaling_->RemoveLocalStream(local_stream); if (IsClosed()) { return; } - mediastream_signaling_->RemoveLocalStream(local_stream); observer_->OnRenegotiationNeeded(); } diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc index 250e60f4185..522d528020c 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc @@ -990,7 +990,9 @@ class P2PTestConductor : public testing::Test { typedef P2PTestConductor<JsepTestClient> JsepPeerConnectionP2PTestClient; // This test sets up a Jsep call between two parties and test Dtmf. -TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestDtmf) { +// TODO(holmer): Disabled due to sometimes crashing on buildbots. +// See issue webrtc/2378. +TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestDtmf) { ASSERT_TRUE(CreateTestClients()); LocalP2PTest(); VerifyDtmf(); @@ -1087,7 +1089,8 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestOfferDtlsButNotSdes) { // This test sets up a Jsep call between two parties, and the callee only // accept to receive video. -TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerVideo) { +// BUG=https://code.google.com/p/webrtc/issues/detail?id=2288 +TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestAnswerVideo) { ASSERT_TRUE(CreateTestClients()); receiving_client()->SetReceiveAudioVideo(false, true); LocalP2PTest(); @@ -1095,7 +1098,7 @@ TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerVideo) { // This test sets up a Jsep call between two parties, and the callee only // accept to receive audio. -TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestAnswerAudio) { +TEST_F(JsepPeerConnectionP2PTestClient, DISABLED_LocalP2PTestAnswerAudio) { ASSERT_TRUE(CreateTestClients()); receiving_client()->SetReceiveAudioVideo(true, false); LocalP2PTest(); @@ -1369,7 +1372,10 @@ TEST_F(JsepPeerConnectionP2PTestClient, IceRestart) { // This test sets up a Jsep call between two parties with external // VideoDecoderFactory. -TEST_F(JsepPeerConnectionP2PTestClient, LocalP2PTestWithVideoDecoderFactory) { +// TODO(holmer): Disabled due to sometimes crashing on buildbots. +// See issue webrtc/2378. +TEST_F(JsepPeerConnectionP2PTestClient, + DISABLED_LocalP2PTestWithVideoDecoderFactory) { ASSERT_TRUE(CreateTestClients()); EnableVideoDecoderFactory(); LocalP2PTest(); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc index 9c5c126b1a6..7d30fab8ec1 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc @@ -29,12 +29,12 @@ #include "talk/app/webrtc/audiotrack.h" #include "talk/app/webrtc/localaudiosource.h" -#include "talk/app/webrtc/localvideosource.h" #include "talk/app/webrtc/mediastreamproxy.h" #include "talk/app/webrtc/mediastreamtrackproxy.h" #include "talk/app/webrtc/peerconnection.h" #include "talk/app/webrtc/peerconnectionproxy.h" #include "talk/app/webrtc/portallocatorfactory.h" +#include "talk/app/webrtc/videosource.h" #include "talk/app/webrtc/videosourceproxy.h" #include "talk/app/webrtc/videotrack.h" #include "talk/media/devices/dummydevicemanager.h" @@ -269,9 +269,8 @@ talk_base::scoped_refptr<VideoSourceInterface> PeerConnectionFactory::CreateVideoSource_s( cricket::VideoCapturer* capturer, const MediaConstraintsInterface* constraints) { - talk_base::scoped_refptr<LocalVideoSource> source( - LocalVideoSource::Create(channel_manager_.get(), capturer, - constraints)); + talk_base::scoped_refptr<VideoSource> source( + VideoSource::Create(channel_manager_.get(), capturer, constraints)); return VideoSourceProxy::Create(signaling_thread_, source); } diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface_unittest.cc index d743684874d..7aa06ef4248 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface_unittest.cc @@ -29,18 +29,19 @@ #include "talk/app/webrtc/fakeportallocatorfactory.h" #include "talk/app/webrtc/jsepsessiondescription.h" -#include "talk/app/webrtc/localvideosource.h" #include "talk/app/webrtc/mediastreaminterface.h" #include "talk/app/webrtc/peerconnectioninterface.h" #include "talk/app/webrtc/test/fakeconstraints.h" #include "talk/app/webrtc/test/mockpeerconnectionobservers.h" #include "talk/app/webrtc/test/testsdpstrings.h" +#include "talk/app/webrtc/videosource.h" #include "talk/base/gunit.h" #include "talk/base/scoped_ptr.h" #include "talk/base/sslstreamadapter.h" #include "talk/base/stringutils.h" #include "talk/base/thread.h" #include "talk/media/base/fakevideocapturer.h" +#include "talk/media/sctp/sctpdataengine.h" #include "talk/session/media/mediasession.h" static const char kStreamLabel1[] = "local_stream_1"; @@ -1193,7 +1194,7 @@ TEST_F(PeerConnectionInterfaceTest, CloseAndTestMethods) { ASSERT_FALSE(local_stream->GetAudioTracks().empty()); talk_base::scoped_refptr<webrtc::DtmfSenderInterface> dtmf_sender( pc_->CreateDtmfSender(local_stream->GetAudioTracks()[0])); - EXPECT_FALSE(dtmf_sender->CanInsertDtmf()); + EXPECT_TRUE(NULL == dtmf_sender); // local stream has been removed. EXPECT_TRUE(pc_->CreateDataChannel("test", NULL) == NULL); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/remotevideocapturer.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/remotevideocapturer.cc new file mode 100644 index 00000000000..072c8d81cc5 --- /dev/null +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/remotevideocapturer.cc @@ -0,0 +1,95 @@ +/* + * libjingle + * Copyright 2013, Google Inc. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO + * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "talk/app/webrtc/remotevideocapturer.h" + +#include "talk/base/logging.h" +#include "talk/media/base/videoframe.h" + +namespace webrtc { + +RemoteVideoCapturer::RemoteVideoCapturer() {} + +RemoteVideoCapturer::~RemoteVideoCapturer() {} + +cricket::CaptureState RemoteVideoCapturer::Start( + const cricket::VideoFormat& capture_format) { + if (capture_state() == cricket::CS_RUNNING) { + LOG(LS_WARNING) + << "RemoteVideoCapturer::Start called when it's already started."; + return capture_state(); + } + + LOG(LS_INFO) << "RemoteVideoCapturer::Start"; + SetCaptureFormat(&capture_format); + return cricket::CS_RUNNING; +} + +void RemoteVideoCapturer::Stop() { + if (capture_state() == cricket::CS_STOPPED) { + LOG(LS_WARNING) + << "RemoteVideoCapturer::Stop called when it's already stopped."; + return; + } + + LOG(LS_INFO) << "RemoteVideoCapturer::Stop"; + SetCaptureFormat(NULL); + SetCaptureState(cricket::CS_STOPPED); +} + +bool RemoteVideoCapturer::IsRunning() { + return capture_state() == cricket::CS_RUNNING; +} + +bool RemoteVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) { + if (!fourccs) + return false; + fourccs->push_back(cricket::FOURCC_I420); + return true; +} + +bool RemoteVideoCapturer::GetBestCaptureFormat( + const cricket::VideoFormat& desired, cricket::VideoFormat* best_format) { + if (!best_format) { + return false; + } + + // RemoteVideoCapturer does not support capability enumeration. + // Use the desired format as the best format. + best_format->width = desired.width; + best_format->height = desired.height; + best_format->fourcc = cricket::FOURCC_I420; + best_format->interval = desired.interval; + return true; +} + +bool RemoteVideoCapturer::IsScreencast() const { + // TODO(ronghuawu): what about remote screencast stream. + return false; +} + +} // namespace webrtc diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaStreamTrack.java b/chromium/third_party/libjingle/source/talk/app/webrtc/remotevideocapturer.h index 5cd2f4c11d5..1ea0692006d 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaStreamTrack.java +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/remotevideocapturer.h @@ -25,62 +25,41 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -package org.webrtc; - -/** Java wrapper for a C++ MediaStreamTrackInterface. */ -public class MediaStreamTrack { - /** Tracks MediaStreamTrackInterface.TrackState */ - public enum State { - INITIALIZING, LIVE, ENDED, FAILED - } - - final long nativeTrack; - - public MediaStreamTrack(long nativeTrack) { - this.nativeTrack = nativeTrack; - } - - public String id() { - return nativeId(nativeTrack); - } - - public String kind() { - return nativeKind(nativeTrack); - } - - public boolean enabled() { - return nativeEnabled(nativeTrack); - } - - public boolean setEnabled(boolean enable) { - return nativeSetEnabled(nativeTrack, enable); - } - - public State state() { - return nativeState(nativeTrack); - } - - public boolean setState(State newState) { - return nativeSetState(nativeTrack, newState.ordinal()); - } - - public void dispose() { - free(nativeTrack); - } - - private static native String nativeId(long nativeTrack); - - private static native String nativeKind(long nativeTrack); - - private static native boolean nativeEnabled(long nativeTrack); - - private static native boolean nativeSetEnabled( - long nativeTrack, boolean enabled); - - private static native State nativeState(long nativeTrack); - - private static native boolean nativeSetState( - long nativeTrack, int newState); - - private static native void free(long nativeTrack); -} +#ifndef TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_ +#define TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_ + +#include <vector> + +#include "talk/app/webrtc/mediastreaminterface.h" +#include "talk/media/base/videocapturer.h" +#include "talk/media/base/videorenderer.h" + +namespace webrtc { + +// RemoteVideoCapturer implements a simple cricket::VideoCapturer which +// gets decoded remote video frames from media channel. +// It's used as the remote video source's VideoCapturer so that the remote video +// can be used as a cricket::VideoCapturer and in that way a remote video stream +// can implement the MediaStreamSourceInterface. +class RemoteVideoCapturer : public cricket::VideoCapturer { + public: + RemoteVideoCapturer(); + virtual ~RemoteVideoCapturer(); + + // cricket::VideoCapturer implementation. + virtual cricket::CaptureState Start( + const cricket::VideoFormat& capture_format) OVERRIDE; + virtual void Stop() OVERRIDE; + virtual bool IsRunning() OVERRIDE; + virtual bool GetPreferredFourccs(std::vector<uint32>* fourccs) OVERRIDE; + virtual bool GetBestCaptureFormat(const cricket::VideoFormat& desired, + cricket::VideoFormat* best_format) OVERRIDE; + virtual bool IsScreencast() const OVERRIDE; + + private: + DISALLOW_COPY_AND_ASSIGN(RemoteVideoCapturer); +}; + +} // namespace webrtc + +#endif // TALK_APP_WEBRTC_REMOTEVIDEOCAPTURER_H_ diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/remotevideocapturer_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/remotevideocapturer_unittest.cc new file mode 100644 index 00000000000..68135507cac --- /dev/null +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/remotevideocapturer_unittest.cc @@ -0,0 +1,132 @@ +/* + * libjingle + * Copyright 2013, Google Inc. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO + * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include <string> + +#include "talk/app/webrtc/remotevideocapturer.h" +#include "talk/base/gunit.h" +#include "talk/media/webrtc/webrtcvideoframe.h" + +using cricket::CaptureState; +using cricket::VideoCapturer; +using cricket::VideoFormat; +using cricket::VideoFormatPod; +using cricket::VideoFrame; + +static const int kMaxWaitMs = 1000; +static const VideoFormatPod kTestFormat = + {640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY}; + +class RemoteVideoCapturerTest : public testing::Test, + public sigslot::has_slots<> { + protected: + RemoteVideoCapturerTest() + : captured_frame_num_(0), + capture_state_(cricket::CS_STOPPED) {} + + virtual void SetUp() { + capturer_.SignalStateChange.connect( + this, &RemoteVideoCapturerTest::OnStateChange); + capturer_.SignalVideoFrame.connect( + this, &RemoteVideoCapturerTest::OnVideoFrame); + } + + ~RemoteVideoCapturerTest() { + capturer_.SignalStateChange.disconnect(this); + capturer_.SignalVideoFrame.disconnect(this); + } + + int captured_frame_num() const { + return captured_frame_num_; + } + + CaptureState capture_state() const { + return capture_state_; + } + + webrtc::RemoteVideoCapturer capturer_; + + private: + void OnStateChange(VideoCapturer* capturer, + CaptureState capture_state) { + EXPECT_EQ(&capturer_, capturer); + capture_state_ = capture_state; + } + + void OnVideoFrame(VideoCapturer* capturer, const VideoFrame* frame) { + EXPECT_EQ(&capturer_, capturer); + ++captured_frame_num_; + } + + int captured_frame_num_; + CaptureState capture_state_; +}; + +TEST_F(RemoteVideoCapturerTest, StartStop) { + // Start + EXPECT_TRUE( + capturer_.StartCapturing(VideoFormat(kTestFormat))); + EXPECT_TRUE_WAIT((cricket::CS_RUNNING == capture_state()), kMaxWaitMs); + EXPECT_EQ(VideoFormat(kTestFormat), + *capturer_.GetCaptureFormat()); + EXPECT_TRUE(capturer_.IsRunning()); + + // Stop + capturer_.Stop(); + EXPECT_TRUE_WAIT((cricket::CS_STOPPED == capture_state()), kMaxWaitMs); + EXPECT_TRUE(NULL == capturer_.GetCaptureFormat()); +} + +TEST_F(RemoteVideoCapturerTest, GetPreferredFourccs) { + EXPECT_FALSE(capturer_.GetPreferredFourccs(NULL)); + + std::vector<uint32> fourccs; + EXPECT_TRUE(capturer_.GetPreferredFourccs(&fourccs)); + EXPECT_EQ(1u, fourccs.size()); + EXPECT_EQ(cricket::FOURCC_I420, fourccs.at(0)); +} + +TEST_F(RemoteVideoCapturerTest, GetBestCaptureFormat) { + VideoFormat desired = VideoFormat(kTestFormat); + EXPECT_FALSE(capturer_.GetBestCaptureFormat(desired, NULL)); + + VideoFormat expected_format = VideoFormat(kTestFormat); + expected_format.fourcc = cricket::FOURCC_I420; + VideoFormat best_format; + EXPECT_TRUE(capturer_.GetBestCaptureFormat(desired, &best_format)); + EXPECT_EQ(expected_format, best_format); +} + +TEST_F(RemoteVideoCapturerTest, InputFrame) { + EXPECT_EQ(0, captured_frame_num()); + + cricket::WebRtcVideoFrame test_frame; + capturer_.SignalVideoFrame(&capturer_, &test_frame); + EXPECT_EQ(1, captured_frame_num()); + capturer_.SignalVideoFrame(&capturer_, &test_frame); + EXPECT_EQ(2, captured_frame_num()); +} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc index b994f2faad7..06c4b44b082 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc @@ -94,6 +94,7 @@ const char StatsReport::kStatsValueNameRemoteAddress[] = "googRemoteAddress"; const char StatsReport::kStatsValueNameRetransmitBitrate[] = "googRetransmitBitrate"; const char StatsReport::kStatsValueNameRtt[] = "googRtt"; +const char StatsReport::kStatsValueNameSsrc[] = "ssrc"; const char StatsReport::kStatsValueNameTargetEncBitrate[] = "googTargetEncBitrate"; const char StatsReport::kStatsValueNameTransmitBitrate[] = @@ -101,7 +102,8 @@ const char StatsReport::kStatsValueNameTransmitBitrate[] = const char StatsReport::kStatsValueNameTransportId[] = "transportId"; const char StatsReport::kStatsValueNameTransportType[] = "googTransportType"; const char StatsReport::kStatsValueNameTrackId[] = "googTrackId"; -const char StatsReport::kStatsValueNameSsrc[] = "ssrc"; +const char StatsReport::kStatsValueNameTypingNoiseState[] = + "googTypingNoiseState"; const char StatsReport::kStatsValueNameWritable[] = "googWritable"; const char StatsReport::kStatsReportTypeSession[] = "googLibjingleSession"; @@ -115,6 +117,7 @@ const char StatsReport::kStatsReportTypeCandidatePair[] = "googCandidatePair"; const char StatsReport::kStatsReportVideoBweId[] = "bweforvideo"; + // Implementations of functions in statstypes.h void StatsReport::AddValue(const std::string& name, const std::string& value) { Value temp; @@ -200,6 +203,8 @@ void ExtractStats(const cricket::VoiceSenderInfo& info, StatsReport* report) { report->AddValue(StatsReport::kStatsValueNameEchoReturnLossEnhancement, info.echo_return_loss_enhancement); report->AddValue(StatsReport::kStatsValueNameCodecName, info.codec_name); + report->AddBoolean(StatsReport::kStatsValueNameTypingNoiseState, + info.typing_noise_detected); } void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) { @@ -398,8 +403,8 @@ StatsReport* StatsCollector::PrepareReport(uint32 ssrc, std::string track_id; if (it == reports_.end()) { if (!session()->GetTrackIdBySsrc(ssrc, &track_id)) { - LOG(LS_ERROR) << "The SSRC " << ssrc - << " is not associated with a track"; + LOG(LS_WARNING) << "The SSRC " << ssrc + << " is not associated with a track"; return NULL; } } else { diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h b/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h index 62f8781612a..30a8b84165c 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h @@ -151,6 +151,7 @@ class StatsReport { static const char kStatsValueNameChannelId[]; static const char kStatsValueNameTrackId[]; static const char kStatsValueNameSsrc[]; + static const char kStatsValueNameTypingNoiseState[]; }; typedef std::vector<StatsReport> StatsReports; diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/localvideosource.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/videosource.cc index 2d43885fccf..f0182f4fb18 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/localvideosource.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videosource.cc @@ -25,7 +25,7 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -#include "talk/app/webrtc/localvideosource.h" +#include "talk/app/webrtc/videosource.h" #include <vector> @@ -54,6 +54,9 @@ const char MediaConstraintsInterface::kNoiseReduction[] = "googNoiseReduction"; const char MediaConstraintsInterface::kLeakyBucket[] = "googLeakyBucket"; const char MediaConstraintsInterface::kTemporalLayeredScreencast[] = "googTemporalLayeredScreencast"; +// TODO(ronghuawu): Remove once cpu overuse detection is stable. +const char MediaConstraintsInterface::kCpuOveruseDetection[] = + "googCpuOveruseDetection"; } // namespace webrtc @@ -69,7 +72,7 @@ enum { // Default resolution. If no constraint is specified, this is the resolution we // will use. -static const cricket::VideoFormatPod kDefaultResolution = +static const cricket::VideoFormatPod kDefaultFormat = {640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY}; // List of formats used if the camera doesn't support capability enumeration. @@ -202,7 +205,9 @@ bool NewFormatWithConstraints( } else if (constraint.key == MediaConstraintsInterface::kNoiseReduction || constraint.key == MediaConstraintsInterface::kLeakyBucket || constraint.key == - MediaConstraintsInterface::kTemporalLayeredScreencast) { + MediaConstraintsInterface::kTemporalLayeredScreencast || + constraint.key == + MediaConstraintsInterface::kCpuOveruseDetection) { // These are actually options, not constraints, so they can be satisfied // regardless of the format. return true; @@ -271,7 +276,7 @@ const cricket::VideoFormat& GetBestCaptureFormat( const std::vector<cricket::VideoFormat>& formats) { ASSERT(formats.size() > 0); - int default_area = kDefaultResolution.width * kDefaultResolution.height; + int default_area = kDefaultFormat.width * kDefaultFormat.height; std::vector<cricket::VideoFormat>::const_iterator it = formats.begin(); std::vector<cricket::VideoFormat>::const_iterator best_it = formats.begin(); @@ -316,42 +321,79 @@ bool ExtractVideoOptions(const MediaConstraintsInterface* all_constraints, all_valid &= ExtractOption(all_constraints, MediaConstraintsInterface::kTemporalLayeredScreencast, &(options->video_temporal_layer_screencast)); + all_valid &= ExtractOption(all_constraints, + MediaConstraintsInterface::kCpuOveruseDetection, + &(options->cpu_overuse_detection)); return all_valid; } +class FrameInputWrapper : public cricket::VideoRenderer { + public: + explicit FrameInputWrapper(cricket::VideoCapturer* capturer) + : capturer_(capturer) { + ASSERT(capturer_ != NULL); + } + + virtual ~FrameInputWrapper() {} + + // VideoRenderer implementation. + virtual bool SetSize(int width, int height, int reserved) OVERRIDE { + return true; + } + + virtual bool RenderFrame(const cricket::VideoFrame* frame) OVERRIDE { + if (!capturer_->IsRunning()) { + return true; + } + + // This signal will be made on media engine render thread. The clients + // of this signal should have no assumptions on what thread this signal + // come from. + capturer_->SignalVideoFrame(capturer_, frame); + return true; + } + + private: + cricket::VideoCapturer* capturer_; + int width_; + int height_; + + DISALLOW_COPY_AND_ASSIGN(FrameInputWrapper); +}; + } // anonymous namespace namespace webrtc { -talk_base::scoped_refptr<LocalVideoSource> LocalVideoSource::Create( +talk_base::scoped_refptr<VideoSource> VideoSource::Create( cricket::ChannelManager* channel_manager, cricket::VideoCapturer* capturer, const webrtc::MediaConstraintsInterface* constraints) { ASSERT(channel_manager != NULL); ASSERT(capturer != NULL); - talk_base::scoped_refptr<LocalVideoSource> source( - new talk_base::RefCountedObject<LocalVideoSource>(channel_manager, - capturer)); + talk_base::scoped_refptr<VideoSource> source( + new talk_base::RefCountedObject<VideoSource>(channel_manager, + capturer)); source->Initialize(constraints); return source; } -LocalVideoSource::LocalVideoSource(cricket::ChannelManager* channel_manager, - cricket::VideoCapturer* capturer) +VideoSource::VideoSource(cricket::ChannelManager* channel_manager, + cricket::VideoCapturer* capturer) : channel_manager_(channel_manager), video_capturer_(capturer), state_(kInitializing) { channel_manager_->SignalVideoCaptureStateChange.connect( - this, &LocalVideoSource::OnStateChange); + this, &VideoSource::OnStateChange); } -LocalVideoSource::~LocalVideoSource() { +VideoSource::~VideoSource() { channel_manager_->StopVideoCapture(video_capturer_.get(), format_); channel_manager_->SignalVideoCaptureStateChange.disconnect(this); } -void LocalVideoSource::Initialize( +void VideoSource::Initialize( const webrtc::MediaConstraintsInterface* constraints) { std::vector<cricket::VideoFormat> formats; @@ -363,7 +405,7 @@ void LocalVideoSource::Initialize( // format from the constraints if any. // Note that this only affects tab capturing, not desktop capturing, // since desktop capturer does not respect the VideoFormat passed in. - formats.push_back(cricket::VideoFormat(kDefaultResolution)); + formats.push_back(cricket::VideoFormat(kDefaultFormat)); } else { // The VideoCapturer implementation doesn't support capability enumeration. // We need to guess what the camera support. @@ -414,25 +456,34 @@ void LocalVideoSource::Initialize( // Initialize hasn't succeeded until a successful state change has occurred. } -void LocalVideoSource::AddSink(cricket::VideoRenderer* output) { +cricket::VideoRenderer* VideoSource::FrameInput() { + // Defer creation of frame_input_ until it's needed, e.g. the local video + // sources will never need it. + if (!frame_input_) { + frame_input_.reset(new FrameInputWrapper(video_capturer_.get())); + } + return frame_input_.get(); +} + +void VideoSource::AddSink(cricket::VideoRenderer* output) { channel_manager_->AddVideoRenderer(video_capturer_.get(), output); } -void LocalVideoSource::RemoveSink(cricket::VideoRenderer* output) { +void VideoSource::RemoveSink(cricket::VideoRenderer* output) { channel_manager_->RemoveVideoRenderer(video_capturer_.get(), output); } // OnStateChange listens to the ChannelManager::SignalVideoCaptureStateChange. // This signal is triggered for all video capturers. Not only the one we are // interested in. -void LocalVideoSource::OnStateChange(cricket::VideoCapturer* capturer, +void VideoSource::OnStateChange(cricket::VideoCapturer* capturer, cricket::CaptureState capture_state) { if (capturer == video_capturer_.get()) { SetState(GetReadyState(capture_state)); } } -void LocalVideoSource::SetState(SourceState new_state) { +void VideoSource::SetState(SourceState new_state) { if (VERIFY(state_ != new_state)) { state_ = new_state; FireOnChanged(); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/localvideosource.h b/chromium/third_party/libjingle/source/talk/app/webrtc/videosource.h index 0a3bac091c7..f58b479c284 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/localvideosource.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videosource.h @@ -25,18 +25,19 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -#ifndef TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_ -#define TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_ +#ifndef TALK_APP_WEBRTC_VIDEOSOURCE_H_ +#define TALK_APP_WEBRTC_VIDEOSOURCE_H_ #include "talk/app/webrtc/mediastreaminterface.h" #include "talk/app/webrtc/notifier.h" #include "talk/app/webrtc/videosourceinterface.h" +#include "talk/app/webrtc/videotrackrenderers.h" #include "talk/base/scoped_ptr.h" #include "talk/base/sigslot.h" #include "talk/media/base/videocapturer.h" #include "talk/media/base/videocommon.h" -// LocalVideoSource implements VideoSourceInterface. It owns a +// VideoSource implements VideoSourceInterface. It owns a // cricket::VideoCapturer and make sure the camera is started at a resolution // that honors the constraints. // The state is set depending on the result of starting the capturer. @@ -53,20 +54,21 @@ namespace webrtc { class MediaConstraintsInterface; -class LocalVideoSource : public Notifier<VideoSourceInterface>, - public sigslot::has_slots<> { +class VideoSource : public Notifier<VideoSourceInterface>, + public sigslot::has_slots<> { public: - // Creates an instance of LocalVideoSource. - // LocalVideoSource take ownership of |capturer|. + // Creates an instance of VideoSource. + // VideoSource take ownership of |capturer|. // |constraints| can be NULL and in that case the camera is opened using a // default resolution. - static talk_base::scoped_refptr<LocalVideoSource> Create( + static talk_base::scoped_refptr<VideoSource> Create( cricket::ChannelManager* channel_manager, cricket::VideoCapturer* capturer, const webrtc::MediaConstraintsInterface* constraints); virtual SourceState state() const { return state_; } virtual const cricket::VideoOptions* options() const { return &options_; } + virtual cricket::VideoRenderer* FrameInput(); virtual cricket::VideoCapturer* GetVideoCapturer() { return video_capturer_.get(); @@ -77,18 +79,19 @@ class LocalVideoSource : public Notifier<VideoSourceInterface>, virtual void RemoveSink(cricket::VideoRenderer* output); protected: - LocalVideoSource(cricket::ChannelManager* channel_manager, - cricket::VideoCapturer* capturer); - ~LocalVideoSource(); + VideoSource(cricket::ChannelManager* channel_manager, + cricket::VideoCapturer* capturer); + virtual ~VideoSource(); + void Initialize(const webrtc::MediaConstraintsInterface* constraints); private: - void Initialize(const webrtc::MediaConstraintsInterface* constraints); void OnStateChange(cricket::VideoCapturer* capturer, cricket::CaptureState capture_state); void SetState(SourceState new_state); cricket::ChannelManager* channel_manager_; talk_base::scoped_ptr<cricket::VideoCapturer> video_capturer_; + talk_base::scoped_ptr<cricket::VideoRenderer> frame_input_; cricket::VideoFormat format_; cricket::VideoOptions options_; @@ -97,4 +100,4 @@ class LocalVideoSource : public Notifier<VideoSourceInterface>, } // namespace webrtc -#endif // TALK_APP_WEBRTC_LOCALVIDEOSOURCE_H_ +#endif // TALK_APP_WEBRTC_VIDEOSOURCE_H_ diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/localvideosource_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/videosource_unittest.cc index 24a858886a6..69e9b3f0f74 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/localvideosource_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videosource_unittest.cc @@ -25,20 +25,21 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -#include "talk/app/webrtc/localvideosource.h" - #include <string> #include <vector> #include "talk/app/webrtc/test/fakeconstraints.h" +#include "talk/app/webrtc/remotevideocapturer.h" +#include "talk/app/webrtc/videosource.h" #include "talk/base/gunit.h" #include "talk/media/base/fakemediaengine.h" #include "talk/media/base/fakevideorenderer.h" #include "talk/media/devices/fakedevicemanager.h" +#include "talk/media/webrtc/webrtcvideoframe.h" #include "talk/session/media/channelmanager.h" using webrtc::FakeConstraints; -using webrtc::LocalVideoSource; +using webrtc::VideoSource; using webrtc::MediaConstraintsInterface; using webrtc::MediaSourceInterface; using webrtc::ObserverInterface; @@ -123,52 +124,54 @@ class StateObserver : public ObserverInterface { talk_base::scoped_refptr<VideoSourceInterface> source_; }; -class LocalVideoSourceTest : public testing::Test { +class VideoSourceTest : public testing::Test { protected: - LocalVideoSourceTest() - : channel_manager_(new cricket::ChannelManager( + VideoSourceTest() + : capturer_cleanup_(new TestVideoCapturer()), + capturer_(capturer_cleanup_.get()), + channel_manager_(new cricket::ChannelManager( new cricket::FakeMediaEngine(), new cricket::FakeDeviceManager(), talk_base::Thread::Current())) { } void SetUp() { ASSERT_TRUE(channel_manager_->Init()); - capturer_ = new TestVideoCapturer(); } - void CreateLocalVideoSource() { - CreateLocalVideoSource(NULL); + void CreateVideoSource() { + CreateVideoSource(NULL); } - void CreateLocalVideoSource( + void CreateVideoSource( const webrtc::MediaConstraintsInterface* constraints) { // VideoSource take ownership of |capturer_| - local_source_ = LocalVideoSource::Create(channel_manager_.get(), - capturer_, - constraints); + source_ = VideoSource::Create(channel_manager_.get(), + capturer_cleanup_.release(), + constraints); - ASSERT_TRUE(local_source_.get() != NULL); - EXPECT_EQ(capturer_, local_source_->GetVideoCapturer()); + ASSERT_TRUE(source_.get() != NULL); + EXPECT_EQ(capturer_, source_->GetVideoCapturer()); - state_observer_.reset(new StateObserver(local_source_)); - local_source_->RegisterObserver(state_observer_.get()); - local_source_->AddSink(&renderer_); + state_observer_.reset(new StateObserver(source_)); + source_->RegisterObserver(state_observer_.get()); + source_->AddSink(&renderer_); } - TestVideoCapturer* capturer_; // Raw pointer. Owned by local_source_. + talk_base::scoped_ptr<TestVideoCapturer> capturer_cleanup_; + TestVideoCapturer* capturer_; cricket::FakeVideoRenderer renderer_; talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_; talk_base::scoped_ptr<StateObserver> state_observer_; - talk_base::scoped_refptr<LocalVideoSource> local_source_; + talk_base::scoped_refptr<VideoSource> source_; }; -// Test that a LocalVideoSource transition to kLive state when the capture +// Test that a VideoSource transition to kLive state when the capture // device have started and kEnded if it is stopped. // It also test that an output can receive video frames. -TEST_F(LocalVideoSourceTest, StartStop) { +TEST_F(VideoSourceTest, StartStop) { // Initialize without constraints. - CreateLocalVideoSource(); + CreateVideoSource(); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); @@ -180,10 +183,38 @@ TEST_F(LocalVideoSourceTest, StartStop) { kMaxWaitMs); } -// Test that a LocalVideoSource transition to kEnded if the capture device +// Test start stop with a remote VideoSource - the video source that has a +// RemoteVideoCapturer and takes video frames from FrameInput. +TEST_F(VideoSourceTest, StartStopRemote) { + source_ = VideoSource::Create(channel_manager_.get(), + new webrtc::RemoteVideoCapturer(), + NULL); + + ASSERT_TRUE(source_.get() != NULL); + EXPECT_TRUE(NULL != source_->GetVideoCapturer()); + + state_observer_.reset(new StateObserver(source_)); + source_->RegisterObserver(state_observer_.get()); + source_->AddSink(&renderer_); + + EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), + kMaxWaitMs); + + cricket::VideoRenderer* frameinput = source_->FrameInput(); + cricket::WebRtcVideoFrame test_frame; + frameinput->SetSize(1280, 720, 0); + frameinput->RenderFrame(&test_frame); + EXPECT_EQ(1, renderer_.num_rendered_frames()); + + source_->GetVideoCapturer()->Stop(); + EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(), + kMaxWaitMs); +} + +// Test that a VideoSource transition to kEnded if the capture device // fails. -TEST_F(LocalVideoSourceTest, CameraFailed) { - CreateLocalVideoSource(); +TEST_F(VideoSourceTest, CameraFailed) { + CreateVideoSource(); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); @@ -194,13 +225,13 @@ TEST_F(LocalVideoSourceTest, CameraFailed) { // Test that the capture output is CIF if we set max constraints to CIF. // and the capture device support CIF. -TEST_F(LocalVideoSourceTest, MandatoryConstraintCif5Fps) { +TEST_F(VideoSourceTest, MandatoryConstraintCif5Fps) { FakeConstraints constraints; constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352); constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288); constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 5); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -212,7 +243,7 @@ TEST_F(LocalVideoSourceTest, MandatoryConstraintCif5Fps) { // Test that the capture output is 720P if the camera support it and the // optional constraint is set to 720P. -TEST_F(LocalVideoSourceTest, MandatoryMinVgaOptional720P) { +TEST_F(VideoSourceTest, MandatoryMinVgaOptional720P) { FakeConstraints constraints; constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640); constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480); @@ -220,7 +251,7 @@ TEST_F(LocalVideoSourceTest, MandatoryMinVgaOptional720P) { constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio, 1280.0 / 720); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -233,7 +264,7 @@ TEST_F(LocalVideoSourceTest, MandatoryMinVgaOptional720P) { // Test that the capture output have aspect ratio 4:3 if a mandatory constraint // require it even if an optional constraint request a higher resolution // that don't have this aspect ratio. -TEST_F(LocalVideoSourceTest, MandatoryAspectRatio4To3) { +TEST_F(VideoSourceTest, MandatoryAspectRatio4To3) { FakeConstraints constraints; constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640); constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480); @@ -241,7 +272,7 @@ TEST_F(LocalVideoSourceTest, MandatoryAspectRatio4To3) { 640.0 / 480); constraints.AddOptional(MediaConstraintsInterface::kMinWidth, 1280); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -254,20 +285,20 @@ TEST_F(LocalVideoSourceTest, MandatoryAspectRatio4To3) { // Test that the source state transition to kEnded if the mandatory aspect ratio // is set higher than supported. -TEST_F(LocalVideoSourceTest, MandatoryAspectRatioTooHigh) { +TEST_F(VideoSourceTest, MandatoryAspectRatioTooHigh) { FakeConstraints constraints; constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio, 2); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(), kMaxWaitMs); } // Test that the source ignores an optional aspect ratio that is higher than // supported. -TEST_F(LocalVideoSourceTest, OptionalAspectRatioTooHigh) { +TEST_F(VideoSourceTest, OptionalAspectRatioTooHigh) { FakeConstraints constraints; constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio, 2); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -278,10 +309,10 @@ TEST_F(LocalVideoSourceTest, OptionalAspectRatioTooHigh) { // Test that the source starts video with the default resolution if the // camera doesn't support capability enumeration and there are no constraints. -TEST_F(LocalVideoSourceTest, NoCameraCapability) { +TEST_F(VideoSourceTest, NoCameraCapability) { capturer_->TestWithoutCameraFormats(); - CreateLocalVideoSource(); + CreateVideoSource(); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -294,7 +325,7 @@ TEST_F(LocalVideoSourceTest, NoCameraCapability) { // Test that the source can start the video and get the requested aspect ratio // if the camera doesn't support capability enumeration and the aspect ratio is // set. -TEST_F(LocalVideoSourceTest, NoCameraCapability16To9Ratio) { +TEST_F(VideoSourceTest, NoCameraCapability16To9Ratio) { capturer_->TestWithoutCameraFormats(); FakeConstraints constraints; @@ -303,7 +334,7 @@ TEST_F(LocalVideoSourceTest, NoCameraCapability16To9Ratio) { constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio, requested_aspect_ratio); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -313,114 +344,119 @@ TEST_F(LocalVideoSourceTest, NoCameraCapability16To9Ratio) { // Test that the source state transitions to kEnded if an unknown mandatory // constraint is found. -TEST_F(LocalVideoSourceTest, InvalidMandatoryConstraint) { +TEST_F(VideoSourceTest, InvalidMandatoryConstraint) { FakeConstraints constraints; constraints.AddMandatory("weird key", 640); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(), kMaxWaitMs); } // Test that the source ignores an unknown optional constraint. -TEST_F(LocalVideoSourceTest, InvalidOptionalConstraint) { +TEST_F(VideoSourceTest, InvalidOptionalConstraint) { FakeConstraints constraints; constraints.AddOptional("weird key", 640); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); } -TEST_F(LocalVideoSourceTest, SetValidOptionValues) { +TEST_F(VideoSourceTest, SetValidOptionValues) { FakeConstraints constraints; constraints.AddMandatory(MediaConstraintsInterface::kNoiseReduction, "false"); constraints.AddMandatory( MediaConstraintsInterface::kTemporalLayeredScreencast, "false"); constraints.AddOptional( MediaConstraintsInterface::kLeakyBucket, "true"); + constraints.AddOptional( + MediaConstraintsInterface::kCpuOveruseDetection, "true"); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); bool value = true; - EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value)); + EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value)); EXPECT_FALSE(value); - EXPECT_TRUE(local_source_->options()-> + EXPECT_TRUE(source_->options()-> video_temporal_layer_screencast.Get(&value)); EXPECT_FALSE(value); - EXPECT_TRUE(local_source_->options()->video_leaky_bucket.Get(&value)); + EXPECT_TRUE(source_->options()->video_leaky_bucket.Get(&value)); EXPECT_TRUE(value); + EXPECT_TRUE(source_->options()-> + cpu_overuse_detection.GetWithDefaultIfUnset(false)); } -TEST_F(LocalVideoSourceTest, OptionNotSet) { +TEST_F(VideoSourceTest, OptionNotSet) { FakeConstraints constraints; - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); bool value; - EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value)); + EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value)); + EXPECT_FALSE(source_->options()->cpu_overuse_detection.Get(&value)); } -TEST_F(LocalVideoSourceTest, MandatoryOptionOverridesOptional) { +TEST_F(VideoSourceTest, MandatoryOptionOverridesOptional) { FakeConstraints constraints; constraints.AddMandatory( MediaConstraintsInterface::kNoiseReduction, true); constraints.AddOptional( MediaConstraintsInterface::kNoiseReduction, false); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); bool value = false; - EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value)); + EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value)); EXPECT_TRUE(value); - EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value)); + EXPECT_FALSE(source_->options()->video_leaky_bucket.Get(&value)); } -TEST_F(LocalVideoSourceTest, InvalidOptionKeyOptional) { +TEST_F(VideoSourceTest, InvalidOptionKeyOptional) { FakeConstraints constraints; constraints.AddOptional( MediaConstraintsInterface::kNoiseReduction, false); constraints.AddOptional("invalidKey", false); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); bool value = true; - EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value)); + EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value)); EXPECT_FALSE(value); } -TEST_F(LocalVideoSourceTest, InvalidOptionKeyMandatory) { +TEST_F(VideoSourceTest, InvalidOptionKeyMandatory) { FakeConstraints constraints; constraints.AddMandatory( MediaConstraintsInterface::kNoiseReduction, false); constraints.AddMandatory("invalidKey", false); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(), kMaxWaitMs); bool value; - EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value)); + EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value)); } -TEST_F(LocalVideoSourceTest, InvalidOptionValueOptional) { +TEST_F(VideoSourceTest, InvalidOptionValueOptional) { FakeConstraints constraints; constraints.AddOptional( MediaConstraintsInterface::kNoiseReduction, "true"); constraints.AddOptional( MediaConstraintsInterface::kLeakyBucket, "not boolean"); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); bool value = false; - EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value)); + EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value)); EXPECT_TRUE(value); - EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value)); + EXPECT_FALSE(source_->options()->video_leaky_bucket.Get(&value)); } -TEST_F(LocalVideoSourceTest, InvalidOptionValueMandatory) { +TEST_F(VideoSourceTest, InvalidOptionValueMandatory) { FakeConstraints constraints; // Optional constraints should be ignored if the mandatory constraints fail. constraints.AddOptional( @@ -429,15 +465,15 @@ TEST_F(LocalVideoSourceTest, InvalidOptionValueMandatory) { constraints.AddMandatory( MediaConstraintsInterface::kLeakyBucket, "True"); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(), kMaxWaitMs); bool value; - EXPECT_FALSE(local_source_->options()->video_noise_reduction.Get(&value)); + EXPECT_FALSE(source_->options()->video_noise_reduction.Get(&value)); } -TEST_F(LocalVideoSourceTest, MixedOptionsAndConstraints) { +TEST_F(VideoSourceTest, MixedOptionsAndConstraints) { FakeConstraints constraints; constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352); constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288); @@ -448,7 +484,7 @@ TEST_F(LocalVideoSourceTest, MixedOptionsAndConstraints) { constraints.AddOptional( MediaConstraintsInterface::kNoiseReduction, true); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -458,18 +494,18 @@ TEST_F(LocalVideoSourceTest, MixedOptionsAndConstraints) { EXPECT_EQ(5, format->framerate()); bool value = true; - EXPECT_TRUE(local_source_->options()->video_noise_reduction.Get(&value)); + EXPECT_TRUE(source_->options()->video_noise_reduction.Get(&value)); EXPECT_FALSE(value); - EXPECT_FALSE(local_source_->options()->video_leaky_bucket.Get(&value)); + EXPECT_FALSE(source_->options()->video_leaky_bucket.Get(&value)); } // Tests that the source starts video with the default resolution for // screencast if no constraint is set. -TEST_F(LocalVideoSourceTest, ScreencastResolutionNoConstraint) { +TEST_F(VideoSourceTest, ScreencastResolutionNoConstraint) { capturer_->TestWithoutCameraFormats(); capturer_->SetScreencast(true); - CreateLocalVideoSource(); + CreateVideoSource(); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -481,7 +517,7 @@ TEST_F(LocalVideoSourceTest, ScreencastResolutionNoConstraint) { // Tests that the source starts video with the max width and height set by // constraints for screencast. -TEST_F(LocalVideoSourceTest, ScreencastResolutionWithConstraint) { +TEST_F(VideoSourceTest, ScreencastResolutionWithConstraint) { FakeConstraints constraints; constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 480); constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 270); @@ -489,7 +525,7 @@ TEST_F(LocalVideoSourceTest, ScreencastResolutionWithConstraint) { capturer_->TestWithoutCameraFormats(); capturer_->SetScreencast(true); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); @@ -499,21 +535,21 @@ TEST_F(LocalVideoSourceTest, ScreencastResolutionWithConstraint) { EXPECT_EQ(30, format->framerate()); } -TEST_F(LocalVideoSourceTest, MandatorySubOneFpsConstraints) { +TEST_F(VideoSourceTest, MandatorySubOneFpsConstraints) { FakeConstraints constraints; constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 0.5); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(), kMaxWaitMs); ASSERT_TRUE(capturer_->GetCaptureFormat() == NULL); } -TEST_F(LocalVideoSourceTest, OptionalSubOneFpsConstraints) { +TEST_F(VideoSourceTest, OptionalSubOneFpsConstraints) { FakeConstraints constraints; constraints.AddOptional(MediaConstraintsInterface::kMaxFrameRate, 0.5); - CreateLocalVideoSource(&constraints); + CreateVideoSource(&constraints); EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(), kMaxWaitMs); const cricket::VideoFormat* format = capturer_->GetCaptureFormat(); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/videosourceinterface.h b/chromium/third_party/libjingle/source/talk/app/webrtc/videosourceinterface.h index ae968f728fe..89c90a1ec53 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/videosourceinterface.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videosourceinterface.h @@ -47,6 +47,7 @@ class VideoSourceInterface : public MediaSourceInterface { virtual void AddSink(cricket::VideoRenderer* output) = 0; virtual void RemoveSink(cricket::VideoRenderer* output) = 0; virtual const cricket::VideoOptions* options() const = 0; + virtual cricket::VideoRenderer* FrameInput() = 0; protected: virtual ~VideoSourceInterface() {} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/videosourceproxy.h b/chromium/third_party/libjingle/source/talk/app/webrtc/videosourceproxy.h index be800777c30..991dd554c63 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/videosourceproxy.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videosourceproxy.h @@ -42,6 +42,7 @@ BEGIN_PROXY_MAP(VideoSource) PROXY_METHOD1(void, AddSink, cricket::VideoRenderer*) PROXY_METHOD1(void, RemoveSink, cricket::VideoRenderer*) PROXY_CONSTMETHOD0(const cricket::VideoOptions*, options) + PROXY_METHOD0(cricket::VideoRenderer*, FrameInput) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) END_PROXY() diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc index ec17ec7e718..7ab7815ba2a 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc @@ -39,12 +39,12 @@ VideoTrack::VideoTrack(const std::string& label, : MediaStreamTrack<VideoTrackInterface>(label), video_source_(video_source) { if (video_source_) - video_source_->AddSink(FrameInput()); + video_source_->AddSink(&renderers_); } VideoTrack::~VideoTrack() { if (video_source_) - video_source_->RemoveSink(FrameInput()); + video_source_->RemoveSink(&renderers_); } std::string VideoTrack::kind() const { @@ -59,10 +59,6 @@ void VideoTrack::RemoveRenderer(VideoRendererInterface* renderer) { renderers_.RemoveRenderer(renderer); } -cricket::VideoRenderer* VideoTrack::FrameInput() { - return &renderers_; -} - bool VideoTrack::set_enabled(bool enable) { renderers_.SetEnabled(enable); return MediaStreamTrack<VideoTrackInterface>::set_enabled(enable); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.h b/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.h index aefeb502cbb..acd1b755c58 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.h @@ -44,7 +44,6 @@ class VideoTrack : public MediaStreamTrack<VideoTrackInterface> { virtual void AddRenderer(VideoRendererInterface* renderer); virtual void RemoveRenderer(VideoRendererInterface* renderer); - virtual cricket::VideoRenderer* FrameInput(); virtual VideoSourceInterface* GetSource() const { return video_source_.get(); } diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack_unittest.cc index 671e360f21a..4a30293c7ee 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack_unittest.cc @@ -28,38 +28,53 @@ #include <string> #include "talk/app/webrtc/test/fakevideotrackrenderer.h" +#include "talk/app/webrtc/remotevideocapturer.h" +#include "talk/app/webrtc/videosource.h" #include "talk/app/webrtc/videotrack.h" #include "talk/base/gunit.h" #include "talk/base/scoped_ptr.h" +#include "talk/media/base/fakemediaengine.h" +#include "talk/media/devices/fakedevicemanager.h" #include "talk/media/webrtc/webrtcvideoframe.h" +#include "talk/session/media/channelmanager.h" using webrtc::FakeVideoTrackRenderer; +using webrtc::VideoSource; using webrtc::VideoTrack; using webrtc::VideoTrackInterface; // Test adding renderers to a video track and render to them by providing -// VideoFrames to the track frame input. +// frames to the source. TEST(VideoTrack, RenderVideo) { static const char kVideoTrackId[] = "track_id"; + + talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_; + channel_manager_.reset( + new cricket::ChannelManager(new cricket::FakeMediaEngine(), + new cricket::FakeDeviceManager(), + talk_base::Thread::Current())); + ASSERT_TRUE(channel_manager_->Init()); talk_base::scoped_refptr<VideoTrackInterface> video_track( - VideoTrack::Create(kVideoTrackId, NULL)); + VideoTrack::Create(kVideoTrackId, + VideoSource::Create(channel_manager_.get(), + new webrtc::RemoteVideoCapturer(), + NULL))); // FakeVideoTrackRenderer register itself to |video_track| talk_base::scoped_ptr<FakeVideoTrackRenderer> renderer_1( new FakeVideoTrackRenderer(video_track.get())); - cricket::VideoRenderer* render_input = video_track->FrameInput(); + cricket::VideoRenderer* render_input = video_track->GetSource()->FrameInput(); ASSERT_FALSE(render_input == NULL); - render_input->SetSize(123, 123, 0); - EXPECT_EQ(1, renderer_1->num_set_sizes()); - EXPECT_EQ(123, renderer_1->width()); - EXPECT_EQ(123, renderer_1->height()); - cricket::WebRtcVideoFrame frame; frame.InitToBlack(123, 123, 1, 1, 0, 0); render_input->RenderFrame(&frame); EXPECT_EQ(1, renderer_1->num_rendered_frames()); + EXPECT_EQ(1, renderer_1->num_set_sizes()); + EXPECT_EQ(123, renderer_1->width()); + EXPECT_EQ(123, renderer_1->height()); + // FakeVideoTrackRenderer register itself to |video_track| talk_base::scoped_ptr<FakeVideoTrackRenderer> renderer_2( new FakeVideoTrackRenderer(video_track.get())); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc index 9910a5101be..60c427d16f1 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc @@ -42,6 +42,7 @@ #include "talk/media/base/codec.h" #include "talk/media/base/constants.h" #include "talk/media/base/cryptoparams.h" +#include "talk/media/sctp/sctpdataengine.h" #include "talk/p2p/base/candidate.h" #include "talk/p2p/base/constants.h" #include "talk/p2p/base/port.h" @@ -141,6 +142,7 @@ static const char kAttributeCandidateUsername[] = "username"; static const char kAttributeCandidatePassword[] = "password"; static const char kAttributeCandidateGeneration[] = "generation"; static const char kAttributeFingerprint[] = "fingerprint"; +static const char kAttributeSetup[] = "setup"; static const char kAttributeFmtp[] = "fmtp"; static const char kAttributeRtpmap[] = "rtpmap"; static const char kAttributeRtcp[] = "rtcp"; @@ -318,6 +320,9 @@ static bool ParseExtmap(const std::string& line, static bool ParseFingerprintAttribute(const std::string& line, talk_base::SSLFingerprint** fingerprint, SdpParseError* error); +static bool ParseDtlsSetup(const std::string& line, + cricket::ConnectionRole* role, + SdpParseError* error); // Helper functions @@ -902,7 +907,8 @@ bool SdpDeserialize(const std::string& message, SdpParseError* error) { std::string session_id; std::string session_version; - TransportDescription session_td(NS_JINGLE_ICE_UDP, Candidates()); + TransportDescription session_td(NS_JINGLE_ICE_UDP, + std::string(), std::string()); RtpHeaderExtensions session_extmaps; cricket::SessionDescription* desc = new cricket::SessionDescription(); std::vector<JsepIceCandidate*> candidates; @@ -1226,8 +1232,23 @@ void BuildMediaDescription(const ContentInfo* content_info, os << kSdpDelimiterColon << fp->algorithm << kSdpDelimiterSpace << fp->GetRfc4572Fingerprint(); - AddLine(os.str(), message); + + // Inserting setup attribute. + if (transport_info->description.connection_role != + cricket::CONNECTIONROLE_NONE) { + // Making sure we are not using "passive" mode. + cricket::ConnectionRole role = + transport_info->description.connection_role; + ASSERT(role == cricket::CONNECTIONROLE_ACTIVE || + role == cricket::CONNECTIONROLE_ACTPASS); + InitAttrLine(kAttributeSetup, &os); + std::string dtls_role_str = role == cricket::CONNECTIONROLE_ACTPASS ? + cricket::CONNECTIONROLE_ACTPASS_STR : + cricket::CONNECTIONROLE_ACTIVE_STR; + os << kSdpDelimiterColon << dtls_role_str; + AddLine(os.str(), message); + } } } @@ -1296,7 +1317,16 @@ void BuildRtpContentAttributes( // RFC 4566 // b=AS:<bandwidth> - if (media_desc->bandwidth() >= 1000) { + // We should always use the default bandwidth for RTP-based data + // channels. Don't allow SDP to set the bandwidth, because that + // would give JS the opportunity to "break the Internet". + // TODO(pthatcher): But we need to temporarily allow the SDP to control + // this for backwards-compatibility. Once we don't need that any + // more, remove this. + bool support_dc_sdp_bandwidth_temporarily = true; + if (media_desc->bandwidth() >= 1000 && + (media_type != cricket::MEDIA_TYPE_DATA || + support_dc_sdp_bandwidth_temporarily)) { InitLine(kLineTypeSessionBandwidth, kApplicationSpecificMaximum, &os); os << kSdpDelimiterColon << (media_desc->bandwidth() / 1000); AddLine(os.str(), message); @@ -1796,6 +1826,10 @@ bool ParseSessionDescription(const std::string& message, size_t* pos, return false; } session_td->identity_fingerprint.reset(fingerprint); + } else if (HasAttribute(line, kAttributeSetup)) { + if (!ParseDtlsSetup(line, &(session_td->connection_role), error)) { + return false; + } } else if (HasAttribute(line, kAttributeMsidSemantics)) { std::string semantics; if (!GetValue(line, kAttributeMsidSemantics, &semantics, error)) { @@ -1876,6 +1910,24 @@ static bool ParseFingerprintAttribute(const std::string& line, return true; } +static bool ParseDtlsSetup(const std::string& line, + cricket::ConnectionRole* role, + SdpParseError* error) { + // setup-attr = "a=setup:" role + // role = "active" / "passive" / "actpass" / "holdconn" + std::vector<std::string> fields; + talk_base::split(line.substr(kLinePrefixLength), kSdpDelimiterColon, &fields); + const size_t expected_fields = 2; + if (fields.size() != expected_fields) { + return ParseFailedExpectFieldNum(line, expected_fields, error); + } + std::string role_str = fields[1]; + if (!cricket::StringToConnectionRole(role_str, role)) { + return ParseFailed(line, "Invalid attribute value.", error); + } + return true; +} + // RFC 3551 // PT encoding media type clock rate channels // name (Hz) @@ -2039,6 +2091,7 @@ bool ParseMediaDescription(const std::string& message, session_td.ice_ufrag, session_td.ice_pwd, session_td.ice_mode, + session_td.connection_role, session_td.identity_fingerprint.get(), Candidates()); @@ -2062,6 +2115,16 @@ bool ParseMediaDescription(const std::string& message, message, cricket::MEDIA_TYPE_DATA, mline_index, protocol, codec_preference, pos, &content_name, &transport, candidates, error)); + // We should always use the default bandwidth for RTP-based data + // channels. Don't allow SDP to set the bandwidth, because that + // would give JS the opportunity to "break the Internet". + // TODO(pthatcher): But we need to temporarily allow the SDP to control + // this for backwards-compatibility. Once we don't need that any + // more, remove this. + bool support_dc_sdp_bandwidth_temporarily = true; + if (content.get() && !support_dc_sdp_bandwidth_temporarily) { + content->set_bandwidth(cricket::kAutoBandwidth); + } } else { LOG(LS_WARNING) << "Unsupported media type: " << line; continue; @@ -2378,6 +2441,10 @@ bool ParseContent(const std::string& message, return false; } transport->identity_fingerprint.reset(fingerprint); + } else if (HasAttribute(line, kAttributeSetup)) { + if (!ParseDtlsSetup(line, &(transport->connection_role), error)) { + return false; + } } else if (is_rtp) { // // RTP specific attrubtes diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc index 9e4c66072e5..b1505aae7df 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc @@ -281,7 +281,7 @@ static const char kSdpSctpDataChannelString[] = "a=ice-ufrag:ufrag_data\r\n" "a=ice-pwd:pwd_data\r\n" "a=mid:data_content_name\r\n" - "a=fmtp:5000 protocol=webrtc-datachannel; streams=10\r\n"; + "a=fmtp:5000 protocol=webrtc-datachannel; streams=65536\r\n"; static const char kSdpSctpDataChannelWithCandidatesString[] = "m=application 2345 DTLS/SCTP 5000\r\n" @@ -296,7 +296,7 @@ static const char kSdpSctpDataChannelWithCandidatesString[] = "a=ice-ufrag:ufrag_data\r\n" "a=ice-pwd:pwd_data\r\n" "a=mid:data_content_name\r\n" - "a=fmtp:5000 protocol=webrtc-datachannel; streams=10\r\n"; + "a=fmtp:5000 protocol=webrtc-datachannel; streams=65536\r\n"; // One candidate reference string as per W3c spec. @@ -485,19 +485,13 @@ class WebRtcSdpTest : public testing::Test { EXPECT_TRUE(desc_.AddTransportInfo( TransportInfo(kAudioContentName, TransportDescription(NS_JINGLE_ICE_UDP, - std::vector<std::string>(), kCandidateUfragVoice, - kCandidatePwdVoice, - cricket::ICEMODE_FULL, - NULL, Candidates())))); + kCandidatePwdVoice)))); EXPECT_TRUE(desc_.AddTransportInfo( TransportInfo(kVideoContentName, TransportDescription(NS_JINGLE_ICE_UDP, - std::vector<std::string>(), kCandidateUfragVideo, - kCandidatePwdVideo, - cricket::ICEMODE_FULL, - NULL, Candidates())))); + kCandidatePwdVideo)))); // v4 host int port = 1234; @@ -860,9 +854,7 @@ class WebRtcSdpTest : public testing::Test { } TransportInfo transport_info( content_name, TransportDescription(NS_JINGLE_ICE_UDP, - std::vector<std::string>(), - ufrag, pwd, cricket::ICEMODE_FULL, - NULL, Candidates())); + ufrag, pwd)); SessionDescription* desc = const_cast<SessionDescription*>(jdesc->description()); desc->RemoveTransportInfoByName(content_name); @@ -903,16 +895,18 @@ class WebRtcSdpTest : public testing::Test { std::vector<std::string>(), kCandidateUfragVoice, kCandidatePwdVoice, - cricket::ICEMODE_FULL, &fingerprint, - Candidates())))); + cricket::ICEMODE_FULL, + cricket::CONNECTIONROLE_NONE, + &fingerprint, Candidates())))); EXPECT_TRUE(desc_.AddTransportInfo( TransportInfo(kVideoContentName, TransportDescription(NS_JINGLE_ICE_UDP, std::vector<std::string>(), kCandidateUfragVideo, kCandidatePwdVideo, - cricket::ICEMODE_FULL, &fingerprint, - Candidates())))); + cricket::ICEMODE_FULL, + cricket::CONNECTIONROLE_NONE, + &fingerprint, Candidates())))); } void AddExtmap() { @@ -984,11 +978,8 @@ class WebRtcSdpTest : public testing::Test { EXPECT_TRUE(desc_.AddTransportInfo( TransportInfo(kDataContentName, TransportDescription(NS_JINGLE_ICE_UDP, - std::vector<std::string>(), kCandidateUfragData, - kCandidatePwdData, - cricket::ICEMODE_FULL, - NULL, Candidates())))); + kCandidatePwdData)))); } void AddRtpDataChannel() { @@ -1011,11 +1002,8 @@ class WebRtcSdpTest : public testing::Test { EXPECT_TRUE(desc_.AddTransportInfo( TransportInfo(kDataContentName, TransportDescription(NS_JINGLE_ICE_UDP, - std::vector<std::string>(), kCandidateUfragData, - kCandidatePwdData, - cricket::ICEMODE_FULL, - NULL, Candidates())))); + kCandidatePwdData)))); } bool TestDeserializeDirection(cricket::MediaContentDirection direction) { @@ -1439,6 +1427,28 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithSctpDataChannel) { EXPECT_EQ(message, expected_sdp); } +TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithDataChannelAndBandwidth) { + AddRtpDataChannel(); + data_desc_->set_bandwidth(100*1000); + JsepSessionDescription jsep_desc(kDummyString); + + ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion)); + std::string message = webrtc::SdpSerialize(jsep_desc); + + std::string expected_sdp = kSdpString; + expected_sdp.append(kSdpRtpDataChannelString); + // We want to test that serializing data content ignores bandwidth + // settings (it should always be the default). Thus, we don't do + // the following: + // TODO(pthatcher): We need to temporarily allow the SDP to control + // this for backwards-compatibility. Once we don't need that any + // more, remove this. + InjectAfter("a=mid:data_content_name\r\na=sendrecv\r\n", + "b=AS:100\r\n", + &expected_sdp); + EXPECT_EQ(expected_sdp, message); +} + TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmap) { AddExtmap(); JsepSessionDescription desc_with_extmap("dummy"); @@ -1751,6 +1761,32 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannels) { EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output)); } +TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpDataChannelsAndBandwidth) { + AddRtpDataChannel(); + JsepSessionDescription jdesc(kDummyString); + // We want to test that deserializing data content ignores bandwidth + // settings (it should always be the default). Thus, we don't do + // the following: + // TODO(pthatcher): We need to temporarily allow the SDP to control + // this for backwards-compatibility. Once we don't need that any + // more, remove this. + DataContentDescription* dcd = static_cast<DataContentDescription*>( + GetFirstDataContent(&desc_)->description); + dcd->set_bandwidth(100 * 1000); + ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion)); + + std::string sdp_with_bandwidth = kSdpString; + sdp_with_bandwidth.append(kSdpRtpDataChannelString); + InjectAfter("a=mid:data_content_name\r\n", + "b=AS:100\r\n", + &sdp_with_bandwidth); + JsepSessionDescription jdesc_with_bandwidth(kDummyString); + + EXPECT_TRUE( + SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth)); + EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_with_bandwidth)); +} + TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithSessionLevelExtmap) { TestDeserializeExtmap(true, false); } @@ -1966,3 +2002,60 @@ TEST_F(WebRtcSdpTest, RoundTripSdpWithSctpDataChannelsWithCandidates) { EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output)); EXPECT_EQ(sdp_with_data, webrtc::SdpSerialize(jdesc_output)); } + +TEST_F(WebRtcSdpTest, SerializeDtlsSetupAttribute) { + AddFingerprint(); + TransportInfo audio_transport_info = + *(desc_.GetTransportInfoByName(kAudioContentName)); + EXPECT_EQ(cricket::CONNECTIONROLE_NONE, + audio_transport_info.description.connection_role); + audio_transport_info.description.connection_role = + cricket::CONNECTIONROLE_ACTIVE; + + TransportInfo video_transport_info = + *(desc_.GetTransportInfoByName(kVideoContentName)); + EXPECT_EQ(cricket::CONNECTIONROLE_NONE, + video_transport_info.description.connection_role); + video_transport_info.description.connection_role = + cricket::CONNECTIONROLE_ACTIVE; + + desc_.RemoveTransportInfoByName(kAudioContentName); + desc_.RemoveTransportInfoByName(kVideoContentName); + + desc_.AddTransportInfo(audio_transport_info); + desc_.AddTransportInfo(video_transport_info); + + ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(), + jdesc_.session_id(), + jdesc_.session_version())); + std::string message = webrtc::SdpSerialize(jdesc_); + std::string sdp_with_dtlssetup = kSdpFullString; + + // Fingerprint attribute is necessary to add DTLS setup attribute. + InjectAfter(kAttributeIcePwdVoice, + kFingerprint, &sdp_with_dtlssetup); + InjectAfter(kAttributeIcePwdVideo, + kFingerprint, &sdp_with_dtlssetup); + // Now adding |setup| attribute. + InjectAfter(kFingerprint, + "a=setup:active\r\n", &sdp_with_dtlssetup); + EXPECT_EQ(sdp_with_dtlssetup, message); +} + +TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttribute) { + JsepSessionDescription jdesc_with_dtlssetup(kDummyString); + std::string sdp_with_dtlssetup = kSdpFullString; + InjectAfter(kSessionTime, + "a=setup:actpass\r\n", + &sdp_with_dtlssetup); + EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup)); + cricket::SessionDescription* desc = jdesc_with_dtlssetup.description(); + const cricket::TransportInfo* atinfo = + desc->GetTransportInfoByName("audio_content_name"); + EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS, + atinfo->description.connection_role); + const cricket::TransportInfo* vtinfo = + desc->GetTransportInfoByName("video_content_name"); + EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS, + vtinfo->description.connection_role); +} diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc index b056757df43..f1fb40d51a0 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc @@ -422,6 +422,17 @@ bool WebRtcSession::Initialize( // mandatory constraints can be fulfilled. Note that |constraints| // can be null. bool value; + + // Enable DTLS by default if |dtls_identity_service| is valid. + bool dtls_enabled = (dtls_identity_service != NULL); + // |constraints| can override the default |dtls_enabled| value. + if (FindConstraint( + constraints, + MediaConstraintsInterface::kEnableDtlsSrtp, + &value, NULL)) { + dtls_enabled = value; + } + // Enable creation of RTP data channels if the kEnableRtpDataChannels is set. // It takes precendence over the kEnableSctpDataChannels constraint. if (FindConstraint( @@ -434,11 +445,6 @@ bool WebRtcSession::Initialize( constraints, MediaConstraintsInterface::kEnableSctpDataChannels, &value, NULL) && value; - bool dtls_enabled = FindConstraint( - constraints, - MediaConstraintsInterface::kEnableDtlsSrtp, - &value, NULL) && value; - // DTLS has to be enabled to use SCTP. if (sctp_enabled && dtls_enabled) { LOG(LS_INFO) << "Allowing SCTP data engine."; @@ -467,7 +473,7 @@ bool WebRtcSession::Initialize( this, id(), data_channel_type_, - constraints)); + dtls_enabled)); webrtc_session_desc_factory_->SignalIdentityReady.connect( this, &WebRtcSession::OnIdentityReady); @@ -505,6 +511,26 @@ cricket::SecureMediaPolicy WebRtcSession::secure_policy() const { return webrtc_session_desc_factory_->secure(); } +bool WebRtcSession::GetSslRole(talk_base::SSLRole* role) { + if (local_description() == NULL || remote_description() == NULL) { + LOG(LS_INFO) << "Local and Remote descriptions must be applied to get " + << "SSL Role of the session."; + return false; + } + + // TODO(mallinath) - Return role of each transport, as role may differ from + // one another. + // In current implementaion we just return the role of first transport in the + // transport map. + for (cricket::TransportMap::const_iterator iter = transport_proxies().begin(); + iter != transport_proxies().end(); ++iter) { + if (iter->second->impl()) { + return iter->second->impl()->GetSslRole(role); + } + } + return false; +} + void WebRtcSession::CreateOffer(CreateSessionDescriptionObserver* observer, const MediaConstraintsInterface* constraints) { webrtc_session_desc_factory_->CreateOffer(observer, constraints); @@ -517,42 +543,22 @@ void WebRtcSession::CreateAnswer(CreateSessionDescriptionObserver* observer, bool WebRtcSession::SetLocalDescription(SessionDescriptionInterface* desc, std::string* err_desc) { - cricket::SecureMediaPolicy secure_policy = - webrtc_session_desc_factory_->secure(); // Takes the ownership of |desc| regardless of the result. talk_base::scoped_ptr<SessionDescriptionInterface> desc_temp(desc); - if (error() != cricket::BaseSession::ERROR_NONE) { - return BadLocalSdp(SessionErrorMsg(error()), err_desc); - } - - if (!desc || !desc->description()) { - return BadLocalSdp(kInvalidSdp, err_desc); - } - - if (!VerifyBundleSettings(desc->description())) { - return BadLocalSdp(kBundleWithoutRtcpMux, err_desc); - } - - Action action = GetAction(desc->type()); - if (!ExpectSetLocalDescription(action)) { - std::string type = desc->type(); - return BadLocalSdp(BadStateErrMsg(type, state()), err_desc); - } - if (secure_policy == cricket::SEC_REQUIRED && - !VerifyCrypto(desc->description())) { - return BadLocalSdp(kSdpWithoutCrypto, err_desc); - } - if (action == kAnswer && !VerifyMediaDescriptions( - desc->description(), remote_description()->description())) { - return BadLocalSdp(kMlineMismatch, err_desc); + // Validate SDP. + if (!ValidateSessionDescription(desc, cricket::CS_LOCAL, err_desc)) { + return false; } // Update the initiator flag if this session is the initiator. + Action action = GetAction(desc->type()); if (state() == STATE_INIT && action == kOffer) { set_initiator(true); } + cricket::SecureMediaPolicy secure_policy = + webrtc_session_desc_factory_->secure(); // Update the MediaContentDescription crypto settings as per the policy set. UpdateSessionDescriptionSecurePolicy(secure_policy, desc->description()); @@ -589,40 +595,16 @@ bool WebRtcSession::SetLocalDescription(SessionDescriptionInterface* desc, bool WebRtcSession::SetRemoteDescription(SessionDescriptionInterface* desc, std::string* err_desc) { - cricket::SecureMediaPolicy secure_policy = - webrtc_session_desc_factory_->secure(); // Takes the ownership of |desc| regardless of the result. talk_base::scoped_ptr<SessionDescriptionInterface> desc_temp(desc); - if (error() != cricket::BaseSession::ERROR_NONE) { - return BadRemoteSdp(SessionErrorMsg(error()), err_desc); - } - - if (!desc || !desc->description()) { - return BadRemoteSdp(kInvalidSdp, err_desc); - } - - if (!VerifyBundleSettings(desc->description())) { - return BadRemoteSdp(kBundleWithoutRtcpMux, err_desc); - } - - Action action = GetAction(desc->type()); - if (!ExpectSetRemoteDescription(action)) { - std::string type = desc->type(); - return BadRemoteSdp(BadStateErrMsg(type, state()), err_desc); - } - - if (action == kAnswer && !VerifyMediaDescriptions( - desc->description(), local_description()->description())) { - return BadRemoteSdp(kMlineMismatch, err_desc); - } - - if (secure_policy == cricket::SEC_REQUIRED && - !VerifyCrypto(desc->description())) { - return BadRemoteSdp(kSdpWithoutCrypto, err_desc); + // Validate SDP. + if (!ValidateSessionDescription(desc, cricket::CS_REMOTE, err_desc)) { + return false; } // Transport and Media channels will be created only when offer is set. + Action action = GetAction(desc->type()); if (action == kOffer && !CreateChannels(desc->description())) { // TODO(mallinath) - Handle CreateChannel failure, as new local description // is applied. Restore back to old description. @@ -962,7 +944,6 @@ talk_base::scoped_refptr<DataChannel> WebRtcSession::CreateDataChannel( if (data_channel_.get()) { channel->SetReceiveSsrc(new_config.id); channel->SetSendSsrc(new_config.id); - channel->ConnectToDataSession(); } if (!config->negotiated) { talk_base::Buffer *payload = new talk_base::Buffer; @@ -1094,36 +1075,6 @@ void WebRtcSession::OnTransportProxyCandidatesReady( ProcessNewLocalCandidate(proxy->content_name(), candidates); } -bool WebRtcSession::ExpectSetLocalDescription(Action action) { - return ((action == kOffer && state() == STATE_INIT) || - // update local offer - (action == kOffer && state() == STATE_SENTINITIATE) || - // update the current ongoing session. - (action == kOffer && state() == STATE_RECEIVEDACCEPT) || - (action == kOffer && state() == STATE_SENTACCEPT) || - (action == kOffer && state() == STATE_INPROGRESS) || - // accept remote offer - (action == kAnswer && state() == STATE_RECEIVEDINITIATE) || - (action == kAnswer && state() == STATE_SENTPRACCEPT) || - (action == kPrAnswer && state() == STATE_RECEIVEDINITIATE) || - (action == kPrAnswer && state() == STATE_SENTPRACCEPT)); -} - -bool WebRtcSession::ExpectSetRemoteDescription(Action action) { - return ((action == kOffer && state() == STATE_INIT) || - // update remote offer - (action == kOffer && state() == STATE_RECEIVEDINITIATE) || - // update the current ongoing session - (action == kOffer && state() == STATE_RECEIVEDACCEPT) || - (action == kOffer && state() == STATE_SENTACCEPT) || - (action == kOffer && state() == STATE_INPROGRESS) || - // accept local offer - (action == kAnswer && state() == STATE_SENTINITIATE) || - (action == kAnswer && state() == STATE_RECEIVEDPRACCEPT) || - (action == kPrAnswer && state() == STATE_SENTINITIATE) || - (action == kPrAnswer && state() == STATE_RECEIVEDPRACCEPT)); -} - void WebRtcSession::OnCandidatesAllocationDone() { ASSERT(signaling_thread()->IsCurrent()); if (ice_observer_) { @@ -1378,7 +1329,7 @@ void WebRtcSession::OnDataReceived( } // Returns false if bundle is enabled and rtcp_mux is disabled. -bool WebRtcSession::VerifyBundleSettings(const SessionDescription* desc) { +bool WebRtcSession::ValidateBundleSettings(const SessionDescription* desc) { bool bundle_enabled = desc->HasGroup(cricket::GROUP_TYPE_BUNDLE); if (!bundle_enabled) return true; @@ -1409,4 +1360,79 @@ bool WebRtcSession::HasRtcpMuxEnabled( return description->rtcp_mux(); } +bool WebRtcSession::ValidateSessionDescription( + const SessionDescriptionInterface* sdesc, + cricket::ContentSource source, std::string* error_desc) { + + if (error() != cricket::BaseSession::ERROR_NONE) { + return BadSdp(source, SessionErrorMsg(error()), error_desc); + } + + if (!sdesc || !sdesc->description()) { + return BadSdp(source, kInvalidSdp, error_desc); + } + + std::string type = sdesc->type(); + Action action = GetAction(sdesc->type()); + if (source == cricket::CS_LOCAL) { + if (!ExpectSetLocalDescription(action)) + return BadSdp(source, BadStateErrMsg(type, state()), error_desc); + } else { + if (!ExpectSetRemoteDescription(action)) + return BadSdp(source, BadStateErrMsg(type, state()), error_desc); + } + + // Verify crypto settings. + if (webrtc_session_desc_factory_->secure() == cricket::SEC_REQUIRED && + !VerifyCrypto(sdesc->description())) { + return BadSdp(source, kSdpWithoutCrypto, error_desc); + } + + if (!ValidateBundleSettings(sdesc->description())) { + return BadSdp(source, kBundleWithoutRtcpMux, error_desc); + } + + // Verify m-lines in Answer when compared against Offer. + if (action == kAnswer) { + const cricket::SessionDescription* offer_desc = + (source == cricket::CS_LOCAL) ? remote_description()->description() : + local_description()->description(); + if (!VerifyMediaDescriptions(sdesc->description(), offer_desc)) { + return BadSdp(source, kMlineMismatch, error_desc); + } + } + + return true; +} + +bool WebRtcSession::ExpectSetLocalDescription(Action action) { + return ((action == kOffer && state() == STATE_INIT) || + // update local offer + (action == kOffer && state() == STATE_SENTINITIATE) || + // update the current ongoing session. + (action == kOffer && state() == STATE_RECEIVEDACCEPT) || + (action == kOffer && state() == STATE_SENTACCEPT) || + (action == kOffer && state() == STATE_INPROGRESS) || + // accept remote offer + (action == kAnswer && state() == STATE_RECEIVEDINITIATE) || + (action == kAnswer && state() == STATE_SENTPRACCEPT) || + (action == kPrAnswer && state() == STATE_RECEIVEDINITIATE) || + (action == kPrAnswer && state() == STATE_SENTPRACCEPT)); +} + +bool WebRtcSession::ExpectSetRemoteDescription(Action action) { + return ((action == kOffer && state() == STATE_INIT) || + // update remote offer + (action == kOffer && state() == STATE_RECEIVEDINITIATE) || + // update the current ongoing session + (action == kOffer && state() == STATE_RECEIVEDACCEPT) || + (action == kOffer && state() == STATE_SENTACCEPT) || + (action == kOffer && state() == STATE_INPROGRESS) || + // accept local offer + (action == kAnswer && state() == STATE_SENTINITIATE) || + (action == kAnswer && state() == STATE_RECEIVEDPRACCEPT) || + (action == kPrAnswer && state() == STATE_SENTINITIATE) || + (action == kPrAnswer && state() == STATE_RECEIVEDPRACCEPT)); +} + } // namespace webrtc diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h index 202ca66e129..0cb049f4983 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h @@ -130,6 +130,9 @@ class WebRtcSession : public cricket::BaseSession, void set_secure_policy(cricket::SecureMediaPolicy secure_policy); cricket::SecureMediaPolicy secure_policy() const; + // Get current ssl role from transport. + bool GetSslRole(talk_base::SSLRole* role); + // Generic error message callback from WebRtcSession. // TODO - It may be necessary to supply error code as well. sigslot::signal0<> SignalError; @@ -152,9 +155,6 @@ class WebRtcSession : public cricket::BaseSession, return remote_desc_.get(); } - void set_secure(cricket::SecureMediaPolicy secure_policy); - cricket::SecureMediaPolicy secure(); - // Get the id used as a media stream track's "id" field from ssrc. virtual bool GetTrackIdBySsrc(uint32 ssrc, std::string* id); @@ -223,10 +223,6 @@ class WebRtcSession : public cricket::BaseSession, const cricket::Candidates& candidates); virtual void OnCandidatesAllocationDone(); - // Check if a call to SetLocalDescription is acceptable with |action|. - bool ExpectSetLocalDescription(Action action); - // Check if a call to SetRemoteDescription is acceptable with |action|. - bool ExpectSetRemoteDescription(Action action); // Creates local session description with audio and video contents. bool CreateDefaultLocalDescription(); // Enables media channels to allow sending of media. @@ -275,8 +271,20 @@ class WebRtcSession : public cricket::BaseSession, std::string BadStateErrMsg(const std::string& type, State state); void SetIceConnectionState(PeerConnectionInterface::IceConnectionState state); - bool VerifyBundleSettings(const cricket::SessionDescription* desc); + bool ValidateBundleSettings(const cricket::SessionDescription* desc); bool HasRtcpMuxEnabled(const cricket::ContentInfo* content); + // Below methods are helper methods which verifies SDP. + bool ValidateSessionDescription(const SessionDescriptionInterface* sdesc, + cricket::ContentSource source, + std::string* error_desc); + + // Check if a call to SetLocalDescription is acceptable with |action|. + bool ExpectSetLocalDescription(Action action); + // Check if a call to SetRemoteDescription is acceptable with |action|. + bool ExpectSetRemoteDescription(Action action); + // Verifies a=setup attribute as per RFC 5763. + bool ValidateDtlsSetupAttribute(const cricket::SessionDescription* desc, + Action action); talk_base::scoped_ptr<cricket::VoiceChannel> voice_channel_; talk_base::scoped_ptr<cricket::VideoChannel> video_channel_; diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc index bcb209a0abe..1531e0efc1a 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc @@ -275,7 +275,8 @@ class WebRtcSessionTest : public testing::Test { ss_scope_(fss_.get()), stun_server_(talk_base::Thread::Current(), kStunAddr), allocator_(&network_manager_, kStunAddr, - SocketAddress(), SocketAddress(), SocketAddress()) { + SocketAddress(), SocketAddress(), SocketAddress()), + mediastream_signaling_(channel_manager_.get()) { tdesc_factory_->set_protocol(cricket::ICEPROTO_HYBRID); allocator_.set_flags(cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_RELAY | @@ -313,18 +314,7 @@ class WebRtcSessionTest : public testing::Test { Init(NULL); } - void InitWithDtls() { - constraints_.reset(new FakeConstraints()); - constraints_->AddOptional( - webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, true); - - Init(NULL); - } - - void InitWithAsyncDtls(bool identity_request_should_fail) { - constraints_.reset(new FakeConstraints()); - constraints_->AddOptional( - webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, true); + void InitWithDtls(bool identity_request_should_fail = false) { FakeIdentityService* identity_service = new FakeIdentityService(); identity_service->set_should_fail(identity_request_should_fail); Init(identity_service); @@ -348,7 +338,7 @@ class WebRtcSessionTest : public testing::Test { session_->CreateOffer(observer, constraints); EXPECT_TRUE_WAIT( observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit, - 1000); + 2000); return observer->ReleaseDescription(); } @@ -359,7 +349,7 @@ class WebRtcSessionTest : public testing::Test { session_->CreateAnswer(observer, constraints); EXPECT_TRUE_WAIT( observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit, - 1000); + 2000); return observer->ReleaseDescription(); } @@ -819,7 +809,7 @@ class WebRtcSessionTest : public testing::Test { void VerifyMultipleAsyncCreateDescription( bool success, CreateSessionDescriptionRequest::Type type) { - InitWithAsyncDtls(!success); + InitWithDtls(!success); if (type == CreateSessionDescriptionRequest::kAnswer) { cricket::MediaSessionOptions options; @@ -2490,19 +2480,54 @@ TEST_F(WebRtcSessionTest, TestRtpDataChannelConstraintTakesPrecedence) { webrtc::MediaConstraintsInterface::kEnableRtpDataChannels, true); constraints_->AddOptional( webrtc::MediaConstraintsInterface::kEnableSctpDataChannels, true); - constraints_->AddOptional( - webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, true); - Init(NULL); + InitWithDtls(false); SetLocalDescriptionWithDataChannel(); EXPECT_EQ(cricket::DCT_RTP, data_engine_->last_channel_type()); } +TEST_F(WebRtcSessionTest, TestCreateOfferWithSctpEnabledWithoutStreams) { + MAYBE_SKIP_TEST(talk_base::SSLStreamAdapter::HaveDtlsSrtp); + + constraints_.reset(new FakeConstraints()); + constraints_->AddOptional( + webrtc::MediaConstraintsInterface::kEnableSctpDataChannels, true); + InitWithDtls(false); + + talk_base::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer(NULL)); + EXPECT_TRUE(offer->description()->GetContentByName("data") == NULL); + EXPECT_TRUE(offer->description()->GetTransportInfoByName("data") == NULL); +} + +TEST_F(WebRtcSessionTest, TestCreateAnswerWithSctpInOfferAndNoStreams) { + MAYBE_SKIP_TEST(talk_base::SSLStreamAdapter::HaveDtlsSrtp); + SetFactoryDtlsSrtp(); + constraints_.reset(new FakeConstraints()); + constraints_->AddOptional( + webrtc::MediaConstraintsInterface::kEnableSctpDataChannels, true); + InitWithDtls(false); + + // Create remote offer with SCTP. + cricket::MediaSessionOptions options; + options.data_channel_type = cricket::DCT_SCTP; + JsepSessionDescription* offer = + CreateRemoteOffer(options, cricket::SEC_ENABLED); + SetRemoteDescriptionWithoutError(offer); + + // Verifies the answer contains SCTP. + talk_base::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL)); + EXPECT_TRUE(answer != NULL); + EXPECT_TRUE(answer->description()->GetContentByName("data") != NULL); + EXPECT_TRUE(answer->description()->GetTransportInfoByName("data") != NULL); +} + TEST_F(WebRtcSessionTest, TestSctpDataChannelWithoutDtls) { constraints_.reset(new FakeConstraints()); constraints_->AddOptional( webrtc::MediaConstraintsInterface::kEnableSctpDataChannels, true); - Init(NULL); + constraints_->AddOptional( + webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, false); + InitWithDtls(false); SetLocalDescriptionWithDataChannel(); EXPECT_EQ(cricket::DCT_NONE, data_engine_->last_channel_type()); @@ -2514,9 +2539,7 @@ TEST_F(WebRtcSessionTest, TestSctpDataChannelWithDtls) { constraints_.reset(new FakeConstraints()); constraints_->AddOptional( webrtc::MediaConstraintsInterface::kEnableSctpDataChannels, true); - constraints_->AddOptional( - webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, true); - Init(NULL); + InitWithDtls(false); SetLocalDescriptionWithDataChannel(); EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type()); @@ -2526,7 +2549,7 @@ TEST_F(WebRtcSessionTest, TestSctpDataChannelWithDtls) { // identity generation is finished. TEST_F(WebRtcSessionTest, TestCreateOfferBeforeIdentityRequestReturnSuccess) { MAYBE_SKIP_TEST(talk_base::SSLStreamAdapter::HaveDtlsSrtp); - InitWithAsyncDtls(false); + InitWithDtls(false); EXPECT_TRUE(session_->waiting_for_identity()); talk_base::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer(NULL)); @@ -2537,7 +2560,7 @@ TEST_F(WebRtcSessionTest, TestCreateOfferBeforeIdentityRequestReturnSuccess) { // identity generation is finished. TEST_F(WebRtcSessionTest, TestCreateAnswerBeforeIdentityRequestReturnSuccess) { MAYBE_SKIP_TEST(talk_base::SSLStreamAdapter::HaveDtlsSrtp); - InitWithAsyncDtls(false); + InitWithDtls(false); cricket::MediaSessionOptions options; scoped_ptr<JsepSessionDescription> offer( @@ -2553,7 +2576,7 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerBeforeIdentityRequestReturnSuccess) { // identity generation is finished. TEST_F(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnSuccess) { MAYBE_SKIP_TEST(talk_base::SSLStreamAdapter::HaveDtlsSrtp); - InitWithAsyncDtls(false); + InitWithDtls(false); EXPECT_TRUE_WAIT(!session_->waiting_for_identity(), 1000); talk_base::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer(NULL)); @@ -2564,7 +2587,7 @@ TEST_F(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnSuccess) { // identity generation fails. TEST_F(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnFailure) { MAYBE_SKIP_TEST(talk_base::SSLStreamAdapter::HaveDtlsSrtp); - InitWithAsyncDtls(true); + InitWithDtls(true); EXPECT_TRUE_WAIT(!session_->waiting_for_identity(), 1000); talk_base::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer(NULL)); diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc index 2021085aa27..30c49a718ad 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc @@ -113,7 +113,7 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( WebRtcSession* session, const std::string& session_id, cricket::DataChannelType dct, - const MediaConstraintsInterface* constraints) + bool dtls_enabled) : signaling_thread_(signaling_thread), mediastream_signaling_(mediastream_signaling), session_desc_factory_(channel_manager, &transport_desc_factory_), @@ -132,38 +132,31 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( // By default SRTP-SDES is enabled in WebRtc. set_secure(cricket::SEC_REQUIRED); - // Enable DTLS-SRTP if the constraint is set. - bool dtls_enabled = false; - if (!FindConstraint( - constraints, MediaConstraintsInterface::kEnableDtlsSrtp, - &dtls_enabled, NULL) || - !dtls_enabled) { - return; - } - // DTLS is enabled. - if (identity_service_.get()) { - identity_request_observer_ = - new talk_base::RefCountedObject<WebRtcIdentityRequestObserver>(); - - identity_request_observer_->SignalRequestFailed.connect( - this, &WebRtcSessionDescriptionFactory::OnIdentityRequestFailed); - identity_request_observer_->SignalIdentityReady.connect( - this, &WebRtcSessionDescriptionFactory::OnIdentityReady); - - if (identity_service_->RequestIdentity(kWebRTCIdentityName, - kWebRTCIdentityName, - identity_request_observer_)) { - LOG(LS_VERBOSE) << "DTLS-SRTP enabled; sent DTLS identity request."; - identity_request_state_ = IDENTITY_WAITING; + if (dtls_enabled) { + if (identity_service_.get()) { + identity_request_observer_ = + new talk_base::RefCountedObject<WebRtcIdentityRequestObserver>(); + + identity_request_observer_->SignalRequestFailed.connect( + this, &WebRtcSessionDescriptionFactory::OnIdentityRequestFailed); + identity_request_observer_->SignalIdentityReady.connect( + this, &WebRtcSessionDescriptionFactory::OnIdentityReady); + + if (identity_service_->RequestIdentity(kWebRTCIdentityName, + kWebRTCIdentityName, + identity_request_observer_)) { + LOG(LS_VERBOSE) << "DTLS-SRTP enabled; sent DTLS identity request."; + identity_request_state_ = IDENTITY_WAITING; + } else { + LOG(LS_ERROR) << "Failed to send DTLS identity request."; + identity_request_state_ = IDENTITY_FAILED; + } } else { - LOG(LS_ERROR) << "Failed to send DTLS identity request."; - identity_request_state_ = IDENTITY_FAILED; + identity_request_state_ = IDENTITY_WAITING; + // Do not generate the identity in the constructor since the caller has + // not got a chance to connect to SignalIdentityReady. + signaling_thread_->Post(this, MSG_GENERATE_IDENTITY, NULL); } - } else { - identity_request_state_ = IDENTITY_WAITING; - // Do not generate the identity in the constructor since the caller has - // not got a chance to connect to SignalIdentityReady. - signaling_thread_->Post(this, MSG_GENERATE_IDENTITY, NULL); } } @@ -197,7 +190,8 @@ void WebRtcSessionDescriptionFactory::CreateOffer( return; } - if (data_channel_type_ == cricket::DCT_SCTP) { + if (data_channel_type_ == cricket::DCT_SCTP && + mediastream_signaling_->HasDataChannels()) { options.data_channel_type = cricket::DCT_SCTP; } @@ -249,6 +243,9 @@ void WebRtcSessionDescriptionFactory::CreateAnswer( PostCreateSessionDescriptionFailed(observer, error); return; } + // RTP data channel is handled in MediaSessionOptions::AddStream. SCTP streams + // are not signaled in the SDP so does not go through that path and must be + // handled here. if (data_channel_type_ == cricket::DCT_SCTP) { options.data_channel_type = cricket::DCT_SCTP; } @@ -343,6 +340,13 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer( // an answer should also contain new ice ufrag and password if an offer has // been received with new ufrag and password. request.options.transport_options.ice_restart = session_->IceRestartPending(); + // We should pass current ssl role to the transport description factory, if + // there is already an existing ongoing session. + talk_base::SSLRole ssl_role; + if (session_->GetSslRole(&ssl_role)) { + request.options.transport_options.prefer_passive_role = + (talk_base::SSL_SERVER == ssl_role); + } cricket::SessionDescription* desc(session_desc_factory_.CreateAnswer( static_cast<cricket::BaseSession*>(session_)->remote_description(), diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h index 0c96c69b637..ba34e913749 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h +++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h @@ -102,7 +102,7 @@ class WebRtcSessionDescriptionFactory : public talk_base::MessageHandler, WebRtcSession* session, const std::string& session_id, cricket::DataChannelType dct, - const MediaConstraintsInterface* constraints); + bool dtls_enabled); virtual ~WebRtcSessionDescriptionFactory(); static void CopyCandidatesFromSessionDescription( diff --git a/chromium/third_party/libjingle/source/talk/base/asyncpacketsocket.h b/chromium/third_party/libjingle/source/talk/base/asyncpacketsocket.h index a88f770ca9f..3b4748f510f 100644 --- a/chromium/third_party/libjingle/source/talk/base/asyncpacketsocket.h +++ b/chromium/third_party/libjingle/source/talk/base/asyncpacketsocket.h @@ -28,6 +28,7 @@ #ifndef TALK_BASE_ASYNCPACKETSOCKET_H_ #define TALK_BASE_ASYNCPACKETSOCKET_H_ +#include "talk/base/dscp.h" #include "talk/base/sigslot.h" #include "talk/base/socket.h" @@ -56,8 +57,9 @@ class AsyncPacketSocket : public sigslot::has_slots<> { virtual SocketAddress GetRemoteAddress() const = 0; // Send a packet. - virtual int Send(const void *pv, size_t cb) = 0; - virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr) = 0; + virtual int Send(const void *pv, size_t cb, DiffServCodePoint dscp) = 0; + virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr, + DiffServCodePoint) = 0; // Close the socket. virtual int Close() = 0; diff --git a/chromium/third_party/libjingle/source/talk/base/asynctcpsocket.cc b/chromium/third_party/libjingle/source/talk/base/asynctcpsocket.cc index 095413d3eb8..517e799c422 100644 --- a/chromium/third_party/libjingle/source/talk/base/asynctcpsocket.cc +++ b/chromium/third_party/libjingle/source/talk/base/asynctcpsocket.cc @@ -141,10 +141,12 @@ void AsyncTCPSocketBase::SetError(int error) { return socket_->SetError(error); } +// TODO(mallinath) - Add support of setting DSCP code on AsyncSocket. int AsyncTCPSocketBase::SendTo(const void *pv, size_t cb, - const SocketAddress& addr) { + const SocketAddress& addr, + DiffServCodePoint dscp) { if (addr == GetRemoteAddress()) - return Send(pv, cb); + return Send(pv, cb, dscp); ASSERT(false); socket_->SetError(ENOTCONN); @@ -261,7 +263,8 @@ AsyncTCPSocket::AsyncTCPSocket(AsyncSocket* socket, bool listen) : AsyncTCPSocketBase(socket, listen, kBufSize) { } -int AsyncTCPSocket::Send(const void *pv, size_t cb) { +// TODO(mallinath) - Add support of setting DSCP code on AsyncSocket. +int AsyncTCPSocket::Send(const void *pv, size_t cb, DiffServCodePoint dscp) { if (cb > kBufSize) { SetError(EMSGSIZE); return -1; diff --git a/chromium/third_party/libjingle/source/talk/base/asynctcpsocket.h b/chromium/third_party/libjingle/source/talk/base/asynctcpsocket.h index b34ce188e9d..a0e7a7e2f41 100644 --- a/chromium/third_party/libjingle/source/talk/base/asynctcpsocket.h +++ b/chromium/third_party/libjingle/source/talk/base/asynctcpsocket.h @@ -43,14 +43,15 @@ class AsyncTCPSocketBase : public AsyncPacketSocket { virtual ~AsyncTCPSocketBase(); // Pure virtual methods to send and recv data. - virtual int Send(const void *pv, size_t cb) = 0; + virtual int Send(const void *pv, size_t cb, DiffServCodePoint dscp) = 0; virtual void ProcessInput(char* data, size_t* len) = 0; // Signals incoming connection. virtual void HandleIncomingConnection(AsyncSocket* socket) = 0; virtual SocketAddress GetLocalAddress() const; virtual SocketAddress GetRemoteAddress() const; - virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr); + virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr, + DiffServCodePoint dscp); virtual int Close(); virtual State GetState() const; @@ -101,7 +102,7 @@ class AsyncTCPSocket : public AsyncTCPSocketBase { AsyncTCPSocket(AsyncSocket* socket, bool listen); virtual ~AsyncTCPSocket() {} - virtual int Send(const void* pv, size_t cb); + virtual int Send(const void* pv, size_t cb, DiffServCodePoint dscp); virtual void ProcessInput(char* data, size_t* len); virtual void HandleIncomingConnection(AsyncSocket* socket); diff --git a/chromium/third_party/libjingle/source/talk/base/asyncudpsocket.cc b/chromium/third_party/libjingle/source/talk/base/asyncudpsocket.cc index 6388ce7ce6a..97e5dff9836 100644 --- a/chromium/third_party/libjingle/source/talk/base/asyncudpsocket.cc +++ b/chromium/third_party/libjingle/source/talk/base/asyncudpsocket.cc @@ -75,12 +75,14 @@ SocketAddress AsyncUDPSocket::GetRemoteAddress() const { return socket_->GetRemoteAddress(); } -int AsyncUDPSocket::Send(const void *pv, size_t cb) { +// TODO(mallinath) - Add support of setting DSCP code on AsyncSocket. +int AsyncUDPSocket::Send(const void *pv, size_t cb, DiffServCodePoint dscp) { return socket_->Send(pv, cb); } -int AsyncUDPSocket::SendTo( - const void *pv, size_t cb, const SocketAddress& addr) { +// TODO(mallinath) - Add support of setting DSCP code on AsyncSocket. +int AsyncUDPSocket::SendTo(const void *pv, size_t cb, + const SocketAddress& addr, DiffServCodePoint dscp) { return socket_->SendTo(pv, cb, addr); } diff --git a/chromium/third_party/libjingle/source/talk/base/asyncudpsocket.h b/chromium/third_party/libjingle/source/talk/base/asyncudpsocket.h index 1bf2ad22988..17e12a26c31 100644 --- a/chromium/third_party/libjingle/source/talk/base/asyncudpsocket.h +++ b/chromium/third_party/libjingle/source/talk/base/asyncudpsocket.h @@ -52,8 +52,9 @@ class AsyncUDPSocket : public AsyncPacketSocket { virtual SocketAddress GetLocalAddress() const; virtual SocketAddress GetRemoteAddress() const; - virtual int Send(const void *pv, size_t cb); - virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr); + virtual int Send(const void *pv, size_t cb, DiffServCodePoint dscp); + virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr, + DiffServCodePoint dscp); virtual int Close(); virtual State GetState() const; diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/IceCandidate.java b/chromium/third_party/libjingle/source/talk/base/dscp.h index b5d2dc9da80..c71ee3e932c 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/IceCandidate.java +++ b/chromium/third_party/libjingle/source/talk/base/dscp.h @@ -25,24 +25,38 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -package org.webrtc; +#ifndef TALK_BASE_DSCP_H_ +#define TALK_BASE_DSCP_H_ -/** - * Representation of a single ICE Candidate, mirroring - * {@code IceCandidateInterface} in the C++ API. - */ -public class IceCandidate { - public final String sdpMid; - public final int sdpMLineIndex; - public final String sdp; +namespace talk_base { +// Differentiated Services Code Point. +// See http://tools.ietf.org/html/rfc2474 for details. +enum DiffServCodePoint { + DSCP_NO_CHANGE = -1, + DSCP_DEFAULT = 0, // Same as DSCP_CS0 + DSCP_CS0 = 0, // The default + DSCP_CS1 = 8, // Bulk/background traffic + DSCP_AF11 = 10, + DSCP_AF12 = 12, + DSCP_AF13 = 14, + DSCP_CS2 = 16, + DSCP_AF21 = 18, + DSCP_AF22 = 20, + DSCP_AF23 = 22, + DSCP_CS3 = 24, + DSCP_AF31 = 26, + DSCP_AF32 = 28, + DSCP_AF33 = 30, + DSCP_CS4 = 32, + DSCP_AF41 = 34, // Video + DSCP_AF42 = 36, // Video + DSCP_AF43 = 38, // Video + DSCP_CS5 = 40, // Video + DSCP_EF = 46, // Voice + DSCP_CS6 = 48, // Voice + DSCP_CS7 = 56, // Control messages +}; - public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) { - this.sdpMid = sdpMid; - this.sdpMLineIndex = sdpMLineIndex; - this.sdp = sdp; - } +} // namespace talk_base - public String toString() { - return sdpMid + ":" + sdpMLineIndex + ":" + sdp; - } -} + #endif // TALK_BASE_DSCP_H_ diff --git a/chromium/third_party/libjingle/source/talk/base/gunit_prod.h b/chromium/third_party/libjingle/source/talk/base/gunit_prod.h index 6be5c53f970..bf5e88acb4e 100644 --- a/chromium/third_party/libjingle/source/talk/base/gunit_prod.h +++ b/chromium/third_party/libjingle/source/talk/base/gunit_prod.h @@ -28,7 +28,11 @@ #ifndef TALK_BASE_GUNIT_PROD_H_ #define TALK_BASE_GUNIT_PROD_H_ -#if defined(ANDROID) || defined (GTEST_RELATIVE_PATH) +#if defined(ANDROID) +// Android doesn't use gtest at all, so anything that relies on gtest should +// check this define first. +#define NO_GTEST +#elif defined (GTEST_RELATIVE_PATH) #include "gtest/gtest_prod.h" #else #include "testing/base/gunit_prod.h" diff --git a/chromium/third_party/libjingle/source/talk/base/macsocketserver_unittest.cc b/chromium/third_party/libjingle/source/talk/base/macsocketserver_unittest.cc index 07cce263c0c..a4a71019dd6 100644 --- a/chromium/third_party/libjingle/source/talk/base/macsocketserver_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/base/macsocketserver_unittest.cc @@ -132,7 +132,8 @@ TEST_F(MacAsyncSocketTest, TestConnectWithDnsLookupIPv6) { SocketTest::TestConnectWithDnsLookupIPv6(); } -TEST_F(MacAsyncSocketTest, TestConnectFailIPv4) { +// BUG=https://code.google.com/p/webrtc/issues/detail?id=2272 +TEST_F(MacAsyncSocketTest, DISABLED_TestConnectFailIPv4) { SocketTest::TestConnectFailIPv4(); } diff --git a/chromium/third_party/libjingle/source/talk/base/messagehandler.cc b/chromium/third_party/libjingle/source/talk/base/messagehandler.cc index 5b3585b1626..16f9a21a165 100644 --- a/chromium/third_party/libjingle/source/talk/base/messagehandler.cc +++ b/chromium/third_party/libjingle/source/talk/base/messagehandler.cc @@ -2,26 +2,26 @@ * libjingle * Copyright 2004--2005, Google Inc. * - * Redistribution and use in source and binary forms, with or without + * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * - * 1. Redistributions of source code must retain the above copyright notice, + * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products + * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ @@ -31,7 +31,7 @@ namespace talk_base { MessageHandler::~MessageHandler() { - MessageQueueManager::Instance()->Clear(this); + MessageQueueManager::Clear(this); } } // namespace talk_base diff --git a/chromium/third_party/libjingle/source/talk/base/messagequeue.cc b/chromium/third_party/libjingle/source/talk/base/messagequeue.cc index 5c406221622..15b700ffe48 100644 --- a/chromium/third_party/libjingle/source/talk/base/messagequeue.cc +++ b/chromium/third_party/libjingle/source/talk/base/messagequeue.cc @@ -42,7 +42,7 @@ const uint32 kMaxMsgLatency = 150; // 150 ms //------------------------------------------------------------------ // MessageQueueManager -MessageQueueManager* MessageQueueManager::instance_; +MessageQueueManager* MessageQueueManager::instance_ = NULL; MessageQueueManager* MessageQueueManager::Instance() { // Note: This is not thread safe, but it is first called before threads are @@ -52,6 +52,10 @@ MessageQueueManager* MessageQueueManager::Instance() { return instance_; } +bool MessageQueueManager::IsInitialized() { + return instance_ != NULL; +} + MessageQueueManager::MessageQueueManager() { } @@ -59,6 +63,9 @@ MessageQueueManager::~MessageQueueManager() { } void MessageQueueManager::Add(MessageQueue *message_queue) { + return Instance()->AddInternal(message_queue); +} +void MessageQueueManager::AddInternal(MessageQueue *message_queue) { // MessageQueueManager methods should be non-reentrant, so we // ASSERT that is the case. If any of these ASSERT, please // contact bpm or jbeda. @@ -68,6 +75,12 @@ void MessageQueueManager::Add(MessageQueue *message_queue) { } void MessageQueueManager::Remove(MessageQueue *message_queue) { + // If there isn't a message queue manager instance, then there isn't a queue + // to remove. + if (!instance_) return; + return Instance()->RemoveInternal(message_queue); +} +void MessageQueueManager::RemoveInternal(MessageQueue *message_queue) { ASSERT(!crit_.CurrentThreadIsOwner()); // See note above. // If this is the last MessageQueue, destroy the manager as well so that // we don't leak this object at program shutdown. As mentioned above, this is @@ -91,6 +104,12 @@ void MessageQueueManager::Remove(MessageQueue *message_queue) { } void MessageQueueManager::Clear(MessageHandler *handler) { + // If there isn't a message queue manager instance, then there aren't any + // queues to remove this handler from. + if (!instance_) return; + return Instance()->ClearInternal(handler); +} +void MessageQueueManager::ClearInternal(MessageHandler *handler) { ASSERT(!crit_.CurrentThreadIsOwner()); // See note above. CritScope cs(&crit_); std::vector<MessageQueue *>::iterator iter; @@ -122,7 +141,7 @@ MessageQueue::~MessageQueue() { // is going away. SignalQueueDestroyed(); if (active_) { - MessageQueueManager::Instance()->Remove(this); + MessageQueueManager::Remove(this); Clear(NULL); } if (ss_) { @@ -381,7 +400,7 @@ void MessageQueue::EnsureActive() { ASSERT(crit_.CurrentThreadIsOwner()); if (!active_) { active_ = true; - MessageQueueManager::Instance()->Add(this); + MessageQueueManager::Add(this); } } diff --git a/chromium/third_party/libjingle/source/talk/base/messagequeue.h b/chromium/third_party/libjingle/source/talk/base/messagequeue.h index 331f2073636..7b38ba0082d 100644 --- a/chromium/third_party/libjingle/source/talk/base/messagequeue.h +++ b/chromium/third_party/libjingle/source/talk/base/messagequeue.h @@ -53,16 +53,26 @@ class MessageQueue; class MessageQueueManager { public: - static MessageQueueManager* Instance(); + static void Add(MessageQueue *message_queue); + static void Remove(MessageQueue *message_queue); + static void Clear(MessageHandler *handler); - void Add(MessageQueue *message_queue); - void Remove(MessageQueue *message_queue); - void Clear(MessageHandler *handler); + // For testing purposes, we expose whether or not the MessageQueueManager + // instance has been initialized. It has no other use relative to the rest of + // the functions of this class, which auto-initialize the underlying + // MessageQueueManager instance when necessary. + static bool IsInitialized(); private: + static MessageQueueManager* Instance(); + MessageQueueManager(); ~MessageQueueManager(); + void AddInternal(MessageQueue *message_queue); + void RemoveInternal(MessageQueue *message_queue); + void ClearInternal(MessageHandler *handler); + static MessageQueueManager* instance_; // This list contains 'active' MessageQueues. std::vector<MessageQueue *> message_queues_; diff --git a/chromium/third_party/libjingle/source/talk/base/messagequeue_unittest.cc b/chromium/third_party/libjingle/source/talk/base/messagequeue_unittest.cc index 8e555482222..9ce524ec6ac 100644 --- a/chromium/third_party/libjingle/source/talk/base/messagequeue_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/base/messagequeue_unittest.cc @@ -130,3 +130,28 @@ TEST_F(MessageQueueTest, DiposeHandlerWithPostedMessagePending) { EXPECT_TRUE(deleted); } +struct UnwrapMainThreadScope { + UnwrapMainThreadScope() : rewrap_(Thread::Current() != NULL) { + if (rewrap_) ThreadManager::Instance()->UnwrapCurrentThread(); + } + ~UnwrapMainThreadScope() { + if (rewrap_) ThreadManager::Instance()->WrapCurrentThread(); + } + private: + bool rewrap_; +}; + +TEST(MessageQueueManager, Clear) { + UnwrapMainThreadScope s; + if (MessageQueueManager::IsInitialized()) { + LOG(LS_INFO) << "Unable to run MessageQueueManager::Clear test, since the " + << "MessageQueueManager was already initialized by some " + << "other test in this run."; + return; + } + bool deleted = false; + DeletedMessageHandler* handler = new DeletedMessageHandler(&deleted); + delete handler; + EXPECT_TRUE(deleted); + EXPECT_FALSE(MessageQueueManager::IsInitialized()); +} diff --git a/chromium/third_party/libjingle/source/talk/base/natserver.cc b/chromium/third_party/libjingle/source/talk/base/natserver.cc index 7a3a045099e..483542591e4 100644 --- a/chromium/third_party/libjingle/source/talk/base/natserver.cc +++ b/chromium/third_party/libjingle/source/talk/base/natserver.cc @@ -126,7 +126,8 @@ void NATServer::OnInternalPacket( iter->second->whitelist->insert(dest_addr); // Send the packet to its intended destination. - iter->second->socket->SendTo(buf + length, size - length, dest_addr); + iter->second->socket->SendTo(buf + length, size - length, dest_addr, + DSCP_NO_CHANGE); } void NATServer::OnExternalPacket( @@ -155,7 +156,7 @@ void NATServer::OnExternalPacket( // Copy the data part after the address. std::memcpy(real_buf.get() + addrlength, buf, size); server_socket_->SendTo(real_buf.get(), size + addrlength, - iter->second->route.source()); + iter->second->route.source(), DSCP_NO_CHANGE); } void NATServer::Translate(const SocketAddressPair& route) { diff --git a/chromium/third_party/libjingle/source/talk/base/openssladapter.cc b/chromium/third_party/libjingle/source/talk/base/openssladapter.cc index 50391e5c215..af92f0c4534 100644 --- a/chromium/third_party/libjingle/source/talk/base/openssladapter.cc +++ b/chromium/third_party/libjingle/source/talk/base/openssladapter.cc @@ -233,7 +233,10 @@ VerificationCallback OpenSSLAdapter::custom_verify_callback_ = NULL; bool OpenSSLAdapter::InitializeSSL(VerificationCallback callback) { if (!InitializeSSLThread() || !SSL_library_init()) return false; +#if !defined(ADDRESS_SANITIZER) || !defined(OSX) + // Loading the error strings crashes mac_asan. Omit this debugging aid there. SSL_load_error_strings(); +#endif ERR_load_BIO_strings(); OpenSSL_add_all_algorithms(); RAND_poll(); diff --git a/chromium/third_party/libjingle/source/talk/base/physicalsocketserver.cc b/chromium/third_party/libjingle/source/talk/base/physicalsocketserver.cc index 8a1bb5c2656..f14c3bd69c6 100644 --- a/chromium/third_party/libjingle/source/talk/base/physicalsocketserver.cc +++ b/chromium/third_party/libjingle/source/talk/base/physicalsocketserver.cc @@ -1265,7 +1265,14 @@ void PhysicalSocketServer::Remove(Dispatcher *pdispatcher) { DispatcherList::iterator pos = std::find(dispatchers_.begin(), dispatchers_.end(), pdispatcher); - ASSERT(pos != dispatchers_.end()); + // We silently ignore duplicate calls to Add, so we should silently ignore + // the (expected) symmetric calls to Remove. Note that this may still hide + // a real issue, so we at least log a warning about it. + if (pos == dispatchers_.end()) { + LOG(LS_WARNING) << "PhysicalSocketServer asked to remove a unknown " + << "dispatcher, potentially from a duplicate call to Add."; + return; + } size_t index = pos - dispatchers_.begin(); dispatchers_.erase(pos); for (IteratorList::iterator it = iterators_.begin(); it != iterators_.end(); diff --git a/chromium/third_party/libjingle/source/talk/base/socket.h b/chromium/third_party/libjingle/source/talk/base/socket.h index 9932cdada49..e738060f89d 100644 --- a/chromium/third_party/libjingle/source/talk/base/socket.h +++ b/chromium/third_party/libjingle/source/talk/base/socket.h @@ -184,7 +184,8 @@ class Socket { OPT_RCVBUF, // receive buffer size OPT_SNDBUF, // send buffer size OPT_NODELAY, // whether Nagle algorithm is enabled - OPT_IPV6_V6ONLY // Whether the socket is IPv6 only. + OPT_IPV6_V6ONLY, // Whether the socket is IPv6 only. + OPT_DSCP // DSCP code }; virtual int GetOption(Option opt, int* value) = 0; virtual int SetOption(Option opt, int value) = 0; diff --git a/chromium/third_party/libjingle/source/talk/base/testclient.cc b/chromium/third_party/libjingle/source/talk/base/testclient.cc index 0e7625f7365..0ef85183117 100644 --- a/chromium/third_party/libjingle/source/talk/base/testclient.cc +++ b/chromium/third_party/libjingle/source/talk/base/testclient.cc @@ -25,6 +25,7 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ +#include "talk/base/dscp.h" #include "talk/base/testclient.h" #include "talk/base/thread.h" #include "talk/base/timeutils.h" @@ -58,12 +59,12 @@ bool TestClient::CheckConnState(AsyncPacketSocket::State state) { } int TestClient::Send(const char* buf, size_t size) { - return socket_->Send(buf, size); + return socket_->Send(buf, size, DSCP_NO_CHANGE); } int TestClient::SendTo(const char* buf, size_t size, const SocketAddress& dest) { - return socket_->SendTo(buf, size, dest); + return socket_->SendTo(buf, size, dest, DSCP_NO_CHANGE); } TestClient::Packet* TestClient::NextPacket() { diff --git a/chromium/third_party/libjingle/source/talk/base/testechoserver.h b/chromium/third_party/libjingle/source/talk/base/testechoserver.h index 9bb5178c0ac..10466fa4359 100644 --- a/chromium/third_party/libjingle/source/talk/base/testechoserver.h +++ b/chromium/third_party/libjingle/source/talk/base/testechoserver.h @@ -68,7 +68,7 @@ class TestEchoServer : public sigslot::has_slots<> { } void OnPacket(AsyncPacketSocket* socket, const char* buf, size_t size, const SocketAddress& remote_addr) { - socket->Send(buf, size); + socket->Send(buf, size, DSCP_NO_CHANGE); } void OnClose(AsyncPacketSocket* socket, int err) { ClientList::iterator it = diff --git a/chromium/third_party/libjingle/source/talk/base/timeutils_unittest.cc b/chromium/third_party/libjingle/source/talk/base/timeutils_unittest.cc index c90f6a49f20..0fc5eb19c8a 100644 --- a/chromium/third_party/libjingle/source/talk/base/timeutils_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/base/timeutils_unittest.cc @@ -143,7 +143,7 @@ TEST(TimeTest, BoundaryComparison) { EXPECT_EQ(-100, TimeDiff(ts_earlier, ts_later)); } -TEST(TimeTest, CurrentTmTime) { +TEST(TimeTest, DISABLED_CurrentTmTime) { struct tm tm; int microseconds; diff --git a/chromium/third_party/libjingle/source/talk/base/virtualsocket_unittest.cc b/chromium/third_party/libjingle/source/talk/base/virtualsocket_unittest.cc index 244568e5589..f3b13fc0f62 100644 --- a/chromium/third_party/libjingle/source/talk/base/virtualsocket_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/base/virtualsocket_unittest.cc @@ -69,7 +69,7 @@ struct Sender : public MessageHandler { count += size; memcpy(dummy, &cur_time, sizeof(cur_time)); - socket->Send(dummy, size); + socket->Send(dummy, size, DSCP_NO_CHANGE); last_send = cur_time; thread->PostDelayed(NextDelay(), this, 1); @@ -315,12 +315,12 @@ class VirtualSocketServerTest : public testing::Test { EmptySocketAddressWithFamily(initial_addr.family()); // Create client and server - AsyncSocket* client = ss_->CreateAsyncSocket(initial_addr.family(), - SOCK_STREAM); - sink.Monitor(client); - AsyncSocket* server = ss_->CreateAsyncSocket(initial_addr.family(), - SOCK_STREAM); - sink.Monitor(server); + scoped_ptr<AsyncSocket> client(ss_->CreateAsyncSocket(initial_addr.family(), + SOCK_STREAM)); + sink.Monitor(client.get()); + scoped_ptr<AsyncSocket> server(ss_->CreateAsyncSocket(initial_addr.family(), + SOCK_STREAM)); + sink.Monitor(server.get()); // Initiate connect EXPECT_EQ(0, server->Bind(initial_addr)); @@ -330,19 +330,17 @@ class VirtualSocketServerTest : public testing::Test { EXPECT_EQ(0, client->Connect(server->GetLocalAddress())); // Server close before socket enters accept queue - EXPECT_FALSE(sink.Check(server, testing::SSE_READ)); + EXPECT_FALSE(sink.Check(server.get(), testing::SSE_READ)); server->Close(); ss_->ProcessMessagesUntilIdle(); // Result: connection failed EXPECT_EQ(client->GetState(), AsyncSocket::CS_CLOSED); - EXPECT_TRUE(sink.Check(client, testing::SSE_ERROR)); + EXPECT_TRUE(sink.Check(client.get(), testing::SSE_ERROR)); - // New server - delete server; - server = ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM); - sink.Monitor(server); + server.reset(ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM)); + sink.Monitor(server.get()); // Initiate connect EXPECT_EQ(0, server->Bind(initial_addr)); @@ -354,19 +352,18 @@ class VirtualSocketServerTest : public testing::Test { ss_->ProcessMessagesUntilIdle(); // Server close while socket is in accept queue - EXPECT_TRUE(sink.Check(server, testing::SSE_READ)); + EXPECT_TRUE(sink.Check(server.get(), testing::SSE_READ)); server->Close(); ss_->ProcessMessagesUntilIdle(); // Result: connection failed EXPECT_EQ(client->GetState(), AsyncSocket::CS_CLOSED); - EXPECT_TRUE(sink.Check(client, testing::SSE_ERROR)); + EXPECT_TRUE(sink.Check(client.get(), testing::SSE_ERROR)); // New server - delete server; - server = ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM); - sink.Monitor(server); + server.reset(ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM)); + sink.Monitor(server.get()); // Initiate connect EXPECT_EQ(0, server->Bind(initial_addr)); @@ -378,10 +375,10 @@ class VirtualSocketServerTest : public testing::Test { ss_->ProcessMessagesUntilIdle(); // Server accepts connection - EXPECT_TRUE(sink.Check(server, testing::SSE_READ)); - AsyncSocket* accepted = server->Accept(&accept_addr); - ASSERT_TRUE(NULL != accepted); - sink.Monitor(accepted); + EXPECT_TRUE(sink.Check(server.get(), testing::SSE_READ)); + scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr)); + ASSERT_TRUE(NULL != accepted.get()); + sink.Monitor(accepted.get()); // Client closes before connection complets EXPECT_EQ(accepted->GetState(), AsyncSocket::CS_CONNECTED); @@ -394,8 +391,8 @@ class VirtualSocketServerTest : public testing::Test { // Result: accepted socket closes EXPECT_EQ(accepted->GetState(), AsyncSocket::CS_CLOSED); - EXPECT_TRUE(sink.Check(accepted, testing::SSE_CLOSE)); - EXPECT_FALSE(sink.Check(client, testing::SSE_CLOSE)); + EXPECT_TRUE(sink.Check(accepted.get(), testing::SSE_CLOSE)); + EXPECT_FALSE(sink.Check(client.get(), testing::SSE_CLOSE)); } void CloseTest(const SocketAddress& initial_addr) { @@ -409,8 +406,9 @@ class VirtualSocketServerTest : public testing::Test { EXPECT_EQ(a->GetLocalAddress().family(), initial_addr.family()); - AsyncSocket* b = ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM); - sink.Monitor(b); + scoped_ptr<AsyncSocket> b(ss_->CreateAsyncSocket(initial_addr.family(), + SOCK_STREAM)); + sink.Monitor(b.get()); b->Bind(initial_addr); EXPECT_EQ(b->GetLocalAddress().family(), initial_addr.family()); @@ -423,7 +421,7 @@ class VirtualSocketServerTest : public testing::Test { EXPECT_EQ(a->GetState(), AsyncSocket::CS_CONNECTED); EXPECT_EQ(a->GetRemoteAddress(), b->GetLocalAddress()); - EXPECT_TRUE(sink.Check(b, testing::SSE_OPEN)); + EXPECT_TRUE(sink.Check(b.get(), testing::SSE_OPEN)); EXPECT_EQ(b->GetState(), AsyncSocket::CS_CONNECTED); EXPECT_EQ(b->GetRemoteAddress(), a->GetLocalAddress()); @@ -434,14 +432,15 @@ class VirtualSocketServerTest : public testing::Test { ss_->ProcessMessagesUntilIdle(); char buffer[10]; - EXPECT_FALSE(sink.Check(b, testing::SSE_READ)); + EXPECT_FALSE(sink.Check(b.get(), testing::SSE_READ)); EXPECT_EQ(-1, b->Recv(buffer, 10)); EXPECT_TRUE(sink.Check(a, testing::SSE_CLOSE)); EXPECT_EQ(a->GetState(), AsyncSocket::CS_CLOSED); EXPECT_EQ(a->GetRemoteAddress(), kEmptyAddr); - EXPECT_FALSE(sink.Check(b, testing::SSE_CLOSE)); // No signal for Closer + // No signal for Closer + EXPECT_FALSE(sink.Check(b.get(), testing::SSE_CLOSE)); EXPECT_EQ(b->GetState(), AsyncSocket::CS_CLOSED); EXPECT_EQ(b->GetRemoteAddress(), kEmptyAddr); } diff --git a/chromium/third_party/libjingle/source/talk/build/common.gypi b/chromium/third_party/libjingle/source/talk/build/common.gypi index a8fcd57aaa7..50acb39ea95 100644 --- a/chromium/third_party/libjingle/source/talk/build/common.gypi +++ b/chromium/third_party/libjingle/source/talk/build/common.gypi @@ -2,26 +2,26 @@ # libjingle # Copyright 2012, Google Inc. # -# Redistribution and use in source and binary forms, with or without +# Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # -# 1. Redistributions of source code must retain the above copyright notice, +# 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. -# 3. The name of the author may not be used to endorse or promote products +# 3. The name of the author may not be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED -# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR -# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # @@ -36,8 +36,6 @@ # flood of chromium-style warnings. 'clang_use_chrome_plugins%': 0, 'libpeer_target_type%': 'static_library', - # Whether or not to build the ObjectiveC PeerConnection API & tests. - 'libjingle_objc%': 0, 'conditions': [ ['OS=="android" or OS=="linux"', { # TODO(henrike): make sure waterfall bots have $JAVA_HOME configured @@ -110,7 +108,7 @@ 'HAVE_OPENSSL_SSL_H=1', ], }], - ['libjingle_objc==1', { + ['OS=="ios" or (OS=="mac" and target_arch!="ia32")', { 'defines': [ 'CARBON_DEPRECATED=YES', ], diff --git a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/AppRTCClient.java b/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/AppRTCClient.java deleted file mode 100644 index fe415643738..00000000000 --- a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/AppRTCClient.java +++ /dev/null @@ -1,432 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.appspot.apprtc; - -import android.app.Activity; -import android.os.AsyncTask; -import android.util.Log; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; -import org.webrtc.MediaConstraints; -import org.webrtc.PeerConnection; - -import java.io.IOException; -import java.io.InputStream; -import java.net.HttpURLConnection; -import java.net.URL; -import java.net.URLConnection; -import java.util.LinkedList; -import java.util.List; -import java.util.Scanner; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Negotiates signaling for chatting with apprtc.appspot.com "rooms". - * Uses the client<->server specifics of the apprtc AppEngine webapp. - * - * To use: create an instance of this object (registering a message handler) and - * call connectToRoom(). Once that's done call sendMessage() and wait for the - * registered handler to be called with received messages. - */ -public class AppRTCClient { - private static final String TAG = "AppRTCClient"; - private GAEChannelClient channelClient; - private final Activity activity; - private final GAEChannelClient.MessageHandler gaeHandler; - private final IceServersObserver iceServersObserver; - - // These members are only read/written under sendQueue's lock. - private LinkedList<String> sendQueue = new LinkedList<String>(); - private AppRTCSignalingParameters appRTCSignalingParameters; - - /** - * Callback fired once the room's signaling parameters specify the set of - * ICE servers to use. - */ - public static interface IceServersObserver { - public void onIceServers(List<PeerConnection.IceServer> iceServers); - } - - public AppRTCClient( - Activity activity, GAEChannelClient.MessageHandler gaeHandler, - IceServersObserver iceServersObserver) { - this.activity = activity; - this.gaeHandler = gaeHandler; - this.iceServersObserver = iceServersObserver; - } - - /** - * Asynchronously connect to an AppRTC room URL, e.g. - * https://apprtc.appspot.com/?r=NNN and register message-handling callbacks - * on its GAE Channel. - */ - public void connectToRoom(String url) { - while (url.indexOf('?') < 0) { - // Keep redirecting until we get a room number. - (new RedirectResolver()).execute(url); - return; // RedirectResolver above calls us back with the next URL. - } - (new RoomParameterGetter()).execute(url); - } - - /** - * Disconnect from the GAE Channel. - */ - public void disconnect() { - if (channelClient != null) { - channelClient.close(); - channelClient = null; - } - } - - /** - * Queue a message for sending to the room's channel and send it if already - * connected (other wise queued messages are drained when the channel is - eventually established). - */ - public synchronized void sendMessage(String msg) { - synchronized (sendQueue) { - sendQueue.add(msg); - } - requestQueueDrainInBackground(); - } - - public boolean isInitiator() { - return appRTCSignalingParameters.initiator; - } - - public MediaConstraints pcConstraints() { - return appRTCSignalingParameters.pcConstraints; - } - - public MediaConstraints videoConstraints() { - return appRTCSignalingParameters.videoConstraints; - } - - // Struct holding the signaling parameters of an AppRTC room. - private class AppRTCSignalingParameters { - public final List<PeerConnection.IceServer> iceServers; - public final String gaeBaseHref; - public final String channelToken; - public final String postMessageUrl; - public final boolean initiator; - public final MediaConstraints pcConstraints; - public final MediaConstraints videoConstraints; - - public AppRTCSignalingParameters( - List<PeerConnection.IceServer> iceServers, - String gaeBaseHref, String channelToken, String postMessageUrl, - boolean initiator, MediaConstraints pcConstraints, - MediaConstraints videoConstraints) { - this.iceServers = iceServers; - this.gaeBaseHref = gaeBaseHref; - this.channelToken = channelToken; - this.postMessageUrl = postMessageUrl; - this.initiator = initiator; - this.pcConstraints = pcConstraints; - this.videoConstraints = videoConstraints; - } - } - - // Load the given URL and return the value of the Location header of the - // resulting 302 response. If the result is not a 302, throws. - private class RedirectResolver extends AsyncTask<String, Void, String> { - @Override - protected String doInBackground(String... urls) { - if (urls.length != 1) { - throw new RuntimeException("Must be called with a single URL"); - } - try { - return followRedirect(urls[0]); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - protected void onPostExecute(String url) { - connectToRoom(url); - } - - private String followRedirect(String url) throws IOException { - HttpURLConnection connection = (HttpURLConnection) - new URL(url).openConnection(); - connection.setInstanceFollowRedirects(false); - int code = connection.getResponseCode(); - if (code != HttpURLConnection.HTTP_MOVED_TEMP) { - throw new IOException("Unexpected response: " + code + " for " + url + - ", with contents: " + drainStream(connection.getInputStream())); - } - int n = 0; - String name, value; - while ((name = connection.getHeaderFieldKey(n)) != null) { - value = connection.getHeaderField(n); - if (name.equals("Location")) { - return value; - } - ++n; - } - throw new IOException("Didn't find Location header!"); - } - } - - // AsyncTask that converts an AppRTC room URL into the set of signaling - // parameters to use with that room. - private class RoomParameterGetter - extends AsyncTask<String, Void, AppRTCSignalingParameters> { - @Override - protected AppRTCSignalingParameters doInBackground(String... urls) { - if (urls.length != 1) { - throw new RuntimeException("Must be called with a single URL"); - } - try { - return getParametersForRoomUrl(urls[0]); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - protected void onPostExecute(AppRTCSignalingParameters params) { - channelClient = - new GAEChannelClient(activity, params.channelToken, gaeHandler); - synchronized (sendQueue) { - appRTCSignalingParameters = params; - } - requestQueueDrainInBackground(); - iceServersObserver.onIceServers(appRTCSignalingParameters.iceServers); - } - - // Fetches |url| and fishes the signaling parameters out of the HTML via - // regular expressions. - // - // TODO(fischman): replace this hackery with a dedicated JSON-serving URL in - // apprtc so that this isn't necessary (here and in other future apps that - // want to interop with apprtc). - private AppRTCSignalingParameters getParametersForRoomUrl(String url) - throws IOException { - final Pattern fullRoomPattern = Pattern.compile( - ".*\n *Sorry, this room is full\\..*"); - - String roomHtml = - drainStream((new URL(url)).openConnection().getInputStream()); - - Matcher fullRoomMatcher = fullRoomPattern.matcher(roomHtml); - if (fullRoomMatcher.find()) { - throw new IOException("Room is full!"); - } - - String gaeBaseHref = url.substring(0, url.indexOf('?')); - String token = getVarValue(roomHtml, "channelToken", true); - String postMessageUrl = "/message?r=" + - getVarValue(roomHtml, "roomKey", true) + "&u=" + - getVarValue(roomHtml, "me", true); - boolean initiator = getVarValue(roomHtml, "initiator", false).equals("1"); - LinkedList<PeerConnection.IceServer> iceServers = - iceServersFromPCConfigJSON(getVarValue(roomHtml, "pcConfig", false)); - - boolean isTurnPresent = false; - for (PeerConnection.IceServer server : iceServers) { - if (server.uri.startsWith("turn:")) { - isTurnPresent = true; - break; - } - } - if (!isTurnPresent) { - iceServers.add( - requestTurnServer(getVarValue(roomHtml, "turnUrl", true))); - } - - MediaConstraints pcConstraints = constraintsFromJSON( - getVarValue(roomHtml, "pcConstraints", false)); - Log.d(TAG, "pcConstraints: " + pcConstraints); - - MediaConstraints videoConstraints = constraintsFromJSON( - getVideoConstraints( - getVarValue(roomHtml, "mediaConstraints", false))); - Log.d(TAG, "videoConstraints: " + videoConstraints); - - return new AppRTCSignalingParameters( - iceServers, gaeBaseHref, token, postMessageUrl, initiator, - pcConstraints, videoConstraints); - } - - private String getVideoConstraints(String mediaConstraintsString) { - try { - JSONObject json = new JSONObject(mediaConstraintsString); - JSONObject videoJson = json.optJSONObject("video"); - if (videoJson == null) { - return ""; - } - return videoJson.toString(); - } catch (JSONException e) { - throw new RuntimeException(e); - } - } - - private MediaConstraints constraintsFromJSON(String jsonString) { - try { - MediaConstraints constraints = new MediaConstraints(); - JSONObject json = new JSONObject(jsonString); - JSONObject mandatoryJSON = json.optJSONObject("mandatory"); - if (mandatoryJSON != null) { - JSONArray mandatoryKeys = mandatoryJSON.names(); - if (mandatoryKeys != null) { - for (int i = 0; i < mandatoryKeys.length(); ++i) { - String key = (String) mandatoryKeys.getString(i); - String value = mandatoryJSON.getString(key); - constraints.mandatory.add( - new MediaConstraints.KeyValuePair(key, value)); - } - } - } - JSONArray optionalJSON = json.optJSONArray("optional"); - if (optionalJSON != null) { - for (int i = 0; i < optionalJSON.length(); ++i) { - JSONObject keyValueDict = optionalJSON.getJSONObject(i); - String key = keyValueDict.names().getString(0); - String value = keyValueDict.getString(key); - constraints.optional.add( - new MediaConstraints.KeyValuePair(key, value)); - } - } - return constraints; - } catch (JSONException e) { - throw new RuntimeException(e); - } - } - - // Scan |roomHtml| for declaration & assignment of |varName| and return its - // value, optionally stripping outside quotes if |stripQuotes| requests it. - private String getVarValue( - String roomHtml, String varName, boolean stripQuotes) - throws IOException { - final Pattern pattern = Pattern.compile( - ".*\n *var " + varName + " = ([^\n]*);\n.*"); - Matcher matcher = pattern.matcher(roomHtml); - if (!matcher.find()) { - throw new IOException("Missing " + varName + " in HTML: " + roomHtml); - } - String varValue = matcher.group(1); - if (matcher.find()) { - throw new IOException("Too many " + varName + " in HTML: " + roomHtml); - } - if (stripQuotes) { - varValue = varValue.substring(1, varValue.length() - 1); - } - return varValue; - } - - // Requests & returns a TURN ICE Server based on a request URL. Must be run - // off the main thread! - private PeerConnection.IceServer requestTurnServer(String url) { - try { - URLConnection connection = (new URL(url)).openConnection(); - connection.addRequestProperty("user-agent", "Mozilla/5.0"); - connection.addRequestProperty("origin", "https://apprtc.appspot.com"); - String response = drainStream(connection.getInputStream()); - JSONObject responseJSON = new JSONObject(response); - String uri = responseJSON.getJSONArray("uris").getString(0); - String username = responseJSON.getString("username"); - String password = responseJSON.getString("password"); - return new PeerConnection.IceServer(uri, username, password); - } catch (JSONException e) { - throw new RuntimeException(e); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - - // Return the list of ICE servers described by a WebRTCPeerConnection - // configuration string. - private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON( - String pcConfig) { - try { - JSONObject json = new JSONObject(pcConfig); - JSONArray servers = json.getJSONArray("iceServers"); - LinkedList<PeerConnection.IceServer> ret = - new LinkedList<PeerConnection.IceServer>(); - for (int i = 0; i < servers.length(); ++i) { - JSONObject server = servers.getJSONObject(i); - String url = server.getString("url"); - String credential = - server.has("credential") ? server.getString("credential") : ""; - ret.add(new PeerConnection.IceServer(url, "", credential)); - } - return ret; - } catch (JSONException e) { - throw new RuntimeException(e); - } - } - - // Request an attempt to drain the send queue, on a background thread. - private void requestQueueDrainInBackground() { - (new AsyncTask<Void, Void, Void>() { - public Void doInBackground(Void... unused) { - maybeDrainQueue(); - return null; - } - }).execute(); - } - - // Send all queued messages if connected to the room. - private void maybeDrainQueue() { - synchronized (sendQueue) { - if (appRTCSignalingParameters == null) { - return; - } - try { - for (String msg : sendQueue) { - URLConnection connection = new URL( - appRTCSignalingParameters.gaeBaseHref + - appRTCSignalingParameters.postMessageUrl).openConnection(); - connection.setDoOutput(true); - connection.getOutputStream().write(msg.getBytes("UTF-8")); - if (!connection.getHeaderField(null).startsWith("HTTP/1.1 200 ")) { - throw new IOException( - "Non-200 response to POST: " + connection.getHeaderField(null) + - " for msg: " + msg); - } - } - } catch (IOException e) { - throw new RuntimeException(e); - } - sendQueue.clear(); - } - } - - // Return the contents of an InputStream as a String. - private static String drainStream(InputStream in) { - Scanner s = new Scanner(in).useDelimiter("\\A"); - return s.hasNext() ? s.next() : ""; - } -} diff --git a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java b/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java deleted file mode 100644 index 66187538dfd..00000000000 --- a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java +++ /dev/null @@ -1,518 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.appspot.apprtc; - -import android.app.Activity; -import android.app.AlertDialog; -import android.content.DialogInterface; -import android.content.Intent; -import android.graphics.Point; -import android.media.AudioManager; -import android.os.Bundle; -import android.os.PowerManager; -import android.util.Log; -import android.webkit.JavascriptInterface; -import android.widget.EditText; -import android.widget.Toast; - -import org.json.JSONException; -import org.json.JSONObject; -import org.webrtc.DataChannel; -import org.webrtc.IceCandidate; -import org.webrtc.Logging; -import org.webrtc.MediaConstraints; -import org.webrtc.MediaStream; -import org.webrtc.PeerConnection; -import org.webrtc.PeerConnectionFactory; -import org.webrtc.SdpObserver; -import org.webrtc.SessionDescription; -import org.webrtc.StatsObserver; -import org.webrtc.StatsReport; -import org.webrtc.VideoCapturer; -import org.webrtc.VideoRenderer; -import org.webrtc.VideoRenderer.I420Frame; -import org.webrtc.VideoSource; -import org.webrtc.VideoTrack; - -import java.util.EnumSet; -import java.util.LinkedList; -import java.util.List; - -/** - * Main Activity of the AppRTCDemo Android app demonstrating interoperability - * between the Android/Java implementation of PeerConnection and the - * apprtc.appspot.com demo webapp. - */ -public class AppRTCDemoActivity extends Activity - implements AppRTCClient.IceServersObserver { - private static final String TAG = "AppRTCDemoActivity"; - private PeerConnection pc; - private final PCObserver pcObserver = new PCObserver(); - private final SDPObserver sdpObserver = new SDPObserver(); - private final GAEChannelClient.MessageHandler gaeHandler = new GAEHandler(); - private AppRTCClient appRtcClient = new AppRTCClient(this, gaeHandler, this); - private VideoStreamsView vsv; - private Toast logToast; - private LinkedList<IceCandidate> queuedRemoteCandidates = - new LinkedList<IceCandidate>(); - // Synchronize on quit[0] to avoid teardown-related crashes. - private final Boolean[] quit = new Boolean[] { false }; - private MediaConstraints sdpMediaConstraints; - private PowerManager.WakeLock wakeLock; - - @Override - public void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - - // Since the error-handling of this demo consists of throwing - // RuntimeExceptions and we assume that'll terminate the app, we install - // this default handler so it's applied to background threads as well. - Thread.setDefaultUncaughtExceptionHandler( - new Thread.UncaughtExceptionHandler() { - public void uncaughtException(Thread t, Throwable e) { - e.printStackTrace(); - System.exit(-1); - } - }); - - // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging. - // Logging.enableTracing( - // "/sdcard/trace.txt", - // EnumSet.of(Logging.TraceLevel.TRACE_ALL), - // Logging.Severity.LS_SENSITIVE); - - PowerManager powerManager = (PowerManager) getSystemService(POWER_SERVICE); - wakeLock = powerManager.newWakeLock( - PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "AppRTCDemo"); - wakeLock.acquire(); - - Point displaySize = new Point(); - getWindowManager().getDefaultDisplay().getSize(displaySize); - vsv = new VideoStreamsView(this, displaySize); - setContentView(vsv); - - abortUnless(PeerConnectionFactory.initializeAndroidGlobals(this), - "Failed to initializeAndroidGlobals"); - - AudioManager audioManager = - ((AudioManager) getSystemService(AUDIO_SERVICE)); - audioManager.setMode(audioManager.isWiredHeadsetOn() ? - AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION); - audioManager.setSpeakerphoneOn(!audioManager.isWiredHeadsetOn()); - - sdpMediaConstraints = new MediaConstraints(); - sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( - "OfferToReceiveAudio", "true")); - sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( - "OfferToReceiveVideo", "true")); - - final Intent intent = getIntent(); - if ("android.intent.action.VIEW".equals(intent.getAction())) { - connectToRoom(intent.getData().toString()); - return; - } - showGetRoomUI(); - } - - private void showGetRoomUI() { - final EditText roomInput = new EditText(this); - roomInput.setText("https://apprtc.appspot.com/?r="); - roomInput.setSelection(roomInput.getText().length()); - DialogInterface.OnClickListener listener = - new DialogInterface.OnClickListener() { - @Override public void onClick(DialogInterface dialog, int which) { - abortUnless(which == DialogInterface.BUTTON_POSITIVE, "lolwat?"); - dialog.dismiss(); - connectToRoom(roomInput.getText().toString()); - } - }; - AlertDialog.Builder builder = new AlertDialog.Builder(this); - builder - .setMessage("Enter room URL").setView(roomInput) - .setPositiveButton("Go!", listener).show(); - } - - private void connectToRoom(String roomUrl) { - logAndToast("Connecting to room..."); - appRtcClient.connectToRoom(roomUrl); - } - - @Override - public void onPause() { - super.onPause(); - vsv.onPause(); - // TODO(fischman): IWBN to support pause/resume, but the WebRTC codebase - // isn't ready for that yet; e.g. - // https://code.google.com/p/webrtc/issues/detail?id=1407 - // Instead, simply exit instead of pausing (the alternative leads to - // system-borking with wedged cameras; e.g. b/8224551) - disconnectAndExit(); - } - - @Override - public void onResume() { - // The onResume() is a lie! See TODO(fischman) in onPause() above. - super.onResume(); - vsv.onResume(); - } - - @Override - public void onIceServers(List<PeerConnection.IceServer> iceServers) { - PeerConnectionFactory factory = new PeerConnectionFactory(); - - pc = factory.createPeerConnection( - iceServers, appRtcClient.pcConstraints(), pcObserver); - - { - final PeerConnection finalPC = pc; - final Runnable repeatedStatsLogger = new Runnable() { - public void run() { - synchronized (quit[0]) { - if (quit[0]) { - return; - } - final Runnable runnableThis = this; - boolean success = finalPC.getStats(new StatsObserver() { - public void onComplete(StatsReport[] reports) { - for (StatsReport report : reports) { - Log.d(TAG, "Stats: " + report.toString()); - } - vsv.postDelayed(runnableThis, 10000); - } - }, null); - if (!success) { - throw new RuntimeException("getStats() return false!"); - } - } - } - }; - vsv.postDelayed(repeatedStatsLogger, 10000); - } - - { - logAndToast("Creating local video source..."); - VideoCapturer capturer = getVideoCapturer(); - VideoSource videoSource = factory.createVideoSource( - capturer, appRtcClient.videoConstraints()); - MediaStream lMS = factory.createLocalMediaStream("ARDAMS"); - VideoTrack videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource); - videoTrack.addRenderer(new VideoRenderer(new VideoCallbacks( - vsv, VideoStreamsView.Endpoint.LOCAL))); - lMS.addTrack(videoTrack); - lMS.addTrack(factory.createAudioTrack("ARDAMSa0")); - pc.addStream(lMS, new MediaConstraints()); - } - logAndToast("Waiting for ICE candidates..."); - } - - // Cycle through likely device names for the camera and return the first - // capturer that works, or crash if none do. - private VideoCapturer getVideoCapturer() { - String[] cameraFacing = { "front", "back" }; - int[] cameraIndex = { 0, 1 }; - int[] cameraOrientation = { 0, 90, 180, 270 }; - for (String facing : cameraFacing) { - for (int index : cameraIndex) { - for (int orientation : cameraOrientation) { - String name = "Camera " + index + ", Facing " + facing + - ", Orientation " + orientation; - VideoCapturer capturer = VideoCapturer.create(name); - if (capturer != null) { - logAndToast("Using camera: " + name); - return capturer; - } - } - } - } - throw new RuntimeException("Failed to open capturer"); - } - - @Override - public void onDestroy() { - super.onDestroy(); - } - - // Poor-man's assert(): die with |msg| unless |condition| is true. - private static void abortUnless(boolean condition, String msg) { - if (!condition) { - throw new RuntimeException(msg); - } - } - - // Log |msg| and Toast about it. - private void logAndToast(String msg) { - Log.d(TAG, msg); - if (logToast != null) { - logToast.cancel(); - } - logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT); - logToast.show(); - } - - // Send |json| to the underlying AppEngine Channel. - private void sendMessage(JSONObject json) { - appRtcClient.sendMessage(json.toString()); - } - - // Put a |key|->|value| mapping in |json|. - private static void jsonPut(JSONObject json, String key, Object value) { - try { - json.put(key, value); - } catch (JSONException e) { - throw new RuntimeException(e); - } - } - - // Implementation detail: observe ICE & stream changes and react accordingly. - private class PCObserver implements PeerConnection.Observer { - @Override public void onIceCandidate(final IceCandidate candidate){ - runOnUiThread(new Runnable() { - public void run() { - JSONObject json = new JSONObject(); - jsonPut(json, "type", "candidate"); - jsonPut(json, "label", candidate.sdpMLineIndex); - jsonPut(json, "id", candidate.sdpMid); - jsonPut(json, "candidate", candidate.sdp); - sendMessage(json); - } - }); - } - - @Override public void onError(){ - runOnUiThread(new Runnable() { - public void run() { - throw new RuntimeException("PeerConnection error!"); - } - }); - } - - @Override public void onSignalingChange( - PeerConnection.SignalingState newState) { - } - - @Override public void onIceConnectionChange( - PeerConnection.IceConnectionState newState) { - } - - @Override public void onIceGatheringChange( - PeerConnection.IceGatheringState newState) { - } - - @Override public void onAddStream(final MediaStream stream){ - runOnUiThread(new Runnable() { - public void run() { - abortUnless(stream.audioTracks.size() == 1 && - stream.videoTracks.size() == 1, - "Weird-looking stream: " + stream); - stream.videoTracks.get(0).addRenderer(new VideoRenderer( - new VideoCallbacks(vsv, VideoStreamsView.Endpoint.REMOTE))); - } - }); - } - - @Override public void onRemoveStream(final MediaStream stream){ - runOnUiThread(new Runnable() { - public void run() { - stream.videoTracks.get(0).dispose(); - } - }); - } - - @Override public void onDataChannel(final DataChannel dc) { - runOnUiThread(new Runnable() { - public void run() { - throw new RuntimeException( - "AppRTC doesn't use data channels, but got: " + dc.label() + - " anyway!"); - } - }); - } - } - - // Implementation detail: handle offer creation/signaling and answer setting, - // as well as adding remote ICE candidates once the answer SDP is set. - private class SDPObserver implements SdpObserver { - @Override public void onCreateSuccess(final SessionDescription sdp) { - runOnUiThread(new Runnable() { - public void run() { - logAndToast("Sending " + sdp.type); - JSONObject json = new JSONObject(); - jsonPut(json, "type", sdp.type.canonicalForm()); - jsonPut(json, "sdp", sdp.description); - sendMessage(json); - pc.setLocalDescription(sdpObserver, sdp); - } - }); - } - - @Override public void onSetSuccess() { - runOnUiThread(new Runnable() { - public void run() { - if (appRtcClient.isInitiator()) { - if (pc.getRemoteDescription() != null) { - // We've set our local offer and received & set the remote - // answer, so drain candidates. - drainRemoteCandidates(); - } - } else { - if (pc.getLocalDescription() == null) { - // We just set the remote offer, time to create our answer. - logAndToast("Creating answer"); - pc.createAnswer(SDPObserver.this, sdpMediaConstraints); - } else { - // Sent our answer and set it as local description; drain - // candidates. - drainRemoteCandidates(); - } - } - } - }); - } - - @Override public void onCreateFailure(final String error) { - runOnUiThread(new Runnable() { - public void run() { - throw new RuntimeException("createSDP error: " + error); - } - }); - } - - @Override public void onSetFailure(final String error) { - runOnUiThread(new Runnable() { - public void run() { - throw new RuntimeException("setSDP error: " + error); - } - }); - } - - private void drainRemoteCandidates() { - for (IceCandidate candidate : queuedRemoteCandidates) { - pc.addIceCandidate(candidate); - } - queuedRemoteCandidates = null; - } - } - - // Implementation detail: handler for receiving GAE messages and dispatching - // them appropriately. - private class GAEHandler implements GAEChannelClient.MessageHandler { - @JavascriptInterface public void onOpen() { - if (!appRtcClient.isInitiator()) { - return; - } - logAndToast("Creating offer..."); - pc.createOffer(sdpObserver, sdpMediaConstraints); - } - - @JavascriptInterface public void onMessage(String data) { - try { - JSONObject json = new JSONObject(data); - String type = (String) json.get("type"); - if (type.equals("candidate")) { - IceCandidate candidate = new IceCandidate( - (String) json.get("id"), - json.getInt("label"), - (String) json.get("candidate")); - if (queuedRemoteCandidates != null) { - queuedRemoteCandidates.add(candidate); - } else { - pc.addIceCandidate(candidate); - } - } else if (type.equals("answer") || type.equals("offer")) { - SessionDescription sdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm(type), - (String) json.get("sdp")); - pc.setRemoteDescription(sdpObserver, sdp); - } else if (type.equals("bye")) { - logAndToast("Remote end hung up; dropping PeerConnection"); - disconnectAndExit(); - } else { - throw new RuntimeException("Unexpected message: " + data); - } - } catch (JSONException e) { - throw new RuntimeException(e); - } - } - - @JavascriptInterface public void onClose() { - disconnectAndExit(); - } - - @JavascriptInterface public void onError(int code, String description) { - disconnectAndExit(); - } - } - - // Disconnect from remote resources, dispose of local resources, and exit. - private void disconnectAndExit() { - synchronized (quit[0]) { - if (quit[0]) { - return; - } - quit[0] = true; - wakeLock.release(); - if (pc != null) { - pc.dispose(); - pc = null; - } - if (appRtcClient != null) { - appRtcClient.sendMessage("{\"type\": \"bye\"}"); - appRtcClient.disconnect(); - appRtcClient = null; - } - finish(); - } - } - - // Implementation detail: bridge the VideoRenderer.Callbacks interface to the - // VideoStreamsView implementation. - private class VideoCallbacks implements VideoRenderer.Callbacks { - private final VideoStreamsView view; - private final VideoStreamsView.Endpoint stream; - - public VideoCallbacks( - VideoStreamsView view, VideoStreamsView.Endpoint stream) { - this.view = view; - this.stream = stream; - } - - @Override - public void setSize(final int width, final int height) { - view.queueEvent(new Runnable() { - public void run() { - view.setSize(stream, width, height); - } - }); - } - - @Override - public void renderFrame(I420Frame frame) { - view.queueFrame(stream, frame); - } - } -} diff --git a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/FramePool.java b/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/FramePool.java deleted file mode 100644 index 6f112865053..00000000000 --- a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/FramePool.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.appspot.apprtc; - -import org.webrtc.VideoRenderer.I420Frame; - -import java.util.HashMap; -import java.util.LinkedList; - -/** - * This class acts as an allocation pool meant to minimize GC churn caused by - * frame allocation & disposal. The public API comprises of just two methods: - * copyFrame(), which allocates as necessary and copies, and - * returnFrame(), which returns frame ownership to the pool for use by a later - * call to copyFrame(). - * - * This class is thread-safe; calls to copyFrame() and returnFrame() are allowed - * to happen on any thread. - */ -class FramePool { - // Maps each summary code (see summarizeFrameDimensions()) to a list of frames - // of that description. - private final HashMap<Long, LinkedList<I420Frame>> availableFrames = - new HashMap<Long, LinkedList<I420Frame>>(); - // Every dimension (e.g. width, height, stride) of a frame must be less than - // this value. - private static final long MAX_DIMENSION = 4096; - - public I420Frame takeFrame(I420Frame source) { - long desc = summarizeFrameDimensions(source); - I420Frame dst = null; - synchronized (availableFrames) { - LinkedList<I420Frame> frames = availableFrames.get(desc); - if (frames == null) { - frames = new LinkedList<I420Frame>(); - availableFrames.put(desc, frames); - } - if (!frames.isEmpty()) { - dst = frames.pop(); - } else { - dst = new I420Frame( - source.width, source.height, source.yuvStrides, null); - } - } - return dst; - } - - public void returnFrame(I420Frame frame) { - long desc = summarizeFrameDimensions(frame); - synchronized (availableFrames) { - LinkedList<I420Frame> frames = availableFrames.get(desc); - if (frames == null) { - throw new IllegalArgumentException("Unexpected frame dimensions"); - } - frames.add(frame); - } - } - - /** Validate that |frame| can be managed by the pool. */ - public static boolean validateDimensions(I420Frame frame) { - return frame.width < MAX_DIMENSION && frame.height < MAX_DIMENSION && - frame.yuvStrides[0] < MAX_DIMENSION && - frame.yuvStrides[1] < MAX_DIMENSION && - frame.yuvStrides[2] < MAX_DIMENSION; - } - - // Return a code summarizing the dimensions of |frame|. Two frames that - // return the same summary are guaranteed to be able to store each others' - // contents. Used like Object.hashCode(), but we need all the bits of a long - // to do a good job, and hashCode() returns int, so we do this. - private static long summarizeFrameDimensions(I420Frame frame) { - long ret = frame.width; - ret = ret * MAX_DIMENSION + frame.height; - ret = ret * MAX_DIMENSION + frame.yuvStrides[0]; - ret = ret * MAX_DIMENSION + frame.yuvStrides[1]; - ret = ret * MAX_DIMENSION + frame.yuvStrides[2]; - return ret; - } -} diff --git a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/GAEChannelClient.java b/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/GAEChannelClient.java deleted file mode 100644 index 29a91134808..00000000000 --- a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/GAEChannelClient.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.appspot.apprtc; - -import android.annotation.SuppressLint; -import android.app.Activity; -import android.util.Log; -import android.webkit.ConsoleMessage; -import android.webkit.JavascriptInterface; -import android.webkit.WebChromeClient; -import android.webkit.WebView; -import android.webkit.WebViewClient; - -/** - * Java-land version of Google AppEngine's JavaScript Channel API: - * https://developers.google.com/appengine/docs/python/channel/javascript - * - * Requires a hosted HTML page that opens the desired channel and dispatches JS - * on{Open,Message,Close,Error}() events to a global object named - * "androidMessageHandler". - */ -public class GAEChannelClient { - private static final String TAG = "GAEChannelClient"; - private WebView webView; - private final ProxyingMessageHandler proxyingMessageHandler; - - /** - * Callback interface for messages delivered on the Google AppEngine channel. - * - * Methods are guaranteed to be invoked on the UI thread of |activity| passed - * to GAEChannelClient's constructor. - */ - public interface MessageHandler { - public void onOpen(); - public void onMessage(String data); - public void onClose(); - public void onError(int code, String description); - } - - /** Asynchronously open an AppEngine channel. */ - @SuppressLint("SetJavaScriptEnabled") - public GAEChannelClient( - Activity activity, String token, MessageHandler handler) { - webView = new WebView(activity); - webView.getSettings().setJavaScriptEnabled(true); - webView.setWebChromeClient(new WebChromeClient() { // Purely for debugging. - public boolean onConsoleMessage (ConsoleMessage msg) { - Log.d(TAG, "console: " + msg.message() + " at " + - msg.sourceId() + ":" + msg.lineNumber()); - return false; - } - }); - webView.setWebViewClient(new WebViewClient() { // Purely for debugging. - public void onReceivedError( - WebView view, int errorCode, String description, - String failingUrl) { - Log.e(TAG, "JS error: " + errorCode + " in " + failingUrl + - ", desc: " + description); - } - }); - proxyingMessageHandler = - new ProxyingMessageHandler(activity, handler, token); - webView.addJavascriptInterface( - proxyingMessageHandler, "androidMessageHandler"); - webView.loadUrl("file:///android_asset/channel.html"); - } - - /** Close the connection to the AppEngine channel. */ - public void close() { - if (webView == null) { - return; - } - proxyingMessageHandler.disconnect(); - webView.removeJavascriptInterface("androidMessageHandler"); - webView.loadUrl("about:blank"); - webView = null; - } - - // Helper class for proxying callbacks from the Java<->JS interaction - // (private, background) thread to the Activity's UI thread. - private static class ProxyingMessageHandler { - private final Activity activity; - private final MessageHandler handler; - private final boolean[] disconnected = { false }; - private final String token; - - public - ProxyingMessageHandler(Activity activity, MessageHandler handler, - String token) { - this.activity = activity; - this.handler = handler; - this.token = token; - } - - public void disconnect() { - disconnected[0] = true; - } - - private boolean disconnected() { - return disconnected[0]; - } - - @JavascriptInterface public String getToken() { - return token; - } - - @JavascriptInterface public void onOpen() { - activity.runOnUiThread(new Runnable() { - public void run() { - if (!disconnected()) { - handler.onOpen(); - } - } - }); - } - - @JavascriptInterface public void onMessage(final String data) { - activity.runOnUiThread(new Runnable() { - public void run() { - if (!disconnected()) { - handler.onMessage(data); - } - } - }); - } - - @JavascriptInterface public void onClose() { - activity.runOnUiThread(new Runnable() { - public void run() { - if (!disconnected()) { - handler.onClose(); - } - } - }); - } - - @JavascriptInterface public void onError( - final int code, final String description) { - activity.runOnUiThread(new Runnable() { - public void run() { - if (!disconnected()) { - handler.onError(code, description); - } - } - }); - } - } -} diff --git a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/VideoStreamsView.java b/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/VideoStreamsView.java deleted file mode 100644 index 30c9bf8f207..00000000000 --- a/chromium/third_party/libjingle/source/talk/examples/android/src/org/appspot/apprtc/VideoStreamsView.java +++ /dev/null @@ -1,323 +0,0 @@ -/* - * libjingle - * Copyright 2013, Google Inc. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO - * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; - * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR - * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -package org.appspot.apprtc; - -import android.content.Context; -import android.graphics.Point; -import android.graphics.Rect; -import android.opengl.GLES20; -import android.opengl.GLSurfaceView; -import android.util.Log; - -import org.webrtc.VideoRenderer.I420Frame; - -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.nio.FloatBuffer; -import java.util.EnumMap; - -import javax.microedition.khronos.egl.EGLConfig; -import javax.microedition.khronos.opengles.GL10; - -/** - * A GLSurfaceView{,.Renderer} that efficiently renders YUV frames from local & - * remote VideoTracks using the GPU for CSC. Clients will want to call the - * constructor, setSize() and updateFrame() as appropriate, but none of the - * other public methods of this class are of interest to clients (only to system - * classes). - */ -public class VideoStreamsView - extends GLSurfaceView - implements GLSurfaceView.Renderer { - - /** Identify which of the two video streams is being addressed. */ - public static enum Endpoint { LOCAL, REMOTE }; - - private final static String TAG = "VideoStreamsView"; - private EnumMap<Endpoint, Rect> rects = - new EnumMap<Endpoint, Rect>(Endpoint.class); - private Point screenDimensions; - // [0] are local Y,U,V, [1] are remote Y,U,V. - private int[][] yuvTextures = { { -1, -1, -1}, {-1, -1, -1 }}; - private int posLocation = -1; - private long lastFPSLogTime = System.nanoTime(); - private long numFramesSinceLastLog = 0; - private FramePool framePool = new FramePool(); - // Accessed on multiple threads! Must be synchronized. - private EnumMap<Endpoint, I420Frame> framesToRender = - new EnumMap<Endpoint, I420Frame>(Endpoint.class); - - public VideoStreamsView(Context c, Point screenDimensions) { - super(c); - this.screenDimensions = screenDimensions; - setEGLContextClientVersion(2); - setRenderer(this); - setRenderMode(RENDERMODE_WHEN_DIRTY); - } - - /** Queue |frame| to be uploaded. */ - public void queueFrame(final Endpoint stream, I420Frame frame) { - // Paying for the copy of the YUV data here allows CSC and painting time - // to get spent on the render thread instead of the UI thread. - abortUnless(framePool.validateDimensions(frame), "Frame too large!"); - final I420Frame frameCopy = framePool.takeFrame(frame).copyFrom(frame); - boolean needToScheduleRender; - synchronized (framesToRender) { - // A new render needs to be scheduled (via updateFrames()) iff there isn't - // already a render scheduled, which is true iff framesToRender is empty. - needToScheduleRender = framesToRender.isEmpty(); - I420Frame frameToDrop = framesToRender.put(stream, frameCopy); - if (frameToDrop != null) { - framePool.returnFrame(frameToDrop); - } - } - if (needToScheduleRender) { - queueEvent(new Runnable() { - public void run() { - updateFrames(); - } - }); - } - } - - // Upload the planes from |framesToRender| to the textures owned by this View. - private void updateFrames() { - I420Frame localFrame = null; - I420Frame remoteFrame = null; - synchronized (framesToRender) { - localFrame = framesToRender.remove(Endpoint.LOCAL); - remoteFrame = framesToRender.remove(Endpoint.REMOTE); - } - if (localFrame != null) { - texImage2D(localFrame, yuvTextures[0]); - framePool.returnFrame(localFrame); - } - if (remoteFrame != null) { - texImage2D(remoteFrame, yuvTextures[1]); - framePool.returnFrame(remoteFrame); - } - abortUnless(localFrame != null || remoteFrame != null, - "Nothing to render!"); - requestRender(); - } - - /** Inform this View of the dimensions of frames coming from |stream|. */ - public void setSize(Endpoint stream, int width, int height) { - // Generate 3 texture ids for Y/U/V and place them into |textures|, - // allocating enough storage for |width|x|height| pixels. - int[] textures = yuvTextures[stream == Endpoint.LOCAL ? 0 : 1]; - GLES20.glGenTextures(3, textures, 0); - for (int i = 0; i < 3; ++i) { - int w = i == 0 ? width : width / 2; - int h = i == 0 ? height : height / 2; - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]); - GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0, - GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null); - GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, - GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); - GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, - GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); - GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, - GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, - GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - } - checkNoGLES2Error(); - } - - @Override - protected void onMeasure(int unusedX, int unusedY) { - // Go big or go home! - setMeasuredDimension(screenDimensions.x, screenDimensions.y); - } - - @Override - public void onSurfaceChanged(GL10 unused, int width, int height) { - GLES20.glViewport(0, 0, width, height); - checkNoGLES2Error(); - } - - @Override - public void onDrawFrame(GL10 unused) { - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - drawRectangle(yuvTextures[1], remoteVertices); - drawRectangle(yuvTextures[0], localVertices); - ++numFramesSinceLastLog; - long now = System.nanoTime(); - if (lastFPSLogTime == -1 || now - lastFPSLogTime > 1e9) { - double fps = numFramesSinceLastLog / ((now - lastFPSLogTime) / 1e9); - Log.d(TAG, "Rendered FPS: " + fps); - lastFPSLogTime = now; - numFramesSinceLastLog = 1; - } - checkNoGLES2Error(); - } - - @Override - public void onSurfaceCreated(GL10 unused, EGLConfig config) { - int program = GLES20.glCreateProgram(); - addShaderTo(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_STRING, program); - addShaderTo(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_STRING, program); - - GLES20.glLinkProgram(program); - int[] result = new int[] { GLES20.GL_FALSE }; - result[0] = GLES20.GL_FALSE; - GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, result, 0); - abortUnless(result[0] == GLES20.GL_TRUE, - GLES20.glGetProgramInfoLog(program)); - GLES20.glUseProgram(program); - - GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "y_tex"), 0); - GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "u_tex"), 1); - GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "v_tex"), 2); - - // Actually set in drawRectangle(), but queried only once here. - posLocation = GLES20.glGetAttribLocation(program, "in_pos"); - - int tcLocation = GLES20.glGetAttribLocation(program, "in_tc"); - GLES20.glEnableVertexAttribArray(tcLocation); - GLES20.glVertexAttribPointer( - tcLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords); - - GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); - checkNoGLES2Error(); - } - - // Wrap a float[] in a direct FloatBuffer using native byte order. - private static FloatBuffer directNativeFloatBuffer(float[] array) { - FloatBuffer buffer = ByteBuffer.allocateDirect(array.length * 4).order( - ByteOrder.nativeOrder()).asFloatBuffer(); - buffer.put(array); - buffer.flip(); - return buffer; - } - - // Upload the YUV planes from |frame| to |textures|. - private void texImage2D(I420Frame frame, int[] textures) { - for (int i = 0; i < 3; ++i) { - ByteBuffer plane = frame.yuvPlanes[i]; - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]); - int w = i == 0 ? frame.width : frame.width / 2; - int h = i == 0 ? frame.height : frame.height / 2; - abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!=" + w); - GLES20.glTexImage2D( - GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0, - GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, plane); - } - checkNoGLES2Error(); - } - - // Draw |textures| using |vertices| (X,Y coordinates). - private void drawRectangle(int[] textures, FloatBuffer vertices) { - for (int i = 0; i < 3; ++i) { - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]); - } - - GLES20.glVertexAttribPointer( - posLocation, 2, GLES20.GL_FLOAT, false, 0, vertices); - GLES20.glEnableVertexAttribArray(posLocation); - - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); - checkNoGLES2Error(); - } - - // Compile & attach a |type| shader specified by |source| to |program|. - private static void addShaderTo( - int type, String source, int program) { - int[] result = new int[] { GLES20.GL_FALSE }; - int shader = GLES20.glCreateShader(type); - GLES20.glShaderSource(shader, source); - GLES20.glCompileShader(shader); - GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0); - abortUnless(result[0] == GLES20.GL_TRUE, - GLES20.glGetShaderInfoLog(shader) + ", source: " + source); - GLES20.glAttachShader(program, shader); - GLES20.glDeleteShader(shader); - checkNoGLES2Error(); - } - - // Poor-man's assert(): die with |msg| unless |condition| is true. - private static void abortUnless(boolean condition, String msg) { - if (!condition) { - throw new RuntimeException(msg); - } - } - - // Assert that no OpenGL ES 2.0 error has been raised. - private static void checkNoGLES2Error() { - int error = GLES20.glGetError(); - abortUnless(error == GLES20.GL_NO_ERROR, "GLES20 error: " + error); - } - - // Remote image should span the full screen. - private static final FloatBuffer remoteVertices = directNativeFloatBuffer( - new float[] { -1, 1, -1, -1, 1, 1, 1, -1 }); - - // Local image should be thumbnailish. - private static final FloatBuffer localVertices = directNativeFloatBuffer( - new float[] { 0.6f, 0.9f, 0.6f, 0.6f, 0.9f, 0.9f, 0.9f, 0.6f }); - - // Texture Coordinates mapping the entire texture. - private static final FloatBuffer textureCoords = directNativeFloatBuffer( - new float[] { 0, 0, 0, 1, 1, 0, 1, 1 }); - - // Pass-through vertex shader. - private static final String VERTEX_SHADER_STRING = - "varying vec2 interp_tc;\n" + - "\n" + - "attribute vec4 in_pos;\n" + - "attribute vec2 in_tc;\n" + - "\n" + - "void main() {\n" + - " gl_Position = in_pos;\n" + - " interp_tc = in_tc;\n" + - "}\n"; - - // YUV to RGB pixel shader. Loads a pixel from each plane and pass through the - // matrix. - private static final String FRAGMENT_SHADER_STRING = - "precision mediump float;\n" + - "varying vec2 interp_tc;\n" + - "\n" + - "uniform sampler2D y_tex;\n" + - "uniform sampler2D u_tex;\n" + - "uniform sampler2D v_tex;\n" + - "\n" + - "void main() {\n" + - " float y = texture2D(y_tex, interp_tc).r;\n" + - " float u = texture2D(u_tex, interp_tc).r - .5;\n" + - " float v = texture2D(v_tex, interp_tc).r - .5;\n" + - // CSC according to http://www.fourcc.org/fccyvrgb.php - " gl_FragColor = vec4(y + 1.403 * v, " + - " y - 0.344 * u - 0.714 * v, " + - " y + 1.77 * u, 1);\n" + - "}\n"; -} diff --git a/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m b/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m index 93f0d36a316..99f5166669b 100644 --- a/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m +++ b/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppClient.m @@ -34,7 +34,7 @@ @interface APPRTCAppClient () -@property(nonatomic, assign) dispatch_queue_t backgroundQueue; +@property(nonatomic, strong) dispatch_queue_t backgroundQueue; @property(nonatomic, copy) NSString *baseURL; @property(nonatomic, strong) GAEChannelClient *gaeChannel; @property(nonatomic, copy) NSString *postMessageUrl; @@ -52,7 +52,6 @@ - (id)init { if (self = [super init]) { _backgroundQueue = dispatch_queue_create("RTCBackgroundQueue", NULL); - dispatch_retain(_backgroundQueue); _sendQueue = [NSMutableArray array]; // Uncomment to see Request/Response logging. // _verboseLogging = YES; @@ -60,10 +59,6 @@ return self; } -- (void)dealloc { - dispatch_release(_backgroundQueue); -} - #pragma mark - Public methods - (void)connectToRoom:(NSURL *)url { @@ -199,14 +194,17 @@ error:&error]; NSAssert(!error, @"Unable to parse. %@", error.localizedDescription); NSString *username = json[@"username"]; - NSString *turnServer = json[@"turn"]; NSString *password = json[@"password"]; - NSString *fullUrl = - [NSString stringWithFormat:@"turn:%@@%@", username, turnServer]; - RTCICEServer *ICEServer = - [[RTCICEServer alloc] initWithURI:[NSURL URLWithString:fullUrl] + NSArray* uris = json[@"uris"]; + for (int i = 0; i < [uris count]; ++i) { + NSString *turnServer = [uris objectAtIndex:i]; + RTCICEServer *ICEServer = + [[RTCICEServer alloc] initWithURI:[NSURL URLWithString:turnServer] + username:username password:password]; - [ICEServers addObject:ICEServer]; + NSLog(@"Added ICE Server: %@", ICEServer); + [ICEServers addObject:ICEServer]; + } } else { NSLog(@"Unable to get TURN server. Error: %@", error.description); } @@ -246,9 +244,10 @@ [NSRegularExpression regularExpressionWithPattern:@"room is full" options:0 error:nil]; - if ([fullRegex numberOfMatchesInString:self.roomHtml - options:0 - range:NSMakeRange(0, [self.roomHtml length])]) { + if ([fullRegex + numberOfMatchesInString:self.roomHtml + options:0 + range:NSMakeRange(0, [self.roomHtml length])]) { [self showMessage:@"Room full"]; return; } @@ -257,7 +256,8 @@ NSString *fullUrl = [[[connection originalRequest] URL] absoluteString]; NSRange queryRange = [fullUrl rangeOfString:@"?"]; self.baseURL = [fullUrl substringToIndex:queryRange.location]; - [self maybeLogMessage:[NSString stringWithFormat:@"Base URL: %@", self.baseURL]]; + [self maybeLogMessage: + [NSString stringWithFormat:@"Base URL: %@", self.baseURL]]; self.token = [self findVar:@"channelToken" strippingQuotes:YES]; if (!self.token) @@ -291,11 +291,15 @@ NSDictionary *json = [NSJSONSerialization JSONObjectWithData:pcData options:0 error:&error]; NSAssert(!error, @"Unable to parse. %@", error.localizedDescription); - NSArray *servers = [json objectForKey:@"ICEServers"]; + NSArray *servers = [json objectForKey:@"iceServers"]; NSMutableArray *ICEServers = [NSMutableArray array]; for (NSDictionary *server in servers) { NSString *url = [server objectForKey:@"url"]; + NSString *username = json[@"username"]; NSString *credential = [server objectForKey:@"credential"]; + if (!username) { + username = @""; + } if (!credential) { credential = @""; } @@ -305,7 +309,9 @@ credential]]; RTCICEServer *ICEServer = [[RTCICEServer alloc] initWithURI:[NSURL URLWithString:url] + username:username password:credential]; + NSLog(@"Added ICE Server: %@", ICEServer); [ICEServers addObject:ICEServer]; } [self updateICEServers:ICEServers withTurnServer:turnServerUrl]; diff --git a/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h b/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h index ad1c5127600..22754e3ad26 100644 --- a/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h +++ b/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.h @@ -35,7 +35,8 @@ @protocol APPRTCSendMessage<NSObject> - (void)sendData:(NSData *)data; - +// Logging helper. +- (void)displayLogMessage:(NSString *)message; @end @class APPRTCViewController; diff --git a/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m b/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m index 710f4ad5eca..34aa7520cbf 100644 --- a/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m +++ b/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCAppDelegate.m @@ -62,7 +62,7 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection signalingStateChanged:(RTCSignalingState)stateChanged { - NSLog(@"PCO onSignalingStateChange."); + NSLog(@"PCO onSignalingStateChange: %d", stateChanged); } - (void)peerConnection:(RTCPeerConnection *)peerConnection @@ -119,6 +119,13 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection iceConnectionChanged:(RTCICEConnectionState)newState { NSLog(@"PCO onIceConnectionChange. %d", newState); + if (newState == RTCICEConnectionConnected) + [self displayLogMessage:@"ICE Connection Connected."]; + NSAssert(newState != RTCICEConnectionFailed, @"ICE Connection failed!"); +} + +- (void)displayLogMessage:(NSString *)message { + [_delegate displayLogMessage:message]; } @end @@ -258,8 +265,8 @@ } else if (([value compare:@"offer"] == NSOrderedSame) || ([value compare:@"answer"] == NSOrderedSame)) { NSString *sdpString = [objects objectForKey:@"sdp"]; - RTCSessionDescription *sdp = - [[RTCSessionDescription alloc] initWithType:value sdp:sdpString]; + RTCSessionDescription *sdp = [[RTCSessionDescription alloc] + initWithType:value sdp:[APPRTCAppDelegate preferISAC:sdpString]]; [self.peerConnection setRemoteDescriptionWithDelegate:self sessionDescription:sdp]; [self displayLogMessage:@"PC - setRemoteDescription."]; @@ -283,8 +290,71 @@ #pragma mark - RTCSessionDescriptonDelegate methods +// Match |pattern| to |string| and return the first group of the first +// match, or nil if no match was found. ++ (NSString *)firstMatch:(NSRegularExpression *)pattern + withString:(NSString *)string { + NSTextCheckingResult* result = + [pattern firstMatchInString:string + options:0 + range:NSMakeRange(0, [string length])]; + if (!result) + return nil; + return [string substringWithRange:[result rangeAtIndex:1]]; +} + +// Mangle |origSDP| to prefer the ISAC/16k audio codec. ++ (NSString *)preferISAC:(NSString *)origSDP { + int mLineIndex = -1; + NSString* isac16kRtpMap = nil; + NSArray* lines = [origSDP componentsSeparatedByString:@"\n"]; + NSRegularExpression* isac16kRegex = [NSRegularExpression + regularExpressionWithPattern:@"^a=rtpmap:(\\d+) ISAC/16000[\r]?$" + options:0 + error:nil]; + for (int i = 0; + (i < [lines count]) && (mLineIndex == -1 || isac16kRtpMap == nil); + ++i) { + NSString* line = [lines objectAtIndex:i]; + if ([line hasPrefix:@"m=audio "]) { + mLineIndex = i; + continue; + } + isac16kRtpMap = [self firstMatch:isac16kRegex withString:line]; + } + if (mLineIndex == -1) { + NSLog(@"No m=audio line, so can't prefer iSAC"); + return origSDP; + } + if (isac16kRtpMap == nil) { + NSLog(@"No ISAC/16000 line, so can't prefer iSAC"); + return origSDP; + } + NSArray* origMLineParts = + [[lines objectAtIndex:mLineIndex] componentsSeparatedByString:@" "]; + NSMutableArray* newMLine = + [NSMutableArray arrayWithCapacity:[origMLineParts count]]; + int origPartIndex = 0; + // Format is: m=<media> <port> <proto> <fmt> ... + [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]]; + [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]]; + [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex++]]; + [newMLine addObject:isac16kRtpMap]; + for (; origPartIndex < [origMLineParts count]; ++origPartIndex) { + if ([isac16kRtpMap compare:[origMLineParts objectAtIndex:origPartIndex]] + != NSOrderedSame) { + [newMLine addObject:[origMLineParts objectAtIndex:origPartIndex]]; + } + } + NSMutableArray* newLines = [NSMutableArray arrayWithCapacity:[lines count]]; + [newLines addObjectsFromArray:lines]; + [newLines replaceObjectAtIndex:mLineIndex + withObject:[newMLine componentsJoinedByString:@" "]]; + return [newLines componentsJoinedByString:@"\n"]; +} + - (void)peerConnection:(RTCPeerConnection *)peerConnection - didCreateSessionDescription:(RTCSessionDescription *)sdp + didCreateSessionDescription:(RTCSessionDescription *)origSdp error:(NSError *)error { if (error) { [self displayLogMessage:@"SDP onFailure."]; @@ -293,6 +363,10 @@ } [self displayLogMessage:@"SDP onSuccess(SDP) - set local description."]; + RTCSessionDescription* sdp = + [[RTCSessionDescription alloc] + initWithType:origSdp.type + sdp:[APPRTCAppDelegate preferISAC:origSdp.description]]; [self.peerConnection setLocalDescriptionWithDelegate:self sessionDescription:sdp]; [self displayLogMessage:@"PC setLocalDescription."]; diff --git a/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCViewController.m b/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCViewController.m index 928686b6853..ab84c0932b4 100644 --- a/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCViewController.m +++ b/chromium/third_party/libjingle/source/talk/examples/ios/AppRTCDemo/APPRTCViewController.m @@ -36,6 +36,7 @@ - (void)viewDidLoad { [super viewDidLoad]; self.textField.delegate = self; + [self.textField becomeFirstResponder]; } - (void)displayText:(NSString *)text { diff --git a/chromium/third_party/libjingle/source/talk/libjingle.gyp b/chromium/third_party/libjingle/source/talk/libjingle.gyp index 15d92d10d98..5250eec1b8f 100755 --- a/chromium/third_party/libjingle/source/talk/libjingle.gyp +++ b/chromium/third_party/libjingle/source/talk/libjingle.gyp @@ -42,7 +42,6 @@ }], ], }], - ['OS=="linux" or OS=="android"', { 'targets': [ { @@ -107,6 +106,7 @@ # and include it here. 'android_java_files': [ '<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java', + '<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java', '<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java', '<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java', '<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java', @@ -150,7 +150,7 @@ }, ], }], - ['libjingle_objc == 1', { + ['OS=="ios" or (OS=="mac" and target_arch!="ia32")', { 'targets': [ { 'target_name': 'libjingle_peerconnection_objc', @@ -229,13 +229,14 @@ 'link_settings': { 'libraries': [ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework', + '-lstdc++', ], }, 'xcode_settings': { 'CLANG_ENABLE_OBJC_ARC': 'YES', }, - } - ] + }, # target libjingle_peerconnection_objc + ], }], ], @@ -591,17 +592,6 @@ ], }], ['OS=="mac"', { - 'conditions': [ - ['libjingle_objc != 1', { - 'link_settings' :{ - 'xcode_settings': { - 'OTHER_LDFLAGS': [ - '-framework Carbon', - ], - }, - }, - }], - ], 'sources': [ 'base/macasyncsocket.cc', 'base/macasyncsocket.h', @@ -619,18 +609,36 @@ ], 'link_settings': { 'libraries': [ - '$(SDKROOT)/usr/lib/libcrypto.dylib', - '$(SDKROOT)/usr/lib/libssl.dylib', + '$(SDKROOT)/usr/lib/libcrypto.dylib', + '$(SDKROOT)/usr/lib/libssl.dylib', ], - 'xcode_settings': { - 'OTHER_LDFLAGS': [ - '-framework Cocoa', - '-framework IOKit', - '-framework Security', - '-framework SystemConfiguration', - ], + }, + 'all_dependent_settings': { + 'link_settings': { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-framework Cocoa', + '-framework Foundation', + '-framework IOKit', + '-framework Security', + '-framework SystemConfiguration', + ], + }, }, }, + 'conditions': [ + ['target_arch=="ia32"', { + 'all_dependent_settings': { + 'link_settings': { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-framework Carbon', + ], + }, + }, + }, + }], + ], }], ['OS=="ios"', { 'sources': [ @@ -639,13 +647,16 @@ 'dependencies': [ '../net/third_party/nss/ssl.gyp:libssl', ], - 'xcode_settings': { - 'OTHER_LDFLAGS': [ - '-framework IOKit', - '-framework Security', - '-framework SystemConfiguration', - '-framework UIKit', - ], + 'all_dependent_settings': { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-framework Foundation', + '-framework IOKit', + '-framework Security', + '-framework SystemConfiguration', + '-framework UIKit', + ], + }, }, }], ['OS=="win"', { @@ -903,12 +914,18 @@ 'media/devices/macdevicemanagermm.mm', ], 'conditions': [ - # TODO(hughv): Investigate if this is needed. - [ 'libjingle_objc != 1', { + ['target_arch=="ia32"', { 'sources': [ 'media/devices/carbonvideorenderer.cc', 'media/devices/carbonvideorenderer.h', ], + 'link_settings': { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-framework Carbon', + ], + }, + }, }], ], 'xcode_settings': { @@ -1032,6 +1049,7 @@ 'p2p/base/transportchannelimpl.h', 'p2p/base/transportchannelproxy.cc', 'p2p/base/transportchannelproxy.h', + 'p2p/base/transportdescription.cc', 'p2p/base/transportdescription.h', 'p2p/base/transportdescriptionfactory.cc', 'p2p/base/transportdescriptionfactory.h', @@ -1119,8 +1137,6 @@ 'app/webrtc/jsepsessiondescription.h', 'app/webrtc/localaudiosource.cc', 'app/webrtc/localaudiosource.h', - 'app/webrtc/localvideosource.cc', - 'app/webrtc/localvideosource.h', 'app/webrtc/mediaconstraintsinterface.cc', 'app/webrtc/mediaconstraintsinterface.h', 'app/webrtc/mediastream.cc', @@ -1144,10 +1160,14 @@ 'app/webrtc/portallocatorfactory.cc', 'app/webrtc/portallocatorfactory.h', 'app/webrtc/proxy.h', + 'app/webrtc/remotevideocapturer.cc', + 'app/webrtc/remotevideocapturer.h', 'app/webrtc/statscollector.cc', 'app/webrtc/statscollector.h', 'app/webrtc/statstypes.h', 'app/webrtc/streamcollection.h', + 'app/webrtc/videosource.cc', + 'app/webrtc/videosource.h', 'app/webrtc/videosourceinterface.h', 'app/webrtc/videosourceproxy.h', 'app/webrtc/videotrack.cc', diff --git a/chromium/third_party/libjingle/source/talk/libjingle_examples.gyp b/chromium/third_party/libjingle/source/talk/libjingle_examples.gyp index 78bfe654f13..d4aa4a42712 100755 --- a/chromium/third_party/libjingle/source/talk/libjingle_examples.gyp +++ b/chromium/third_party/libjingle/source/talk/libjingle_examples.gyp @@ -218,7 +218,7 @@ ], # targets }], # OS=="linux" or OS=="win" - ['libjingle_objc==1 and OS=="ios"', { + ['OS=="ios"', { 'targets': [ { 'target_name': 'AppRTCDemo', @@ -276,17 +276,23 @@ # we could pick more intelligently among the keys, but as a # first cut just tell the developer to specify a key identity # explicitly. - 'ensure_single_key': '<!(python -c "assert len(\'\'\'<(key_id)\'\'\') > 0 and \'\\n\' not in \'\'\'<(key_id)\'\'\', \'key_id gyp variable needs to be set explicitly because there are multiple codesigning keys, or none!\'")', + 'ensure_single_key': '<!(python -c "assert \'\\n\' not in \'\'\'<(key_id)\'\'\', \'key_id gyp variable needs to be set explicitly because there are multiple codesigning keys!\'")', }, - 'action': [ - '/usr/bin/codesign', '-v', '--force', '--sign', '<(key_id)', - '${BUILT_PRODUCTS_DIR}/AppRTCDemo.app', + 'conditions': [ + ['key_id==""', { + 'action': [ 'echo', 'Skipping signing' ], + }, { + 'action': [ + '/usr/bin/codesign', '-v', '--force', '--sign', '<(key_id)', + '${BUILT_PRODUCTS_DIR}/AppRTCDemo.app', + ], + }], ], }, ], }, # target AppRTCDemo ], # targets - }], # libjingle_objc==1 + }], # OS=="ios" ['OS=="android"', { 'targets': [ diff --git a/chromium/third_party/libjingle/source/talk/libjingle_tests.gyp b/chromium/third_party/libjingle/source/talk/libjingle_tests.gyp index 9ea81afd06f..546767d1fb5 100755 --- a/chromium/third_party/libjingle/source/talk/libjingle_tests.gyp +++ b/chromium/third_party/libjingle/source/talk/libjingle_tests.gyp @@ -379,7 +379,6 @@ 'app/webrtc/dtmfsender_unittest.cc', 'app/webrtc/jsepsessiondescription_unittest.cc', 'app/webrtc/localaudiosource_unittest.cc', - 'app/webrtc/localvideosource_unittest.cc', # 'app/webrtc/mediastream_unittest.cc', # 'app/webrtc/mediastreamhandler_unittest.cc', 'app/webrtc/mediastreamsignaling_unittest.cc', @@ -387,6 +386,7 @@ 'app/webrtc/peerconnectionfactory_unittest.cc', 'app/webrtc/peerconnectioninterface_unittest.cc', # 'app/webrtc/peerconnectionproxy_unittest.cc', + 'app/webrtc/remotevideocapturer_unittest.cc', 'app/webrtc/test/fakeaudiocapturemodule.cc', 'app/webrtc/test/fakeaudiocapturemodule.h', 'app/webrtc/test/fakeaudiocapturemodule_unittest.cc', @@ -397,6 +397,7 @@ 'app/webrtc/test/fakevideotrackrenderer.h', 'app/webrtc/test/mockpeerconnectionobservers.h', 'app/webrtc/test/testsdpstrings.h', + 'app/webrtc/videosource_unittest.cc', 'app/webrtc/videotrack_unittest.cc', 'app/webrtc/webrtcsdp_unittest.cc', 'app/webrtc/webrtcsession_unittest.cc', @@ -464,13 +465,16 @@ }, ], }], - ['libjingle_objc == 1', { + ['OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.7")', { + # The >=10.7 above is required to make ARC link cleanly (e.g. as + # opposed to _compile_ cleanly, which the library under test + # does just fine on 10.6 too). 'targets': [ { + 'target_name': 'libjingle_peerconnection_objc_test', 'variables': { 'infoplist_file': './app/webrtc/objctests/Info.plist', }, - 'target_name': 'libjingle_peerconnection_objc_test', 'type': 'executable', 'mac_bundle': 1, 'mac_bundle_resources': [ @@ -510,7 +514,7 @@ ], }], ], - }, + }, # target libjingle_peerconnection_objc_test ], }], ], diff --git a/chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h b/chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h index ded5698ba42..7ef0c9b1ab3 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h +++ b/chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h @@ -864,8 +864,6 @@ class FakeVideoEngine : public FakeBaseEngine { renderer_ = r; return true; } - bool SetVideoCapturer(VideoCapturer* /*capturer*/) { return true; } - VideoCapturer* GetVideoCapturer() const { return NULL; } bool SetCapture(bool capture) { capture_ = capture; return true; diff --git a/chromium/third_party/libjingle/source/talk/media/base/fakenetworkinterface.h b/chromium/third_party/libjingle/source/talk/media/base/fakenetworkinterface.h index 2fdd1d4a39a..d0f277e8f42 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/fakenetworkinterface.h +++ b/chromium/third_party/libjingle/source/talk/media/base/fakenetworkinterface.h @@ -130,7 +130,8 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface, int recvbuf_size() const { return recvbuf_size_; } protected: - virtual bool SendPacket(talk_base::Buffer* packet) { + virtual bool SendPacket(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { talk_base::CritScope cs(&crit_); uint32 cur_ssrc = 0; @@ -164,7 +165,8 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface, return true; } - virtual bool SendRtcp(talk_base::Buffer* packet) { + virtual bool SendRtcp(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { talk_base::CritScope cs(&crit_); rtcp_packets_.push_back(*packet); if (!conf_) { diff --git a/chromium/third_party/libjingle/source/talk/media/base/fakevideorenderer.h b/chromium/third_party/libjingle/source/talk/media/base/fakevideorenderer.h index 4000d5e2045..362e592951e 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/fakevideorenderer.h +++ b/chromium/third_party/libjingle/source/talk/media/base/fakevideorenderer.h @@ -28,6 +28,7 @@ #ifndef TALK_MEDIA_BASE_FAKEVIDEORENDERER_H_ #define TALK_MEDIA_BASE_FAKEVIDEORENDERER_H_ +#include "talk/base/logging.h" #include "talk/base/sigslot.h" #include "talk/media/base/videoframe.h" #include "talk/media/base/videorenderer.h" @@ -62,6 +63,13 @@ class FakeVideoRenderer : public VideoRenderer { if (!frame || frame->GetWidth() != static_cast<size_t>(width_) || frame->GetHeight() != static_cast<size_t>(height_)) { + if (!frame) { + LOG(LS_WARNING) << "RenderFrame expected non-null frame."; + } else { + LOG(LS_WARNING) << "RenderFrame expected frame of size " << width_ + << "x" << height_ << " but received frame of size " + << frame->GetWidth() << "x" << frame->GetHeight(); + } ++errors_; return false; } diff --git a/chromium/third_party/libjingle/source/talk/media/base/filemediaengine_unittest.cc b/chromium/third_party/libjingle/source/talk/media/base/filemediaengine_unittest.cc index 703fc118d54..e4d72bbe610 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/filemediaengine_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/media/base/filemediaengine_unittest.cc @@ -58,7 +58,8 @@ class FileNetworkInterface : public MediaChannel::NetworkInterface { } // Implement pure virtual methods of NetworkInterface. - virtual bool SendPacket(talk_base::Buffer* packet) { + virtual bool SendPacket(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { if (!packet) return false; if (media_channel_) { @@ -74,11 +75,13 @@ class FileNetworkInterface : public MediaChannel::NetworkInterface { return true; } - virtual bool SendRtcp(talk_base::Buffer* packet) { return false; } + virtual bool SendRtcp(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { return false; } virtual int SetOption(MediaChannel::NetworkInterface::SocketType type, talk_base::Socket::Option opt, int option) { return 0; } + virtual void SetDefaultDSCPCode(talk_base::DiffServCodePoint dscp) {} size_t num_sent_packets() const { return num_sent_packets_; } diff --git a/chromium/third_party/libjingle/source/talk/media/base/mediachannel.h b/chromium/third_party/libjingle/source/talk/media/base/mediachannel.h index a9e37781b39..7431bc1001f 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/mediachannel.h +++ b/chromium/third_party/libjingle/source/talk/media/base/mediachannel.h @@ -33,6 +33,7 @@ #include "talk/base/basictypes.h" #include "talk/base/buffer.h" +#include "talk/base/dscp.h" #include "talk/base/logging.h" #include "talk/base/sigslot.h" #include "talk/base/socket.h" @@ -236,6 +237,7 @@ struct VideoOptions { adapt_input_to_cpu_usage.SetFrom(change.adapt_input_to_cpu_usage); adapt_cpu_with_smoothing.SetFrom(change.adapt_cpu_with_smoothing); adapt_view_switch.SetFrom(change.adapt_view_switch); + video_adapt_third.SetFrom(change.video_adapt_third); video_noise_reduction.SetFrom(change.video_noise_reduction); video_three_layers.SetFrom(change.video_three_layers); video_enable_camera_list.SetFrom(change.video_enable_camera_list); @@ -245,6 +247,7 @@ struct VideoOptions { video_temporal_layer_screencast.SetFrom( change.video_temporal_layer_screencast); video_leaky_bucket.SetFrom(change.video_leaky_bucket); + cpu_overuse_detection.SetFrom(change.cpu_overuse_detection); conference_mode.SetFrom(change.conference_mode); process_adaptation_threshhold.SetFrom(change.process_adaptation_threshhold); system_low_adaptation_threshhold.SetFrom( @@ -259,6 +262,7 @@ struct VideoOptions { adapt_input_to_cpu_usage == o.adapt_input_to_cpu_usage && adapt_cpu_with_smoothing == o.adapt_cpu_with_smoothing && adapt_view_switch == o.adapt_view_switch && + video_adapt_third == o.video_adapt_third && video_noise_reduction == o.video_noise_reduction && video_three_layers == o.video_three_layers && video_enable_camera_list == o.video_enable_camera_list && @@ -267,6 +271,7 @@ struct VideoOptions { video_watermark == o.video_watermark && video_temporal_layer_screencast == o.video_temporal_layer_screencast && video_leaky_bucket == o.video_leaky_bucket && + cpu_overuse_detection == o.cpu_overuse_detection && conference_mode == o.conference_mode && process_adaptation_threshhold == o.process_adaptation_threshhold && system_low_adaptation_threshhold == @@ -283,16 +288,17 @@ struct VideoOptions { ost << ToStringIfSet("cpu adaption", adapt_input_to_cpu_usage); ost << ToStringIfSet("cpu adaptation smoothing", adapt_cpu_with_smoothing); ost << ToStringIfSet("adapt view switch", adapt_view_switch); + ost << ToStringIfSet("video adapt third", video_adapt_third); ost << ToStringIfSet("noise reduction", video_noise_reduction); ost << ToStringIfSet("3 layers", video_three_layers); ost << ToStringIfSet("camera list", video_enable_camera_list); - ost << ToStringIfSet("1 layer screencast", - video_one_layer_screencast); + ost << ToStringIfSet("1 layer screencast", video_one_layer_screencast); ost << ToStringIfSet("high bitrate", video_high_bitrate); ost << ToStringIfSet("watermark", video_watermark); ost << ToStringIfSet("video temporal layer screencast", video_temporal_layer_screencast); ost << ToStringIfSet("leaky bucket", video_leaky_bucket); + ost << ToStringIfSet("cpu overuse detection", cpu_overuse_detection); ost << ToStringIfSet("conference mode", conference_mode); ost << ToStringIfSet("process", process_adaptation_threshhold); ost << ToStringIfSet("low", system_low_adaptation_threshhold); @@ -310,6 +316,8 @@ struct VideoOptions { Settable<bool> adapt_cpu_with_smoothing; // Enable Adapt View Switch? Settable<bool> adapt_view_switch; + // Enable video adapt third? + Settable<bool> video_adapt_third; // Enable denoising? Settable<bool> video_noise_reduction; // Experimental: Enable multi layer? @@ -326,6 +334,10 @@ struct VideoOptions { Settable<bool> video_temporal_layer_screencast; // Enable WebRTC leaky bucket when sending media packets. Settable<bool> video_leaky_bucket; + // Enable WebRTC Cpu Overuse Detection, which is a new version of the CPU + // adaptation algorithm. So this option will override the + // |adapt_input_to_cpu_usage|. + Settable<bool> cpu_overuse_detection; // Use conference mode? Settable<bool> conference_mode; // Threshhold for process cpu adaptation. (Process limit) @@ -403,8 +415,12 @@ class MediaChannel : public sigslot::has_slots<> { class NetworkInterface { public: enum SocketType { ST_RTP, ST_RTCP }; - virtual bool SendPacket(talk_base::Buffer* packet) = 0; - virtual bool SendRtcp(talk_base::Buffer* packet) = 0; + virtual bool SendPacket( + talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp = talk_base::DSCP_NO_CHANGE) = 0; + virtual bool SendRtcp( + talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp = talk_base::DSCP_NO_CHANGE) = 0; virtual int SetOption(SocketType type, talk_base::Socket::Option opt, int option) = 0; virtual ~NetworkInterface() {} @@ -508,7 +524,8 @@ struct VoiceSenderInfo { echo_delay_median_ms(0), echo_delay_std_ms(0), echo_return_loss(0), - echo_return_loss_enhancement(0) { + echo_return_loss_enhancement(0), + typing_noise_detected(false) { } uint32 ssrc; @@ -526,6 +543,7 @@ struct VoiceSenderInfo { int echo_delay_std_ms; int echo_return_loss; int echo_return_loss_enhancement; + bool typing_noise_detected; }; struct VoiceReceiverInfo { @@ -849,10 +867,12 @@ class VideoMediaChannel : public MediaChannel { }; enum DataMessageType { - // TODO(pthatcher): Make this enum match the SCTP PPIDs that WebRTC uses? - DMT_CONTROL = 0, - DMT_BINARY = 1, - DMT_TEXT = 2, + // Chrome-Internal use only. See SctpDataMediaChannel for the actual PPID + // values. + DMT_NONE = 0, + DMT_CONTROL = 1, + DMT_BINARY = 2, + DMT_TEXT = 3, }; // Info about data received in DataMediaChannel. For use in diff --git a/chromium/third_party/libjingle/source/talk/media/base/mediaengine.h b/chromium/third_party/libjingle/source/talk/media/base/mediaengine.h index 7a1244dfae7..8ebc13b1086 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/mediaengine.h +++ b/chromium/third_party/libjingle/source/talk/media/base/mediaengine.h @@ -259,10 +259,10 @@ class CompositeMediaEngine : public MediaEngineInterface { } virtual void SetVoiceLogging(int min_sev, const char* filter) { - return voice_.SetLogging(min_sev, filter); + voice_.SetLogging(min_sev, filter); } virtual void SetVideoLogging(int min_sev, const char* filter) { - return video_.SetLogging(min_sev, filter); + video_.SetLogging(min_sev, filter); } virtual bool RegisterVoiceProcessor(uint32 ssrc, diff --git a/chromium/third_party/libjingle/source/talk/media/base/testutils.h b/chromium/third_party/libjingle/source/talk/media/base/testutils.h index 7bc7dc3ae02..4d037b7ba38 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/testutils.h +++ b/chromium/third_party/libjingle/source/talk/media/base/testutils.h @@ -28,8 +28,16 @@ #ifndef TALK_MEDIA_BASE_TESTUTILS_H_ #define TALK_MEDIA_BASE_TESTUTILS_H_ +#ifdef LINUX +#include <X11/Xlib.h> +// X defines a few macros that stomp on types that gunit.h uses. +#undef None +#undef Bool +#endif + #include <string> #include <vector> + #if !defined(DISABLE_YUV) #include "libyuv/compare.h" #endif @@ -237,6 +245,37 @@ bool ContainsMatchingCodec(const std::vector<C>& codecs, const C& codec) { return false; } +#define MAYBE_SKIP_SCREENCAST_TEST() \ + if (!cricket::IsScreencastingAvailable()) { \ + LOG(LS_WARNING) << "Skipping test, since it doesn't have the requisite " \ + << "X environment for screen capture."; \ + return; \ + } \ + +#ifdef LINUX +struct XDisplay { + XDisplay() : display_(XOpenDisplay(NULL)) { } + ~XDisplay() { if (display_) XCloseDisplay(display_); } + bool IsValid() const { return display_ != NULL; } + operator Display*() { return display_; } + private: + Display* display_; +}; +#endif + +// Returns true if screencasting is available. When false, anything that uses +// screencasting features may fail. +inline bool IsScreencastingAvailable() { +#ifdef LINUX + XDisplay display; + if (!display.IsValid()) { + LOG(LS_WARNING) << "No X Display available."; + return false; + } +#endif + return true; +} + } // namespace cricket #endif // TALK_MEDIA_BASE_TESTUTILS_H_ diff --git a/chromium/third_party/libjingle/source/talk/media/base/videoadapter.cc b/chromium/third_party/libjingle/source/talk/media/base/videoadapter.cc index cef4248d38a..20114fbd383 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videoadapter.cc +++ b/chromium/third_party/libjingle/source/talk/media/base/videoadapter.cc @@ -45,86 +45,100 @@ static const float kCpuLoadWeightCoefficient = 0.4f; // The seed value for the cpu load moving average. static const float kCpuLoadInitialAverage = 0.5f; -// TODO(fbarchard): Consider making scale factor table settable, to allow -// application to select quality vs performance tradeoff. -// TODO(fbarchard): Add framerate scaling to tables for 1/2 framerate. -// List of scale factors that adapter will scale by. -#if defined(IOS) || defined(ANDROID) -// Mobile needs 1/4 scale for VGA (640 x 360) to QQVGA (160 x 90) -// or 1/4 scale for HVGA (480 x 270) to QQHVGA (120 x 67) -static const int kMinNumPixels = 120 * 67; -static float kScaleFactors[] = { - 1.f / 1.f, // Full size. - 3.f / 4.f, // 3/4 scale. - 1.f / 2.f, // 1/2 scale. - 3.f / 8.f, // 3/8 scale. - 1.f / 4.f, // 1/4 scale. -}; -#else // Desktop needs 1/8 scale for HD (1280 x 720) to QQVGA (160 x 90) -static const int kMinNumPixels = 160 * 100; -static float kScaleFactors[] = { - 1.f / 1.f, // Full size. - 3.f / 4.f, // 3/4 scale. - 1.f / 2.f, // 1/2 scale. - 3.f / 8.f, // 3/8 scale. - 1.f / 4.f, // 1/4 scale. +static const float kScaleFactors[] = { + 1.f / 1.f, // Full size. + 3.f / 4.f, // 3/4 scale. + 1.f / 2.f, // 1/2 scale. + 3.f / 8.f, // 3/8 scale. + 1.f / 4.f, // 1/4 scale. 3.f / 16.f, // 3/16 scale. - 1.f / 8.f // 1/8 scale. + 1.f / 8.f, // 1/8 scale. + 0.f // End of table. }; -#endif -static const int kNumScaleFactors = ARRAY_SIZE(kScaleFactors); +// TODO(fbarchard): Use this table (optionally) for CPU and GD as well. +static const float kViewScaleFactors[] = { + 1.f / 1.f, // Full size. + 3.f / 4.f, // 3/4 scale. + 2.f / 3.f, // 2/3 scale. // Allow 1080p to 720p. + 1.f / 2.f, // 1/2 scale. + 3.f / 8.f, // 3/8 scale. + 1.f / 3.f, // 1/3 scale. // Allow 1080p to 360p. + 1.f / 4.f, // 1/4 scale. + 3.f / 16.f, // 3/16 scale. + 1.f / 8.f, // 1/8 scale. + 0.f // End of table. +}; +const float* VideoAdapter::GetViewScaleFactors() const { + return scale_third_ ? kViewScaleFactors : kScaleFactors; +} + +// For resolutions that would scale down a little instead of up a little, +// bias toward scaling up a little. This will tend to choose 3/4 scale instead +// of 2/3 scale, when the 2/3 is not an exact match. +static const float kUpBias = -0.9f; // Find the scale factor that, when applied to width and height, is closest // to num_pixels. -float VideoAdapter::FindClosestScale(int width, int height, - int target_num_pixels) { +float VideoAdapter::FindScale(const float* scale_factors, + const float upbias, + int width, int height, + int target_num_pixels) { + const float kMinNumPixels = 160 * 90; if (!target_num_pixels) { return 0.f; } - int best_distance = INT_MAX; - int best_index = kNumScaleFactors - 1; // Default to max scale. - for (int i = 0; i < kNumScaleFactors; ++i) { - int test_num_pixels = static_cast<int>(width * kScaleFactors[i] * - height * kScaleFactors[i]); - int diff = test_num_pixels - target_num_pixels; + float best_distance = static_cast<float>(INT_MAX); + float best_scale = 1.f; // Default to unscaled if nothing matches. + float pixels = static_cast<float>(width * height); + for (int i = 0; ; ++i) { + float scale = scale_factors[i]; + float test_num_pixels = pixels * scale * scale; + // Do not consider scale factors that produce too small images. + // Scale factor of 0 at end of table will also exit here. + if (test_num_pixels < kMinNumPixels) { + break; + } + float diff = target_num_pixels - test_num_pixels; + // If resolution is higher than desired, bias the difference based on + // preference for slightly larger for nearest, or avoid completely if + // looking for lower resolutions only. if (diff < 0) { - diff = -diff; + diff = diff * kUpBias; } if (diff < best_distance) { best_distance = diff; - best_index = i; + best_scale = scale; if (best_distance == 0) { // Found exact match. break; } } } - return kScaleFactors[best_index]; + return best_scale; +} + +// Find the closest scale factor. +float VideoAdapter::FindClosestScale(int width, int height, + int target_num_pixels) { + return FindScale(kScaleFactors, kUpBias, + width, height, target_num_pixels); +} + +// Find the closest view scale factor. +float VideoAdapter::FindClosestViewScale(int width, int height, + int target_num_pixels) { + return FindScale(GetViewScaleFactors(), kUpBias, + width, height, target_num_pixels); } // Finds the scale factor that, when applied to width and height, produces // fewer than num_pixels. +static const float kUpAvoidBias = -1000000000.f; float VideoAdapter::FindLowerScale(int width, int height, int target_num_pixels) { - if (!target_num_pixels) { - return 0.f; - } - int best_distance = INT_MAX; - int best_index = kNumScaleFactors - 1; // Default to max scale. - for (int i = 0; i < kNumScaleFactors; ++i) { - int test_num_pixels = static_cast<int>(width * kScaleFactors[i] * - height * kScaleFactors[i]); - int diff = target_num_pixels - test_num_pixels; - if (diff >= 0 && diff < best_distance) { - best_distance = diff; - best_index = i; - if (best_distance == 0) { // Found exact match. - break; - } - } - } - return kScaleFactors[best_index]; + return FindScale(GetViewScaleFactors(), kUpAvoidBias, + width, height, target_num_pixels); } // There are several frame sizes used by Adapter. This explains them @@ -147,6 +161,12 @@ float VideoAdapter::FindLowerScale(int width, int height, // Implementation of VideoAdapter VideoAdapter::VideoAdapter() : output_num_pixels_(INT_MAX), + scale_third_(false), + frames_(0), + adapted_frames_(0), + adaption_changes_(0), + previous_width(0), + previous_height(0), black_output_(false), is_black_(false), interval_next_frame_(0) { @@ -155,12 +175,6 @@ VideoAdapter::VideoAdapter() VideoAdapter::~VideoAdapter() { } -void VideoAdapter::SetInputFormat(const VideoFrame& in_frame) { - talk_base::CritScope cs(&critical_section_); - input_format_.width = static_cast<int>(in_frame.GetWidth()); - input_format_.height = static_cast<int>(in_frame.GetHeight()); -} - void VideoAdapter::SetInputFormat(const VideoFormat& format) { talk_base::CritScope cs(&critical_section_); input_format_ = format; @@ -168,6 +182,29 @@ void VideoAdapter::SetInputFormat(const VideoFormat& format) { output_format_.interval, input_format_.interval); } +void CoordinatedVideoAdapter::SetInputFormat(const VideoFormat& format) { + int previous_width = input_format().width; + int previous_height = input_format().height; + bool is_resolution_change = previous_width > 0 && format.width > 0 && + (previous_width != format.width || + previous_height != format.height); + VideoAdapter::SetInputFormat(format); + if (is_resolution_change) { + int width, height; + // Trigger the adaptation logic again, to potentially reset the adaptation + // state for things like view requests that may not longer be capping + // output (or may now cap output). + AdaptToMinimumFormat(&width, &height); + LOG(LS_INFO) << "VAdapt Input Resolution Change: " + << "Previous input resolution: " + << previous_width << "x" << previous_height + << " New input resolution: " + << format.width << "x" << format.height + << " New output resolution: " + << width << "x" << height; + } +} + void VideoAdapter::SetOutputFormat(const VideoFormat& format) { talk_base::CritScope cs(&critical_section_); output_format_ = format; @@ -208,9 +245,13 @@ bool VideoAdapter::AdaptFrame(const VideoFrame* in_frame, if (!in_frame || !out_frame) { return false; } + ++frames_; // Update input to actual frame dimensions. - SetInputFormat(*in_frame); + VideoFormat format(static_cast<int>(in_frame->GetWidth()), + static_cast<int>(in_frame->GetHeight()), + input_format_.interval, input_format_.fourcc); + SetInputFormat(format); // Drop the input frame if necessary. bool should_drop = false; @@ -236,8 +277,9 @@ bool VideoAdapter::AdaptFrame(const VideoFrame* in_frame, return true; } + float scale = 1.f; if (output_num_pixels_) { - float scale = VideoAdapter::FindClosestScale( + scale = VideoAdapter::FindClosestViewScale( static_cast<int>(in_frame->GetWidth()), static_cast<int>(in_frame->GetHeight()), output_num_pixels_); @@ -251,9 +293,45 @@ bool VideoAdapter::AdaptFrame(const VideoFrame* in_frame, } *out_frame = output_frame_.get(); + + // Show VAdapt log every 300 frames. (10 seconds) + // TODO(fbarchard): Consider GetLogSeverity() to change interval to less + // for LS_VERBOSE and more for LS_INFO. + bool show = frames_ % 300 == 0; + if (in_frame->GetWidth() != (*out_frame)->GetWidth() || + in_frame->GetHeight() != (*out_frame)->GetHeight()) { + ++adapted_frames_; + } + // TODO(fbarchard): LOG the previous output resolution and track input + // resolution changes as well. Consider dropping the statistics into their + // own class which could be queried publically. + bool changed = false; + if (previous_width && (previous_width != (*out_frame)->GetWidth() || + previous_height != (*out_frame)->GetHeight())) { + show = true; + ++adaption_changes_; + changed = true; + } + if (show) { + // TODO(fbarchard): Reduce to LS_VERBOSE when adapter info is not needed + // in default calls. + LOG(LS_INFO) << "VAdapt Frame: " << adapted_frames_ + << " / " << frames_ + << " Changes: " << adaption_changes_ + << " Input: " << in_frame->GetWidth() + << "x" << in_frame->GetHeight() + << " Scale: " << scale + << " Output: " << (*out_frame)->GetWidth() + << "x" << (*out_frame)->GetHeight() + << " Changed: " << (changed ? "true" : "false"); + } + previous_width = (*out_frame)->GetWidth(); + previous_height = (*out_frame)->GetHeight(); + return true; } +// Scale or Blacken the frame. Returns true if successful. bool VideoAdapter::StretchToOutputFrame(const VideoFrame* in_frame) { int output_width = output_format_.width; int output_height = output_format_.height; @@ -409,37 +487,12 @@ void CoordinatedVideoAdapter::OnEncoderResolutionRequest( << " To: " << new_width << "x" << new_height; } -// A CPU request for new resolution -void CoordinatedVideoAdapter::OnCpuLoadUpdated( - int current_cpus, int max_cpus, float process_load, float system_load) { +// A Bandwidth GD request for new resolution +void CoordinatedVideoAdapter::OnCpuResolutionRequest(AdaptRequest request) { talk_base::CritScope cs(&request_critical_section_); if (!cpu_adaptation_) { return; } - // Update the moving average of system load. Even if we aren't smoothing, - // we'll still calculate this information, in case smoothing is later enabled. - system_load_average_ = kCpuLoadWeightCoefficient * system_load + - (1.0f - kCpuLoadWeightCoefficient) * system_load_average_; - if (cpu_smoothing_) { - system_load = system_load_average_; - } - // If we haven't started taking samples yet, wait until we have at least - // the correct number of samples per the wait time. - if (cpu_adapt_wait_time_ == 0) { - cpu_adapt_wait_time_ = talk_base::TimeAfter(kCpuLoadMinSampleTime); - } - AdaptRequest request = FindCpuRequest(current_cpus, max_cpus, - process_load, system_load); - // Make sure we're not adapting too quickly. - if (request != KEEP) { - if (talk_base::TimeIsLater(talk_base::Time(), - cpu_adapt_wait_time_)) { - LOG(LS_VERBOSE) << "VAdapt CPU load high/low but do not adapt until " - << talk_base::TimeUntil(cpu_adapt_wait_time_) << " ms"; - request = KEEP; - } - } - // Update how many times we have downgraded due to the cpu load. switch (request) { case DOWNGRADE: @@ -482,13 +535,46 @@ void CoordinatedVideoAdapter::OnCpuLoadUpdated( LOG(LS_INFO) << "VAdapt CPU Request: " << (DOWNGRADE == request ? "down" : (UPGRADE == request ? "up" : "keep")) - << " Process: " << process_load - << " System: " << system_load << " Steps: " << cpu_downgrade_count_ << " Changed: " << (changed ? "true" : "false") << " To: " << new_width << "x" << new_height; } +// A CPU request for new resolution +// TODO(fbarchard): Move outside adapter. +void CoordinatedVideoAdapter::OnCpuLoadUpdated( + int current_cpus, int max_cpus, float process_load, float system_load) { + talk_base::CritScope cs(&request_critical_section_); + if (!cpu_adaptation_) { + return; + } + // Update the moving average of system load. Even if we aren't smoothing, + // we'll still calculate this information, in case smoothing is later enabled. + system_load_average_ = kCpuLoadWeightCoefficient * system_load + + (1.0f - kCpuLoadWeightCoefficient) * system_load_average_; + if (cpu_smoothing_) { + system_load = system_load_average_; + } + // If we haven't started taking samples yet, wait until we have at least + // the correct number of samples per the wait time. + if (cpu_adapt_wait_time_ == 0) { + cpu_adapt_wait_time_ = talk_base::TimeAfter(kCpuLoadMinSampleTime); + } + AdaptRequest request = FindCpuRequest(current_cpus, max_cpus, + process_load, system_load); + // Make sure we're not adapting too quickly. + if (request != KEEP) { + if (talk_base::TimeIsLater(talk_base::Time(), + cpu_adapt_wait_time_)) { + LOG(LS_VERBOSE) << "VAdapt CPU load high/low but do not adapt until " + << talk_base::TimeUntil(cpu_adapt_wait_time_) << " ms"; + request = KEEP; + } + } + + OnCpuResolutionRequest(request); +} + // Called by cpu adapter on up requests. bool CoordinatedVideoAdapter::IsMinimumFormat(int pixels) { // Find closest scale factor that matches input resolution to min_num_pixels @@ -522,51 +608,46 @@ bool CoordinatedVideoAdapter::AdaptToMinimumFormat(int* new_width, input = new_output; } int old_num_pixels = GetOutputNumPixels(); - // Find resolution that respects ViewRequest or less pixels. - int view_desired_num_pixels = view_desired_num_pixels_; - int min_num_pixels = view_desired_num_pixels_; - if (!input.IsSize0x0()) { - float scale = FindLowerScale(input.width, input.height, min_num_pixels); - min_num_pixels = view_desired_num_pixels = - static_cast<int>(input.width * input.height * scale * scale + .5f); - } - // Reduce resolution further, if necessary, based on encoder bandwidth (GD). + int min_num_pixels = INT_MAX; + adapt_reason_ = 0; + + // Reduce resolution based on encoder bandwidth (GD). if (encoder_desired_num_pixels_ && (encoder_desired_num_pixels_ < min_num_pixels)) { + adapt_reason_ |= ADAPTREASON_BANDWIDTH; min_num_pixels = encoder_desired_num_pixels_; } - // Reduce resolution further, if necessary, based on CPU. + // Reduce resolution based on CPU. if (cpu_adaptation_ && cpu_desired_num_pixels_ && - (cpu_desired_num_pixels_ < min_num_pixels)) { + (cpu_desired_num_pixels_ <= min_num_pixels)) { + if (cpu_desired_num_pixels_ < min_num_pixels) { + adapt_reason_ = ADAPTREASON_CPU; + } else { + adapt_reason_ |= ADAPTREASON_CPU; + } min_num_pixels = cpu_desired_num_pixels_; } - - // Determine which factors are keeping adapter resolution low. - // Caveat: Does not consider framerate. - adapt_reason_ = static_cast<AdaptReason>(0); - if (view_desired_num_pixels == min_num_pixels) { - adapt_reason_ |= ADAPTREASON_VIEW; - } - if (encoder_desired_num_pixels_ == min_num_pixels) { - adapt_reason_ |= ADAPTREASON_BANDWIDTH; - } - if (cpu_desired_num_pixels_ == min_num_pixels) { - adapt_reason_ |= ADAPTREASON_CPU; + // Round resolution for GD or CPU to allow 1/2 to map to 9/16. + if (!input.IsSize0x0() && min_num_pixels != INT_MAX) { + float scale = FindClosestScale(input.width, input.height, min_num_pixels); + min_num_pixels = static_cast<int>(input.width * scale + .5f) * + static_cast<int>(input.height * scale + .5f); } - - // Prevent going below QQVGA. - if (min_num_pixels > 0 && min_num_pixels < kMinNumPixels) { - min_num_pixels = kMinNumPixels; + // Reduce resolution based on View Request. + if (view_desired_num_pixels_ <= min_num_pixels) { + if (view_desired_num_pixels_ < min_num_pixels) { + adapt_reason_ = ADAPTREASON_VIEW; + } else { + adapt_reason_ |= ADAPTREASON_VIEW; + } + min_num_pixels = view_desired_num_pixels_; } - SetOutputNumPixels(min_num_pixels); - - // Find closest scale factor that matches input resolution to min_num_pixels - // and set that for output resolution. This is not needed for VideoAdapter, - // but provides feedback to unittests and users on expected resolution. - // Actual resolution is based on input frame. + // Snap to a scale factor. float scale = 1.0f; if (!input.IsSize0x0()) { - scale = FindClosestScale(input.width, input.height, min_num_pixels); + scale = FindLowerScale(input.width, input.height, min_num_pixels); + min_num_pixels = static_cast<int>(input.width * scale + .5f) * + static_cast<int>(input.height * scale + .5f); } if (scale == 1.0f) { adapt_reason_ = 0; @@ -574,6 +655,8 @@ bool CoordinatedVideoAdapter::AdaptToMinimumFormat(int* new_width, *new_width = new_output.width = static_cast<int>(input.width * scale + .5f); *new_height = new_output.height = static_cast<int>(input.height * scale + .5f); + SetOutputNumPixels(min_num_pixels); + new_output.interval = view_desired_interval_; SetOutputFormat(new_output); int new_num_pixels = GetOutputNumPixels(); diff --git a/chromium/third_party/libjingle/source/talk/media/base/videoadapter.h b/chromium/third_party/libjingle/source/talk/media/base/videoadapter.h index c41ac6e5cdc..2bd31d5df5c 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videoadapter.h +++ b/chromium/third_party/libjingle/source/talk/media/base/videoadapter.h @@ -45,8 +45,7 @@ class VideoAdapter { VideoAdapter(); virtual ~VideoAdapter(); - void SetInputFormat(const VideoFrame& in_frame); - void SetInputFormat(const VideoFormat& format); + virtual void SetInputFormat(const VideoFormat& format); void SetOutputFormat(const VideoFormat& format); // Constrain output resolution to this many pixels overall void SetOutputNumPixels(int num_pixels); @@ -65,16 +64,34 @@ class VideoAdapter { // the output frame. bool AdaptFrame(const VideoFrame* in_frame, const VideoFrame** out_frame); + void set_scale_third(bool enable) { + LOG(LS_INFO) << "Video Adapter third scaling is now " + << (enable ? "enabled" : "disabled"); + scale_third_ = enable; + } + bool scale_third() const { return scale_third_; } + protected: float FindClosestScale(int width, int height, int target_num_pixels); + float FindClosestViewScale(int width, int height, int target_num_pixels); float FindLowerScale(int width, int height, int target_num_pixels); private: + const float* GetViewScaleFactors() const; + float FindScale(const float* scale_factors, + const float upbias, int width, int height, + int target_num_pixels); bool StretchToOutputFrame(const VideoFrame* in_frame); VideoFormat input_format_; VideoFormat output_format_; int output_num_pixels_; + bool scale_third_; // True if adapter allows scaling to 1/3 and 2/3. + int frames_; // Number of input frames. + int adapted_frames_; // Number of frames scaled. + int adaption_changes_; // Number of changes in scale factor. + size_t previous_width; // Previous adapter output width. + size_t previous_height; // Previous adapter output height. bool black_output_; // Flag to tell if we need to black output_frame_. bool is_black_; // Flag to tell if output_frame_ is currently black. int64 interval_next_frame_; @@ -102,6 +119,8 @@ class CoordinatedVideoAdapter CoordinatedVideoAdapter(); virtual ~CoordinatedVideoAdapter() {} + virtual void SetInputFormat(const VideoFormat& format); + // Enable or disable video adaptation due to the change of the CPU load. void set_cpu_adaptation(bool enable) { cpu_adaptation_ = enable; } bool cpu_adaptation() const { return cpu_adaptation_; } @@ -176,6 +195,8 @@ class CoordinatedVideoAdapter void OnOutputFormatRequest(const VideoFormat& format); // Handle the resolution request from the encoder due to bandwidth changes. void OnEncoderResolutionRequest(int width, int height, AdaptRequest request); + // Handle the resolution request for CPU overuse. + void OnCpuResolutionRequest(AdaptRequest request); // Handle the CPU load provided by a CPU monitor. void OnCpuLoadUpdated(int current_cpus, int max_cpus, float process_load, float system_load); diff --git a/chromium/third_party/libjingle/source/talk/media/base/videocapturer.cc b/chromium/third_party/libjingle/source/talk/media/base/videocapturer.cc index 3bc23731db0..acab19d6616 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videocapturer.cc +++ b/chromium/third_party/libjingle/source/talk/media/base/videocapturer.cc @@ -103,10 +103,12 @@ VideoCapturer::VideoCapturer(talk_base::Thread* thread) : thread_(thread) { void VideoCapturer::Construct() { ClearAspectRatio(); enable_camera_list_ = false; + square_pixel_aspect_ratio_ = false; capture_state_ = CS_STOPPED; SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured); scaled_width_ = 0; scaled_height_ = 0; + screencast_max_pixels_ = 0; muted_ = false; black_frame_count_down_ = kNumBlackFramesOnMute; } @@ -323,24 +325,30 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*, #if !defined(DISABLE_YUV) if (IsScreencast()) { int scaled_width, scaled_height; - int desired_screencast_fps = capture_format_.get() ? - VideoFormat::IntervalToFps(capture_format_->interval) : - kDefaultScreencastFps; - ComputeScale(captured_frame->width, captured_frame->height, - desired_screencast_fps, &scaled_width, &scaled_height); - - if (scaled_width != scaled_width_ || scaled_height != scaled_height_) { - LOG(LS_VERBOSE) << "Scaling Screencast from " - << captured_frame->width << "x" - << captured_frame->height << " to " - << scaled_width << "x" << scaled_height; - scaled_width_ = scaled_width; - scaled_height_ = scaled_height; + if (screencast_max_pixels_ > 0) { + ComputeScaleMaxPixels(captured_frame->width, captured_frame->height, + screencast_max_pixels_, &scaled_width, &scaled_height); + } else { + int desired_screencast_fps = capture_format_.get() ? + VideoFormat::IntervalToFps(capture_format_->interval) : + kDefaultScreencastFps; + ComputeScale(captured_frame->width, captured_frame->height, + desired_screencast_fps, &scaled_width, &scaled_height); } + if (FOURCC_ARGB == captured_frame->fourcc && - (scaled_width != captured_frame->height || - scaled_height != captured_frame->height)) { - CapturedFrame* scaled_frame = const_cast<CapturedFrame*>(captured_frame); + (scaled_width != captured_frame->width || + scaled_height != captured_frame->height)) { + if (scaled_width != scaled_width_ || scaled_height != scaled_height_) { + LOG(LS_INFO) << "Scaling Screencast from " + << captured_frame->width << "x" + << captured_frame->height << " to " + << scaled_width << "x" << scaled_height; + scaled_width_ = scaled_width; + scaled_height_ = scaled_height; + } + CapturedFrame* modified_frame = + const_cast<CapturedFrame*>(captured_frame); // Compute new width such that width * height is less than maximum but // maintains original captured frame aspect ratio. // Round down width to multiple of 4 so odd width won't round up beyond @@ -349,17 +357,88 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*, libyuv::ARGBScale(reinterpret_cast<const uint8*>(captured_frame->data), captured_frame->width * 4, captured_frame->width, captured_frame->height, - reinterpret_cast<uint8*>(scaled_frame->data), + reinterpret_cast<uint8*>(modified_frame->data), scaled_width * 4, scaled_width, scaled_height, libyuv::kFilterBilinear); - scaled_frame->width = scaled_width; - scaled_frame->height = scaled_height; - scaled_frame->data_size = scaled_width * 4 * scaled_height; + modified_frame->width = scaled_width; + modified_frame->height = scaled_height; + modified_frame->data_size = scaled_width * 4 * scaled_height; } } + + const int kYuy2Bpp = 2; + const int kArgbBpp = 4; + // TODO(fbarchard): Make a helper function to adjust pixels to square. + // TODO(fbarchard): Hook up experiment to scaling. + // TODO(fbarchard): Avoid scale and convert if muted. + // Temporary buffer is scoped here so it will persist until i420_frame.Init() + // makes a copy of the frame, converting to I420. + talk_base::scoped_array<uint8> temp_buffer; + // YUY2 can be scaled vertically using an ARGB scaler. Aspect ratio is only + // a problem on OSX. OSX always converts webcams to YUY2 or UYVY. + bool can_scale = + FOURCC_YUY2 == CanonicalFourCC(captured_frame->fourcc) || + FOURCC_UYVY == CanonicalFourCC(captured_frame->fourcc); + + // If pixels are not square, optionally use vertical scaling to make them + // square. Square pixels simplify the rest of the pipeline, including + // effects and rendering. + if (can_scale && square_pixel_aspect_ratio_ && + captured_frame->pixel_width != captured_frame->pixel_height) { + int scaled_width, scaled_height; + // modified_frame points to the captured_frame but with const casted away + // so it can be modified. + CapturedFrame* modified_frame = const_cast<CapturedFrame*>(captured_frame); + // Compute the frame size that makes pixels square pixel aspect ratio. + ComputeScaleToSquarePixels(captured_frame->width, captured_frame->height, + captured_frame->pixel_width, + captured_frame->pixel_height, + &scaled_width, &scaled_height); + + if (scaled_width != scaled_width_ || scaled_height != scaled_height_) { + LOG(LS_INFO) << "Scaling WebCam from " + << captured_frame->width << "x" + << captured_frame->height << " to " + << scaled_width << "x" << scaled_height + << " for PAR " + << captured_frame->pixel_width << "x" + << captured_frame->pixel_height; + scaled_width_ = scaled_width; + scaled_height_ = scaled_height; + } + const int modified_frame_size = scaled_width * scaled_height * kYuy2Bpp; + uint8* temp_buffer_data; + // Pixels are wide and short; Increasing height. Requires temporary buffer. + if (scaled_height > captured_frame->height) { + temp_buffer.reset(new uint8[modified_frame_size]); + temp_buffer_data = temp_buffer.get(); + } else { + // Pixels are narrow and tall; Decreasing height. Scale will be done + // in place. + temp_buffer_data = reinterpret_cast<uint8*>(captured_frame->data); + } + + // Use ARGBScaler to vertically scale the YUY2 image, adjusting for 16 bpp. + libyuv::ARGBScale(reinterpret_cast<const uint8*>(captured_frame->data), + captured_frame->width * kYuy2Bpp, // Stride for YUY2. + captured_frame->width * kYuy2Bpp / kArgbBpp, // Width. + abs(captured_frame->height), // Height. + temp_buffer_data, + scaled_width * kYuy2Bpp, // Stride for YUY2. + scaled_width * kYuy2Bpp / kArgbBpp, // Width. + abs(scaled_height), // New height. + libyuv::kFilterBilinear); + modified_frame->width = scaled_width; + modified_frame->height = scaled_height; + modified_frame->pixel_width = 1; + modified_frame->pixel_height = 1; + modified_frame->data_size = modified_frame_size; + modified_frame->data = temp_buffer_data; + } #endif // !DISABLE_YUV - // Size to crop captured frame to. This adjusts the captured frames - // aspect ratio to match the final view aspect ratio, considering pixel + + // Size to crop captured frame to. This adjusts the captured frames + // aspect ratio to match the final view aspect ratio, considering pixel // aspect ratio and rotation. The final size may be scaled down by video // adapter to better match ratio_w_ x ratio_h_. // Note that abs() of frame height is passed in, because source may be diff --git a/chromium/third_party/libjingle/source/talk/media/base/videocapturer.h b/chromium/third_party/libjingle/source/talk/media/base/videocapturer.h index 3997976f893..933fc825000 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videocapturer.h +++ b/chromium/third_party/libjingle/source/talk/media/base/videocapturer.h @@ -236,11 +236,18 @@ class VideoCapturer bool enable_camera_list() { return enable_camera_list_; } + + // Enable scaling to ensure square pixels. + void set_square_pixel_aspect_ratio(bool square_pixel_aspect_ratio) { + square_pixel_aspect_ratio_ = square_pixel_aspect_ratio; + } + bool square_pixel_aspect_ratio() { + return square_pixel_aspect_ratio_; + } + // Signal all capture state changes that are not a direct result of calling // Start(). sigslot::signal2<VideoCapturer*, CaptureState> SignalStateChange; - // TODO(hellner): rename |SignalFrameCaptured| to something like - // |SignalRawFrame| or |SignalNativeFrame|. // Frame callbacks are multithreaded to allow disconnect and connect to be // called concurrently. It also ensures that it is safe to call disconnect // at any time which is needed since the signal may be called from an @@ -254,6 +261,17 @@ class VideoCapturer const VideoProcessors& video_processors() const { return video_processors_; } + // If 'screencast_max_pixels' is set greater than zero, screencasts will be + // scaled to be no larger than this value. + // If set to zero, the max pixels will be limited to + // Retina MacBookPro 15" resolution of 2880 x 1800. + // For high fps, maximum pixels limit is set based on common 24" monitor + // resolution of 2048 x 1280. + int screencast_max_pixels() const { return screencast_max_pixels_; } + void set_screencast_max_pixels(int p) { + screencast_max_pixels_ = talk_base::_max(0, p); + } + protected: // Callback attached to SignalFrameCaptured where SignalVideoFrames is called. void OnFrameCaptured(VideoCapturer* video_capturer, @@ -311,8 +329,10 @@ class VideoCapturer int ratio_w_; // View resolution. e.g. 1280 x 720. int ratio_h_; bool enable_camera_list_; + bool square_pixel_aspect_ratio_; // Enable scaling to square pixels. int scaled_width_; // Current output size from ComputeScale. int scaled_height_; + int screencast_max_pixels_; // Downscale screencasts further if requested. bool muted_; int black_frame_count_down_; diff --git a/chromium/third_party/libjingle/source/talk/media/base/videocapturer_unittest.cc b/chromium/third_party/libjingle/source/talk/media/base/videocapturer_unittest.cc index a6ce3ba9b31..82a95fb637d 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videocapturer_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/media/base/videocapturer_unittest.cc @@ -194,6 +194,40 @@ TEST_F(VideoCapturerTest, CameraOffOnMute) { EXPECT_EQ(33, video_frames_received()); } +TEST_F(VideoCapturerTest, ScreencastScaledMaxPixels) { + capturer_.SetScreencast(true); + + int kWidth = 1280; + int kHeight = 720; + + // Screencasts usually have large weird dimensions and are ARGB. + std::vector<cricket::VideoFormat> formats; + formats.push_back(cricket::VideoFormat(kWidth, kHeight, + cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB)); + formats.push_back(cricket::VideoFormat(2 * kWidth, 2 * kHeight, + cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB)); + capturer_.ResetSupportedFormats(formats); + + + EXPECT_EQ(0, capturer_.screencast_max_pixels()); + EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat( + 2 * kWidth, + 2 * kHeight, + cricket::VideoFormat::FpsToInterval(30), + cricket::FOURCC_ARGB))); + EXPECT_TRUE(capturer_.IsRunning()); + EXPECT_EQ(0, renderer_.num_rendered_frames()); + renderer_.SetSize(2 * kWidth, 2 * kHeight, 0); + EXPECT_TRUE(capturer_.CaptureFrame()); + EXPECT_EQ(1, renderer_.num_rendered_frames()); + + capturer_.set_screencast_max_pixels(kWidth * kHeight); + renderer_.SetSize(kWidth, kHeight, 0); + EXPECT_TRUE(capturer_.CaptureFrame()); + EXPECT_EQ(2, renderer_.num_rendered_frames()); +} + + TEST_F(VideoCapturerTest, TestFourccMatch) { cricket::VideoFormat desired(640, 480, cricket::VideoFormat::FpsToInterval(30), @@ -681,3 +715,29 @@ TEST_F(VideoCapturerTest, Whitelist) { capturer_.ConstrainSupportedFormats(vga_format); EXPECT_TRUE(HdFormatInList(*capturer_.GetSupportedFormats())); } + +TEST_F(VideoCapturerTest, BlacklistAllFormats) { + cricket::VideoFormat vga_format(640, 480, + cricket::VideoFormat::FpsToInterval(30), + cricket::FOURCC_I420); + std::vector<cricket::VideoFormat> supported_formats; + // Mock a device that only supports HD formats. + supported_formats.push_back(cricket::VideoFormat(1280, 720, + cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420)); + supported_formats.push_back(cricket::VideoFormat(1920, 1080, + cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420)); + capturer_.ResetSupportedFormats(supported_formats); + EXPECT_EQ(2u, capturer_.GetSupportedFormats()->size()); + // Now, enable the list, which would exclude both formats. However, since + // only HD formats are available, we refuse to filter at all, so we don't + // break this camera. + capturer_.set_enable_camera_list(true); + capturer_.ConstrainSupportedFormats(vga_format); + EXPECT_EQ(2u, capturer_.GetSupportedFormats()->size()); + // To make sure it's not just the camera list being broken, add in VGA and + // try again. This time, only the VGA format should be there. + supported_formats.push_back(vga_format); + capturer_.ResetSupportedFormats(supported_formats); + ASSERT_EQ(1u, capturer_.GetSupportedFormats()->size()); + EXPECT_EQ(vga_format.height, capturer_.GetSupportedFormats()->at(0).height); +} diff --git a/chromium/third_party/libjingle/source/talk/media/base/videocommon.cc b/chromium/third_party/libjingle/source/talk/media/base/videocommon.cc index 12f3bb36886..5dd45d73075 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videocommon.cc +++ b/chromium/third_party/libjingle/source/talk/media/base/videocommon.cc @@ -98,23 +98,17 @@ static float FindLowerScale(int width, int height, int target_num_pixels) { return kScaleFactors[best_index]; } -// Compute a size to scale frames to that is below maximum compression -// and rendering size with the same aspect ratio. -void ComputeScale(int frame_width, int frame_height, int fps, - int* scaled_width, int* scaled_height) { +// Computes a scale less to fit in max_pixels while maintaining aspect ratio. +void ComputeScaleMaxPixels(int frame_width, int frame_height, int max_pixels, + int* scaled_width, int* scaled_height) { ASSERT(scaled_width != NULL); ASSERT(scaled_height != NULL); + ASSERT(max_pixels > 0); // For VP8 the values for max width and height can be found here // webrtc/src/video_engine/vie_defines.h (kViEMaxCodecWidth and // kViEMaxCodecHeight) const int kMaxWidth = 4096; const int kMaxHeight = 3072; - // Maximum pixels limit is set to Retina MacBookPro 15" resolution of - // 2880 x 1800 as of 4/18/2013. - // For high fps, maximum pixels limit is set based on common 24" monitor - // resolution of 2048 x 1280 as of 6/13/2013. The Retina resolution is - // therefore reduced to 1440 x 900. - int kMaxPixels = (fps > 5) ? 2048 * 1280 : 2880 * 1800; int new_frame_width = frame_width; int new_frame_height = frame_height; @@ -129,12 +123,12 @@ void ComputeScale(int frame_width, int frame_height, int fps, new_frame_height = kMaxHeight; } // Limit number of pixels. - if (new_frame_width * new_frame_height > kMaxPixels) { + if (new_frame_width * new_frame_height > max_pixels) { // Compute new width such that width * height is less than maximum but // maintains original captured frame aspect ratio. new_frame_width = static_cast<int>(sqrtf(static_cast<float>( - kMaxPixels) * new_frame_width / new_frame_height)); - new_frame_height = kMaxPixels / new_frame_width; + max_pixels) * new_frame_width / new_frame_height)); + new_frame_height = max_pixels / new_frame_width; } // Snap to a scale factor that is less than or equal to target pixels. float scale = FindLowerScale(frame_width, frame_height, @@ -143,6 +137,20 @@ void ComputeScale(int frame_width, int frame_height, int fps, *scaled_height = static_cast<int>(frame_height * scale + .5f); } +// Compute a size to scale frames to that is below maximum compression +// and rendering size with the same aspect ratio. +void ComputeScale(int frame_width, int frame_height, int fps, + int* scaled_width, int* scaled_height) { + // Maximum pixels limit is set to Retina MacBookPro 15" resolution of + // 2880 x 1800 as of 4/18/2013. + // For high fps, maximum pixels limit is set based on common 24" monitor + // resolution of 2048 x 1280 as of 6/13/2013. The Retina resolution is + // therefore reduced to 1440 x 900. + int max_pixels = (fps > 5) ? 2048 * 1280 : 2880 * 1800; + ComputeScaleMaxPixels( + frame_width, frame_height, max_pixels, scaled_width, scaled_height); +} + // Compute size to crop video frame to. // If cropped_format_* is 0, return the frame_* size as is. void ComputeCrop(int cropped_format_width, @@ -209,6 +217,14 @@ void ComputeCrop(int cropped_format_width, } } +// Compute the frame size that makes pixels square pixel aspect ratio. +void ComputeScaleToSquarePixels(int in_width, int in_height, + int pixel_width, int pixel_height, + int* scaled_width, int* scaled_height) { + *scaled_width = in_width; // Keep width the same. + *scaled_height = in_height * pixel_height / pixel_width; +} + // The C++ standard requires a namespace-scope definition of static const // integral types even when they are initialized in the declaration (see // [class.static.data]/4), but MSVC with /Ze is non-conforming and treats that diff --git a/chromium/third_party/libjingle/source/talk/media/base/videocommon.h b/chromium/third_party/libjingle/source/talk/media/base/videocommon.h index 098651f3df8..cf24f6fbb39 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videocommon.h +++ b/chromium/third_party/libjingle/source/talk/media/base/videocommon.h @@ -25,7 +25,7 @@ // // Common definition for video, including fourcc and VideoFormat. -#ifndef TALK_MEDIA_BASE_VIDEOCOMMON_H_ +#ifndef TALK_MEDIA_BASE_VIDEOCOMMON_H_ // NOLINT #define TALK_MEDIA_BASE_VIDEOCOMMON_H_ #include <string> @@ -147,6 +147,15 @@ inline std::string GetFourccName(uint32 fourcc) { return name; } +// Computes a scale less to fit in max_pixels while maintaining aspect ratio. +void ComputeScaleMaxPixels(int frame_width, int frame_height, int max_pixels, + int* scaled_width, int* scaled_height); + +// For low fps, max pixels limit is set to Retina MacBookPro 15" resolution of +// 2880 x 1800 as of 4/18/2013. +// For high fps, maximum pixels limit is set based on common 24" monitor +// resolution of 2048 x 1280 as of 6/13/2013. The Retina resolution is +// therefore reduced to 1440 x 900. void ComputeScale(int frame_width, int frame_height, int fps, int* scaled_width, int* scaled_height); @@ -158,6 +167,11 @@ void ComputeCrop(int cropped_format_width, int cropped_format_height, int rotation, int* cropped_width, int* cropped_height); +// Compute the frame size that makes pixels square pixel aspect ratio. +void ComputeScaleToSquarePixels(int in_width, int in_height, + int pixel_width, int pixel_height, + int* scaled_width, int* scaled_height); + ////////////////////////////////////////////////////////////////////////////// // Definition of VideoFormat. ////////////////////////////////////////////////////////////////////////////// @@ -239,4 +253,4 @@ struct VideoFormat : VideoFormatPod { } // namespace cricket -#endif // TALK_MEDIA_BASE_VIDEOCOMMON_H_ +#endif // TALK_MEDIA_BASE_VIDEOCOMMON_H_ // NOLINT diff --git a/chromium/third_party/libjingle/source/talk/media/base/videocommon_unittest.cc b/chromium/third_party/libjingle/source/talk/media/base/videocommon_unittest.cc index e9cd26a1729..91228437351 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videocommon_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/media/base/videocommon_unittest.cc @@ -287,4 +287,26 @@ TEST(VideoCommonTest, TestComputeCrop) { EXPECT_EQ(768, cropped_height); } +TEST(VideoCommonTest, TestComputeScaleToSquarePixels) { + int scaled_width, scaled_height; + + // Pixel aspect ratio is 4:3. Logical aspect ratio is 16:9. Expect scale + // to square pixels with physical aspect ratio of 16:9. + ComputeScaleToSquarePixels(640, 480, + 4, 3, // 4 x 3 pixel aspect ratio + &scaled_width, &scaled_height); + EXPECT_EQ(640, scaled_width); + EXPECT_EQ(360, scaled_height); + + // Pixel aspect ratio is 3:8. Physical aspect ratio is 4:3. Expect scale + // to square pixels with logical aspect ratio of 1:2. + // Note that 640x1280 will be scaled down by video adapter to view request + // of 640*360 and will end up using 320x640. + ComputeScaleToSquarePixels(640, 480, + 3, 8, // 4 x 3 pixel aspect ratio + &scaled_width, &scaled_height); + EXPECT_EQ(640, scaled_width); + EXPECT_EQ(1280, scaled_height); +} + } // namespace cricket diff --git a/chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h b/chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h index e9f7612dfa2..d8b9bcb2955 100644 --- a/chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h +++ b/chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h @@ -23,7 +23,7 @@ // OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF // ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -#ifndef TALK_MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ +#ifndef TALK_MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ // NOLINT #define TALK_MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ #include <string> @@ -1373,8 +1373,10 @@ class VideoMediaChannelTest : public testing::Test, EXPECT_EQ(0, renderer.num_rendered_frames()); EXPECT_TRUE(SendFrame()); - EXPECT_FRAME_ON_RENDERER_WAIT(renderer, 1, codec.width, codec.height, - kTimeout); + EXPECT_TRUE_WAIT(renderer.num_rendered_frames() >= 1 && + codec.width == renderer.width() && + codec.height == renderer.height(), kTimeout); + EXPECT_EQ(0, renderer.errors()); // Registering an external capturer is currently the same as screen casting // (update the test when this changes). @@ -1392,8 +1394,8 @@ class VideoMediaChannelTest : public testing::Test, EXPECT_TRUE(capturer->CaptureCustomFrame(kWidth, kHeight, cricket::FOURCC_ARGB)); EXPECT_TRUE(capturer->CaptureFrame()); - EXPECT_EQ_WAIT(2, renderer.num_rendered_frames(), kTimeout); - EXPECT_TRUE_WAIT(kScaledWidth == renderer.width() && + EXPECT_TRUE_WAIT(renderer.num_rendered_frames() >= 2 && + kScaledWidth == renderer.width() && kScaledHeight == renderer.height(), kTimeout); EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL)); } @@ -1665,4 +1667,4 @@ class VideoMediaChannelTest : public testing::Test, cricket::FakeVideoRenderer renderer2_; }; -#endif // TALK_MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ +#endif // TALK_MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ NOLINT diff --git a/chromium/third_party/libjingle/source/talk/media/devices/devicemanager.cc b/chromium/third_party/libjingle/source/talk/media/devices/devicemanager.cc index 2ce5eb08b82..6f4aa33ffca 100644 --- a/chromium/third_party/libjingle/source/talk/media/devices/devicemanager.cc +++ b/chromium/third_party/libjingle/source/talk/media/devices/devicemanager.cc @@ -149,9 +149,9 @@ bool DeviceManager::GetAudioOutputDevice(const std::string& name, Device* out) { bool DeviceManager::GetVideoCaptureDevices(std::vector<Device>* devices) { devices->clear(); -#if defined(IOS) - // On iOS, we treat the camera(s) as a single device. Even if there are - // multiple cameras, that's abstracted away at a higher level. +#if defined(ANDROID) || defined(IOS) + // On Android and iOS, we treat the camera(s) as a single device. Even if + // there are multiple cameras, that's abstracted away at a higher level. Device dev("camera", "1"); // name and ID devices->push_back(dev); return true; diff --git a/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine.cc b/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine.cc index 2f9b117b913..3d450c6863b 100644 --- a/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine.cc +++ b/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine.cc @@ -64,28 +64,58 @@ struct SctpInboundPacket { int flags; }; -// Helper for logging SCTP data. Given a buffer, returns a readable string. +// Helper for logging SCTP messages. static void debug_sctp_printf(const char *format, ...) { char s[255]; va_list ap; va_start(ap, format); vsnprintf(s, sizeof(s), format, ap); - LOG(LS_INFO) << s; - // vprintf(format, ap); + LOG(LS_INFO) << "SCTP: " << s; va_end(ap); } -// Helper for make a string dump of some SCTP data. Used for LOG -// debugging messages. -static std::string SctpDataToDebugString(void* buffer, size_t length, - int dump_type) { - char *dump_buf = usrsctp_dumppacket(buffer, length, dump_type); - if (!dump_buf) { - return ""; - } - std::string s = std::string(dump_buf); - usrsctp_freedumpbuffer(dump_buf); - return s; +// Get the PPID to use for the terminating fragment of this type. +static SctpDataMediaChannel::PayloadProtocolIdentifier GetPpid( + cricket::DataMessageType type) { + switch (type) { + default: + case cricket::DMT_NONE: + return SctpDataMediaChannel::PPID_NONE; + case cricket::DMT_CONTROL: + return SctpDataMediaChannel::PPID_CONTROL; + case cricket::DMT_BINARY: + return SctpDataMediaChannel::PPID_BINARY_LAST; + case cricket::DMT_TEXT: + return SctpDataMediaChannel::PPID_TEXT_LAST; + }; +} + +static bool GetDataMediaType( + SctpDataMediaChannel::PayloadProtocolIdentifier ppid, + cricket::DataMessageType *dest) { + ASSERT(dest != NULL); + switch (ppid) { + case SctpDataMediaChannel::PPID_BINARY_PARTIAL: + case SctpDataMediaChannel::PPID_BINARY_LAST: + *dest = cricket::DMT_BINARY; + return true; + + case SctpDataMediaChannel::PPID_TEXT_PARTIAL: + case SctpDataMediaChannel::PPID_TEXT_LAST: + *dest = cricket::DMT_TEXT; + return true; + + case SctpDataMediaChannel::PPID_CONTROL: + *dest = cricket::DMT_CONTROL; + return true; + + case SctpDataMediaChannel::PPID_NONE: + *dest = cricket::DMT_NONE; + return true; + + default: + return false; + } } // This is the callback usrsctp uses when there's data to send on the network @@ -96,9 +126,7 @@ static int OnSctpOutboundPacket(void* addr, void* data, size_t length, LOG(LS_VERBOSE) << "global OnSctpOutboundPacket():" << "addr: " << addr << "; length: " << length << "; tos: " << std::hex << static_cast<int>(tos) - << "; set_df: " << std::hex << static_cast<int>(set_df) - << "; data:" << SctpDataToDebugString(data, length, - SCTP_DUMP_OUTBOUND); + << "; set_df: " << std::hex << static_cast<int>(set_df); // Note: We have to copy the data; the caller will delete it. talk_base::Buffer* buffer = new talk_base::Buffer(data, length); channel->worker_thread()->Post(channel, MSG_SCTPOUTBOUNDPACKET, @@ -114,37 +142,29 @@ static int OnSctpInboundPacket(struct socket* sock, union sctp_sockstore addr, void* data, size_t length, struct sctp_rcvinfo rcv, int flags, void* ulp_info) { - LOG(LS_VERBOSE) << "global OnSctpInboundPacket... Msg of length " - << length << " received via " << addr.sconn.sconn_addr << ":" - << talk_base::NetworkToHost16(addr.sconn.sconn_port) - << " on stream " << rcv.rcv_sid - << " with SSN " << rcv.rcv_ssn - << " and TSN " << rcv.rcv_tsn << ", PPID " - << talk_base::NetworkToHost32(rcv.rcv_ppid) - << ", context " << rcv.rcv_context - << ", data: " << data - << ", ulp_info:" << ulp_info - << ", flags:" << std::hex << flags; SctpDataMediaChannel* channel = static_cast<SctpDataMediaChannel*>(ulp_info); - // The second log call is useful when the defines flags are incorrect. In - // this case, ulp_info ends up being bad and the second log message will - // cause a crash. - LOG(LS_VERBOSE) << "global OnSctpInboundPacket. channel=" - << channel->debug_name() << "..."; // Post data to the channel's receiver thread (copying it). // TODO(ldixon): Unclear if copy is needed as this method is responsible for // memory cleanup. But this does simplify code. - const uint32 native_ppid = talk_base::HostToNetwork32(rcv.rcv_ppid); - SctpInboundPacket* packet = new SctpInboundPacket(); - packet->buffer.SetData(data, length); - packet->params.ssrc = rcv.rcv_sid; - packet->params.seq_num = rcv.rcv_ssn; - packet->params.timestamp = rcv.rcv_tsn; - packet->params.type = - static_cast<cricket::DataMessageType>(native_ppid); - packet->flags = flags; - channel->worker_thread()->Post(channel, MSG_SCTPINBOUNDPACKET, - talk_base::WrapMessageData(packet)); + const SctpDataMediaChannel::PayloadProtocolIdentifier ppid = + static_cast<SctpDataMediaChannel::PayloadProtocolIdentifier>( + talk_base::HostToNetwork32(rcv.rcv_ppid)); + cricket::DataMessageType type = cricket::DMT_NONE; + if (!GetDataMediaType(ppid, &type) && !(flags & MSG_NOTIFICATION)) { + // It's neither a notification nor a recognized data packet. Drop it. + LOG(LS_ERROR) << "Received an unknown PPID " << ppid + << " on an SCTP packet. Dropping."; + } else { + SctpInboundPacket* packet = new SctpInboundPacket; + packet->buffer.SetData(data, length); + packet->params.ssrc = rcv.rcv_sid; + packet->params.seq_num = rcv.rcv_ssn; + packet->params.timestamp = rcv.rcv_tsn; + packet->params.type = type; + packet->flags = flags; + channel->worker_thread()->Post(channel, MSG_SCTPINBOUNDPACKET, + talk_base::WrapMessageData(packet)); + } free(data); return 1; } @@ -181,6 +201,14 @@ SctpDataEngine::SctpDataEngine() { // See: http://lakerest.net/pipermail/sctp-coders/2012-January/009438.html // See: http://svnweb.freebsd.org/base?view=revision&revision=229805 // usrsctp_sysctl_set_sctp_blackhole(2); + + // Set the number of default outgoing streams. This is the number we'll + // send in the SCTP INIT message. The 'appropriate default' in the + // second paragraph of + // http://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-05#section-6.2 + // is cricket::kMaxSctpSid. + usrsctp_sysctl_set_sctp_nr_outgoing_streams_default( + cricket::kMaxSctpSid); } usrsctp_engines_count++; @@ -270,6 +298,13 @@ bool SctpDataMediaChannel::OpenSctpSocket() { return false; } + uint32_t nodelay = 1; + if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_NODELAY, &nodelay, + sizeof(nodelay))) { + LOG_ERRNO(LS_ERROR) << debug_name_ << "Failed to set SCTP_NODELAY."; + return false; + } + // Subscribe to SCTP event notifications. int event_types[] = {SCTP_ASSOC_CHANGE, SCTP_PEER_ADDR_CHANGE, @@ -430,7 +465,8 @@ bool SctpDataMediaChannel::SendData( const talk_base::Buffer& payload, SendDataResult* result) { if (result) { - // If we return true, we'll set this to SDR_SUCCESS. + // Preset |result| to assume an error. If SendData succeeds, we'll + // overwrite |*result| once more at the end. *result = SDR_ERROR; } @@ -450,41 +486,36 @@ bool SctpDataMediaChannel::SendData( return false; } - // TODO(ldixon): Experiment with sctp_sendv_spa instead of sctp_sndinfo. e.g. - // struct sctp_sendv_spa spa = {0}; - // spa.sendv_flags |= SCTP_SEND_SNDINFO_VALID; - // spa.sendv_sndinfo.snd_sid = params.ssrc; - // spa.sendv_sndinfo.snd_context = 0; - // spa.sendv_sndinfo.snd_assoc_id = 0; - // TODO(pthatcher): Support different types of protocols (e.g. SSL) and - // messages (e.g. Binary) via SendDataParams. - // spa.sendv_sndinfo.snd_ppid = htonl(PPID_NONE); - // TODO(pthatcher): Support different reliability semantics. - // For reliable: Remove SCTP_UNORDERED. - // For partially-reliable: Add rtx or ttl. - // spa.sendv_sndinfo.snd_flags = SCTP_UNORDERED; - // TODO(phatcher): Try some of these things. - // spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; - // spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_RTX; - // spa.sendv_prinfo.pr_value = htons(max_retransmit_count); - // spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_TTL; - // spa.sendv_prinfo.pr_value = htons(max_retransmit_time); // // Send data using SCTP. - sctp_sndinfo sndinfo = {0}; - sndinfo.snd_sid = params.ssrc; - sndinfo.snd_flags = 0; - // TODO(pthatcher): Once data types are added to SendParams, this can be set - // from SendParams. - sndinfo.snd_ppid = talk_base::HostToNetwork32(params.type); - sndinfo.snd_context = 0; - sndinfo.snd_assoc_id = 0; - ssize_t res = usrsctp_sendv(sock_, payload.data(), - static_cast<size_t>(payload.length()), - NULL, 0, &sndinfo, - static_cast<socklen_t>(sizeof(sndinfo)), - SCTP_SENDV_SNDINFO, 0); - if (res < 0) { + ssize_t send_res = 0; // result from usrsctp_sendv. + struct sctp_sendv_spa spa = {0}; + spa.sendv_flags |= SCTP_SEND_SNDINFO_VALID; + spa.sendv_sndinfo.snd_sid = params.ssrc; + spa.sendv_sndinfo.snd_ppid = talk_base::HostToNetwork32( + GetPpid(params.type)); + + // Ordered implies reliable. + if (!params.ordered) { + spa.sendv_sndinfo.snd_flags |= SCTP_UNORDERED; + if (params.max_rtx_count >= 0 || params.max_rtx_ms == 0) { + spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; + spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_RTX; + spa.sendv_prinfo.pr_value = params.max_rtx_count; + } else { + spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; + spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_TTL; + spa.sendv_prinfo.pr_value = params.max_rtx_ms; + } + } + + // We don't fragment. + send_res = usrsctp_sendv(sock_, payload.data(), + static_cast<size_t>(payload.length()), + NULL, 0, &spa, + static_cast<socklen_t>(sizeof(spa)), + SCTP_SENDV_SPA, 0); + if (send_res < 0) { if (errno == EWOULDBLOCK) { *result = SDR_BLOCK; LOG(LS_INFO) << debug_name_ << "->SendData(...): EWOULDBLOCK returned"; @@ -496,7 +527,7 @@ bool SctpDataMediaChannel::SendData( return false; } if (result) { - // If we return true, we'll set this to SDR_SUCCESS. + // Only way out now is success. *result = SDR_SUCCESS; } return true; @@ -504,17 +535,13 @@ bool SctpDataMediaChannel::SendData( // Called by network interface when a packet has been received. void SctpDataMediaChannel::OnPacketReceived(talk_base::Buffer* packet) { - LOG(LS_VERBOSE) << debug_name_ << "->OnPacketReceived(...): " - << " length=" << packet->length() << "; data=" - << SctpDataToDebugString(packet->data(), packet->length(), - SCTP_DUMP_INBOUND); + LOG(LS_VERBOSE) << debug_name_ << "->OnPacketReceived(...): " << " length=" + << packet->length() << ", sending: " << sending_; // Only give receiving packets to usrsctp after if connected. This enables two // peers to each make a connect call, but for them not to receive an INIT // packet before they have called connect; least the last receiver of the INIT // packet will have called connect, and a connection will be established. if (sending_) { - LOG(LS_VERBOSE) << debug_name_ << "->OnPacketReceived(...):" - << " Passed packet to sctp."; // Pass received packet to SCTP stack. Once processed by usrsctp, the data // will be will be given to the global OnSctpInboundData, and then, // marshalled by a Post and handled with OnMessage. @@ -522,8 +549,6 @@ void SctpDataMediaChannel::OnPacketReceived(talk_base::Buffer* packet) { } else { // TODO(ldixon): Consider caching the packet for very slightly better // reliability. - LOG(LS_INFO) << debug_name_ << "->OnPacketReceived(...):" - << " Threw packet (probably an INIT) away."; } } @@ -532,10 +557,8 @@ void SctpDataMediaChannel::OnInboundPacketFromSctpToChannel( LOG(LS_VERBOSE) << debug_name_ << "->OnInboundPacketFromSctpToChannel(...): " << "Received SCTP data:" << " ssrc=" << packet->params.ssrc - << " data='" << std::string(packet->buffer.data(), - packet->buffer.length()) << " notification: " << (packet->flags & MSG_NOTIFICATION) - << "' length=" << packet->buffer.length(); + << " length=" << packet->buffer.length(); // Sending a packet with data == NULL (no data) is SCTPs "close the // connection" message. This sets sock_ = NULL; if (!packet->buffer.length() || !packet->buffer.data()) { diff --git a/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine.h b/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine.h index d62eff1aeaa..429016e0e6a 100644 --- a/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine.h +++ b/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine.h @@ -54,6 +54,10 @@ struct sctp_assoc_change; struct socket; namespace cricket { +// The highest stream ID (Sid) that SCTP allows, and the number of streams we +// tell SCTP we're going to use. +const uint32 kMaxSctpSid = USHRT_MAX; + // A DataEngine that interacts with usrsctp. // // From channel calls, data flows like this: @@ -108,12 +112,14 @@ class SctpDataMediaChannel : public DataMediaChannel, // on top of SCTP. enum PayloadProtocolIdentifier { PPID_NONE = 0, // No protocol is specified. - // Specified by Mozilla. Not clear that this is actually part of the - // standard. Use with caution! - // http://mxr.mozilla.org/mozilla-central/source/netwerk/sctp/datachannel/DataChannelProtocol.h#22 + // Matches the PPIDs in mozilla source and + // https://datatracker.ietf.org/doc/draft-ietf-rtcweb-data-protocol Sec. 9 + // They're not yet assigned by IANA. PPID_CONTROL = 50, - PPID_TEXT = 51, - PPID_BINARY = 52, + PPID_BINARY_PARTIAL = 52, + PPID_BINARY_LAST = 53, + PPID_TEXT_PARTIAL = 54, + PPID_TEXT_LAST = 51 }; // Given a thread which will be used to post messages (received data) to this @@ -208,11 +214,7 @@ class SctpDataMediaChannel : public DataMediaChannel, // related to the ports at the IP level. int local_port_; int remote_port_; - // TODO(ldixon): investigate why removing 'struct' makes the compiler - // complain. - // - // The socket created by usrsctp_socket(...). - struct socket* sock_; + struct socket* sock_; // The socket created by usrsctp_socket(...). // sending_ is true iff there is a connected socket. bool sending_; diff --git a/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine_unittest.cc b/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine_unittest.cc index 2b8787f3aac..363e7dfeb64 100644 --- a/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/media/sctp/sctpdataengine_unittest.cc @@ -60,7 +60,8 @@ class SctpFakeNetworkInterface : public cricket::MediaChannel::NetworkInterface, protected: // Called to send raw packet down the wire (e.g. SCTP an packet). - virtual bool SendPacket(talk_base::Buffer* packet) { + virtual bool SendPacket(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { LOG(LS_VERBOSE) << "SctpFakeNetworkInterface::SendPacket"; // TODO(ldixon): Can/should we use Buffer.TransferTo here? @@ -89,7 +90,8 @@ class SctpFakeNetworkInterface : public cricket::MediaChannel::NetworkInterface, // Unsupported functions required to exist by NetworkInterface. // TODO(ldixon): Refactor parent NetworkInterface class so these are not // required. They are RTC specific and should be in an appropriate subclass. - virtual bool SendRtcp(talk_base::Buffer* packet) { + virtual bool SendRtcp(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { LOG(LS_WARNING) << "Unsupported: SctpFakeNetworkInterface::SendRtcp."; return false; } @@ -98,6 +100,9 @@ class SctpFakeNetworkInterface : public cricket::MediaChannel::NetworkInterface, LOG(LS_WARNING) << "Unsupported: SctpFakeNetworkInterface::SetOption."; return 0; } + virtual void SetDefaultDSCPCode(talk_base::DiffServCodePoint dscp) { + LOG(LS_WARNING) << "Unsupported: SctpFakeNetworkInterface::SetOption."; + } private: // Not owned by this class. diff --git a/chromium/third_party/libjingle/source/talk/media/testdata/1.frame_plus_1.byte b/chromium/third_party/libjingle/source/talk/media/testdata/1.frame_plus_1.byte Binary files differdeleted file mode 100644 index b619edeed23..00000000000 --- a/chromium/third_party/libjingle/source/talk/media/testdata/1.frame_plus_1.byte +++ /dev/null diff --git a/chromium/third_party/libjingle/source/talk/media/testdata/captured-320x240-2s-48.frames b/chromium/third_party/libjingle/source/talk/media/testdata/captured-320x240-2s-48.frames Binary files differdeleted file mode 100644 index 292a1700f15..00000000000 --- a/chromium/third_party/libjingle/source/talk/media/testdata/captured-320x240-2s-48.frames +++ /dev/null diff --git a/chromium/third_party/libjingle/source/talk/media/testdata/h264-svc-99-640x360.rtpdump b/chromium/third_party/libjingle/source/talk/media/testdata/h264-svc-99-640x360.rtpdump Binary files differdeleted file mode 100644 index ffa521df066..00000000000 --- a/chromium/third_party/libjingle/source/talk/media/testdata/h264-svc-99-640x360.rtpdump +++ /dev/null diff --git a/chromium/third_party/libjingle/source/talk/media/testdata/video.rtpdump b/chromium/third_party/libjingle/source/talk/media/testdata/video.rtpdump Binary files differdeleted file mode 100644 index 7be863e5013..00000000000 --- a/chromium/third_party/libjingle/source/talk/media/testdata/video.rtpdump +++ /dev/null diff --git a/chromium/third_party/libjingle/source/talk/media/testdata/voice.rtpdump b/chromium/third_party/libjingle/source/talk/media/testdata/voice.rtpdump Binary files differdeleted file mode 100644 index 8f0ec15c996..00000000000 --- a/chromium/third_party/libjingle/source/talk/media/testdata/voice.rtpdump +++ /dev/null diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h index df539048105..31de172a211 100644 --- a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h +++ b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h @@ -787,10 +787,6 @@ class FakeWebRtcVideoEngine // Not using WEBRTC_STUB due to bool return value virtual bool IsIPv6Enabled(int channel) { return true; } WEBRTC_STUB(SetMTU, (int, unsigned int)); - WEBRTC_STUB(SetPacketTimeoutNotification, (const int, bool, int)); - WEBRTC_STUB(RegisterObserver, (const int, webrtc::ViENetworkObserver&)); - WEBRTC_STUB(SetPeriodicDeadOrAliveStatus, (const int, const bool, - const unsigned int)); // webrtc::ViERender WEBRTC_STUB(RegisterVideoRenderModule, (webrtc::VideoRender&)); diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h index 4ed38d1d3f6..c3cd786009d 100644 --- a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h +++ b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h @@ -266,9 +266,6 @@ class FakeWebRtcVoiceEngine virtual webrtc::AudioProcessing* audio_processing() OVERRIDE { return NULL; } -#ifndef USE_WEBRTC_DEV_BRANCH - WEBRTC_STUB(MaxNumOfChannels, ()); -#endif WEBRTC_FUNC(CreateChannel, ()) { if (fail_create_channel_) { return -1; @@ -619,18 +616,6 @@ class FakeWebRtcVoiceEngine } WEBRTC_STUB(ReceivedRTCPPacket, (int channel, const void* data, unsigned int length)); - // Not using WEBRTC_STUB due to bool return value - WEBRTC_STUB(SetPacketTimeoutNotification, (int channel, bool enable, - int timeoutSeconds)); - WEBRTC_STUB(GetPacketTimeoutNotification, (int channel, bool& enable, - int& timeoutSeconds)); - WEBRTC_STUB(RegisterDeadOrAliveObserver, (int channel, - webrtc::VoEConnectionObserver& observer)); - WEBRTC_STUB(DeRegisterDeadOrAliveObserver, (int channel)); - WEBRTC_STUB(GetPeriodicDeadOrAliveStatus, (int channel, bool& enabled, - int& sampleTimeSeconds)); - WEBRTC_STUB(SetPeriodicDeadOrAliveStatus, (int channel, bool enable, - int sampleTimeSeconds)); // webrtc::VoERTP_RTCP WEBRTC_STUB(RegisterRTPObserver, (int channel, @@ -751,7 +736,7 @@ class FakeWebRtcVoiceEngine // webrtc::VoEVideoSync WEBRTC_STUB(GetPlayoutBufferSize, (int& bufferMs)); WEBRTC_STUB(GetPlayoutTimestamp, (int channel, unsigned int& timestamp)); - WEBRTC_STUB(GetRtpRtcp, (int, webrtc::RtpRtcp*&)); + WEBRTC_STUB(GetRtpRtcp, (int, webrtc::RtpRtcp**, webrtc::RtpReceiver**)); WEBRTC_STUB(SetInitTimestamp, (int channel, unsigned int timestamp)); WEBRTC_STUB(SetInitSequenceNumber, (int channel, short sequenceNumber)); WEBRTC_STUB(SetMinimumPlayoutDelay, (int channel, int delayMs)); diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtctexturevideoframe.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtctexturevideoframe.h index 05b50f74c28..691c8142558 100644 --- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtctexturevideoframe.h +++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtctexturevideoframe.h @@ -31,14 +31,7 @@ #include "talk/base/refcount.h" #include "talk/base/scoped_ref_ptr.h" #include "talk/media/base/videoframe.h" -#ifdef USE_WEBRTC_DEV_BRANCH #include "webrtc/common_video/interface/native_handle.h" -#else -#include "webrtc/common_video/interface/i420_video_frame.h" -// Define NativeHandle to an existing type so we don't need to add lots of -// USE_WEBRTC_DEV_BRANCH. -#define NativeHandle I420VideoFrame -#endif namespace cricket { diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine.cc index 873b249b70f..fd7e5bfa3f6 100644 --- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine.cc +++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine.cc @@ -221,9 +221,7 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer { virtual int DeliverFrame(unsigned char* buffer, int buffer_size, uint32_t time_stamp, int64_t render_time -#ifdef USE_WEBRTC_DEV_BRANCH , void* handle -#endif ) { talk_base::CritScope cs(&crit_); frame_rate_tracker_.Update(1); @@ -238,17 +236,13 @@ class WebRtcRenderAdapter : public webrtc::ExternalRenderer { talk_base::kNumNanosecsPerMillisec; // Send the rtp timestamp to renderer as the VideoFrame timestamp. // and the render timestamp as the VideoFrame elapsed_time. -#ifdef USE_WEBRTC_DEV_BRANCH if (handle == NULL) { -#endif return DeliverBufferFrame(buffer, buffer_size, render_time_stamp_in_ns, rtp_time_stamp_in_ns); -#ifdef USE_WEBRTC_DEV_BRANCH } else { return DeliverTextureFrame(handle, render_time_stamp_in_ns, rtp_time_stamp_in_ns); } -#endif } virtual bool IsTextureSupported() { return true; } @@ -465,6 +459,46 @@ class WebRtcVideoChannelRecvInfo { DecoderMap registered_decoders_; }; +class WebRtcOveruseObserver : public webrtc::CpuOveruseObserver { + public: + explicit WebRtcOveruseObserver(CoordinatedVideoAdapter* video_adapter) + : video_adapter_(video_adapter), + enabled_(false) { + } + + // TODO(mflodman): Consider sending resolution as part of event, to let + // adapter know what resolution the request is based on. Helps eliminate stale + // data, race conditions. + virtual void OveruseDetected() OVERRIDE { + talk_base::CritScope cs(&crit_); + if (!enabled_) { + return; + } + + video_adapter_->OnCpuResolutionRequest(CoordinatedVideoAdapter::DOWNGRADE); + } + + virtual void NormalUsage() OVERRIDE { + talk_base::CritScope cs(&crit_); + if (!enabled_) { + return; + } + + video_adapter_->OnCpuResolutionRequest(CoordinatedVideoAdapter::UPGRADE); + } + + void Enable(bool enable) { + talk_base::CritScope cs(&crit_); + enabled_ = enable; + } + + private: + CoordinatedVideoAdapter* video_adapter_; + bool enabled_; + talk_base::CriticalSection crit_; +}; + + class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> { public: typedef std::map<int, webrtc::VideoEncoder*> EncoderMap; // key: payload type @@ -481,6 +515,7 @@ class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> { capturer_updated_(false), interval_(0), video_adapter_(new CoordinatedVideoAdapter) { + overuse_observer_.reset(new WebRtcOveruseObserver(video_adapter_.get())); SignalCpuAdaptationUnable.repeat(video_adapter_->SignalCpuAdaptationUnable); if (cpu_monitor) { cpu_monitor->SignalUpdate.connect( @@ -534,6 +569,9 @@ class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> { int CurrentAdaptReason() const { return video_adapter_->adapt_reason(); } + webrtc::CpuOveruseObserver* overuse_observer() { + return overuse_observer_.get(); + } StreamParams* stream_params() { return stream_params_.get(); } void set_stream_params(const StreamParams& sp) { @@ -572,7 +610,7 @@ class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> { } void ApplyCpuOptions(const VideoOptions& options) { - bool cpu_adapt, cpu_smoothing; + bool cpu_adapt, cpu_smoothing, adapt_third; float low, med, high; if (options.adapt_input_to_cpu_usage.Get(&cpu_adapt)) { video_adapter_->set_cpu_adaptation(cpu_adapt); @@ -589,7 +627,16 @@ class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> { if (options.system_high_adaptation_threshhold.Get(&high)) { video_adapter_->set_high_system_threshold(high); } + if (options.video_adapt_third.Get(&adapt_third)) { + video_adapter_->set_scale_third(adapt_third); + } } + + void SetCpuOveruseDetection(bool enable) { + overuse_observer_->Enable(enable); + video_adapter_->set_cpu_adaptation(enable); + } + void ProcessFrame(const VideoFrame& original_frame, bool mute, VideoFrame** processed_frame) { if (!mute) { @@ -642,6 +689,7 @@ class WebRtcVideoChannelSendInfo : public sigslot::has_slots<> { int64 interval_; talk_base::scoped_ptr<CoordinatedVideoAdapter> video_adapter_; + talk_base::scoped_ptr<WebRtcOveruseObserver> overuse_observer_; }; const WebRtcVideoEngine::VideoCodecPref @@ -1175,7 +1223,7 @@ static void AddDefaultFeedbackParams(VideoCodec* codec) { } // Rebuilds the codec list to be only those that are less intensive -// than the specified codec. +// than the specified codec. Prefers internal codec over external. bool WebRtcVideoEngine::RebuildCodecList(const VideoCodec& in_codec) { if (!FindCodec(in_codec)) return false; @@ -1183,32 +1231,12 @@ bool WebRtcVideoEngine::RebuildCodecList(const VideoCodec& in_codec) { video_codecs_.clear(); bool found = false; - std::set<std::string> external_codec_names; - if (encoder_factory_) { - const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs = - encoder_factory_->codecs(); - for (size_t i = 0; i < codecs.size(); ++i) { - if (!found) - found = (in_codec.name == codecs[i].name); - VideoCodec codec( - GetExternalVideoPayloadType(static_cast<int>(i)), - codecs[i].name, - codecs[i].max_width, - codecs[i].max_height, - codecs[i].max_fps, - static_cast<int>(codecs.size() + ARRAY_SIZE(kVideoCodecPrefs) - i)); - AddDefaultFeedbackParams(&codec); - video_codecs_.push_back(codec); - external_codec_names.insert(codecs[i].name); - } - } + std::set<std::string> internal_codec_names; for (size_t i = 0; i < ARRAY_SIZE(kVideoCodecPrefs); ++i) { const VideoCodecPref& pref(kVideoCodecPrefs[i]); if (!found) found = (in_codec.name == pref.name); - bool is_external_codec = external_codec_names.find(pref.name) != - external_codec_names.end(); - if (found && !is_external_codec) { + if (found) { VideoCodec codec(pref.payload_type, pref.name, in_codec.width, in_codec.height, in_codec.framerate, static_cast<int>(ARRAY_SIZE(kVideoCodecPrefs) - i)); @@ -1216,6 +1244,28 @@ bool WebRtcVideoEngine::RebuildCodecList(const VideoCodec& in_codec) { AddDefaultFeedbackParams(&codec); } video_codecs_.push_back(codec); + internal_codec_names.insert(codec.name); + } + } + if (encoder_factory_) { + const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs = + encoder_factory_->codecs(); + for (size_t i = 0; i < codecs.size(); ++i) { + bool is_internal_codec = internal_codec_names.find(codecs[i].name) != + internal_codec_names.end(); + if (!is_internal_codec) { + if (!found) + found = (in_codec.name == codecs[i].name); + VideoCodec codec( + GetExternalVideoPayloadType(static_cast<int>(i)), + codecs[i].name, + codecs[i].max_width, + codecs[i].max_height, + codecs[i].max_fps, + static_cast<int>(codecs.size() + ARRAY_SIZE(kVideoCodecPrefs) - i)); + AddDefaultFeedbackParams(&codec); + video_codecs_.push_back(codec); + } } } ASSERT(found); @@ -2193,7 +2243,10 @@ bool WebRtcVideoMediaChannel::GetStats(VideoMediaInfo* info) { unsigned int ssrc; // Get receiver statistics and build VideoReceiverInfo, if we have data. - if (engine_->vie()->rtp()->GetRemoteSSRC(channel->channel_id(), ssrc) != 0) + // Skip the default channel (ssrc == 0). + if (engine_->vie()->rtp()->GetRemoteSSRC( + channel->channel_id(), ssrc) != 0 || + ssrc == 0) continue; unsigned int bytes_sent, packets_sent, bytes_recv, packets_recv; @@ -2481,6 +2534,9 @@ bool WebRtcVideoMediaChannel::SetOptions(const VideoOptions &options) { bool buffer_latency_changed = options.buffered_mode_latency.IsSet() && (options_.buffered_mode_latency != options.buffered_mode_latency); + bool cpu_overuse_detection_changed = options.cpu_overuse_detection.IsSet() && + (options_.cpu_overuse_detection != options.cpu_overuse_detection); + bool conference_mode_turned_off = false; if (options_.conference_mode.IsSet() && options.conference_mode.IsSet() && options_.conference_mode.GetWithDefaultIfUnset(false) && @@ -2558,6 +2614,15 @@ bool WebRtcVideoMediaChannel::SetOptions(const VideoOptions &options) { } } } + if (cpu_overuse_detection_changed) { + bool cpu_overuse_detection = + options_.cpu_overuse_detection.GetWithDefaultIfUnset(false); + for (SendChannelMap::iterator iter = send_channels_.begin(); + iter != send_channels_.end(); ++iter) { + WebRtcVideoChannelSendInfo* send_channel = iter->second; + send_channel->SetCpuOveruseDetection(cpu_overuse_detection); + } + } return true; } @@ -2702,8 +2767,8 @@ bool WebRtcVideoMediaChannel::SendFrame( frame_i420.y_pitch = frame_out->GetYPitch(); frame_i420.u_pitch = frame_out->GetUPitch(); frame_i420.v_pitch = frame_out->GetVPitch(); - frame_i420.width = static_cast<unsigned short>(frame_out->GetWidth()); - frame_i420.height = static_cast<unsigned short>(frame_out->GetHeight()); + frame_i420.width = static_cast<uint16>(frame_out->GetWidth()); + frame_i420.height = static_cast<uint16>(frame_out->GetHeight()); int64 timestamp_ntp_ms = 0; // TODO(justinlin): Reenable after Windows issues with clock drift are fixed. @@ -2966,10 +3031,19 @@ bool WebRtcVideoMediaChannel::ConfigureSending(int channel_id, new WebRtcVideoChannelSendInfo(channel_id, vie_capture, external_capture, engine()->cpu_monitor())); + if (engine()->vie()->base()->RegisterCpuOveruseObserver( + channel_id, send_channel->overuse_observer())) { + LOG_RTCERR1(RegisterCpuOveruseObserver, channel_id); + return false; + } send_channel->ApplyCpuOptions(options_); send_channel->SignalCpuAdaptationUnable.connect(this, &WebRtcVideoMediaChannel::OnCpuAdaptationUnable); + if (options_.cpu_overuse_detection.GetWithDefaultIfUnset(false)) { + send_channel->SetCpuOveruseDetection(true); + } + // Register encoder observer for outgoing framerate and bitrate. if (engine()->vie()->codec()->RegisterEncoderObserver( channel_id, *send_channel->encoder_observer()) != 0) { diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine_unittest.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine_unittest.cc index 840fcdd0868..04662caf7c1 100644 --- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine_unittest.cc @@ -86,8 +86,7 @@ class FakeViEWrapper : public cricket::ViEWrapper { // Test fixture to test WebRtcVideoEngine with a fake webrtc::VideoEngine. // Useful for testing failure paths. -class WebRtcVideoEngineTestFake : - public testing::Test, +class WebRtcVideoEngineTestFake : public testing::Test, public sigslot::has_slots<> { public: WebRtcVideoEngineTestFake() @@ -1759,18 +1758,57 @@ TEST_F(WebRtcVideoEngineTestFake, FeedbackParamsForNonVP8) { EXPECT_TRUE(SetupEngine()); std::vector<cricket::VideoCodec> codecs(engine_.codecs()); - EXPECT_EQ("GENERIC", codecs[0].name); - EXPECT_TRUE(codecs[0].HasFeedbackParam( + // The external codec will appear at last. + size_t pos = codecs.size() - 1; + EXPECT_EQ("GENERIC", codecs[pos].name); + EXPECT_TRUE(codecs[pos].HasFeedbackParam( cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty))); - EXPECT_TRUE(codecs[0].HasFeedbackParam( + EXPECT_TRUE(codecs[pos].HasFeedbackParam( cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty))); - EXPECT_TRUE(codecs[0].HasFeedbackParam( + EXPECT_TRUE(codecs[pos].HasFeedbackParam( cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir))); } +// Test external codec with be added to the end of the supported codec list. +TEST_F(WebRtcVideoEngineTestFake, ExternalCodecAddedToTheEnd) { + EXPECT_TRUE(SetupEngine()); + + std::vector<cricket::VideoCodec> codecs(engine_.codecs()); + EXPECT_EQ("VP8", codecs[0].name); + + encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecGeneric, + "GENERIC"); + engine_.SetExternalEncoderFactory(&encoder_factory_); + encoder_factory_.NotifyCodecsAvailable(); + + codecs = engine_.codecs(); + // The external codec will appear at last. + EXPECT_EQ("GENERIC", codecs[codecs.size() - 1].name); +} + +// Test that external codec with be ignored if it has the same name as one of +// the internal codecs. +TEST_F(WebRtcVideoEngineTestFake, ExternalCodecIgnored) { + EXPECT_TRUE(SetupEngine()); + + std::vector<cricket::VideoCodec> internal_codecs(engine_.codecs()); + EXPECT_EQ("VP8", internal_codecs[0].name); + + encoder_factory_.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8"); + engine_.SetExternalEncoderFactory(&encoder_factory_); + encoder_factory_.NotifyCodecsAvailable(); + + std::vector<cricket::VideoCodec> codecs = engine_.codecs(); + EXPECT_EQ("VP8", codecs[0].name); + EXPECT_EQ(internal_codecs[0].height, codecs[0].height); + EXPECT_EQ(internal_codecs[0].width, codecs[0].width); + // Verify the last codec is not the external codec. + EXPECT_NE("VP8", codecs[codecs.size() - 1].name); +} + TEST_F(WebRtcVideoEngineTestFake, UpdateEncoderCodecsAfterSetFactory) { engine_.SetExternalEncoderFactory(&encoder_factory_); EXPECT_TRUE(SetupEngine()); diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc index 433138562bc..86860389314 100644 --- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc +++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc @@ -125,16 +125,19 @@ static const int kOpusStereoBitrate = 64000; static const int kOpusMinBitrate = 6000; static const int kOpusMaxBitrate = 510000; -#if defined(CHROMEOS) -// Ensure we open the file in a writeable path on ChromeOS. This workaround -// can be removed when it's possible to specify a filename for audio option -// based AEC dumps. +// Ensure we open the file in a writeable path on ChromeOS and Android. This +// workaround can be removed when it's possible to specify a filename for audio +// option based AEC dumps. // // TODO(grunell): Use a string in the options instead of hardcoding it here // and let the embedder choose the filename (crbug.com/264223). // -// NOTE(ajm): Don't use this hardcoded /tmp path on non-ChromeOS platforms. +// NOTE(ajm): Don't use hardcoded paths on platforms not explicitly specified +// below. +#if defined(CHROMEOS) static const char kAecDumpByAudioOptionFilename[] = "/tmp/audio.aecdump"; +#elif defined(ANDROID) +static const char kAecDumpByAudioOptionFilename[] = "/sdcard/audio.aecdump"; #else static const char kAecDumpByAudioOptionFilename[] = "audio.aecdump"; #endif @@ -1482,6 +1485,7 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine) desired_playout_(false), nack_enabled_(false), playout_(false), + typing_noise_detected_(false), desired_send_(SEND_NOTHING), send_(SEND_NOTHING), default_receive_ssrc_(0) { @@ -1619,18 +1623,11 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs( } bool WebRtcVoiceMediaChannel::SetSendCodecs( - const std::vector<AudioCodec>& codecs) { - // TODO(xians): Break down this function into SetSendCodecs(channel, codecs) - // to support per-channel codecs. - - // Disable DTMF, VAD, and FEC unless we know the other side wants them. - dtmf_allowed_ = false; - for (ChannelMap::iterator iter = send_channels_.begin(); - iter != send_channels_.end(); ++iter) { - engine()->voe()->codec()->SetVADStatus(iter->second.channel, false); - engine()->voe()->rtp()->SetNACKStatus(iter->second.channel, false, 0); - engine()->voe()->rtp()->SetFECStatus(iter->second.channel, false); - } + int channel, const std::vector<AudioCodec>& codecs) { + // Disable VAD, and FEC unless we know the other side wants them. + engine()->voe()->codec()->SetVADStatus(channel, false); + engine()->voe()->rtp()->SetNACKStatus(channel, false, 0); + engine()->voe()->rtp()->SetFECStatus(channel, false); // Scan through the list to figure out the codec to use for sending, along // with the proper configuration for VAD and DTMF. @@ -1687,16 +1684,11 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs( // about it. if (_stricmp(it->name.c_str(), "telephone-event") == 0 || _stricmp(it->name.c_str(), "audio/telephone-event") == 0) { - for (ChannelMap::iterator iter = send_channels_.begin(); - iter != send_channels_.end(); ++iter) { - if (engine()->voe()->dtmf()->SetSendTelephoneEventPayloadType( - iter->second.channel, it->id) == -1) { - LOG_RTCERR2(SetSendTelephoneEventPayloadType, - iter->second.channel, it->id); - return false; - } + if (engine()->voe()->dtmf()->SetSendTelephoneEventPayloadType( + channel, it->id) == -1) { + LOG_RTCERR2(SetSendTelephoneEventPayloadType, channel, it->id); + return false; } - dtmf_allowed_ = true; } // Turn voice activity detection/comfort noise on if supported. @@ -1719,35 +1711,30 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs( << " not supported."; continue; } - // Loop through the existing send channels and set the CN payloadtype - // and the VAD status. - for (ChannelMap::iterator iter = send_channels_.begin(); - iter != send_channels_.end(); ++iter) { - int channel = iter->second.channel; - // The CN payload type for 8000 Hz clockrate is fixed at 13. - if (cn_freq != webrtc::kFreq8000Hz) { - if (engine()->voe()->codec()->SetSendCNPayloadType( - channel, it->id, cn_freq) == -1) { - LOG_RTCERR3(SetSendCNPayloadType, channel, it->id, cn_freq); - // TODO(ajm): This failure condition will be removed from VoE. - // Restore the return here when we update to a new enough webrtc. - // - // Not returning false because the SetSendCNPayloadType will fail if - // the channel is already sending. - // This can happen if the remote description is applied twice, for - // example in the case of ROAP on top of JSEP, where both side will - // send the offer. - } + // Set the CN payloadtype and the VAD status. + // The CN payload type for 8000 Hz clockrate is fixed at 13. + if (cn_freq != webrtc::kFreq8000Hz) { + if (engine()->voe()->codec()->SetSendCNPayloadType( + channel, it->id, cn_freq) == -1) { + LOG_RTCERR3(SetSendCNPayloadType, channel, it->id, cn_freq); + // TODO(ajm): This failure condition will be removed from VoE. + // Restore the return here when we update to a new enough webrtc. + // + // Not returning false because the SetSendCNPayloadType will fail if + // the channel is already sending. + // This can happen if the remote description is applied twice, for + // example in the case of ROAP on top of JSEP, where both side will + // send the offer. } + } - // Only turn on VAD if we have a CN payload type that matches the - // clockrate for the codec we are going to use. - if (it->clockrate == send_codec.plfreq) { - LOG(LS_INFO) << "Enabling VAD"; - if (engine()->voe()->codec()->SetVADStatus(channel, true) == -1) { - LOG_RTCERR2(SetVADStatus, channel, true); - return false; - } + // Only turn on VAD if we have a CN payload type that matches the + // clockrate for the codec we are going to use. + if (it->clockrate == send_codec.plfreq) { + LOG(LS_INFO) << "Enabling VAD"; + if (engine()->voe()->codec()->SetVADStatus(channel, true) == -1) { + LOG_RTCERR2(SetVADStatus, channel, true); + return false; } } } @@ -1767,28 +1754,22 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs( // Enable redundant encoding of the specified codec. Treat any // failure as a fatal internal error. LOG(LS_INFO) << "Enabling FEC"; - for (ChannelMap::iterator iter = send_channels_.begin(); - iter != send_channels_.end(); ++iter) { - if (engine()->voe()->rtp()->SetFECStatus(iter->second.channel, - true, it->id) == -1) { - LOG_RTCERR3(SetFECStatus, iter->second.channel, true, it->id); - return false; - } + if (engine()->voe()->rtp()->SetFECStatus(channel, true, it->id) == -1) { + LOG_RTCERR3(SetFECStatus, channel, true, it->id); + return false; } } else { send_codec = voe_codec; nack_enabled_ = IsNackEnabled(*it); - SetNack(send_channels_, nack_enabled_); + SetNack(channel, nack_enabled_); } first = false; // Set the codec immediately, since SetVADStatus() depends on whether // the current codec is mono or stereo. - if (!SetSendCodec(send_codec)) + if (!SetSendCodec(channel, send_codec)) return false; } } - SetNack(receive_channels_, nack_enabled_); - // If we're being asked to set an empty list of codecs, due to a buggy client, // choose the most common format: PCMU @@ -1796,10 +1777,39 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs( LOG(LS_WARNING) << "Received empty list of codecs; using PCMU/8000"; AudioCodec codec(0, "PCMU", 8000, 0, 1, 0); engine()->FindWebRtcCodec(codec, &send_codec); - if (!SetSendCodec(send_codec)) + if (!SetSendCodec(channel, send_codec)) return false; } + // Always update the |send_codec_| to the currently set send codec. + send_codec_.reset(new webrtc::CodecInst(send_codec)); + + return true; +} + +bool WebRtcVoiceMediaChannel::SetSendCodecs( + const std::vector<AudioCodec>& codecs) { + dtmf_allowed_ = false; + for (std::vector<AudioCodec>::const_iterator it = codecs.begin(); + it != codecs.end(); ++it) { + // Find the DTMF telephone event "codec". + if (_stricmp(it->name.c_str(), "telephone-event") == 0 || + _stricmp(it->name.c_str(), "audio/telephone-event") == 0) { + dtmf_allowed_ = true; + } + } + + // Cache the codecs in order to configure the channel created later. + send_codecs_ = codecs; + for (ChannelMap::iterator iter = send_channels_.begin(); + iter != send_channels_.end(); ++iter) { + if (!SetSendCodecs(iter->second.channel, codecs)) { + return false; + } + } + + SetNack(receive_channels_, nack_enabled_); + return true; } @@ -1807,17 +1817,16 @@ void WebRtcVoiceMediaChannel::SetNack(const ChannelMap& channels, bool nack_enabled) { for (ChannelMap::const_iterator it = channels.begin(); it != channels.end(); ++it) { - SetNack(it->first, it->second.channel, nack_enabled_); + SetNack(it->second.channel, nack_enabled); } } -void WebRtcVoiceMediaChannel::SetNack(uint32 ssrc, int channel, - bool nack_enabled) { +void WebRtcVoiceMediaChannel::SetNack(int channel, bool nack_enabled) { if (nack_enabled) { - LOG(LS_INFO) << "Enabling NACK for stream " << ssrc; + LOG(LS_INFO) << "Enabling NACK for channel " << channel; engine()->voe()->rtp()->SetNACKStatus(channel, true, kNackMaxPackets); } else { - LOG(LS_INFO) << "Disabling NACK for stream " << ssrc; + LOG(LS_INFO) << "Disabling NACK for channel " << channel; engine()->voe()->rtp()->SetNACKStatus(channel, false, 0); } } @@ -1832,10 +1841,6 @@ bool WebRtcVoiceMediaChannel::SetSendCodec( return false; } - // All SetSendCodec calls were successful. Update the global state - // accordingly. - send_codec_.reset(new webrtc::CodecInst(send_codec)); - return true; } @@ -2085,8 +2090,8 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) { return false; } - // Set the current codec to be used for the new channel. - if (send_codec_ && !SetSendCodec(channel, *send_codec_)) + // Set the current codecs to be used for the new channel. + if (!send_codecs_.empty() && !SetSendCodecs(channel, send_codecs_)) return false; return ChangeSend(channel, desired_send_); @@ -2210,7 +2215,7 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) { SetPlayout(voe_channel(), false); } } - SetNack(ssrc, channel, nack_enabled_); + SetNack(channel, nack_enabled_); receive_channels_.insert( std::make_pair(ssrc, WebRtcVoiceChannelInfo(channel, NULL))); @@ -2534,7 +2539,24 @@ bool WebRtcVoiceMediaChannel::InsertDtmf(uint32 ssrc, int event, // Send the event. if (flags & cricket::DF_SEND) { - int channel = (ssrc == 0) ? voe_channel() : GetSendChannelNum(ssrc); + int channel = -1; + if (ssrc == 0) { + bool default_channel_is_inuse = false; + for (ChannelMap::const_iterator iter = send_channels_.begin(); + iter != send_channels_.end(); ++iter) { + if (IsDefaultChannel(iter->second.channel)) { + default_channel_is_inuse = true; + break; + } + } + if (default_channel_is_inuse) { + channel = voe_channel(); + } else if (!send_channels_.empty()) { + channel = send_channels_.begin()->second.channel; + } + } else { + channel = GetSendChannelNum(ssrc); + } if (channel == -1) { LOG(LS_WARNING) << "InsertDtmf - The specified ssrc " << ssrc << " is not in use."; @@ -2784,6 +2806,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) { sinfo.echo_return_loss_enhancement = echo_return_loss_enhancement; sinfo.echo_delay_median_ms = echo_delay_median_ms; sinfo.echo_delay_std_ms = echo_delay_std_ms; + sinfo.typing_noise_detected = typing_noise_detected_; info->senders.push_back(sinfo); } @@ -2892,6 +2915,13 @@ bool WebRtcVoiceMediaChannel::FindSsrc(int channel_num, uint32* ssrc) { } void WebRtcVoiceMediaChannel::OnError(uint32 ssrc, int error) { +#ifdef USE_WEBRTC_DEV_BRANCH + if (error == VE_TYPING_NOISE_WARNING) { + typing_noise_detected_ = true; + } else if (error == VE_TYPING_NOISE_OFF_WARNING) { + typing_noise_detected_ = false; + } +#endif SignalMediaError(ssrc, WebRtcErrorToChannelError(error)); } diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h index 76f5be7af39..62d3bc10a25 100644 --- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h +++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h @@ -388,7 +388,7 @@ class WebRtcVoiceMediaChannel }; typedef std::map<uint32, WebRtcVoiceChannelInfo> ChannelMap; - void SetNack(uint32 ssrc, int channel, bool nack_enabled); + void SetNack(int channel, bool nack_enabled); void SetNack(const ChannelMap& channels, bool nack_enabled); bool SetSendCodec(const webrtc::CodecInst& send_codec); bool SetSendCodec(int channel, const webrtc::CodecInst& send_codec); @@ -403,16 +403,19 @@ class WebRtcVoiceMediaChannel bool IsDefaultChannel(int channel_id) const { return channel_id == voe_channel(); } + bool SetSendCodecs(int channel, const std::vector<AudioCodec>& codecs); talk_base::scoped_ptr<WebRtcSoundclipStream> ringback_tone_; std::set<int> ringback_channels_; // channels playing ringback std::vector<AudioCodec> recv_codecs_; + std::vector<AudioCodec> send_codecs_; talk_base::scoped_ptr<webrtc::CodecInst> send_codec_; AudioOptions options_; bool dtmf_allowed_; bool desired_playout_; bool nack_enabled_; bool playout_; + bool typing_noise_detected_; SendFlags desired_send_; SendFlags send_; diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc index 31596cd726d..3710e7ebc93 100644 --- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc @@ -125,7 +125,7 @@ class WebRtcVoiceEngineTestFake : public testing::Test { EXPECT_TRUE(SetupEngine()); // Remove stream added in Setup, which is corresponding to default channel. int default_channel_num = voe_.GetLastChannel(); - uint32 default_send_ssrc; + uint32 default_send_ssrc = 0u; EXPECT_EQ(0, voe_.GetLocalSSRC(default_channel_num, default_send_ssrc)); EXPECT_EQ(kSsrc1, default_send_ssrc); EXPECT_TRUE(channel_->RemoveSendStream(default_send_ssrc)); @@ -143,7 +143,18 @@ class WebRtcVoiceEngineTestFake : public testing::Test { engine_.Terminate(); } - void TestInsertDtmf(uint32 ssrc, int channel_id) { + void TestInsertDtmf(uint32 ssrc, bool caller) { + EXPECT_TRUE(engine_.Init(talk_base::Thread::Current())); + channel_ = engine_.CreateChannel(); + EXPECT_TRUE(channel_ != NULL); + if (caller) { + // if this is a caller, local description will be applied and add the + // send stream. + EXPECT_TRUE(channel_->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc1))); + } + int channel_id = voe_.GetLastChannel(); + // Test we can only InsertDtmf when the other side supports telephone-event. std::vector<cricket::AudioCodec> codecs; codecs.push_back(kPcmuCodec); @@ -154,6 +165,14 @@ class WebRtcVoiceEngineTestFake : public testing::Test { codecs.push_back(kTelephoneEventCodec); EXPECT_TRUE(channel_->SetSendCodecs(codecs)); EXPECT_TRUE(channel_->CanInsertDtmf()); + + if (!caller) { + // There's no active send channel yet. + EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123, cricket::DF_SEND)); + EXPECT_TRUE(channel_->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc1))); + } + // Check we fail if the ssrc is invalid. EXPECT_FALSE(channel_->InsertDtmf(-1, 1, 111, cricket::DF_SEND)); @@ -923,8 +942,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusMaxAverageBitrate) { EXPECT_EQ(200000, gcodec.rate); } -// Test that we can enable NACK with opus. -TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNack) { +// Test that we can enable NACK with opus as caller. +TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCaller) { EXPECT_TRUE(SetupEngine()); int channel_num = voe_.GetLastChannel(); std::vector<cricket::AudioCodec> codecs; @@ -936,6 +955,26 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNack) { EXPECT_TRUE(voe_.GetNACK(channel_num)); } +// Test that we can enable NACK with opus as callee. +TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) { + EXPECT_TRUE(engine_.Init(talk_base::Thread::Current())); + channel_ = engine_.CreateChannel(); + EXPECT_TRUE(channel_ != NULL); + + int channel_num = voe_.GetLastChannel(); + std::vector<cricket::AudioCodec> codecs; + codecs.push_back(kOpusCodec); + codecs[0].AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, + cricket::kParamValueEmpty)); + EXPECT_FALSE(voe_.GetNACK(channel_num)); + EXPECT_TRUE(channel_->SetSendCodecs(codecs)); + EXPECT_FALSE(voe_.GetNACK(channel_num)); + + EXPECT_TRUE(channel_->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc1))); + EXPECT_TRUE(voe_.GetNACK(channel_num)); +} + // Test that we can enable NACK on receive streams. TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackRecvStreams) { EXPECT_TRUE(SetupEngine()); @@ -1136,8 +1175,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsNoCodecs) { EXPECT_EQ(106, voe_.GetSendTelephoneEventPayloadType(channel_num)); } -// Test that we set VAD and DTMF types correctly. -TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMF) { +// Test that we set VAD and DTMF types correctly as caller. +TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { EXPECT_TRUE(SetupEngine()); int channel_num = voe_.GetLastChannel(); std::vector<cricket::AudioCodec> codecs; @@ -1163,6 +1202,39 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMF) { EXPECT_EQ(98, voe_.GetSendTelephoneEventPayloadType(channel_num)); } +// Test that we set VAD and DTMF types correctly as callee. +TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { + EXPECT_TRUE(engine_.Init(talk_base::Thread::Current())); + channel_ = engine_.CreateChannel(); + EXPECT_TRUE(channel_ != NULL); + + int channel_num = voe_.GetLastChannel(); + std::vector<cricket::AudioCodec> codecs; + codecs.push_back(kIsacCodec); + codecs.push_back(kPcmuCodec); + // TODO(juberti): cn 32000 + codecs.push_back(kCn16000Codec); + codecs.push_back(kCn8000Codec); + codecs.push_back(kTelephoneEventCodec); + codecs.push_back(kRedCodec); + codecs[0].id = 96; + codecs[2].id = 97; // wideband CN + codecs[4].id = 98; // DTMF + EXPECT_TRUE(channel_->SetSendCodecs(codecs)); + EXPECT_TRUE(channel_->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc1))); + + webrtc::CodecInst gcodec; + EXPECT_EQ(0, voe_.GetSendCodec(channel_num, gcodec)); + EXPECT_EQ(96, gcodec.pltype); + EXPECT_STREQ("ISAC", gcodec.plname); + EXPECT_TRUE(voe_.GetVAD(channel_num)); + EXPECT_FALSE(voe_.GetFEC(channel_num)); + EXPECT_EQ(13, voe_.GetSendCNPayloadType(channel_num, false)); + EXPECT_EQ(97, voe_.GetSendCNPayloadType(channel_num, true)); + EXPECT_EQ(98, voe_.GetSendTelephoneEventPayloadType(channel_num)); +} + // Test that we only apply VAD if we have a CN codec that matches the // send codec clockrate. TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { @@ -1227,8 +1299,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) { EXPECT_EQ(98, voe_.GetSendTelephoneEventPayloadType(channel_num)); } -// Test that we set up FEC correctly. -TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsRED) { +// Test that we set up FEC correctly as caller. +TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsREDAsCaller) { EXPECT_TRUE(SetupEngine()); int channel_num = voe_.GetLastChannel(); std::vector<cricket::AudioCodec> codecs; @@ -1247,6 +1319,31 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsRED) { EXPECT_EQ(127, voe_.GetSendFECPayloadType(channel_num)); } +// Test that we set up FEC correctly as callee. +TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsREDAsCallee) { + EXPECT_TRUE(engine_.Init(talk_base::Thread::Current())); + channel_ = engine_.CreateChannel(); + EXPECT_TRUE(channel_ != NULL); + + int channel_num = voe_.GetLastChannel(); + std::vector<cricket::AudioCodec> codecs; + codecs.push_back(kRedCodec); + codecs.push_back(kIsacCodec); + codecs.push_back(kPcmuCodec); + codecs[0].id = 127; + codecs[0].params[""] = "96/96"; + codecs[1].id = 96; + EXPECT_TRUE(channel_->SetSendCodecs(codecs)); + EXPECT_TRUE(channel_->AddSendStream( + cricket::StreamParams::CreateLegacy(kSsrc1))); + webrtc::CodecInst gcodec; + EXPECT_EQ(0, voe_.GetSendCodec(channel_num, gcodec)); + EXPECT_EQ(96, gcodec.pltype); + EXPECT_STREQ("ISAC", gcodec.plname); + EXPECT_TRUE(voe_.GetFEC(channel_num)); + EXPECT_EQ(127, voe_.GetSendFECPayloadType(channel_num)); +} + // Test that we set up FEC correctly if params are omitted. TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsREDNoParams) { EXPECT_TRUE(SetupEngine()); @@ -1947,18 +2044,24 @@ TEST_F(WebRtcVoiceEngineTestFake, StreamCleanup) { EXPECT_EQ(0, voe_.GetNumChannels()); } -// Test the InsertDtmf on default send stream. -TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnDefaultSendStream) { - EXPECT_TRUE(SetupEngine()); - int channel_num = voe_.GetLastChannel(); - TestInsertDtmf(0, channel_num); +// Test the InsertDtmf on default send stream as caller. +TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnDefaultSendStreamAsCaller) { + TestInsertDtmf(0, true); } -// Test the InsertDtmf on specified send stream. -TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnSendStream) { - EXPECT_TRUE(SetupEngine()); - int channel_num = voe_.GetLastChannel(); - TestInsertDtmf(kSsrc1, channel_num); +// Test the InsertDtmf on default send stream as callee +TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnDefaultSendStreamAsCallee) { + TestInsertDtmf(0, false); +} + +// Test the InsertDtmf on specified send stream as caller. +TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnSendStreamAsCaller) { + TestInsertDtmf(kSsrc1, true); +} + +// Test the InsertDtmf on specified send stream as callee. +TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnSendStreamAsCallee) { + TestInsertDtmf(kSsrc1, false); } // Test that we can play a ringback tone properly in a single-stream call. @@ -2722,8 +2825,6 @@ TEST(WebRtcVoiceEngineTest, HasCorrectCodecs) { EXPECT_TRUE(engine.FindCodec( cricket::AudioCodec(96, "red", 8000, 0, 1, 0))); EXPECT_TRUE(engine.FindCodec( - cricket::AudioCodec(96, "CN", 48000, 0, 1, 0))); - EXPECT_TRUE(engine.FindCodec( cricket::AudioCodec(96, "CN", 32000, 0, 1, 0))); EXPECT_TRUE(engine.FindCodec( cricket::AudioCodec(96, "CN", 16000, 0, 1, 0))); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket.cc b/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket.cc index 2f616410fc7..ec00c048288 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket.cc @@ -65,7 +65,9 @@ AsyncStunTCPSocket::AsyncStunTCPSocket( : talk_base::AsyncTCPSocketBase(socket, listen, kBufSize) { } -int AsyncStunTCPSocket::Send(const void *pv, size_t cb) { +// TODO(mallinath) - Add support of setting DSCP code on AsyncSocket. +int AsyncStunTCPSocket::Send(const void *pv, size_t cb, + talk_base::DiffServCodePoint dscp) { if (cb > kBufSize || cb < kPacketLenSize + kPacketLenOffset) { SetError(EMSGSIZE); return -1; diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket.h b/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket.h index 2380c4c0dac..ff748d1f287 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket.h @@ -47,7 +47,8 @@ class AsyncStunTCPSocket : public talk_base::AsyncTCPSocketBase { AsyncStunTCPSocket(talk_base::AsyncSocket* socket, bool listen); virtual ~AsyncStunTCPSocket() {} - virtual int Send(const void* pv, size_t cb); + virtual int Send(const void* pv, size_t cb, + talk_base::DiffServCodePoint dscp); virtual void ProcessInput(char* data, size_t* len); virtual void HandleIncomingConnection(talk_base::AsyncSocket* socket); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket_unittest.cc index a6757126418..7cb380b0aa4 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/asyncstuntcpsocket_unittest.cc @@ -121,7 +121,8 @@ class AsyncStunTCPSocketTest : public testing::Test, } bool Send(const void* data, size_t len) { - size_t ret = send_socket_->Send(reinterpret_cast<const char*>(data), len); + size_t ret = send_socket_->Send( + reinterpret_cast<const char*>(data), len, talk_base::DSCP_NO_CHANGE); vss_->ProcessMessagesUntilIdle(); return (ret == len); } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/constants.cc b/chromium/third_party/libjingle/source/talk/p2p/base/constants.cc index 12336d41016..27d43096b9d 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/constants.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/constants.cc @@ -262,4 +262,10 @@ const char NS_VOICEMAIL[] = "http://www.google.com/session/voicemail"; const buzz::StaticQName QN_VOICEMAIL_REGARDING = { NS_VOICEMAIL, "regarding" }; #endif +// From RFC 4145, SDP setup attribute values. +const char CONNECTIONROLE_ACTIVE_STR[] = "active"; +const char CONNECTIONROLE_PASSIVE_STR[] = "passive"; +const char CONNECTIONROLE_ACTPASS_STR[] = "actpass"; +const char CONNECTIONROLE_HOLDCONN_STR[] = "holdconn"; + } // namespace cricket diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/constants.h b/chromium/third_party/libjingle/source/talk/p2p/base/constants.h index f7e5671b267..99e006a0185 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/constants.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/constants.h @@ -261,6 +261,12 @@ extern const char NS_VOICEMAIL[]; extern const buzz::StaticQName QN_VOICEMAIL_REGARDING; #endif +// RFC 4145, SDP setup attribute values. +extern const char CONNECTIONROLE_ACTIVE_STR[]; +extern const char CONNECTIONROLE_PASSIVE_STR[]; +extern const char CONNECTIONROLE_ACTPASS_STR[]; +extern const char CONNECTIONROLE_HOLDCONN_STR[]; + } // namespace cricket #endif // TALK_P2P_BASE_CONSTANTS_H_ diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransport.h b/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransport.h index a6e3b82c1f0..93da1033e8b 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransport.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransport.h @@ -55,7 +55,6 @@ class DtlsTransport : public Base { ~DtlsTransport() { Base::DestroyAllChannels(); } - virtual void SetIdentity_w(talk_base::SSLIdentity* identity) { identity_ = identity; } @@ -93,6 +92,12 @@ class DtlsTransport : public Base { } virtual bool NegotiateTransportDescription_w(ContentAction local_role) { + if (!Base::local_description() || !Base::remote_description()) { + LOG(LS_INFO) << "Local and Remote description must be set before " + << "transport descriptions are negotiated"; + return false; + } + talk_base::SSLFingerprint* local_fp = Base::local_description()->identity_fingerprint.get(); talk_base::SSLFingerprint* remote_fp = @@ -100,6 +105,74 @@ class DtlsTransport : public Base { if (remote_fp && local_fp) { remote_fingerprint_.reset(new talk_base::SSLFingerprint(*remote_fp)); + + // From RFC 4145, section-4.1, The following are the values that the + // 'setup' attribute can take in an offer/answer exchange: + // Offer Answer + // ________________ + // active passive / holdconn + // passive active / holdconn + // actpass active / passive / holdconn + // holdconn holdconn + // + // Set the role that is most conformant with RFC 5763, Section 5, bullet 1 + // The endpoint MUST use the setup attribute defined in [RFC4145]. + // The endpoint that is the offerer MUST use the setup attribute + // value of setup:actpass and be prepared to receive a client_hello + // before it receives the answer. The answerer MUST use either a + // setup attribute value of setup:active or setup:passive. Note that + // if the answerer uses setup:passive, then the DTLS handshake will + // not begin until the answerer is received, which adds additional + // latency. setup:active allows the answer and the DTLS handshake to + // occur in parallel. Thus, setup:active is RECOMMENDED. Whichever + // party is active MUST initiate a DTLS handshake by sending a + // ClientHello over each flow (host/port quartet). + // IOW - actpass and passive modes should be treated as server and + // active as client. + ConnectionRole local_connection_role = + Base::local_description()->connection_role; + ConnectionRole remote_connection_role = + Base::remote_description()->connection_role; + + bool is_remote_server = false; + if (local_role == CA_OFFER) { + if (local_connection_role != CONNECTIONROLE_ACTPASS) { + LOG(LS_ERROR) << "Offerer must use actpass value for setup attribute"; + return false; + } + + if (remote_connection_role == CONNECTIONROLE_ACTIVE || + remote_connection_role == CONNECTIONROLE_PASSIVE || + remote_connection_role == CONNECTIONROLE_NONE) { + is_remote_server = (remote_connection_role == CONNECTIONROLE_PASSIVE); + } else { + LOG(LS_ERROR) << "Answerer must use either active or passive value " + << "for setup attribute"; + return false; + } + // If remote is NONE or ACTIVE it will act as client. + } else { + if (remote_connection_role != CONNECTIONROLE_ACTPASS && + remote_connection_role != CONNECTIONROLE_NONE) { + LOG(LS_ERROR) << "Offerer must use actpass value for setup attribute"; + return false; + } + + if (local_connection_role == CONNECTIONROLE_ACTIVE || + local_connection_role == CONNECTIONROLE_PASSIVE) { + is_remote_server = (local_connection_role == CONNECTIONROLE_ACTIVE); + } else { + LOG(LS_ERROR) << "Answerer must use either active or passive value " + << "for setup attribute"; + return false; + } + + // If local is passive, local will act as server. + } + + secure_role_ = is_remote_server ? talk_base::SSL_CLIENT : + talk_base::SSL_SERVER; + } else if (local_fp && (local_role == CA_ANSWER)) { LOG(LS_ERROR) << "Local fingerprint supplied when caller didn't offer DTLS"; @@ -128,18 +201,34 @@ class DtlsTransport : public Base { Base::DestroyTransportChannel(base_channel); } + virtual bool GetSslRole_w(talk_base::SSLRole* ssl_role) const { + ASSERT(ssl_role != NULL); + *ssl_role = secure_role_; + return true; + } + private: - virtual void ApplyNegotiatedTransportDescription_w( + virtual bool ApplyNegotiatedTransportDescription_w( TransportChannelImpl* channel) { - channel->SetRemoteFingerprint( + // Set ssl role. Role must be set before fingerprint is applied, which + // initiates DTLS setup. + if (!channel->SetSslRole(secure_role_)) { + LOG(LS_INFO) << "Failed to set ssl role for the channel."; + return false; + } + // Apply remote fingerprint. + if (!channel->SetRemoteFingerprint( remote_fingerprint_->algorithm, reinterpret_cast<const uint8 *>(remote_fingerprint_-> digest.data()), - remote_fingerprint_->digest.length()); - Base::ApplyNegotiatedTransportDescription_w(channel); + remote_fingerprint_->digest.length())) { + return false; + } + return Base::ApplyNegotiatedTransportDescription_w(channel); } talk_base::SSLIdentity* identity_; + talk_base::SSLRole secure_role_; talk_base::scoped_ptr<talk_base::SSLFingerprint> remote_fingerprint_; }; diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel.cc b/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel.cc index 6cf400c97ec..dead3a550be 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel.cc @@ -29,6 +29,7 @@ #include "talk/p2p/base/dtlstransportchannel.h" #include "talk/base/buffer.h" +#include "talk/base/dscp.h" #include "talk/base/messagequeue.h" #include "talk/base/stream.h" #include "talk/base/sslstreamadapter.h" @@ -69,7 +70,8 @@ talk_base::StreamResult StreamInterfaceChannel::Write(const void* data, int* error) { // Always succeeds, since this is an unreliable transport anyway. // TODO: Should this block if channel_'s temporarily unwritable? - channel_->SendPacket(static_cast<const char*>(data), data_len); + channel_->SendPacket( + static_cast<const char*>(data), data_len, talk_base::DSCP_NO_CHANGE); if (written) { *written = data_len; } @@ -102,7 +104,7 @@ DtlsTransportChannelWrapper::DtlsTransportChannelWrapper( downward_(NULL), dtls_state_(STATE_NONE), local_identity_(NULL), - dtls_role_(talk_base::SSL_CLIENT) { + ssl_role_(talk_base::SSL_CLIENT) { channel_->SignalReadableState.connect(this, &DtlsTransportChannelWrapper::OnReadableState); channel_->SignalWritableState.connect(this, @@ -171,18 +173,22 @@ bool DtlsTransportChannelWrapper::SetLocalIdentity( return true; } -void DtlsTransportChannelWrapper::SetIceRole(IceRole role) { - // TODO(ekr@rtfm.com): Forbid this if Connect() has been called. - ASSERT(dtls_state_ < STATE_ACCEPTED); +bool DtlsTransportChannelWrapper::SetSslRole(talk_base::SSLRole role) { + if (dtls_state_ == STATE_OPEN) { + if (ssl_role_ != role) { + LOG(LS_ERROR) << "SSL Role can't be reversed after the session is setup."; + return false; + } + return true; + } - // Set the role that is most conformant with RFC 5763, Section 5, bullet 1: - // The endpoint that is the offerer MUST [...] be prepared to receive - // a client_hello before it receives the answer. - // (IOW, the offerer is the server, and the answerer is the client). - dtls_role_ = (role == ICEROLE_CONTROLLING) ? - talk_base::SSL_SERVER : talk_base::SSL_CLIENT; + ssl_role_ = role; + return true; +} - channel_->SetIceRole(role); +bool DtlsTransportChannelWrapper::GetSslRole(talk_base::SSLRole* role) const { + *role = ssl_role_; + return true; } bool DtlsTransportChannelWrapper::SetRemoteFingerprint( @@ -201,12 +207,12 @@ bool DtlsTransportChannelWrapper::SetRemoteFingerprint( // hasn't been called. if (dtls_state_ > STATE_OFFERED || (dtls_state_ == STATE_NONE && !digest_alg.empty())) { - LOG_J(LS_ERROR, this) << "Can't set DTLS remote settings in this state"; + LOG_J(LS_ERROR, this) << "Can't set DTLS remote settings in this state."; return false; } if (digest_alg.empty()) { - LOG_J(LS_INFO, this) << "Other side didn't support DTLS"; + LOG_J(LS_INFO, this) << "Other side didn't support DTLS."; dtls_state_ = STATE_NONE; return true; } @@ -230,7 +236,7 @@ bool DtlsTransportChannelWrapper::SetupDtls() { dtls_.reset(talk_base::SSLStreamAdapter::Create(downward)); if (!dtls_) { - LOG_J(LS_ERROR, this) << "Failed to create DTLS adapter"; + LOG_J(LS_ERROR, this) << "Failed to create DTLS adapter."; delete downward; return false; } @@ -239,27 +245,27 @@ bool DtlsTransportChannelWrapper::SetupDtls() { dtls_->SetIdentity(local_identity_->GetReference()); dtls_->SetMode(talk_base::SSL_MODE_DTLS); - dtls_->SetServerRole(dtls_role_); + dtls_->SetServerRole(ssl_role_); dtls_->SignalEvent.connect(this, &DtlsTransportChannelWrapper::OnDtlsEvent); if (!dtls_->SetPeerCertificateDigest( remote_fingerprint_algorithm_, reinterpret_cast<unsigned char *>(remote_fingerprint_value_.data()), remote_fingerprint_value_.length())) { - LOG_J(LS_ERROR, this) << "Couldn't set DTLS certificate digest"; + LOG_J(LS_ERROR, this) << "Couldn't set DTLS certificate digest."; return false; } // Set up DTLS-SRTP, if it's been enabled. if (!srtp_ciphers_.empty()) { if (!dtls_->SetDtlsSrtpCiphers(srtp_ciphers_)) { - LOG_J(LS_ERROR, this) << "Couldn't set DTLS-SRTP ciphers"; + LOG_J(LS_ERROR, this) << "Couldn't set DTLS-SRTP ciphers."; return false; } } else { - LOG_J(LS_INFO, this) << "Not using DTLS"; + LOG_J(LS_INFO, this) << "Not using DTLS."; } - LOG_J(LS_INFO, this) << "DTLS setup complete"; + LOG_J(LS_INFO, this) << "DTLS setup complete."; return true; } @@ -293,6 +299,7 @@ bool DtlsTransportChannelWrapper::GetSrtpCipher(std::string* cipher) { // Called from upper layers to send a media packet. int DtlsTransportChannelWrapper::SendPacket(const char* data, size_t size, + talk_base::DiffServCodePoint dscp, int flags) { int result = -1; @@ -317,7 +324,7 @@ int DtlsTransportChannelWrapper::SendPacket(const char* data, size_t size, break; } - result = channel_->SendPacket(data, size); + result = channel_->SendPacket(data, size, dscp); } else { result = (dtls_->WriteAll(data, size, NULL, NULL) == talk_base::SR_SUCCESS) ? static_cast<int>(size) : -1; @@ -325,7 +332,7 @@ int DtlsTransportChannelWrapper::SendPacket(const char* data, size_t size, break; // Not doing DTLS. case STATE_NONE: - result = channel_->SendPacket(data, size); + result = channel_->SendPacket(data, size, dscp); break; case STATE_CLOSED: // Can't send anything when we're closed. @@ -349,7 +356,7 @@ void DtlsTransportChannelWrapper::OnReadableState(TransportChannel* channel) { ASSERT(talk_base::Thread::Current() == worker_thread_); ASSERT(channel == channel_); LOG_J(LS_VERBOSE, this) - << "DTLSTransportChannelWrapper: channel readable state changed"; + << "DTLSTransportChannelWrapper: channel readable state changed."; if (dtls_state_ == STATE_NONE || dtls_state_ == STATE_OPEN) { set_readable(channel_->readable()); @@ -361,7 +368,7 @@ void DtlsTransportChannelWrapper::OnWritableState(TransportChannel* channel) { ASSERT(talk_base::Thread::Current() == worker_thread_); ASSERT(channel == channel_); LOG_J(LS_VERBOSE, this) - << "DTLSTransportChannelWrapper: channel writable state changed"; + << "DTLSTransportChannelWrapper: channel writable state changed."; switch (dtls_state_) { case STATE_NONE: @@ -416,13 +423,13 @@ void DtlsTransportChannelWrapper::OnReadPacket(TransportChannel* channel, // decide to take this as evidence that the other // side is ready to do DTLS and start the handshake // on our end - LOG_J(LS_WARNING, this) << "Received packet before we know if we are doing " - << "DTLS or not; dropping"; + LOG_J(LS_WARNING, this) << "Received packet before we know if we are " + << "doing DTLS or not; dropping."; break; case STATE_ACCEPTED: // Drop packets received before DTLS has actually started - LOG_J(LS_INFO, this) << "Dropping packet received before DTLS started"; + LOG_J(LS_INFO, this) << "Dropping packet received before DTLS started."; break; case STATE_STARTED: @@ -431,19 +438,20 @@ void DtlsTransportChannelWrapper::OnReadPacket(TransportChannel* channel, // Is this potentially a DTLS packet? if (IsDtlsPacket(data, size)) { if (!HandleDtlsPacket(data, size)) { - LOG_J(LS_ERROR, this) << "Failed to handle DTLS packet"; + LOG_J(LS_ERROR, this) << "Failed to handle DTLS packet."; return; } } else { // Not a DTLS packet; our handshake should be complete by now. if (dtls_state_ != STATE_OPEN) { - LOG_J(LS_ERROR, this) << "Received non-DTLS packet before DTLS complete"; + LOG_J(LS_ERROR, this) << "Received non-DTLS packet before DTLS " + << "complete."; return; } // And it had better be a SRTP packet. if (!IsRtpPacket(data, size)) { - LOG_J(LS_ERROR, this) << "Received unexpected non-DTLS packet"; + LOG_J(LS_ERROR, this) << "Received unexpected non-DTLS packet."; return; } @@ -472,7 +480,7 @@ void DtlsTransportChannelWrapper::OnDtlsEvent(talk_base::StreamInterface* dtls, ASSERT(dtls == dtls_.get()); if (sig & talk_base::SE_OPEN) { // This is the first time. - LOG_J(LS_INFO, this) << "DTLS handshake complete"; + LOG_J(LS_INFO, this) << "DTLS handshake complete."; if (dtls_->GetState() == talk_base::SS_OPEN) { // The check for OPEN shouldn't be necessary but let's make // sure we don't accidentally frob the state if it's closed. diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel.h b/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel.h index 8321024df22..aec8c7ac428 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel.h @@ -121,8 +121,9 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl { TransportChannelImpl* channel); virtual ~DtlsTransportChannelWrapper(); - virtual void SetIceRole(IceRole ice_role); - // Returns current transport role of the channel. + virtual void SetIceRole(IceRole role) { + channel_->SetIceRole(role); + } virtual IceRole GetIceRole() const { return channel_->GetIceRole(); } @@ -134,7 +135,9 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl { virtual bool IsDtlsActive() const { return dtls_state_ != STATE_NONE; } // Called to send a packet (via DTLS, if turned on). - virtual int SendPacket(const char* data, size_t size, int flags); + virtual int SendPacket(const char* data, size_t size, + talk_base::DiffServCodePoint dscp, + int flags); // TransportChannel calls that we forward to the wrapped transport. virtual int SetOption(talk_base::Socket::Option opt, int value) { @@ -158,6 +161,9 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl { // Find out which DTLS-SRTP cipher was negotiated virtual bool GetSrtpCipher(std::string* cipher); + virtual bool GetSslRole(talk_base::SSLRole* role) const; + virtual bool SetSslRole(talk_base::SSLRole role); + // Once DTLS has established (i.e., this channel is writable), this method // extracts the keys negotiated during the DTLS handshake, for use in external // encryption. DTLS-SRTP uses this to extract the needed SRTP keys. @@ -234,7 +240,7 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl { std::vector<std::string> srtp_ciphers_; // SRTP ciphers to use with DTLS. State dtls_state_; talk_base::SSLIdentity* local_identity_; - talk_base::SSLRole dtls_role_; + talk_base::SSLRole ssl_role_; talk_base::Buffer remote_fingerprint_value_; std::string remote_fingerprint_algorithm_; diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel_unittest.cc index c46839f5ece..267d60be167 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/dtlstransportchannel_unittest.cc @@ -29,6 +29,7 @@ #include <set> #include "talk/base/common.h" +#include "talk/base/dscp.h" #include "talk/base/gunit.h" #include "talk/base/helpers.h" #include "talk/base/scoped_ptr.h" @@ -56,6 +57,10 @@ static bool IsRtpLeadByte(uint8 b) { return ((b & 0xC0) == 0x80); } +using cricket::ConnectionRole; + +enum Flags { NF_REOFFER = 0x1, NF_EXPECT_FAILURE = 0x2 }; + class DtlsTestClient : public sigslot::has_slots<> { public: DtlsTestClient(const std::string& name, @@ -77,6 +82,7 @@ class DtlsTestClient : public sigslot::has_slots<> { void CreateIdentity() { identity_.reset(talk_base::SSLIdentity::Generate(name_)); } + talk_base::SSLIdentity* identity() { return identity_.get(); } void SetupSrtp() { ASSERT(identity_.get() != NULL); use_dtls_srtp_ = true; @@ -108,6 +114,9 @@ class DtlsTestClient : public sigslot::has_slots<> { this, &DtlsTestClient::OnFakeTransportChannelReadPacket); } } + + cricket::Transport* transport() { return transport_.get(); } + cricket::FakeTransportChannel* GetFakeChannel(int component) { cricket::TransportChannelImpl* ch = transport_->GetChannel(component); cricket::DtlsTransportChannelWrapper* wrapper = @@ -118,13 +127,20 @@ class DtlsTestClient : public sigslot::has_slots<> { // Offer DTLS if we have an identity; pass in a remote fingerprint only if // both sides support DTLS. - void Negotiate(DtlsTestClient* peer) { - Negotiate(identity_.get(), (identity_) ? peer->identity_.get() : NULL); + void Negotiate(DtlsTestClient* peer, cricket::ContentAction action, + ConnectionRole local_role, ConnectionRole remote_role, + int flags) { + Negotiate(identity_.get(), (identity_) ? peer->identity_.get() : NULL, + action, local_role, remote_role, flags); } // Allow any DTLS configuration to be specified (including invalid ones). void Negotiate(talk_base::SSLIdentity* local_identity, - talk_base::SSLIdentity* remote_identity) { + talk_base::SSLIdentity* remote_identity, + cricket::ContentAction action, + ConnectionRole local_role, + ConnectionRole remote_role, + int flags) { talk_base::scoped_ptr<talk_base::SSLFingerprint> local_fingerprint; talk_base::scoped_ptr<talk_base::SSLFingerprint> remote_fingerprint; if (local_identity) { @@ -137,7 +153,9 @@ class DtlsTestClient : public sigslot::has_slots<> { talk_base::DIGEST_SHA_1, remote_identity)); ASSERT_TRUE(remote_fingerprint.get() != NULL); } - if (use_dtls_srtp_) { + + if (use_dtls_srtp_ && !(flags & NF_REOFFER)) { + // SRTP ciphers will be set only in the beginning. for (std::vector<cricket::DtlsTransportChannelWrapper*>::iterator it = channels_.begin(); it != channels_.end(); ++it) { std::vector<std::string> ciphers; @@ -150,17 +168,32 @@ class DtlsTestClient : public sigslot::has_slots<> { cricket::NS_GINGLE_P2P : cricket::NS_JINGLE_ICE_UDP; cricket::TransportDescription local_desc( transport_type, std::vector<std::string>(), kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, local_fingerprint.get(), + cricket::ICEMODE_FULL, local_role, + // If remote if the offerer and has no DTLS support, answer will be + // without any fingerprint. + (action == cricket::CA_ANSWER && !remote_identity) ? + NULL : local_fingerprint.get(), cricket::Candidates()); - ASSERT_TRUE(transport_->SetLocalTransportDescription(local_desc, - cricket::CA_OFFER)); + cricket::TransportDescription remote_desc( transport_type, std::vector<std::string>(), kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, remote_fingerprint.get(), + cricket::ICEMODE_FULL, remote_role, remote_fingerprint.get(), cricket::Candidates()); - ASSERT_TRUE(transport_->SetRemoteTransportDescription(remote_desc, - cricket::CA_ANSWER)); + bool expect_success = (flags & NF_EXPECT_FAILURE) ? false : true; + // If |expect_success| is false, expect SRTD or SLTD to fail when + // content action is CA_ANSWER. + if (action == cricket::CA_OFFER) { + ASSERT_TRUE(transport_->SetLocalTransportDescription( + local_desc, cricket::CA_OFFER)); + ASSERT_EQ(expect_success, transport_->SetRemoteTransportDescription( + remote_desc, cricket::CA_ANSWER)); + } else { + ASSERT_TRUE(transport_->SetRemoteTransportDescription( + remote_desc, cricket::CA_OFFER)); + ASSERT_EQ(expect_success, transport_->SetLocalTransportDescription( + local_desc, cricket::CA_ANSWER)); + } negotiated_dtls_ = (local_identity && remote_identity); } @@ -212,7 +245,8 @@ class DtlsTestClient : public sigslot::has_slots<> { // Only set the bypass flag if we've activated DTLS. int flags = (identity_.get() && srtp) ? cricket::PF_SRTP_BYPASS : 0; - int rv = channels_[channel]->SendPacket(packet.get(), size, flags); + int rv = channels_[channel]->SendPacket( + packet.get(), size, talk_base::DSCP_NO_CHANGE, flags); ASSERT_GT(rv, 0); ASSERT_EQ(size, static_cast<size_t>(rv)); ++sent; @@ -373,8 +407,8 @@ class DtlsTransportChannelTest : public testing::Test { use_dtls_srtp_ = true; } - bool Connect() { - Negotiate(); + bool Connect(ConnectionRole client1_role, ConnectionRole client2_role) { + Negotiate(client1_role, client2_role); bool rv = client1_.Connect(&client2_); EXPECT_TRUE(rv); @@ -387,8 +421,20 @@ class DtlsTransportChannelTest : public testing::Test { // Check that we used the right roles. if (use_dtls_) { - client1_.CheckRole(talk_base::SSL_SERVER); - client2_.CheckRole(talk_base::SSL_CLIENT); + talk_base::SSLRole client1_ssl_role = + (client1_role == cricket::CONNECTIONROLE_ACTIVE || + (client2_role == cricket::CONNECTIONROLE_PASSIVE && + client1_role == cricket::CONNECTIONROLE_ACTPASS)) ? + talk_base::SSL_CLIENT : talk_base::SSL_SERVER; + + talk_base::SSLRole client2_ssl_role = + (client2_role == cricket::CONNECTIONROLE_ACTIVE || + (client1_role == cricket::CONNECTIONROLE_PASSIVE && + client2_role == cricket::CONNECTIONROLE_ACTPASS)) ? + talk_base::SSL_CLIENT : talk_base::SSL_SERVER; + + client1_.CheckRole(client1_ssl_role); + client2_.CheckRole(client2_ssl_role); } // Check that we negotiated the right ciphers. @@ -402,11 +448,55 @@ class DtlsTransportChannelTest : public testing::Test { return true; } + + bool Connect() { + // By default, Client1 will be Server and Client2 will be Client. + return Connect(cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_ACTIVE); + } + void Negotiate() { + Negotiate(cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_ACTIVE); + } + + void Negotiate(ConnectionRole client1_role, ConnectionRole client2_role) { + client1_.SetupChannels(channel_ct_, cricket::ICEROLE_CONTROLLING); + client2_.SetupChannels(channel_ct_, cricket::ICEROLE_CONTROLLED); + // Expect success from SLTD and SRTD. + client1_.Negotiate(&client2_, cricket::CA_OFFER, + client1_role, client2_role, 0); + client2_.Negotiate(&client1_, cricket::CA_ANSWER, + client2_role, client1_role, 0); + } + + // Negotiate with legacy client |client2|. Legacy client doesn't use setup + // attributes, except NONE. + void NegotiateWithLegacy() { client1_.SetupChannels(channel_ct_, cricket::ICEROLE_CONTROLLING); client2_.SetupChannels(channel_ct_, cricket::ICEROLE_CONTROLLED); - client2_.Negotiate(&client1_); - client1_.Negotiate(&client2_); + // Expect success from SLTD and SRTD. + client1_.Negotiate(&client2_, cricket::CA_OFFER, + cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_NONE, 0); + client2_.Negotiate(&client1_, cricket::CA_ANSWER, + cricket::CONNECTIONROLE_ACTIVE, + cricket::CONNECTIONROLE_NONE, 0); + } + + void Renegotiate(DtlsTestClient* reoffer_initiator, + ConnectionRole client1_role, ConnectionRole client2_role, + int flags) { + if (reoffer_initiator == &client1_) { + client1_.Negotiate(&client2_, cricket::CA_OFFER, + client1_role, client2_role, flags); + client2_.Negotiate(&client1_, cricket::CA_ANSWER, + client2_role, client1_role, flags); + } else { + client2_.Negotiate(&client1_, cricket::CA_OFFER, + client2_role, client1_role, flags); + client1_.Negotiate(&client2_, cricket::CA_ANSWER, + client1_role, client2_role, flags); + } } void TestTransfer(size_t channel, size_t size, size_t count, bool srtp) { @@ -568,3 +658,96 @@ TEST_F(DtlsTransportChannelTest, TestTransferDtlsSrtpDemux) { TestTransfer(0, 1000, 100, false); TestTransfer(0, 1000, 100, true); } + +// Testing when the remote is passive. +TEST_F(DtlsTransportChannelTest, TestTransferDtlsAnswererIsPassive) { + MAYBE_SKIP_TEST(HaveDtlsSrtp); + SetChannelCount(2); + PrepareDtls(true, true); + PrepareDtlsSrtp(true, true); + ASSERT_TRUE(Connect(cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_PASSIVE)); + TestTransfer(0, 1000, 100, true); + TestTransfer(1, 1000, 100, true); +} + +// Testing with the legacy DTLS client which doesn't use setup attribute. +// In this case legacy is the answerer. +TEST_F(DtlsTransportChannelTest, TestDtlsSetupWithLegacyAsAnswerer) { + MAYBE_SKIP_TEST(HaveDtlsSrtp); + PrepareDtls(true, true); + NegotiateWithLegacy(); + talk_base::SSLRole channel1_role; + talk_base::SSLRole channel2_role; + EXPECT_TRUE(client1_.transport()->GetSslRole(&channel1_role)); + EXPECT_TRUE(client2_.transport()->GetSslRole(&channel2_role)); + EXPECT_EQ(talk_base::SSL_SERVER, channel1_role); + EXPECT_EQ(talk_base::SSL_CLIENT, channel2_role); +} + +// Testing re offer/answer after the session is estbalished. Roles will be +// kept same as of the previous negotiation. +TEST_F(DtlsTransportChannelTest, TestDtlsReOfferFromOfferer) { + MAYBE_SKIP_TEST(HaveDtlsSrtp); + SetChannelCount(2); + PrepareDtls(true, true); + PrepareDtlsSrtp(true, true); + // Initial role for client1 is ACTPASS and client2 is ACTIVE. + ASSERT_TRUE(Connect(cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_ACTIVE)); + TestTransfer(0, 1000, 100, true); + TestTransfer(1, 1000, 100, true); + // Using input roles for the re-offer. + Renegotiate(&client1_, cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_ACTIVE, NF_REOFFER); + TestTransfer(0, 1000, 100, true); + TestTransfer(1, 1000, 100, true); +} + +TEST_F(DtlsTransportChannelTest, TestDtlsReOfferFromAnswerer) { + MAYBE_SKIP_TEST(HaveDtlsSrtp); + SetChannelCount(2); + PrepareDtls(true, true); + PrepareDtlsSrtp(true, true); + // Initial role for client1 is ACTPASS and client2 is ACTIVE. + ASSERT_TRUE(Connect(cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_ACTIVE)); + TestTransfer(0, 1000, 100, true); + TestTransfer(1, 1000, 100, true); + // Using input roles for the re-offer. + Renegotiate(&client2_, cricket::CONNECTIONROLE_PASSIVE, + cricket::CONNECTIONROLE_ACTPASS, NF_REOFFER); + TestTransfer(0, 1000, 100, true); + TestTransfer(1, 1000, 100, true); +} + +// Test that any change in role after the intial setup will result in failure. +TEST_F(DtlsTransportChannelTest, TestDtlsRoleReversal) { + MAYBE_SKIP_TEST(HaveDtlsSrtp); + SetChannelCount(2); + PrepareDtls(true, true); + PrepareDtlsSrtp(true, true); + ASSERT_TRUE(Connect(cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_PASSIVE)); + + // Renegotiate from client2 with actpass and client1 as active. + Renegotiate(&client2_, cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_ACTIVE, + NF_REOFFER | NF_EXPECT_FAILURE); +} + +// Test that using different setup attributes which results in similar ssl +// role as the initial negotiation will result in success. +TEST_F(DtlsTransportChannelTest, TestDtlsReOfferWithDifferentSetupAttr) { + MAYBE_SKIP_TEST(HaveDtlsSrtp); + SetChannelCount(2); + PrepareDtls(true, true); + PrepareDtlsSrtp(true, true); + ASSERT_TRUE(Connect(cricket::CONNECTIONROLE_ACTPASS, + cricket::CONNECTIONROLE_PASSIVE)); + // Renegotiate from client2 with actpass and client1 as active. + Renegotiate(&client2_, cricket::CONNECTIONROLE_ACTIVE, + cricket::CONNECTIONROLE_ACTPASS, NF_REOFFER); + TestTransfer(0, 1000, 100, true); + TestTransfer(1, 1000, 100, true); +} diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/fakesession.h b/chromium/third_party/libjingle/source/talk/p2p/base/fakesession.h index 6b96c60e630..d162950a3bb 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/fakesession.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/fakesession.h @@ -71,7 +71,8 @@ class FakeTransportChannel : public TransportChannelImpl, tiebreaker_(0), ice_proto_(ICEPROTO_HYBRID), remote_ice_mode_(ICEMODE_FULL), - dtls_fingerprint_("", NULL, 0) { + dtls_fingerprint_("", NULL, 0), + ssl_role_(talk_base::SSL_CLIENT) { } ~FakeTransportChannel() { Reset(); @@ -117,6 +118,14 @@ class FakeTransportChannel : public TransportChannelImpl, dtls_fingerprint_ = talk_base::SSLFingerprint(alg, digest, digest_len); return true; } + virtual bool SetSslRole(talk_base::SSLRole role) { + ssl_role_ = role; + return true; + } + virtual bool GetSslRole(talk_base::SSLRole* role) const { + *role = ssl_role_; + return true; + } virtual void Connect() { if (state_ == STATE_INIT) { @@ -160,7 +169,8 @@ class FakeTransportChannel : public TransportChannelImpl, } } - virtual int SendPacket(const char* data, size_t len, int flags) { + virtual int SendPacket(const char* data, size_t len, + talk_base::DiffServCodePoint dscp, int flags) { if (state_ != STATE_CONNECTED) { return -1; } @@ -199,7 +209,6 @@ class FakeTransportChannel : public TransportChannelImpl, bool SetLocalIdentity(talk_base::SSLIdentity* identity) { identity_ = identity; - return true; } @@ -275,6 +284,7 @@ class FakeTransportChannel : public TransportChannelImpl, std::string remote_ice_pwd_; IceMode remote_ice_mode_; talk_base::SSLFingerprint dtls_fingerprint_; + talk_base::SSLRole ssl_role_; }; // Fake transport class, which can be passed to anything that needs a Transport. diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel.cc b/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel.cc index 7a72d1008ed..d45a66c40b6 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel.cc @@ -518,7 +518,20 @@ void P2PTransportChannel::OnUnknownAddress( // request came from. // There shouldn't be an existing connection with this remote address. - ASSERT(port->GetConnection(new_remote_candidate.address()) == NULL); + // When ports are muxed, this channel might get multiple unknown addres + // signals. In that case if the connection is already exists, we should + // simply ignore the signal othewise send server error. + if (port->GetConnection(new_remote_candidate.address()) && port_muxed) { + LOG(LS_INFO) << "Connection already exist for PeerReflexive candidate: " + << new_remote_candidate.ToString(); + return; + } else if (port->GetConnection(new_remote_candidate.address())) { + ASSERT(false); + port->SendBindingErrorResponse(stun_msg, address, + STUN_ERROR_SERVER_ERROR, + STUN_ERROR_REASON_SERVER_ERROR); + return; + } Connection* connection = port->CreateConnection( new_remote_candidate, cricket::PortInterface::ORIGIN_THIS_PORT); @@ -773,7 +786,9 @@ int P2PTransportChannel::SetOption(talk_base::Socket::Option opt, int value) { } // Send data to the other side, using our best connection. -int P2PTransportChannel::SendPacket(const char *data, size_t len, int flags) { +int P2PTransportChannel::SendPacket(const char *data, size_t len, + talk_base::DiffServCodePoint dscp, + int flags) { ASSERT(worker_thread_ == talk_base::Thread::Current()); if (flags != 0) { error_ = EINVAL; @@ -783,7 +798,7 @@ int P2PTransportChannel::SendPacket(const char *data, size_t len, int flags) { error_ = EWOULDBLOCK; return -1; } - int sent = best_connection_->Send(data, len); + int sent = best_connection_->Send(data, len, dscp); if (sent <= 0) { ASSERT(sent < 0); error_ = best_connection_->GetError(); @@ -823,6 +838,14 @@ bool P2PTransportChannel::GetStats(ConnectionInfos *infos) { return true; } +talk_base::DiffServCodePoint P2PTransportChannel::DefaultDscpValue() const { + OptionMap::const_iterator it = options_.find(talk_base::Socket::OPT_DSCP); + if (it == options_.end()) { + return talk_base::DSCP_NO_CHANGE; + } + return static_cast<talk_base::DiffServCodePoint> (it->second); +} + // Begin allocate (or immediately re-allocate, if MSG_ALLOCATE pending) void P2PTransportChannel::Allocate() { // Time for a new allocator, lets make sure we have a signalling channel diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel.h b/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel.h index 74a4483801f..2fc718641fb 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel.h @@ -90,7 +90,8 @@ class P2PTransportChannel : public TransportChannelImpl, virtual void OnCandidate(const Candidate& candidate); // From TransportChannel: - virtual int SendPacket(const char *data, size_t len, int flags); + virtual int SendPacket(const char *data, size_t len, + talk_base::DiffServCodePoint dscp, int flags); virtual int SetOption(talk_base::Socket::Option opt, int value); virtual int GetError() { return error_; } virtual bool GetStats(std::vector<ConnectionInfo>* stats); @@ -104,6 +105,54 @@ class P2PTransportChannel : public TransportChannelImpl, IceMode remote_ice_mode() const { return remote_ice_mode_; } + // DTLS methods. + virtual bool IsDtlsActive() const { return false; } + + // Default implementation. + virtual bool GetSslRole(talk_base::SSLRole* role) const { + return false; + } + + virtual bool SetSslRole(talk_base::SSLRole role) { + return false; + } + + // Set up the ciphers to use for DTLS-SRTP. + virtual bool SetSrtpCiphers(const std::vector<std::string>& ciphers) { + return false; + } + + // Find out which DTLS-SRTP cipher was negotiated + virtual bool GetSrtpCipher(std::string* cipher) { + return false; + } + + // Allows key material to be extracted for external encryption. + virtual bool ExportKeyingMaterial( + const std::string& label, + const uint8* context, + size_t context_len, + bool use_context, + uint8* result, + size_t result_len) { + return false; + } + + virtual bool SetLocalIdentity(talk_base::SSLIdentity* identity) { + return false; + } + + // Set DTLS Remote fingerprint. Must be after local identity set. + virtual bool SetRemoteFingerprint( + const std::string& digest_alg, + const uint8* digest, + size_t digest_len) { + return false; + } + + // Helper method used only in unittest. + talk_base::DiffServCodePoint DefaultDscpValue() const; + private: talk_base::Thread* thread() { return worker_thread_; } PortAllocatorSession* allocator_session() { diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel_unittest.cc index 32504debba0..e3cddc0e22e 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/p2ptransportchannel_unittest.cc @@ -25,6 +25,7 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ +#include "talk/base/dscp.h" #include "talk/base/fakenetwork.h" #include "talk/base/firewallsocketserver.h" #include "talk/base/gunit.h" @@ -620,7 +621,7 @@ class P2PTransportChannelTestBase : public testing::Test, } int SendData(cricket::TransportChannel* channel, const char* data, size_t len) { - return channel->SendPacket(data, len, 0); + return channel->SendPacket(data, len, talk_base::DSCP_NO_CHANGE, 0); } bool CheckDataOnChannel(cricket::TransportChannel* channel, const char* data, int len) { @@ -1022,13 +1023,15 @@ const P2PTransportChannelTest::Result* #define FLAKY_P2P_TEST(x, y) \ P2P_TEST_DECLARATION(x, y, DISABLED_) +// TODO(holmer): Disabled due to randomly failing on webrtc buildbots. +// Issue: webrtc/2383 #define P2P_TEST_SET(x) \ P2P_TEST(x, OPEN) \ - P2P_TEST(x, NAT_FULL_CONE) \ - P2P_TEST(x, NAT_ADDR_RESTRICTED) \ + FLAKY_P2P_TEST(x, NAT_FULL_CONE) \ + FLAKY_P2P_TEST(x, NAT_ADDR_RESTRICTED) \ P2P_TEST(x, NAT_PORT_RESTRICTED) \ P2P_TEST(x, NAT_SYMMETRIC) \ - P2P_TEST(x, NAT_DOUBLE_CONE) \ + FLAKY_P2P_TEST(x, NAT_DOUBLE_CONE) \ P2P_TEST(x, NAT_SYMMETRIC_THEN_CONE) \ P2P_TEST(x, BLOCK_UDP) \ P2P_TEST(x, BLOCK_UDP_AND_INCOMING_TCP) \ @@ -1144,7 +1147,7 @@ TEST_F(P2PTransportChannelTest, GetStats) { } // Test that we properly handle getting a STUN error due to slow signaling. -TEST_F(P2PTransportChannelTest, SlowSignaling) { +TEST_F(P2PTransportChannelTest, DISABLED_SlowSignaling) { ConfigureEndpoints(OPEN, NAT_SYMMETRIC, kDefaultPortAllocatorFlags, kDefaultPortAllocatorFlags, @@ -1266,6 +1269,150 @@ TEST_F(P2PTransportChannelTest, TestTcpConnectionsFromActiveToPassive) { DestroyChannels(); } +TEST_F(P2PTransportChannelTest, TestBundleAllocatorToBundleAllocator) { + AddAddress(0, kPublicAddrs[0]); + AddAddress(1, kPublicAddrs[1]); + SetAllocatorFlags(0, cricket::PORTALLOCATOR_ENABLE_BUNDLE); + SetAllocatorFlags(1, cricket::PORTALLOCATOR_ENABLE_BUNDLE); + + CreateChannels(2); + + EXPECT_TRUE_WAIT(ep1_ch1()->readable() && + ep1_ch1()->writable() && + ep2_ch1()->readable() && + ep2_ch1()->writable(), + 1000); + EXPECT_TRUE(ep1_ch1()->best_connection() && + ep2_ch1()->best_connection()); + + EXPECT_FALSE(ep1_ch2()->readable()); + EXPECT_FALSE(ep1_ch2()->writable()); + EXPECT_FALSE(ep2_ch2()->readable()); + EXPECT_FALSE(ep2_ch2()->writable()); + + TestSendRecv(1); // Only 1 channel is writable per Endpoint. + DestroyChannels(); +} + +TEST_F(P2PTransportChannelTest, TestBundleAllocatorToNonBundleAllocator) { + AddAddress(0, kPublicAddrs[0]); + AddAddress(1, kPublicAddrs[1]); + // Enable BUNDLE flag at one side. + SetAllocatorFlags(0, cricket::PORTALLOCATOR_ENABLE_BUNDLE); + + CreateChannels(2); + + EXPECT_TRUE_WAIT(ep1_ch1()->readable() && + ep1_ch1()->writable() && + ep2_ch1()->readable() && + ep2_ch1()->writable(), + 1000); + EXPECT_TRUE_WAIT(ep1_ch2()->readable() && + ep1_ch2()->writable() && + ep2_ch2()->readable() && + ep2_ch2()->writable(), + 1000); + + EXPECT_TRUE(ep1_ch1()->best_connection() && + ep2_ch1()->best_connection()); + EXPECT_TRUE(ep1_ch2()->best_connection() && + ep2_ch2()->best_connection()); + + TestSendRecv(2); + DestroyChannels(); +} + +TEST_F(P2PTransportChannelTest, TestIceRoleConflictWithoutBundle) { + AddAddress(0, kPublicAddrs[0]); + AddAddress(1, kPublicAddrs[1]); + TestSignalRoleConflict(); +} + +TEST_F(P2PTransportChannelTest, TestIceRoleConflictWithBundle) { + AddAddress(0, kPublicAddrs[0]); + AddAddress(1, kPublicAddrs[1]); + SetAllocatorFlags(0, cricket::PORTALLOCATOR_ENABLE_BUNDLE); + SetAllocatorFlags(1, cricket::PORTALLOCATOR_ENABLE_BUNDLE); + TestSignalRoleConflict(); +} + +// Tests that the ice configs (protocol, tiebreaker and role) can be passed +// down to ports. +TEST_F(P2PTransportChannelTest, TestIceConfigWillPassDownToPort) { + AddAddress(0, kPublicAddrs[0]); + AddAddress(1, kPublicAddrs[1]); + + SetIceRole(0, cricket::ICEROLE_CONTROLLING); + SetIceProtocol(0, cricket::ICEPROTO_GOOGLE); + SetIceTiebreaker(0, kTiebreaker1); + SetIceRole(1, cricket::ICEROLE_CONTROLLING); + SetIceProtocol(1, cricket::ICEPROTO_RFC5245); + SetIceTiebreaker(1, kTiebreaker2); + + CreateChannels(1); + + EXPECT_EQ_WAIT(2u, ep1_ch1()->ports().size(), 1000); + + const std::vector<cricket::PortInterface *> ports_before = ep1_ch1()->ports(); + for (size_t i = 0; i < ports_before.size(); ++i) { + EXPECT_EQ(cricket::ICEROLE_CONTROLLING, ports_before[i]->GetIceRole()); + EXPECT_EQ(cricket::ICEPROTO_GOOGLE, ports_before[i]->IceProtocol()); + EXPECT_EQ(kTiebreaker1, ports_before[i]->IceTiebreaker()); + } + + ep1_ch1()->SetIceRole(cricket::ICEROLE_CONTROLLED); + ep1_ch1()->SetIceProtocolType(cricket::ICEPROTO_RFC5245); + ep1_ch1()->SetIceTiebreaker(kTiebreaker2); + + const std::vector<cricket::PortInterface *> ports_after = ep1_ch1()->ports(); + for (size_t i = 0; i < ports_after.size(); ++i) { + EXPECT_EQ(cricket::ICEROLE_CONTROLLED, ports_before[i]->GetIceRole()); + EXPECT_EQ(cricket::ICEPROTO_RFC5245, ports_before[i]->IceProtocol()); + // SetIceTiebreaker after Connect() has been called will fail. So expect the + // original value. + EXPECT_EQ(kTiebreaker1, ports_before[i]->IceTiebreaker()); + } + + EXPECT_TRUE_WAIT(ep1_ch1()->readable() && + ep1_ch1()->writable() && + ep2_ch1()->readable() && + ep2_ch1()->writable(), + 1000); + + EXPECT_TRUE(ep1_ch1()->best_connection() && + ep2_ch1()->best_connection()); + + TestSendRecv(1); +} + +// Verify that we can set DSCP value and retrieve properly from P2PTC. +TEST_F(P2PTransportChannelTest, TestDefaultDscpValue) { + AddAddress(0, kPublicAddrs[0]); + AddAddress(1, kPublicAddrs[1]); + + CreateChannels(1); + EXPECT_EQ(talk_base::DSCP_NO_CHANGE, + GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); + EXPECT_EQ(talk_base::DSCP_NO_CHANGE, + GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); + GetEndpoint(0)->cd1_.ch_->SetOption( + talk_base::Socket::OPT_DSCP, talk_base::DSCP_CS6); + GetEndpoint(1)->cd1_.ch_->SetOption( + talk_base::Socket::OPT_DSCP, talk_base::DSCP_CS6); + EXPECT_EQ(talk_base::DSCP_CS6, + GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); + EXPECT_EQ(talk_base::DSCP_CS6, + GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); + GetEndpoint(0)->cd1_.ch_->SetOption( + talk_base::Socket::OPT_DSCP, talk_base::DSCP_AF41); + GetEndpoint(1)->cd1_.ch_->SetOption( + talk_base::Socket::OPT_DSCP, talk_base::DSCP_AF41); + EXPECT_EQ(talk_base::DSCP_AF41, + GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); + EXPECT_EQ(talk_base::DSCP_AF41, + GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); +} + // Test what happens when we have 2 users behind the same NAT. This can lead // to interesting behavior because the STUN server will only give out the // address of the outermost NAT. @@ -1306,7 +1453,7 @@ class P2PTransportChannelMultihomedTest : public P2PTransportChannelTestBase { }; // Test that we can establish connectivity when both peers are multihomed. -TEST_F(P2PTransportChannelMultihomedTest, TestBasic) { +TEST_F(P2PTransportChannelMultihomedTest, DISABLED_TestBasic) { AddAddress(0, kPublicAddrs[0]); AddAddress(0, kAlternateAddrs[0]); AddAddress(1, kPublicAddrs[1]); @@ -1387,119 +1534,3 @@ TEST_F(P2PTransportChannelMultihomedTest, TestDrain) { DestroyChannels(); } - -TEST_F(P2PTransportChannelTest, TestBundleAllocatorToBundleAllocator) { - AddAddress(0, kPublicAddrs[0]); - AddAddress(1, kPublicAddrs[1]); - SetAllocatorFlags(0, cricket::PORTALLOCATOR_ENABLE_BUNDLE); - SetAllocatorFlags(1, cricket::PORTALLOCATOR_ENABLE_BUNDLE); - - CreateChannels(2); - - EXPECT_TRUE_WAIT(ep1_ch1()->readable() && - ep1_ch1()->writable() && - ep2_ch1()->readable() && - ep2_ch1()->writable(), - 1000); - EXPECT_TRUE(ep1_ch1()->best_connection() && - ep2_ch1()->best_connection()); - - EXPECT_FALSE(ep1_ch2()->readable()); - EXPECT_FALSE(ep1_ch2()->writable()); - EXPECT_FALSE(ep2_ch2()->readable()); - EXPECT_FALSE(ep2_ch2()->writable()); - - TestSendRecv(1); // Only 1 channel is writable per Endpoint. - DestroyChannels(); -} - -TEST_F(P2PTransportChannelTest, TestBundleAllocatorToNonBundleAllocator) { - AddAddress(0, kPublicAddrs[0]); - AddAddress(1, kPublicAddrs[1]); - // Enable BUNDLE flag at one side. - SetAllocatorFlags(0, cricket::PORTALLOCATOR_ENABLE_BUNDLE); - - CreateChannels(2); - - EXPECT_TRUE_WAIT(ep1_ch1()->readable() && - ep1_ch1()->writable() && - ep2_ch1()->readable() && - ep2_ch1()->writable(), - 1000); - EXPECT_TRUE_WAIT(ep1_ch2()->readable() && - ep1_ch2()->writable() && - ep2_ch2()->readable() && - ep2_ch2()->writable(), - 1000); - - EXPECT_TRUE(ep1_ch1()->best_connection() && - ep2_ch1()->best_connection()); - EXPECT_TRUE(ep1_ch2()->best_connection() && - ep2_ch2()->best_connection()); - - TestSendRecv(2); - DestroyChannels(); -} - -TEST_F(P2PTransportChannelTest, TestIceRoleConflictWithoutBundle) { - AddAddress(0, kPublicAddrs[0]); - AddAddress(1, kPublicAddrs[1]); - TestSignalRoleConflict(); -} - -TEST_F(P2PTransportChannelTest, TestIceRoleConflictWithBundle) { - AddAddress(0, kPublicAddrs[0]); - AddAddress(1, kPublicAddrs[1]); - SetAllocatorFlags(0, cricket::PORTALLOCATOR_ENABLE_BUNDLE); - SetAllocatorFlags(1, cricket::PORTALLOCATOR_ENABLE_BUNDLE); - TestSignalRoleConflict(); -} - -// Tests that the ice configs (protocol, tiebreaker and role) can be passed -// down to ports. -TEST_F(P2PTransportChannelTest, TestIceConfigWillPassDownToPort) { - AddAddress(0, kPublicAddrs[0]); - AddAddress(1, kPublicAddrs[1]); - - SetIceRole(0, cricket::ICEROLE_CONTROLLING); - SetIceProtocol(0, cricket::ICEPROTO_GOOGLE); - SetIceTiebreaker(0, kTiebreaker1); - SetIceRole(1, cricket::ICEROLE_CONTROLLING); - SetIceProtocol(1, cricket::ICEPROTO_RFC5245); - SetIceTiebreaker(1, kTiebreaker2); - - CreateChannels(1); - - EXPECT_EQ_WAIT(2u, ep1_ch1()->ports().size(), 1000); - - const std::vector<cricket::PortInterface *> ports_before = ep1_ch1()->ports(); - for (size_t i = 0; i < ports_before.size(); ++i) { - EXPECT_EQ(cricket::ICEROLE_CONTROLLING, ports_before[i]->GetIceRole()); - EXPECT_EQ(cricket::ICEPROTO_GOOGLE, ports_before[i]->IceProtocol()); - EXPECT_EQ(kTiebreaker1, ports_before[i]->IceTiebreaker()); - } - - ep1_ch1()->SetIceRole(cricket::ICEROLE_CONTROLLED); - ep1_ch1()->SetIceProtocolType(cricket::ICEPROTO_RFC5245); - ep1_ch1()->SetIceTiebreaker(kTiebreaker2); - - const std::vector<cricket::PortInterface *> ports_after = ep1_ch1()->ports(); - for (size_t i = 0; i < ports_after.size(); ++i) { - EXPECT_EQ(cricket::ICEROLE_CONTROLLED, ports_before[i]->GetIceRole()); - EXPECT_EQ(cricket::ICEPROTO_RFC5245, ports_before[i]->IceProtocol()); - // SetIceTiebreaker after Connect() has been called will fail. So expect the - // original value. - EXPECT_EQ(kTiebreaker1, ports_before[i]->IceTiebreaker()); - } - - EXPECT_TRUE_WAIT(ep1_ch1()->readable() && - ep1_ch1()->writable() && - ep2_ch1()->readable() && - ep2_ch1()->writable(), - 1000); - - EXPECT_TRUE(ep1_ch1()->best_connection() && - ep2_ch1()->best_connection()); - - TestSendRecv(1); -} diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/port.cc b/chromium/third_party/libjingle/source/talk/p2p/base/port.cc index b2bfea1859c..6e688dace56 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/port.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/port.cc @@ -112,7 +112,7 @@ const int RTT_RATIO = 3; // 3 : 1 // The delay before we begin checking if this port is useless. const int kPortTimeoutDelay = 30 * 1000; // 30 seconds -const uint32 MSG_CHECKTIMEOUT = 1; +// Used by the Connection. const uint32 MSG_DELETE = 1; } @@ -181,7 +181,8 @@ Port::Port(talk_base::Thread* thread, talk_base::Network* network, ice_protocol_(ICEPROTO_GOOGLE), ice_role_(ICEROLE_UNKNOWN), tiebreaker_(0), - shared_socket_(true) { + shared_socket_(true), + default_dscp_(talk_base::DSCP_NO_CHANGE) { Construct(); } @@ -207,7 +208,8 @@ Port::Port(talk_base::Thread* thread, const std::string& type, ice_protocol_(ICEPROTO_GOOGLE), ice_role_(ICEROLE_UNKNOWN), tiebreaker_(0), - shared_socket_(false) { + shared_socket_(false), + default_dscp_(talk_base::DSCP_NO_CHANGE) { ASSERT(factory_ != NULL); Construct(); } @@ -606,7 +608,7 @@ void Port::SendBindingResponse(StunMessage* request, // Send the response message. talk_base::ByteBuffer buf; response.Write(&buf); - if (SendTo(buf.Data(), buf.Length(), addr, false) < 0) { + if (SendTo(buf.Data(), buf.Length(), addr, DefaultDscpValue(), false) < 0) { LOG_J(LS_ERROR, this) << "Failed to send STUN ping response to " << addr.ToSensitiveString(); } @@ -660,7 +662,7 @@ void Port::SendBindingErrorResponse(StunMessage* request, // Send the response message. talk_base::ByteBuffer buf; response.Write(&buf); - SendTo(buf.Data(), buf.Length(), addr, false); + SendTo(buf.Data(), buf.Length(), addr, DefaultDscpValue(), false); LOG_J(LS_INFO, this) << "Sending STUN binding error: reason=" << reason << " to " << addr.ToSensitiveString(); } @@ -916,7 +918,8 @@ void Connection::set_use_candidate_attr(bool enable) { void Connection::OnSendStunPacket(const void* data, size_t size, StunRequest* req) { - if (port_->SendTo(data, size, remote_candidate_.address(), false) < 0) { + if (port_->SendTo(data, size, remote_candidate_.address(), + port_->DefaultDscpValue(), false) < 0) { LOG_J(LS_WARNING, this) << "Failed to send STUN ping " << req->id(); } } @@ -1069,7 +1072,12 @@ void Connection::UpdateState(uint32 now) { // test we can do is a simple window. // If other side has not sent ping after connection has become readable, use // |last_data_received_| as the indication. - if ((read_state_ == STATE_READABLE) && + // If remote endpoint is doing RFC 5245, it's not required to send ping + // after connection is established. If this connection is serving a data + // channel, it may not be in a position to send media continuously. Do not + // mark connection timeout if it's in RFC5245 mode. + // Below check will be performed with end point if it's doing google-ice. + if (port_->IsGoogleIce() && (read_state_ == STATE_READABLE) && (last_ping_received_ + CONNECTION_READ_TIMEOUT <= now) && (last_data_received_ + CONNECTION_READ_TIMEOUT <= now)) { LOG_J(LS_INFO, this) << "Unreadable after " @@ -1384,12 +1392,13 @@ ProxyConnection::ProxyConnection(Port* port, size_t index, : Connection(port, index, candidate), error_(0) { } -int ProxyConnection::Send(const void* data, size_t size) { +int ProxyConnection::Send(const void* data, size_t size, + talk_base::DiffServCodePoint dscp) { if (write_state_ == STATE_WRITE_INIT || write_state_ == STATE_WRITE_TIMEOUT) { error_ = EWOULDBLOCK; return SOCKET_ERROR; } - int sent = port_->SendTo(data, size, remote_candidate_.address(), true); + int sent = port_->SendTo(data, size, remote_candidate_.address(), dscp, true); if (sent <= 0) { ASSERT(sent < 0); error_ = port_->GetError(); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/port.h b/chromium/third_party/libjingle/source/talk/p2p/base/port.h index f533f627b6f..7b89e5546e8 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/port.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/port.h @@ -304,7 +304,17 @@ class Port : public PortInterface, public talk_base::MessageHandler, // Returns if Google ICE protocol is used. bool IsGoogleIce() const; + // Returns default DSCP value. + talk_base::DiffServCodePoint DefaultDscpValue() const { + return default_dscp_; + } + protected: + enum { + MSG_CHECKTIMEOUT = 0, + MSG_FIRST_AVAILABLE + }; + void set_type(const std::string& type) { type_ = type; } // Fills in the local address of the port. void AddAddress(const talk_base::SocketAddress& address, @@ -334,6 +344,11 @@ class Port : public PortInterface, public talk_base::MessageHandler, // Checks if the address in addr is compatible with the port's ip. bool IsCompatibleAddress(const talk_base::SocketAddress& addr); + // Default DSCP value for this port. Set by TransportChannel. + void SetDefaultDscpValue(talk_base::DiffServCodePoint dscp) { + default_dscp_ = dscp; + } + private: void Construct(); // Called when one of our connections deletes itself. @@ -372,7 +387,9 @@ class Port : public PortInterface, public talk_base::MessageHandler, IceRole ice_role_; uint64 tiebreaker_; bool shared_socket_; - + // DSCP value for ICE/STUN messages. Set by the P2PTransportChannel after + // port becomes ready. + talk_base::DiffServCodePoint default_dscp_; // Information to use when going through a proxy. std::string user_agent_; talk_base::ProxyInfo proxy_; @@ -447,7 +464,8 @@ class Connection : public talk_base::MessageHandler, // The connection can send and receive packets asynchronously. This matches // the interface of AsyncPacketSocket, which may use UDP or TCP under the // covers. - virtual int Send(const void* data, size_t size) = 0; + virtual int Send(const void* data, size_t size, + talk_base::DiffServCodePoint dscp) = 0; // Error if Send() returns < 0 virtual int GetError() = 0; @@ -576,7 +594,8 @@ class ProxyConnection : public Connection { public: ProxyConnection(Port* port, size_t index, const Candidate& candidate); - virtual int Send(const void* data, size_t size); + virtual int Send(const void* data, size_t size, + talk_base::DiffServCodePoint dscp); virtual int GetError() { return error_; } private: diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/port_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/port_unittest.cc index cecefdaeebf..d3e02ac9f58 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/port_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/port_unittest.cc @@ -172,7 +172,7 @@ class TestPort : public Port { } virtual int SendTo( const void* data, size_t size, const talk_base::SocketAddress& addr, - bool payload) { + talk_base::DiffServCodePoint dscp, bool payload) { if (!payload) { IceMessage* msg = new IceMessage; ByteBuffer* buf = new ByteBuffer(static_cast<const char*>(data), size); @@ -787,10 +787,12 @@ class FakeAsyncPacketSocket : public AsyncPacketSocket { } // Send a packet. - virtual int Send(const void *pv, size_t cb) { + virtual int Send(const void *pv, size_t cb, + talk_base::DiffServCodePoint dscp) { return static_cast<int>(cb); } - virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr) { + virtual int SendTo(const void *pv, size_t cb, const SocketAddress& addr, + talk_base::DiffServCodePoint dscp) { return static_cast<int>(cb); } virtual int Close() { @@ -1224,6 +1226,26 @@ TEST_F(PortTest, TestSkipCrossFamilyUdp) { TestCrossFamilyPorts(SOCK_DGRAM); } +// This test verifies DSCP value set through SetOption interface can be +// get through DefaultDscpValue. +TEST_F(PortTest, TestDefaultDscpValue) { + talk_base::scoped_ptr<UDPPort> udpport(CreateUdpPort(kLocalAddr1)); + udpport->SetOption(talk_base::Socket::OPT_DSCP, talk_base::DSCP_CS6); + EXPECT_EQ(talk_base::DSCP_CS6, udpport->DefaultDscpValue()); + talk_base::scoped_ptr<TCPPort> tcpport(CreateTcpPort(kLocalAddr1)); + tcpport->SetOption(talk_base::Socket::OPT_DSCP, talk_base::DSCP_AF31); + EXPECT_EQ(talk_base::DSCP_AF31, tcpport->DefaultDscpValue()); + talk_base::scoped_ptr<StunPort> stunport( + CreateStunPort(kLocalAddr1, nat_socket_factory1())); + stunport->SetOption(talk_base::Socket::OPT_DSCP, talk_base::DSCP_AF41); + EXPECT_EQ(talk_base::DSCP_AF41, stunport->DefaultDscpValue()); + talk_base::scoped_ptr<TurnPort> turnport(CreateTurnPort( + kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP)); + turnport->SetOption(talk_base::Socket::OPT_DSCP, talk_base::DSCP_CS7); + EXPECT_EQ(talk_base::DSCP_CS7, turnport->DefaultDscpValue()); + // TODO(mallinath) - Test DSCP through GetOption. +} + // Test sending STUN messages in GICE format. TEST_F(PortTest, TestSendStunMessageAsGice) { talk_base::scoped_ptr<TestPort> lport( @@ -2127,14 +2149,16 @@ TEST_F(PortTest, TestWritableState) { // Data should be unsendable until the connection is accepted. char data[] = "abcd"; int data_size = ARRAY_SIZE(data); - EXPECT_EQ(SOCKET_ERROR, ch1.conn()->Send(data, data_size)); + EXPECT_EQ(SOCKET_ERROR, + ch1.conn()->Send(data, data_size, talk_base::DSCP_NO_CHANGE)); // Accept the connection to return the binding response, transition to // writable, and allow data to be sent. ch2.AcceptConnection(); EXPECT_EQ_WAIT(Connection::STATE_WRITABLE, ch1.conn()->write_state(), kTimeout); - EXPECT_EQ(data_size, ch1.conn()->Send(data, data_size)); + EXPECT_EQ(data_size, + ch1.conn()->Send(data, data_size, talk_base::DSCP_NO_CHANGE)); // Ask the connection to update state as if enough time has passed to lose // full writability and 5 pings went unresponded to. We'll accomplish the @@ -2147,7 +2171,8 @@ TEST_F(PortTest, TestWritableState) { EXPECT_EQ(Connection::STATE_WRITE_UNRELIABLE, ch1.conn()->write_state()); // Data should be able to be sent in this state. - EXPECT_EQ(data_size, ch1.conn()->Send(data, data_size)); + EXPECT_EQ(data_size, + ch1.conn()->Send(data, data_size, talk_base::DSCP_NO_CHANGE)); // And now allow the other side to process the pings and send binding // responses. @@ -2164,7 +2189,8 @@ TEST_F(PortTest, TestWritableState) { EXPECT_EQ(Connection::STATE_WRITE_TIMEOUT, ch1.conn()->write_state()); // Now that the connection has completely timed out, data send should fail. - EXPECT_EQ(SOCKET_ERROR, ch1.conn()->Send(data, data_size)); + EXPECT_EQ(SOCKET_ERROR, + ch1.conn()->Send(data, data_size, talk_base::DSCP_NO_CHANGE)); ch1.Stop(); ch2.Stop(); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/portallocatorsessionproxy_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/portallocatorsessionproxy_unittest.cc index 2d3ec3dbe61..689fb968d5d 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/portallocatorsessionproxy_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/portallocatorsessionproxy_unittest.cc @@ -59,7 +59,9 @@ class TestSessionChannel : public sigslot::has_slots<> { proxy_session_->SignalPortReady.connect( this, &TestSessionChannel::OnPortReady); } - virtual ~TestSessionChannel() {} + virtual ~TestSessionChannel() { + delete proxy_session_; + } void OnCandidatesReady(PortAllocatorSession* session, const std::vector<Candidate>& candidates) { EXPECT_EQ(proxy_session_, session); @@ -102,10 +104,11 @@ class PortAllocatorSessionProxyTest : public testing::Test { PortAllocatorSessionProxyTest() : socket_factory_(talk_base::Thread::Current()), allocator_(talk_base::Thread::Current(), NULL), - session_(talk_base::Thread::Current(), &socket_factory_, - "test content", 1, - kIceUfrag0, kIcePwd0), - session_muxer_(new PortAllocatorSessionMuxer(&session_)) { + session_(new cricket::FakePortAllocatorSession( + talk_base::Thread::Current(), &socket_factory_, + "test content", 1, + kIceUfrag0, kIcePwd0)), + session_muxer_(new PortAllocatorSessionMuxer(session_)) { } virtual ~PortAllocatorSessionProxyTest() {} void RegisterSessionProxy(PortAllocatorSessionProxy* proxy) { @@ -124,7 +127,7 @@ class PortAllocatorSessionProxyTest : public testing::Test { protected: talk_base::BasicPacketSocketFactory socket_factory_; cricket::FakePortAllocator allocator_; - cricket::FakePortAllocatorSession session_; + cricket::FakePortAllocatorSession* session_; // Muxer object will be delete itself after all registered session proxies // are deleted. PortAllocatorSessionMuxer* session_muxer_; @@ -143,7 +146,7 @@ TEST_F(PortAllocatorSessionProxyTest, TestLateBinding) { EXPECT_EQ_WAIT(1, channel1->candidates_count(), 1000); EXPECT_EQ(1, channel1->ports_count()); EXPECT_TRUE(channel1->allocation_complete()); - EXPECT_EQ(1, session_.port_config_count()); + EXPECT_EQ(1, session_->port_config_count()); // Creating another PortAllocatorSessionProxy and it also should receive // already happened events. PortAllocatorSessionProxy* proxy = @@ -154,7 +157,7 @@ TEST_F(PortAllocatorSessionProxyTest, TestLateBinding) { EXPECT_EQ_WAIT(1, channel2->candidates_count(), 1000); EXPECT_EQ(1, channel2->ports_count()); EXPECT_TRUE_WAIT(channel2->allocation_complete(), 1000); - EXPECT_EQ(1, session_.port_config_count()); + EXPECT_EQ(1, session_->port_config_count()); delete channel1; delete channel2; } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/portinterface.h b/chromium/third_party/libjingle/source/talk/p2p/base/portinterface.h index b956f9abe50..6ea63466c9d 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/portinterface.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/portinterface.h @@ -30,6 +30,7 @@ #include <string> +#include "talk/base/dscp.h" #include "talk/base/socketaddress.h" #include "talk/p2p/base/transport.h" @@ -90,16 +91,16 @@ class PortInterface { // Functions on the underlying socket(s). virtual int SetOption(talk_base::Socket::Option opt, int value) = 0; - virtual int GetError() = 0; - virtual int GetOption(talk_base::Socket::Option opt, int* value) = 0; + virtual int GetError() = 0; virtual const std::vector<Candidate>& Candidates() const = 0; // Sends the given packet to the given address, provided that the address is // that of a connection or an address that has sent to us already. virtual int SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr, bool payload) = 0; + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, bool payload) = 0; // Indicates that we received a successful STUN binding request from an // address that doesn't correspond to any current connection. To turn this diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/portproxy.cc b/chromium/third_party/libjingle/source/talk/p2p/base/portproxy.cc index 2c6119aa5d3..eae39f1612d 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/portproxy.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/portproxy.cc @@ -97,9 +97,10 @@ Connection* PortProxy::CreateConnection(const Candidate& remote_candidate, int PortProxy::SendTo(const void* data, size_t size, const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, bool payload) { ASSERT(impl_ != NULL); - return impl_->SendTo(data, size, addr, payload); + return impl_->SendTo(data, size, addr, dscp, payload); } int PortProxy::SetOption(talk_base::Socket::Option opt, @@ -114,7 +115,6 @@ int PortProxy::GetOption(talk_base::Socket::Option opt, return impl_->GetOption(opt, value); } - int PortProxy::GetError() { ASSERT(impl_ != NULL); return impl_->GetError(); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/portproxy.h b/chromium/third_party/libjingle/source/talk/p2p/base/portproxy.h index 25808ea361a..da326646dcc 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/portproxy.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/portproxy.h @@ -68,7 +68,9 @@ class PortProxy : public PortInterface, public sigslot::has_slots<> { const talk_base::SocketAddress& remote_addr); virtual int SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr, bool payload); + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, + bool payload); virtual int SetOption(talk_base::Socket::Option opt, int value); virtual int GetOption(talk_base::Socket::Option opt, int* value); virtual int GetError(); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/pseudotcp.cc b/chromium/third_party/libjingle/source/talk/p2p/base/pseudotcp.cc index 6a2c1d26b7e..b647fbf3dec 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/pseudotcp.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/pseudotcp.cc @@ -36,6 +36,7 @@ #include "talk/base/byteorder.h" #include "talk/base/common.h" #include "talk/base/logging.h" +#include "talk/base/scoped_ptr.h" #include "talk/base/socket.h" #include "talk/base/stringutils.h" #include "talk/base/timeutils.h" @@ -538,25 +539,24 @@ IPseudoTcpNotify::WriteResult PseudoTcp::packet(uint32 seq, uint8 flags, uint32 now = Now(); - uint8 buffer[MAX_PACKET]; - long_to_bytes(m_conv, buffer); - long_to_bytes(seq, buffer + 4); - long_to_bytes(m_rcv_nxt, buffer + 8); + talk_base::scoped_array<uint8> buffer(new uint8[MAX_PACKET]); + long_to_bytes(m_conv, buffer.get()); + long_to_bytes(seq, buffer.get() + 4); + long_to_bytes(m_rcv_nxt, buffer.get() + 8); buffer[12] = 0; buffer[13] = flags; - short_to_bytes(static_cast<uint16>(m_rcv_wnd >> m_rwnd_scale), buffer + 14); + short_to_bytes( + static_cast<uint16>(m_rcv_wnd >> m_rwnd_scale), buffer.get() + 14); // Timestamp computations - long_to_bytes(now, buffer + 16); - long_to_bytes(m_ts_recent, buffer + 20); + long_to_bytes(now, buffer.get() + 16); + long_to_bytes(m_ts_recent, buffer.get() + 20); m_ts_lastack = m_rcv_nxt; if (len) { size_t bytes_read = 0; - talk_base::StreamResult result = m_sbuf.ReadOffset(buffer + HEADER_SIZE, - len, - offset, - &bytes_read); + talk_base::StreamResult result = m_sbuf.ReadOffset( + buffer.get() + HEADER_SIZE, len, offset, &bytes_read); UNUSED(result); ASSERT(result == talk_base::SR_SUCCESS); ASSERT(static_cast<uint32>(bytes_read) == len); @@ -573,7 +573,8 @@ IPseudoTcpNotify::WriteResult PseudoTcp::packet(uint32 seq, uint8 flags, << "><LEN=" << len << ">"; #endif // _DEBUGMSG - IPseudoTcpNotify::WriteResult wres = m_notify->TcpWritePacket(this, reinterpret_cast<char *>(buffer), len + HEADER_SIZE); + IPseudoTcpNotify::WriteResult wres = m_notify->TcpWritePacket( + this, reinterpret_cast<char *>(buffer.get()), len + HEADER_SIZE); // Note: When len is 0, this is an ACK packet. We don't read the return value for those, // and thus we won't retry. So go ahead and treat the packet as a success (basically simulate // as if it were dropped), which will prevent our timers from being messed up. diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/rawtransportchannel.cc b/chromium/third_party/libjingle/source/talk/p2p/base/rawtransportchannel.cc index 54adab13ca9..ec225029b8b 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/rawtransportchannel.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/rawtransportchannel.cc @@ -74,14 +74,16 @@ RawTransportChannel::~RawTransportChannel() { delete allocator_session_; } -int RawTransportChannel::SendPacket(const char *data, size_t size, int flags) { +int RawTransportChannel::SendPacket(const char *data, size_t size, + talk_base::DiffServCodePoint dscp, + int flags) { if (port_ == NULL) return -1; if (remote_address_.IsNil()) return -1; if (flags != 0) return -1; - return port_->SendTo(data, size, remote_address_, true); + return port_->SendTo(data, size, remote_address_, dscp, true); } int RawTransportChannel::SetOption(talk_base::Socket::Option opt, int value) { diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/rawtransportchannel.h b/chromium/third_party/libjingle/source/talk/p2p/base/rawtransportchannel.h index 0f606b72797..2aac2b5edf6 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/rawtransportchannel.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/rawtransportchannel.h @@ -64,7 +64,8 @@ class RawTransportChannel : public TransportChannelImpl, virtual ~RawTransportChannel(); // Implementation of normal channel packet sending. - virtual int SendPacket(const char *data, size_t len, int flags); + virtual int SendPacket(const char *data, size_t len, + talk_base::DiffServCodePoint dscp, int flags); virtual int SetOption(talk_base::Socket::Option opt, int value); virtual int GetError(); @@ -101,6 +102,55 @@ class RawTransportChannel : public TransportChannelImpl, virtual void SetIcePwd(const std::string& ice_pwd) {} virtual void SetRemoteIceMode(IceMode mode) {} + virtual bool GetStats(ConnectionInfos* infos) { + return false; + } + + // DTLS methods. + virtual bool IsDtlsActive() const { return false; } + + // Default implementation. + virtual bool GetSslRole(talk_base::SSLRole* role) const { + return false; + } + + virtual bool SetSslRole(talk_base::SSLRole role) { + return false; + } + + // Set up the ciphers to use for DTLS-SRTP. + virtual bool SetSrtpCiphers(const std::vector<std::string>& ciphers) { + return false; + } + + // Find out which DTLS-SRTP cipher was negotiated + virtual bool GetSrtpCipher(std::string* cipher) { + return false; + } + + // Allows key material to be extracted for external encryption. + virtual bool ExportKeyingMaterial( + const std::string& label, + const uint8* context, + size_t context_len, + bool use_context, + uint8* result, + size_t result_len) { + return false; + } + + virtual bool SetLocalIdentity(talk_base::SSLIdentity* identity) { + return false; + } + + // Set DTLS Remote fingerprint. Must be after local identity set. + virtual bool SetRemoteFingerprint( + const std::string& digest_alg, + const uint8* digest, + size_t digest_len) { + return false; + } + private: RawTransport* raw_transport_; talk_base::Thread *worker_thread_; diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/relayport.cc b/chromium/third_party/libjingle/source/talk/p2p/base/relayport.cc index 0cd40e5aec5..ff8c07c5514 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/relayport.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/relayport.cc @@ -67,7 +67,7 @@ class RelayConnection : public sigslot::has_slots<> { bool CheckResponse(StunMessage* msg); // Sends data to the relay server. - int Send(const void* pv, size_t cb); + int Send(const void* pv, size_t cb, talk_base::DiffServCodePoint dscp); // Sends a STUN allocate request message to the relay server. void SendAllocateRequest(RelayEntry* entry, int delay); @@ -123,7 +123,8 @@ class RelayEntry : public talk_base::MessageHandler, // Sends a packet to the given destination address using the socket of this // entry. This will wrap the packet in STUN if necessary. int SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr); + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp); // Schedules a keep-alive allocate request. void ScheduleKeepAlive(); @@ -163,7 +164,8 @@ class RelayEntry : public talk_base::MessageHandler, // Sends the given data on the socket to the server with no wrapping. This // returns the number of bytes written or -1 if an error occurred. - int SendPacket(const void* data, size_t size); + int SendPacket(const void* data, size_t size, + talk_base::DiffServCodePoint dscp); }; // Handles an allocate request for a particular RelayEntry. @@ -300,7 +302,9 @@ Connection* RelayPort::CreateConnection(const Candidate& address, } int RelayPort::SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr, bool payload) { + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, + bool payload) { // Try to find an entry for this specific address. Note that the first entry // created was not given an address initially, so it can be set to the first // address that comes along. @@ -341,7 +345,7 @@ int RelayPort::SendTo(const void* data, size_t size, } // Send the actual contents to the server using the usual mechanism. - int sent = entry->SendTo(data, size, addr); + int sent = entry->SendTo(data, size, addr, dscp); if (sent <= 0) { ASSERT(sent < 0); error_ = entry->GetError(); @@ -354,6 +358,14 @@ int RelayPort::SendTo(const void* data, size_t size, int RelayPort::SetOption(talk_base::Socket::Option opt, int value) { int result = 0; + // DSCP option is not passed to the socket. + // TODO(mallinath) - After we have the support on socket, + // remove this specialization. + if (opt == talk_base::Socket::OPT_DSCP) { + SetDefaultDscpValue(static_cast<talk_base::DiffServCodePoint>(value)); + return result; + } + for (size_t i = 0; i < entries_.size(); ++i) { if (entries_[i]->SetSocketOption(opt, value) < 0) { result = -1; @@ -418,7 +430,9 @@ bool RelayConnection::CheckResponse(StunMessage* msg) { void RelayConnection::OnSendPacket(const void* data, size_t size, StunRequest* req) { - int sent = socket_->SendTo(data, size, GetAddress()); + // TODO(mallinath) Find a way to get DSCP value from Port. + int sent = socket_->SendTo( + data, size, GetAddress(), talk_base::DSCP_NO_CHANGE); if (sent <= 0) { LOG(LS_VERBOSE) << "OnSendPacket: failed sending to " << GetAddress() << std::strerror(socket_->GetError()); @@ -426,8 +440,9 @@ void RelayConnection::OnSendPacket(const void* data, size_t size, } } -int RelayConnection::Send(const void* pv, size_t cb) { - return socket_->SendTo(pv, cb, GetAddress()); +int RelayConnection::Send(const void* pv, size_t cb, + talk_base::DiffServCodePoint dscp) { + return socket_->SendTo(pv, cb, GetAddress(), dscp); } void RelayConnection::SendAllocateRequest(RelayEntry* entry, int delay) { @@ -546,11 +561,12 @@ void RelayEntry::OnConnect(const talk_base::SocketAddress& mapped_addr, } int RelayEntry::SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr) { + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp) { // If this connection is locked to the address given, then we can send the // packet with no wrapper. if (locked_ && (ext_addr_ == addr)) - return SendPacket(data, size); + return SendPacket(data, size, dscp); // Otherwise, we must wrap the given data in a STUN SEND request so that we // can communicate the destination address to the server. @@ -598,7 +614,7 @@ int RelayEntry::SendTo(const void* data, size_t size, talk_base::ByteBuffer buf; request.Write(&buf); - return SendPacket(buf.Data(), buf.Length()); + return SendPacket(buf.Data(), buf.Length(), dscp); } void RelayEntry::ScheduleKeepAlive() { @@ -744,12 +760,13 @@ void RelayEntry::OnReadyToSend(talk_base::AsyncPacketSocket* socket) { } } -int RelayEntry::SendPacket(const void* data, size_t size) { +int RelayEntry::SendPacket(const void* data, size_t size, + talk_base::DiffServCodePoint dscp) { int sent = 0; if (current_connection_) { // We are connected, no need to send packets anywere else than to // the current connection. - sent = current_connection_->Send(data, size); + sent = current_connection_->Send(data, size, dscp); } return sent; } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/relayport.h b/chromium/third_party/libjingle/source/talk/p2p/base/relayport.h index a2bfb74425b..c15e7e01069 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/relayport.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/relayport.h @@ -92,7 +92,9 @@ class RelayPort : public Port { void SetReady(); virtual int SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr, bool payload); + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, + bool payload); // Dispatches the given packet to the port or connection as appropriate. void OnReadPacket(const char* data, size_t size, diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/relayserver.cc b/chromium/third_party/libjingle/source/talk/p2p/base/relayserver.cc index 95aa08c39c2..c2cf472d3ba 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/relayserver.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/relayserver.cc @@ -51,7 +51,7 @@ static const uint32 kMessageAcceptConnection = 1; // Calls SendTo on the given socket and logs any bad results. void Send(talk_base::AsyncPacketSocket* socket, const char* bytes, size_t size, const talk_base::SocketAddress& addr) { - int result = socket->SendTo(bytes, size, addr); + int result = socket->SendTo(bytes, size, addr, talk_base::DSCP_NO_CHANGE); if (result < static_cast<int>(size)) { LOG(LS_ERROR) << "SendTo wrote only " << result << " of " << size << " bytes"; diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/session.cc b/chromium/third_party/libjingle/source/talk/p2p/base/session.cc index 74eda019c34..3128393c349 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/session.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/session.cc @@ -999,9 +999,10 @@ TransportInfos Session::GetEmptyTransportInfos( TransportInfos tinfos; for (ContentInfos::const_iterator content = contents.begin(); content != contents.end(); ++content) { - tinfos.push_back( - TransportInfo(content->name, - TransportDescription(transport_type(), Candidates()))); + tinfos.push_back(TransportInfo(content->name, + TransportDescription(transport_type(), + std::string(), + std::string()))); } return tinfos; } @@ -1558,7 +1559,9 @@ bool Session::SendTransportInfoMessage(const TransportProxy* transproxy, const Candidates& candidates, SessionError* error) { return SendTransportInfoMessage(TransportInfo(transproxy->content_name(), - TransportDescription(transproxy->type(), candidates)), error); + TransportDescription(transproxy->type(), std::vector<std::string>(), + std::string(), std::string(), ICEMODE_FULL, + CONNECTIONROLE_NONE, NULL, candidates)), error); } bool Session::WriteSessionAction(SignalingProtocol protocol, diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/session_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/session_unittest.cc index 1d072ae0a99..b64e7374221 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/session_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/session_unittest.cc @@ -32,6 +32,7 @@ #include "talk/base/base64.h" #include "talk/base/common.h" +#include "talk/base/dscp.h" #include "talk/base/gunit.h" #include "talk/base/helpers.h" #include "talk/base/logging.h" @@ -713,7 +714,7 @@ cricket::SessionDescription* NewTestSessionDescription( new TestContentDescription(gingle_content_type, content_type_a)); cricket::TransportDescription desc(cricket::NS_GINGLE_P2P, - cricket::Candidates()); + std::string(), std::string()); offer->AddTransportInfo(cricket::TransportInfo(content_name_a, desc)); if (content_name_a != content_name_b) { @@ -735,7 +736,7 @@ cricket::SessionDescription* NewTestSessionDescription( offer->AddTransportInfo(cricket::TransportInfo (content_name, cricket::TransportDescription( cricket::NS_GINGLE_P2P, - cricket::Candidates()))); + std::string(), std::string()))); return offer; } @@ -830,7 +831,7 @@ struct ChannelHandler : sigslot::has_slots<> { std::string data_with_id(name); data_with_id += data; int result = channel->SendPacket(data_with_id.c_str(), data_with_id.size(), - 0); + talk_base::DSCP_NO_CHANGE, 0); EXPECT_EQ(static_cast<int>(data_with_id.size()), result); } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/sessionmanager.cc b/chromium/third_party/libjingle/source/talk/p2p/base/sessionmanager.cc index 7aa52b35968..15b745239d4 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/sessionmanager.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/sessionmanager.cc @@ -210,6 +210,12 @@ void SessionManager::OnIncomingResponse(const buzz::XmlElement* orig_stanza, } Session* session = FindSession(msg.sid, msg.to); + if (!session) { + // Also try the QN_FROM in the response stanza, in case we sent the request + // to a bare JID but got the response from a full JID. + std::string ack_from = response_stanza->Attr(buzz::QN_FROM); + session = FindSession(msg.sid, ack_from); + } if (session) { session->OnIncomingResponse(orig_stanza, response_stanza, msg); } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/sessionmessages.cc b/chromium/third_party/libjingle/source/talk/p2p/base/sessionmessages.cc index 031c3d6f6aa..7a03d765064 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/sessionmessages.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/sessionmessages.cc @@ -359,7 +359,7 @@ bool ParseGingleTransportInfos(const buzz::XmlElement* action_elem, // If we don't have media, no need to separate the candidates. if (!has_audio && !has_video) { TransportInfo tinfo(CN_OTHER, - TransportDescription(NS_GINGLE_P2P, Candidates())); + TransportDescription(NS_GINGLE_P2P, std::string(), std::string())); if (!ParseGingleCandidates(action_elem, trans_parsers, translators, CN_OTHER, &tinfo.description.candidates, error)) { @@ -371,10 +371,12 @@ bool ParseGingleTransportInfos(const buzz::XmlElement* action_elem, } // If we have media, separate the candidates. - TransportInfo audio_tinfo(CN_AUDIO, - TransportDescription(NS_GINGLE_P2P, Candidates())); - TransportInfo video_tinfo(CN_VIDEO, - TransportDescription(NS_GINGLE_P2P, Candidates())); + TransportInfo audio_tinfo( + CN_AUDIO, + TransportDescription(NS_GINGLE_P2P, std::string(), std::string())); + TransportInfo video_tinfo( + CN_VIDEO, + TransportDescription(NS_GINGLE_P2P, std::string(), std::string())); for (const buzz::XmlElement* candidate_elem = action_elem->FirstElement(); candidate_elem != NULL; candidate_elem = candidate_elem->NextElement()) { diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/stunport.cc b/chromium/third_party/libjingle/source/talk/p2p/base/stunport.cc index e182a51cbae..5e0e5002970 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/stunport.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/stunport.cc @@ -216,8 +216,10 @@ Connection* UDPPort::CreateConnection(const Candidate& address, } int UDPPort::SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr, bool payload) { - int sent = socket_->SendTo(data, size, addr); + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, + bool payload) { + int sent = socket_->SendTo(data, size, addr, dscp); if (sent < 0) { error_ = socket_->GetError(); LOG_J(LS_ERROR, this) << "UDP send of " << size @@ -227,6 +229,12 @@ int UDPPort::SendTo(const void* data, size_t size, } int UDPPort::SetOption(talk_base::Socket::Option opt, int value) { + // TODO(mallinath) - After we have the support on socket, + // remove this specialization. + if (opt == talk_base::Socket::OPT_DSCP) { + SetDefaultDscpValue(static_cast<talk_base::DiffServCodePoint>(value)); + return 0; + } return socket_->SetOption(opt, value); } @@ -254,8 +262,7 @@ void UDPPort::OnReadPacket(talk_base::AsyncPacketSocket* socket, // Even if the response doesn't match one of our outstanding requests, we // will eat it because it might be a response to a retransmitted packet, and // we already cleared the request when we got the first response. - ASSERT(!server_addr_.IsUnresolved()); - if (remote_addr == server_addr_) { + if (!server_addr_.IsUnresolved() && remote_addr == server_addr_) { requests_.CheckResponse(data, size); return; } @@ -346,7 +353,7 @@ void UDPPort::SetResult(bool success) { // TODO: merge this with SendTo above. void UDPPort::OnSendPacket(const void* data, size_t size, StunRequest* req) { StunBindingRequest* sreq = static_cast<StunBindingRequest*>(req); - if (socket_->SendTo(data, size, sreq->server_addr()) < 0) + if (socket_->SendTo(data, size, sreq->server_addr(), DefaultDscpValue()) < 0) PLOG(LERROR, socket_->GetError()) << "sendto"; } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/stunport.h b/chromium/third_party/libjingle/source/talk/p2p/base/stunport.h index 3f982d57be2..7cfed4b7cb8 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/stunport.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/stunport.h @@ -121,7 +121,9 @@ class UDPPort : public Port { bool Init(); virtual int SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr, bool payload); + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, + bool payload); void OnLocalAddressReady(talk_base::AsyncPacketSocket* socket, const talk_base::SocketAddress& address); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/stunport_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/stunport_unittest.cc index ba36c480ec9..3c1c6836f52 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/stunport_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/stunport_unittest.cc @@ -71,10 +71,36 @@ class StunPortTest : public testing::Test, &StunPortTest::OnPortError); } + void CreateSharedStunPort(const talk_base::SocketAddress& server_addr) { + socket_.reset(socket_factory_.CreateUdpSocket( + talk_base::SocketAddress(kLocalAddr.ipaddr(), 0), 0, 0)); + ASSERT_TRUE(socket_ != NULL); + socket_->SignalReadPacket.connect(this, &StunPortTest::OnReadPacket); + stun_port_.reset(cricket::UDPPort::Create( + talk_base::Thread::Current(), &network_, socket_.get(), + talk_base::CreateRandomString(16), talk_base::CreateRandomString(22))); + ASSERT_TRUE(stun_port_ != NULL); + stun_port_->set_server_addr(server_addr); + stun_port_->SignalPortComplete.connect(this, + &StunPortTest::OnPortComplete); + stun_port_->SignalPortError.connect(this, + &StunPortTest::OnPortError); + } + void PrepareAddress() { stun_port_->PrepareAddress(); } + void OnReadPacket(talk_base::AsyncPacketSocket* socket, const char* data, + size_t size, const talk_base::SocketAddress& remote_addr) { + stun_port_->HandleIncomingPacket(socket, data, size, remote_addr); + } + + void SendData(const char* data, size_t len) { + stun_port_->HandleIncomingPacket( + socket_.get(), data, len, talk_base::SocketAddress("22.22.22.22", 0)); + } + protected: static void SetUpTestCase() { // Ensure the RNG is inited. @@ -96,8 +122,9 @@ class StunPortTest : public testing::Test, private: talk_base::Network network_; talk_base::BasicPacketSocketFactory socket_factory_; - talk_base::scoped_ptr<cricket::StunPort> stun_port_; + talk_base::scoped_ptr<cricket::UDPPort> stun_port_; talk_base::scoped_ptr<cricket::TestStunServer> stun_server_; + talk_base::scoped_ptr<talk_base::AsyncPacketSocket> socket_; bool done_; bool error_; int stun_keepalive_delay_; @@ -164,3 +191,28 @@ TEST_F(StunPortTest, TestKeepAliveResponse) { ASSERT_EQ(1U, port()->Candidates().size()); } +// Test that a local candidate can be generated using a shared socket. +TEST_F(StunPortTest, TestSharedSocketPrepareAddress) { + CreateSharedStunPort(kStunAddr); + PrepareAddress(); + EXPECT_TRUE_WAIT(done(), kTimeoutMs); + ASSERT_EQ(1U, port()->Candidates().size()); + EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); +} + +// Test that we still a get a local candidate with invalid stun server hostname. +// Also verifing that UDPPort can receive packets when stun address can't be +// resolved. +TEST_F(StunPortTest, TestSharedSocketPrepareAddressInvalidHostname) { + CreateSharedStunPort(kBadHostnameAddr); + PrepareAddress(); + EXPECT_TRUE_WAIT(done(), kTimeoutMs); + ASSERT_EQ(1U, port()->Candidates().size()); + EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + + // Send data to port after it's ready. This is to make sure, UDP port can + // handle data with unresolved stun server address. + std::string data = "some random data, sending to cricket::Port."; + SendData(data.c_str(), data.length()); + // No crash is success. +} diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/stunserver.cc b/chromium/third_party/libjingle/source/talk/p2p/base/stunserver.cc index 05292e8e181..80719b4aa5e 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/stunserver.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/stunserver.cc @@ -102,7 +102,8 @@ void StunServer::SendResponse( const StunMessage& msg, const talk_base::SocketAddress& addr) { talk_base::ByteBuffer buf; msg.Write(&buf); - if (socket_->SendTo(buf.Data(), buf.Length(), addr) < 0) + if (socket_->SendTo( + buf.Data(), buf.Length(), addr, talk_base::DSCP_NO_CHANGE) < 0) LOG_ERR(LS_ERROR) << "sendto"; } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/tcpport.cc b/chromium/third_party/libjingle/source/talk/p2p/base/tcpport.cc index 037abeccc88..11334c6225e 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/tcpport.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/tcpport.cc @@ -67,6 +67,10 @@ bool TCPPort::Init() { TCPPort::~TCPPort() { delete socket_; + std::list<Incoming>::iterator it; + for (it = incoming_.begin(); it != incoming_.end(); ++it) + delete it->socket; + incoming_.clear(); } Connection* TCPPort::CreateConnection(const Candidate& address, @@ -130,7 +134,9 @@ void TCPPort::PrepareAddress() { } int TCPPort::SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr, bool payload) { + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, + bool payload) { talk_base::AsyncPacketSocket * socket = NULL; if (TCPConnection * conn = static_cast<TCPConnection*>(GetConnection(addr))) { socket = conn->socket(); @@ -143,7 +149,7 @@ int TCPPort::SendTo(const void* data, size_t size, return -1; // TODO: Set error_ } - int sent = socket->Send(data, size); + int sent = socket->Send(data, size, dscp); if (sent < 0) { error_ = socket->GetError(); LOG_J(LS_ERROR, this) << "TCP send of " << size @@ -161,6 +167,14 @@ int TCPPort::GetOption(talk_base::Socket::Option opt, int* value) { } int TCPPort::SetOption(talk_base::Socket::Option opt, int value) { + // If we are setting DSCP value, pass value to base Port and return. + // TODO(mallinath) - After we have the support on socket, + // remove this specialization. + if (opt == talk_base::Socket::OPT_DSCP) { + SetDefaultDscpValue(static_cast<talk_base::DiffServCodePoint>(value)); + return 0; + } + if (socket_) { return socket_->SetOption(opt, value); } else { @@ -257,7 +271,8 @@ TCPConnection::~TCPConnection() { delete socket_; } -int TCPConnection::Send(const void* data, size_t size) { +int TCPConnection::Send(const void* data, size_t size, + talk_base::DiffServCodePoint dscp) { if (!socket_) { error_ = ENOTCONN; return SOCKET_ERROR; @@ -268,7 +283,7 @@ int TCPConnection::Send(const void* data, size_t size) { error_ = EWOULDBLOCK; return SOCKET_ERROR; } - int sent = socket_->Send(data, size); + int sent = socket_->Send(data, size, dscp); if (sent < 0) { error_ = socket_->GetError(); } else { diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/tcpport.h b/chromium/third_party/libjingle/source/talk/p2p/base/tcpport.h index 8136176664c..599d3c66bef 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/tcpport.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/tcpport.h @@ -82,7 +82,9 @@ class TCPPort : public Port { // Handles sending using the local TCP socket. virtual int SendTo(const void* data, size_t size, - const talk_base::SocketAddress& addr, bool payload); + const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, + bool payload); // Accepts incoming TCP connection. void OnNewConnection(talk_base::AsyncPacketSocket* socket, @@ -124,7 +126,8 @@ class TCPConnection : public Connection { talk_base::AsyncPacketSocket* socket = 0); virtual ~TCPConnection(); - virtual int Send(const void* data, size_t size); + virtual int Send(const void* data, size_t size, + talk_base::DiffServCodePoint dscp); virtual int GetError(); talk_base::AsyncPacketSocket* socket() { return socket_; } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transport.cc b/chromium/third_party/libjingle/source/talk/p2p/base/transport.cc index 6ccd90b02dd..3e4ad704068 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transport.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transport.cc @@ -27,6 +27,7 @@ #include "talk/p2p/base/transport.h" +#include "talk/base/bind.h" #include "talk/base/common.h" #include "talk/base/logging.h" #include "talk/p2p/base/candidate.h" @@ -39,27 +40,19 @@ namespace cricket { +using talk_base::Bind; + enum { - MSG_CREATECHANNEL = 1, - MSG_DESTROYCHANNEL = 2, - MSG_DESTROYALLCHANNELS = 3, - MSG_CONNECTCHANNELS = 4, - MSG_RESETCHANNELS = 5, - MSG_ONSIGNALINGREADY = 6, - MSG_ONREMOTECANDIDATE = 7, - MSG_READSTATE = 8, - MSG_WRITESTATE = 9, - MSG_REQUESTSIGNALING = 10, - MSG_CANDIDATEREADY = 11, - MSG_ROUTECHANGE = 12, - MSG_CONNECTING = 13, - MSG_CANDIDATEALLOCATIONCOMPLETE = 14, - MSG_ROLECONFLICT = 15, - MSG_SETICEROLE = 16, - MSG_SETLOCALDESCRIPTION = 17, - MSG_SETREMOTEDESCRIPTION = 18, - MSG_GETSTATS = 19, - MSG_SETIDENTITY = 20, + MSG_ONSIGNALINGREADY = 1, + MSG_ONREMOTECANDIDATE, + MSG_READSTATE, + MSG_WRITESTATE, + MSG_REQUESTSIGNALING, + MSG_CANDIDATEREADY, + MSG_ROUTECHANGE, + MSG_CONNECTING, + MSG_CANDIDATEALLOCATIONCOMPLETE, + MSG_ROLECONFLICT, }; struct ChannelParams : public talk_base::MessageData { @@ -80,36 +73,6 @@ struct ChannelParams : public talk_base::MessageData { Candidate* candidate; }; -struct TransportDescriptionParams : public talk_base::MessageData { - TransportDescriptionParams(const TransportDescription& desc, - ContentAction action) - : desc(desc), action(action), result(false) {} - const TransportDescription& desc; - ContentAction action; - bool result; -}; - -struct IceRoleParam : public talk_base::MessageData { - explicit IceRoleParam(IceRole role) : role(role) {} - - IceRole role; -}; - -struct StatsParam : public talk_base::MessageData { - explicit StatsParam(TransportStats* stats) - : stats(stats), result(false) {} - - TransportStats* stats; - bool result; -}; - -struct IdentityParam : public talk_base::MessageData { - explicit IdentityParam(talk_base::SSLIdentity* identity) - : identity(identity) {} - - talk_base::SSLIdentity* identity; -}; - Transport::Transport(talk_base::Thread* signaling_thread, talk_base::Thread* worker_thread, const std::string& content_name, @@ -137,33 +100,28 @@ Transport::~Transport() { } void Transport::SetIceRole(IceRole role) { - IceRoleParam param(role); - worker_thread()->Send(this, MSG_SETICEROLE, ¶m); + worker_thread_->Invoke<void>(Bind(&Transport::SetIceRole_w, this, role)); } void Transport::SetIdentity(talk_base::SSLIdentity* identity) { - IdentityParam params(identity); - worker_thread()->Send(this, MSG_SETIDENTITY, ¶ms); + worker_thread_->Invoke<void>(Bind(&Transport::SetIdentity_w, this, identity)); } bool Transport::SetLocalTransportDescription( const TransportDescription& description, ContentAction action) { - TransportDescriptionParams params(description, action); - worker_thread()->Send(this, MSG_SETLOCALDESCRIPTION, ¶ms); - return params.result; + return worker_thread_->Invoke<bool>(Bind( + &Transport::SetLocalTransportDescription_w, this, description, action)); } bool Transport::SetRemoteTransportDescription( const TransportDescription& description, ContentAction action) { - TransportDescriptionParams params(description, action); - worker_thread()->Send(this, MSG_SETREMOTEDESCRIPTION, ¶ms); - return params.result; + return worker_thread_->Invoke<bool>(Bind( + &Transport::SetRemoteTransportDescription_w, this, description, action)); } TransportChannelImpl* Transport::CreateChannel(int component) { - ChannelParams params(component); - worker_thread()->Send(this, MSG_CREATECHANNEL, ¶ms); - return params.channel; + return worker_thread_->Invoke<TransportChannelImpl*>(Bind( + &Transport::CreateChannel_w, this, component)); } TransportChannelImpl* Transport::CreateChannel_w(int component) { @@ -235,8 +193,8 @@ bool Transport::HasChannels() { } void Transport::DestroyChannel(int component) { - ChannelParams params(component); - worker_thread()->Send(this, MSG_DESTROYCHANNEL, ¶ms); + worker_thread_->Invoke<void>(Bind( + &Transport::DestroyChannel_w, this, component)); } void Transport::DestroyChannel_w(int component) { @@ -270,7 +228,7 @@ void Transport::DestroyChannel_w(int component) { void Transport::ConnectChannels() { ASSERT(signaling_thread()->IsCurrent()); - worker_thread()->Send(this, MSG_CONNECTCHANNELS, NULL); + worker_thread_->Invoke<void>(Bind(&Transport::ConnectChannels_w, this)); } void Transport::ConnectChannels_w() { @@ -293,7 +251,8 @@ void Transport::ConnectChannels_w() { TransportDescription desc(NS_GINGLE_P2P, std::vector<std::string>(), talk_base::CreateRandomString(ICE_UFRAG_LENGTH), talk_base::CreateRandomString(ICE_PWD_LENGTH), - ICEMODE_FULL, NULL, Candidates()); + ICEMODE_FULL, CONNECTIONROLE_NONE, NULL, + Candidates()); SetLocalTransportDescription_w(desc, CA_OFFER); } @@ -310,7 +269,8 @@ void Transport::OnConnecting_s() { void Transport::DestroyAllChannels() { ASSERT(signaling_thread()->IsCurrent()); - worker_thread()->Send(this, MSG_DESTROYALLCHANNELS, NULL); + worker_thread_->Invoke<void>( + Bind(&Transport::DestroyAllChannels_w, this)); worker_thread()->Clear(this); signaling_thread()->Clear(this); destroyed_ = true; @@ -338,7 +298,7 @@ void Transport::DestroyAllChannels_w() { void Transport::ResetChannels() { ASSERT(signaling_thread()->IsCurrent()); - worker_thread()->Send(this, MSG_RESETCHANNELS, NULL); + worker_thread_->Invoke<void>(Bind(&Transport::ResetChannels_w, this)); } void Transport::ResetChannels_w() { @@ -402,9 +362,8 @@ bool Transport::VerifyCandidate(const Candidate& cand, std::string* error) { bool Transport::GetStats(TransportStats* stats) { ASSERT(signaling_thread()->IsCurrent()); - StatsParam params(stats); - worker_thread()->Send(this, MSG_GETSTATS, ¶ms); - return params.result; + return worker_thread_->Invoke<bool>(Bind( + &Transport::GetStats_w, this, stats)); } bool Transport::GetStats_w(TransportStats* stats) { @@ -424,6 +383,11 @@ bool Transport::GetStats_w(TransportStats* stats) { return true; } +bool Transport::GetSslRole(talk_base::SSLRole* ssl_role) const { + return worker_thread_->Invoke<bool>(Bind( + &Transport::GetSslRole_w, this, ssl_role)); +} + void Transport::OnRemoteCandidates(const std::vector<Candidate>& candidates) { for (std::vector<Candidate>::const_iterator iter = candidates.begin(); iter != candidates.end(); @@ -668,19 +632,20 @@ bool Transport::ApplyRemoteTransportDescription_w(TransportChannelImpl* ch) { return true; } -void Transport::ApplyNegotiatedTransportDescription_w( +bool Transport::ApplyNegotiatedTransportDescription_w( TransportChannelImpl* channel) { channel->SetIceProtocolType(protocol_); channel->SetRemoteIceMode(remote_ice_mode_); + return true; } -bool Transport::NegotiateTransportDescription_w(ContentAction local_role_) { +bool Transport::NegotiateTransportDescription_w(ContentAction local_role) { // TODO(ekr@rtfm.com): This is ICE-specific stuff. Refactor into // P2PTransport. const TransportDescription* offer; const TransportDescription* answer; - if (local_role_ == CA_OFFER) { + if (local_role == CA_OFFER) { offer = local_description_.get(); answer = remote_description_.get(); } else { @@ -724,32 +689,14 @@ bool Transport::NegotiateTransportDescription_w(ContentAction local_role_) { for (ChannelMap::iterator iter = channels_.begin(); iter != channels_.end(); ++iter) { - ApplyNegotiatedTransportDescription_w(iter->second.get()); + if (!ApplyNegotiatedTransportDescription_w(iter->second.get())) + return false; } return true; } void Transport::OnMessage(talk_base::Message* msg) { switch (msg->message_id) { - case MSG_CREATECHANNEL: { - ChannelParams* params = static_cast<ChannelParams*>(msg->pdata); - params->channel = CreateChannel_w(params->component); - } - break; - case MSG_DESTROYCHANNEL: { - ChannelParams* params = static_cast<ChannelParams*>(msg->pdata); - DestroyChannel_w(params->component); - } - break; - case MSG_CONNECTCHANNELS: - ConnectChannels_w(); - break; - case MSG_RESETCHANNELS: - ResetChannels_w(); - break; - case MSG_DESTROYALLCHANNELS: - DestroyAllChannels_w(); - break; case MSG_ONSIGNALINGREADY: CallChannels_w(&TransportChannelImpl::OnSignalingReady); break; @@ -789,36 +736,6 @@ void Transport::OnMessage(talk_base::Message* msg) { case MSG_ROLECONFLICT: SignalRoleConflict(); break; - case MSG_SETICEROLE: { - IceRoleParam* param = - static_cast<IceRoleParam*>(msg->pdata); - SetIceRole_w(param->role); - } - break; - case MSG_SETLOCALDESCRIPTION: { - TransportDescriptionParams* params = - static_cast<TransportDescriptionParams*>(msg->pdata); - params->result = SetLocalTransportDescription_w(params->desc, - params->action); - } - break; - case MSG_SETREMOTEDESCRIPTION: { - TransportDescriptionParams* params = - static_cast<TransportDescriptionParams*>(msg->pdata); - params->result = SetRemoteTransportDescription_w(params->desc, - params->action); - } - break; - case MSG_GETSTATS: { - StatsParam* params = static_cast<StatsParam*>(msg->pdata); - params->result = GetStats_w(params->stats); - } - break; - case MSG_SETIDENTITY: { - IdentityParam* params = static_cast<IdentityParam*>(msg->pdata); - SetIdentity_w(params->identity); - } - break; } } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transport.h b/chromium/third_party/libjingle/source/talk/p2p/base/transport.h index 63c37343abe..381215f5d1c 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transport.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transport.h @@ -52,6 +52,7 @@ #include "talk/base/criticalsection.h" #include "talk/base/messagequeue.h" #include "talk/base/sigslot.h" +#include "talk/base/sslstreamadapter.h" #include "talk/p2p/base/candidate.h" #include "talk/p2p/base/constants.h" #include "talk/p2p/base/sessiondescription.h" @@ -323,6 +324,8 @@ class Transport : public talk_base::MessageHandler, // Forwards the signal from TransportChannel to BaseSession. sigslot::signal0<> SignalRoleConflict; + virtual bool GetSslRole(talk_base::SSLRole* ssl_role) const; + protected: // These are called by Create/DestroyChannel above in order to create or // destroy the appropriate type of channel. @@ -366,9 +369,13 @@ class Transport : public talk_base::MessageHandler, // Pushes down the transport parameters obtained via negotiation. // Derived classes can set their specific parameters here, but must call the // base as well. - virtual void ApplyNegotiatedTransportDescription_w( + virtual bool ApplyNegotiatedTransportDescription_w( TransportChannelImpl* channel); + virtual bool GetSslRole_w(talk_base::SSLRole* ssl_role) const { + return false; + } + private: struct ChannelMapEntry { ChannelMapEntry() : impl_(NULL), candidates_allocated_(false), ref_(0) {} diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transport_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/transport_unittest.cc index f9bebdfa535..e3b7badfff7 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transport_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transport_unittest.cc @@ -145,8 +145,7 @@ TEST_F(TransportTest, TestChannelIceParameters) { transport_->SetIceRole(cricket::ICEROLE_CONTROLLING); transport_->SetIceTiebreaker(99U); cricket::TransportDescription local_desc( - cricket::NS_JINGLE_ICE_UDP, std::vector<std::string>(), - kIceUfrag1, kIcePwd1, cricket::ICEMODE_FULL, NULL, cricket::Candidates()); + cricket::NS_JINGLE_ICE_UDP, kIceUfrag1, kIcePwd1); ASSERT_TRUE(transport_->SetLocalTransportDescription(local_desc, cricket::CA_OFFER)); EXPECT_EQ(cricket::ICEROLE_CONTROLLING, transport_->ice_role()); @@ -157,8 +156,7 @@ TEST_F(TransportTest, TestChannelIceParameters) { EXPECT_EQ(kIcePwd1, channel_->ice_pwd()); cricket::TransportDescription remote_desc( - cricket::NS_JINGLE_ICE_UDP, std::vector<std::string>(), - kIceUfrag1, kIcePwd1, cricket::ICEMODE_FULL, NULL, cricket::Candidates()); + cricket::NS_JINGLE_ICE_UDP, kIceUfrag1, kIcePwd1); ASSERT_TRUE(transport_->SetRemoteTransportDescription(remote_desc, cricket::CA_ANSWER)); EXPECT_EQ(cricket::ICEROLE_CONTROLLING, channel_->GetIceRole()); @@ -177,12 +175,12 @@ TEST_F(TransportTest, TestSetRemoteIceLiteInOffer) { transport_->SetIceRole(cricket::ICEROLE_CONTROLLED); cricket::TransportDescription remote_desc( cricket::NS_JINGLE_ICE_UDP, std::vector<std::string>(), - kIceUfrag1, kIcePwd1, cricket::ICEMODE_LITE, NULL, cricket::Candidates()); + kIceUfrag1, kIcePwd1, cricket::ICEMODE_LITE, + cricket::CONNECTIONROLE_ACTPASS, NULL, cricket::Candidates()); ASSERT_TRUE(transport_->SetRemoteTransportDescription(remote_desc, cricket::CA_OFFER)); cricket::TransportDescription local_desc( - cricket::NS_JINGLE_ICE_UDP, std::vector<std::string>(), - kIceUfrag1, kIcePwd1, cricket::ICEMODE_FULL, NULL, cricket::Candidates()); + cricket::NS_JINGLE_ICE_UDP, kIceUfrag1, kIcePwd1); ASSERT_TRUE(transport_->SetLocalTransportDescription(local_desc, cricket::CA_ANSWER)); EXPECT_EQ(cricket::ICEROLE_CONTROLLING, transport_->ice_role()); @@ -195,8 +193,7 @@ TEST_F(TransportTest, TestSetRemoteIceLiteInOffer) { TEST_F(TransportTest, TestSetRemoteIceLiteInAnswer) { transport_->SetIceRole(cricket::ICEROLE_CONTROLLING); cricket::TransportDescription local_desc( - cricket::NS_JINGLE_ICE_UDP, std::vector<std::string>(), - kIceUfrag1, kIcePwd1, cricket::ICEMODE_FULL, NULL, cricket::Candidates()); + cricket::NS_JINGLE_ICE_UDP, kIceUfrag1, kIcePwd1); ASSERT_TRUE(transport_->SetLocalTransportDescription(local_desc, cricket::CA_OFFER)); EXPECT_EQ(cricket::ICEROLE_CONTROLLING, transport_->ice_role()); @@ -206,7 +203,8 @@ TEST_F(TransportTest, TestSetRemoteIceLiteInAnswer) { EXPECT_EQ(cricket::ICEMODE_FULL, channel_->remote_ice_mode()); cricket::TransportDescription remote_desc( cricket::NS_JINGLE_ICE_UDP, std::vector<std::string>(), - kIceUfrag1, kIcePwd1, cricket::ICEMODE_LITE, NULL, cricket::Candidates()); + kIceUfrag1, kIcePwd1, cricket::ICEMODE_LITE, + cricket::CONNECTIONROLE_NONE, NULL, cricket::Candidates()); ASSERT_TRUE(transport_->SetRemoteTransportDescription(remote_desc, cricket::CA_ANSWER)); EXPECT_EQ(cricket::ICEROLE_CONTROLLING, channel_->GetIceRole()); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transportchannel.h b/chromium/third_party/libjingle/source/talk/p2p/base/transportchannel.h index 2b09f54b83a..85fff7a9f92 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transportchannel.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transportchannel.h @@ -32,6 +32,7 @@ #include <vector> #include "talk/base/basictypes.h" +#include "talk/base/dscp.h" #include "talk/base/sigslot.h" #include "talk/base/socket.h" #include "talk/base/sslidentity.h" @@ -80,7 +81,9 @@ class TransportChannel : public sigslot::has_slots<> { // Attempts to send the given packet. The return value is < 0 on failure. // TODO: Remove the default argument once channel code is updated. - virtual int SendPacket(const char* data, size_t len, int flags = 0) = 0; + virtual int SendPacket(const char* data, size_t len, + talk_base::DiffServCodePoint dscp, + int flags = 0) = 0; // Sets a socket option on this channel. Note that not all options are // supported by all transport types. @@ -89,30 +92,20 @@ class TransportChannel : public sigslot::has_slots<> { // Returns the most recent error that occurred on this channel. virtual int GetError() = 0; - // TODO(mallinath) - Move this to TransportChannelImpl, after channel.cc - // no longer needs it. - // Returns current transportchannel ICE role. - virtual IceRole GetIceRole() const = 0; - // Returns the current stats for this connection. - virtual bool GetStats(ConnectionInfos* infos) { - return false; - } + virtual bool GetStats(ConnectionInfos* infos) = 0; // Is DTLS active? - virtual bool IsDtlsActive() const { - return false; - } + virtual bool IsDtlsActive() const = 0; + + // Default implementation. + virtual bool GetSslRole(talk_base::SSLRole* role) const = 0; // Set up the ciphers to use for DTLS-SRTP. - virtual bool SetSrtpCiphers(const std::vector<std::string>& ciphers) { - return false; - } + virtual bool SetSrtpCiphers(const std::vector<std::string>& ciphers) = 0; // Find out which DTLS-SRTP cipher was negotiated - virtual bool GetSrtpCipher(std::string* cipher) { - return false; - } + virtual bool GetSrtpCipher(std::string* cipher) = 0; // Allows key material to be extracted for external encryption. virtual bool ExportKeyingMaterial(const std::string& label, @@ -120,9 +113,7 @@ class TransportChannel : public sigslot::has_slots<> { size_t context_len, bool use_context, uint8* result, - size_t result_len) { - return false; - } + size_t result_len) = 0; // Signalled each time a packet is received on this channel. sigslot::signal4<TransportChannel*, const char*, diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelimpl.h b/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelimpl.h index 32e2471f120..cde2441307b 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelimpl.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelimpl.h @@ -50,6 +50,7 @@ class TransportChannelImpl : public TransportChannel { virtual Transport* GetTransport() = 0; // For ICE channels. + virtual IceRole GetIceRole() const = 0; virtual void SetIceRole(IceRole role) = 0; virtual void SetIceTiebreaker(uint64 tiebreaker) = 0; // To toggle G-ICE/ICE. @@ -93,16 +94,14 @@ class TransportChannelImpl : public TransportChannel { // DTLS methods // Set DTLS local identity. - virtual bool SetLocalIdentity(talk_base::SSLIdentity* identity) { - return false; - } + virtual bool SetLocalIdentity(talk_base::SSLIdentity* identity) = 0; // Set DTLS Remote fingerprint. Must be after local identity set. virtual bool SetRemoteFingerprint(const std::string& digest_alg, const uint8* digest, - size_t digest_len) { - return false; - } + size_t digest_len) = 0; + + virtual bool SetSslRole(talk_base::SSLRole role) = 0; // TransportChannel is forwarding this signal from PortAllocatorSession. sigslot::signal1<TransportChannelImpl*> SignalCandidatesAllocationDone; diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelproxy.cc b/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelproxy.cc index b25f75751d6..04b32ce6499 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelproxy.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelproxy.cc @@ -93,13 +93,15 @@ void TransportChannelProxy::SetImplementation(TransportChannelImpl* impl) { worker_thread_->Post(this, MSG_UPDATESTATE); } -int TransportChannelProxy::SendPacket(const char* data, size_t len, int flags) { +int TransportChannelProxy::SendPacket(const char* data, size_t len, + talk_base::DiffServCodePoint dscp, + int flags) { ASSERT(talk_base::Thread::Current() == worker_thread_); // Fail if we don't have an impl yet. if (!impl_) { return -1; } - return impl_->SendPacket(data, len, flags); + return impl_->SendPacket(data, len, dscp, flags); } int TransportChannelProxy::SetOption(talk_base::Socket::Option opt, int value) { @@ -135,6 +137,22 @@ bool TransportChannelProxy::IsDtlsActive() const { return impl_->IsDtlsActive(); } +bool TransportChannelProxy::GetSslRole(talk_base::SSLRole* role) const { + ASSERT(talk_base::Thread::Current() == worker_thread_); + if (!impl_) { + return false; + } + return impl_->GetSslRole(role); +} + +bool TransportChannelProxy::SetSslRole(talk_base::SSLRole role) { + ASSERT(talk_base::Thread::Current() == worker_thread_); + if (!impl_) { + return false; + } + return impl_->SetSslRole(role); +} + bool TransportChannelProxy::SetSrtpCiphers(const std::vector<std::string>& ciphers) { ASSERT(talk_base::Thread::Current() == worker_thread_); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelproxy.h b/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelproxy.h index 828c0ae05b4..29f46634198 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelproxy.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transportchannelproxy.h @@ -63,12 +63,16 @@ class TransportChannelProxy : public TransportChannel, // Implementation of the TransportChannel interface. These simply forward to // the implementation. - virtual int SendPacket(const char* data, size_t len, int flags); + virtual int SendPacket(const char* data, size_t len, + talk_base::DiffServCodePoint dscp, + int flags); virtual int SetOption(talk_base::Socket::Option opt, int value); virtual int GetError(); virtual IceRole GetIceRole() const; virtual bool GetStats(ConnectionInfos* infos); virtual bool IsDtlsActive() const; + virtual bool GetSslRole(talk_base::SSLRole* role) const; + virtual bool SetSslRole(talk_base::SSLRole role); virtual bool SetSrtpCiphers(const std::vector<std::string>& ciphers); virtual bool GetSrtpCipher(std::string* cipher); virtual bool ExportKeyingMaterial(const std::string& label, diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaSource.java b/chromium/third_party/libjingle/source/talk/p2p/base/transportdescription.cc index 29490490006..56873427284 100644 --- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/src/org/webrtc/MediaSource.java +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transportdescription.cc @@ -1,6 +1,6 @@ /* * libjingle - * Copyright 2013, Google Inc. + * Copyright 2013 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: @@ -25,31 +25,28 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ +#include "talk/p2p/base/transportdescription.h" -package org.webrtc; +#include "talk/p2p/base/constants.h" -/** Java wrapper for a C++ MediaSourceInterface. */ -public class MediaSource { - /** Tracks MediaSourceInterface.SourceState */ - public enum State { - INITIALIZING, LIVE, ENDED, MUTED - } - - final long nativeSource; // Package-protected for PeerConnectionFactory. +namespace cricket { - public MediaSource(long nativeSource) { - this.nativeSource = nativeSource; - } +bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role) { + const char* const roles[] = { + CONNECTIONROLE_ACTIVE_STR, + CONNECTIONROLE_PASSIVE_STR, + CONNECTIONROLE_ACTPASS_STR, + CONNECTIONROLE_HOLDCONN_STR + }; - public State state() { - return nativeState(nativeSource); - } - - void dispose() { - free(nativeSource); + for (size_t i = 0; i < ARRAY_SIZE(roles); ++i) { + if (stricmp(roles[i], role_str.c_str()) == 0) { + *role = static_cast<ConnectionRole>(CONNECTIONROLE_ACTIVE + i); + return true; + } } + return false; +} - private static native State nativeState(long pointer); +} // namespace cricket - private static native void free(long nativeSource); -} diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transportdescription.h b/chromium/third_party/libjingle/source/talk/p2p/base/transportdescription.h index 92da8d66c29..64fbb89a808 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transportdescription.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transportdescription.h @@ -75,6 +75,26 @@ enum IceMode { ICEMODE_LITE // As defined in http://tools.ietf.org/html/rfc5245#section-4.2 }; +// RFC 4145 - http://tools.ietf.org/html/rfc4145#section-4 +// 'active': The endpoint will initiate an outgoing connection. +// 'passive': The endpoint will accept an incoming connection. +// 'actpass': The endpoint is willing to accept an incoming +// connection or to initiate an outgoing connection. +enum ConnectionRole { + CONNECTIONROLE_NONE = 0, + CONNECTIONROLE_ACTIVE, + CONNECTIONROLE_PASSIVE, + CONNECTIONROLE_ACTPASS, + CONNECTIONROLE_HOLDCONN, +}; + +extern const char CONNECTIONROLE_ACTIVE_STR[]; +extern const char CONNECTIONROLE_PASSIVE_STR[]; +extern const char CONNECTIONROLE_ACTPASS_STR[]; +extern const char CONNECTIONROLE_HOLDCONN_STR[]; + +bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role); + typedef std::vector<Candidate> Candidates; struct TransportDescription { @@ -85,6 +105,7 @@ struct TransportDescription { const std::string& ice_ufrag, const std::string& ice_pwd, IceMode ice_mode, + ConnectionRole role, const talk_base::SSLFingerprint* identity_fingerprint, const Candidates& candidates) : transport_type(transport_type), @@ -92,19 +113,24 @@ struct TransportDescription { ice_ufrag(ice_ufrag), ice_pwd(ice_pwd), ice_mode(ice_mode), + connection_role(role), identity_fingerprint(CopyFingerprint(identity_fingerprint)), candidates(candidates) {} TransportDescription(const std::string& transport_type, - const Candidates& candidates) + const std::string& ice_ufrag, + const std::string& ice_pwd) : transport_type(transport_type), + ice_ufrag(ice_ufrag), + ice_pwd(ice_pwd), ice_mode(ICEMODE_FULL), - candidates(candidates) {} + connection_role(CONNECTIONROLE_NONE) {} TransportDescription(const TransportDescription& from) : transport_type(from.transport_type), transport_options(from.transport_options), ice_ufrag(from.ice_ufrag), ice_pwd(from.ice_pwd), ice_mode(from.ice_mode), + connection_role(from.connection_role), identity_fingerprint(CopyFingerprint(from.identity_fingerprint.get())), candidates(from.candidates) {} @@ -118,6 +144,7 @@ struct TransportDescription { ice_ufrag = from.ice_ufrag; ice_pwd = from.ice_pwd; ice_mode = from.ice_mode; + connection_role = from.connection_role; identity_fingerprint.reset(CopyFingerprint( from.identity_fingerprint.get())); @@ -147,6 +174,7 @@ struct TransportDescription { std::string ice_ufrag; std::string ice_pwd; IceMode ice_mode; + ConnectionRole connection_role; talk_base::scoped_ptr<talk_base::SSLFingerprint> identity_fingerprint; Candidates candidates; diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transportdescriptionfactory.cc b/chromium/third_party/libjingle/source/talk/p2p/base/transportdescriptionfactory.cc index 8fbfff144fa..0c129437ccb 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transportdescriptionfactory.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transportdescriptionfactory.cc @@ -73,10 +73,12 @@ TransportDescription* TransportDescriptionFactory::CreateOffer( // If we are trying to establish a secure transport, add a fingerprint. if (secure_ == SEC_ENABLED || secure_ == SEC_REQUIRED) { // Fail if we can't create the fingerprint. - if (!CreateIdentityDigest(desc.get())) { + // If we are the initiator set role to "actpass". + if (!SetSecurityInfo(desc.get(), CONNECTIONROLE_ACTPASS)) { return NULL; } } + return desc.release(); } @@ -101,7 +103,7 @@ TransportDescription* TransportDescriptionFactory::CreateAnswer( desc->transport_type = NS_GINGLE_P2P; // Offer is hybrid, we support GICE: use GICE. } else if ((!offer || offer->transport_type == NS_GINGLE_P2P) && - (protocol_ == ICEPROTO_HYBRID || protocol_ == ICEPROTO_GOOGLE)) { + (protocol_ == ICEPROTO_HYBRID || protocol_ == ICEPROTO_GOOGLE)) { // Offer is GICE, we support hybrid or GICE: use GICE. desc->transport_type = NS_GINGLE_P2P; } else { @@ -126,7 +128,11 @@ TransportDescription* TransportDescriptionFactory::CreateAnswer( // The offer supports DTLS, so answer with DTLS, as long as we support it. if (secure_ == SEC_ENABLED || secure_ == SEC_REQUIRED) { // Fail if we can't create the fingerprint. - if (!CreateIdentityDigest(desc.get())) { + // Setting DTLS role to active. + ConnectionRole role = (options.prefer_passive_role) ? + CONNECTIONROLE_PASSIVE : CONNECTIONROLE_ACTIVE; + + if (!SetSecurityInfo(desc.get(), role)) { return NULL; } } @@ -140,8 +146,8 @@ TransportDescription* TransportDescriptionFactory::CreateAnswer( return desc.release(); } -bool TransportDescriptionFactory::CreateIdentityDigest( - TransportDescription* desc) const { +bool TransportDescriptionFactory::SetSecurityInfo( + TransportDescription* desc, ConnectionRole role) const { if (!identity_) { LOG(LS_ERROR) << "Cannot create identity digest with no identity"; return false; @@ -154,6 +160,8 @@ bool TransportDescriptionFactory::CreateIdentityDigest( return false; } + // Assign security role. + desc->connection_role = role; return true; } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/transportdescriptionfactory.h b/chromium/third_party/libjingle/source/talk/p2p/base/transportdescriptionfactory.h index 32836f3e814..ddf27998188 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/transportdescriptionfactory.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/transportdescriptionfactory.h @@ -37,8 +37,9 @@ class SSLIdentity; namespace cricket { struct TransportOptions { - TransportOptions() : ice_restart(false) {} + TransportOptions() : ice_restart(false), prefer_passive_role(false) {} bool ice_restart; + bool prefer_passive_role; }; // Creates transport descriptions according to the supplied configuration. @@ -71,7 +72,8 @@ class TransportDescriptionFactory { const TransportDescription* current_description) const; private: - bool CreateIdentityDigest(TransportDescription* description) const; + bool SetSecurityInfo(TransportDescription* description, + ConnectionRole role) const; TransportProtocol protocol_; SecurePolicy secure_; diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/turnport.cc b/chromium/third_party/libjingle/source/talk/p2p/base/turnport.cc index a302b713524..35e51fc2da8 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/turnport.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/turnport.cc @@ -52,10 +52,6 @@ static const int TURN_PERMISSION_TIMEOUT = 5 * 60 * 1000; // 5 minutes static const size_t TURN_CHANNEL_HEADER_SIZE = 4U; -enum { - MSG_PORT_ERROR = 1 -}; - inline bool IsTurnChannelData(uint16 msg_type) { return ((msg_type & 0xC000) == 0x4000); // MSB are 0b01 } @@ -156,7 +152,8 @@ class TurnEntry : public sigslot::has_slots<> { void SendChannelBindRequest(int delay); // Sends a packet to the given destination address. // This will wrap the packet in STUN if necessary. - int Send(const void* data, size_t size, bool payload); + int Send(const void* data, size_t size, bool payload, + talk_base::DiffServCodePoint dscp); void OnCreatePermissionSuccess(); void OnCreatePermissionError(StunMessage* response, int code); @@ -296,6 +293,14 @@ Connection* TurnPort::CreateConnection(const Candidate& address, } int TurnPort::SetOption(talk_base::Socket::Option opt, int value) { + // DSCP option is not passed to the socket. + // TODO(mallinath) - After we have the support on socket, + // remove this specialization. + if (opt == talk_base::Socket::OPT_DSCP) { + SetDefaultDscpValue(static_cast<talk_base::DiffServCodePoint>(value)); + return 0; + } + if (!socket_) { // If socket is not created yet, these options will be applied during socket // creation. @@ -318,6 +323,7 @@ int TurnPort::GetError() { int TurnPort::SendTo(const void* data, size_t size, const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, bool payload) { // Try to find an entry for this specific address; we should have one. TurnEntry* entry = FindEntry(addr); @@ -332,7 +338,7 @@ int TurnPort::SendTo(const void* data, size_t size, } // Send the actual contents to the server using the usual mechanism. - int sent = entry->Send(data, size, payload); + int sent = entry->Send(data, size, payload, dscp); if (sent <= 0) { return SOCKET_ERROR; } @@ -406,7 +412,7 @@ void TurnPort::OnResolveResult(talk_base::SignalThread* signal_thread) { void TurnPort::OnSendStunPacket(const void* data, size_t size, StunRequest* request) { - if (Send(data, size) < 0) { + if (Send(data, size, DefaultDscpValue()) < 0) { LOG_J(LS_ERROR, this) << "Failed to send TURN message, err=" << socket_->GetError(); } @@ -431,15 +437,16 @@ void TurnPort::OnAllocateError() { // We will send SignalPortError asynchronously as this can be sent during // port initialization. This way it will not be blocking other port // creation. - thread()->Post(this, MSG_PORT_ERROR); + thread()->Post(this, MSG_ERROR); } void TurnPort::OnMessage(talk_base::Message* message) { - if (message->message_id == MSG_PORT_ERROR) { + if (message->message_id == MSG_ERROR) { SignalPortError(this); - } else { - Port::OnMessage(message); + return; } + + Port::OnMessage(message); } void TurnPort::OnAllocateRequestTimeout() { @@ -557,8 +564,9 @@ void TurnPort::AddRequestAuthInfo(StunMessage* msg) { VERIFY(msg->AddMessageIntegrity(hash())); } -int TurnPort::Send(const void* data, size_t len) { - return socket_->SendTo(data, len, server_address_.address); +int TurnPort::Send(const void* data, size_t len, + talk_base::DiffServCodePoint dscp) { + return socket_->SendTo(data, len, server_address_.address, dscp); } void TurnPort::UpdateHash() { @@ -890,7 +898,8 @@ void TurnEntry::SendChannelBindRequest(int delay) { port_, this, channel_id_, ext_addr_), delay); } -int TurnEntry::Send(const void* data, size_t size, bool payload) { +int TurnEntry::Send(const void* data, size_t size, bool payload, + talk_base::DiffServCodePoint dscp) { talk_base::ByteBuffer buf; if (state_ != STATE_BOUND) { // If we haven't bound the channel yet, we have to use a Send Indication. @@ -915,7 +924,7 @@ int TurnEntry::Send(const void* data, size_t size, bool payload) { buf.WriteUInt16(static_cast<uint16>(size)); buf.WriteBytes(reinterpret_cast<const char*>(data), size); } - return port_->Send(buf.Data(), buf.Length()); + return port_->Send(buf.Data(), buf.Length(), dscp); } void TurnEntry::OnCreatePermissionSuccess() { diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/turnport.h b/chromium/third_party/libjingle/source/talk/p2p/base/turnport.h index fa23d53475b..4462b0c8c99 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/turnport.h +++ b/chromium/third_party/libjingle/source/talk/p2p/base/turnport.h @@ -74,6 +74,7 @@ class TurnPort : public Port { const Candidate& c, PortInterface::CandidateOrigin origin); virtual int SendTo(const void* data, size_t size, const talk_base::SocketAddress& addr, + talk_base::DiffServCodePoint dscp, bool payload); virtual int SetOption(talk_base::Socket::Option opt, int value); virtual int GetOption(talk_base::Socket::Option opt, int* value); @@ -106,6 +107,8 @@ class TurnPort : public Port { const RelayCredentials& credentials); private: + enum { MSG_ERROR = MSG_FIRST_AVAILABLE }; + typedef std::list<TurnEntry*> EntryList; typedef std::map<talk_base::Socket::Option, int> SocketOptionsMap; @@ -138,7 +141,7 @@ class TurnPort : public Port { bool ScheduleRefresh(int lifetime); void SendRequest(StunRequest* request, int delay); - int Send(const void* data, size_t size); + int Send(const void* data, size_t size, talk_base::DiffServCodePoint dscp); void UpdateHash(); bool UpdateNonce(StunMessage* response); diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/turnport_unittest.cc b/chromium/third_party/libjingle/source/talk/p2p/base/turnport_unittest.cc index 6304ce67898..726175c5c2f 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/turnport_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/turnport_unittest.cc @@ -27,6 +27,7 @@ #include "talk/base/asynctcpsocket.h" #include "talk/base/buffer.h" +#include "talk/base/dscp.h" #include "talk/base/firewallsocketserver.h" #include "talk/base/logging.h" #include "talk/base/gunit.h" @@ -217,8 +218,8 @@ class TurnPortTest : public testing::Test, for (size_t j = 0; j < i + 1; ++j) { buf[j] = 0xFF - j; } - conn1->Send(buf, i + 1); - conn2->Send(buf, i + 1); + conn1->Send(buf, i + 1, talk_base::DSCP_NO_CHANGE); + conn2->Send(buf, i + 1, talk_base::DSCP_NO_CHANGE); main_->ProcessMessages(0); } diff --git a/chromium/third_party/libjingle/source/talk/p2p/base/turnserver.cc b/chromium/third_party/libjingle/source/talk/p2p/base/turnserver.cc index 8260f3dbe71..17ecf3507c6 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/base/turnserver.cc +++ b/chromium/third_party/libjingle/source/talk/p2p/base/turnserver.cc @@ -564,7 +564,8 @@ void TurnServer::SendStun(Connection* conn, StunMessage* msg) { void TurnServer::Send(Connection* conn, const talk_base::ByteBuffer& buf) { - conn->socket()->SendTo(buf.Data(), buf.Length(), conn->src()); + conn->socket()->SendTo(buf.Data(), buf.Length(), conn->src(), + talk_base::DSCP_NO_CHANGE); } void TurnServer::OnAllocationDestroyed(Allocation* allocation) { @@ -936,7 +937,7 @@ void TurnServer::Allocation::SendErrorResponse(const TurnMessage* req, int code, void TurnServer::Allocation::SendExternal(const void* data, size_t size, const talk_base::SocketAddress& peer) { - external_socket_->SendTo(data, size, peer); + external_socket_->SendTo(data, size, peer, talk_base::DSCP_NO_CHANGE); } void TurnServer::Allocation::OnMessage(talk_base::Message* msg) { diff --git a/chromium/third_party/libjingle/source/talk/p2p/client/httpportallocator.h b/chromium/third_party/libjingle/source/talk/p2p/client/httpportallocator.h index cb4c8f82bba..a0ef3b722de 100644 --- a/chromium/third_party/libjingle/source/talk/p2p/client/httpportallocator.h +++ b/chromium/third_party/libjingle/source/talk/p2p/client/httpportallocator.h @@ -32,7 +32,6 @@ #include <string> #include <vector> -#include "talk/base/gunit_prod.h" #include "talk/p2p/client/basicportallocator.h" class HttpPortAllocatorTest_TestSessionRequestUrl_Test; @@ -129,6 +128,9 @@ class HttpPortAllocatorSessionBase : public BasicPortAllocatorSession { virtual void SendSessionRequest(const std::string& host, int port) = 0; virtual void ReceiveSessionResponse(const std::string& response); + // Made public for testing. Should be protected. + std::string GetSessionRequestUrl(); + protected: virtual void GetPortConfigurations(); void TryCreateRelaySession(); @@ -137,11 +139,7 @@ class HttpPortAllocatorSessionBase : public BasicPortAllocatorSession { BasicPortAllocatorSession::allocator()); } - std::string GetSessionRequestUrl(); - private: - FRIEND_TEST(::HttpPortAllocatorTest, TestSessionRequestUrl); - std::vector<std::string> relay_hosts_; std::vector<talk_base::SocketAddress> stun_hosts_; std::string relay_token_; diff --git a/chromium/third_party/libjingle/source/talk/session/media/channel.cc b/chromium/third_party/libjingle/source/talk/session/media/channel.cc index 1bce2acdccb..f6259e9a34b 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/channel.cc +++ b/chromium/third_party/libjingle/source/talk/session/media/channel.cc @@ -30,6 +30,7 @@ #include "talk/base/buffer.h" #include "talk/base/byteorder.h" #include "talk/base/common.h" +#include "talk/base/dscp.h" #include "talk/base/logging.h" #include "talk/media/base/rtputils.h" #include "talk/p2p/base/transportchannel.h" @@ -55,6 +56,8 @@ enum { MSG_SETRENDERER, MSG_ADDRECVSTREAM, MSG_REMOVERECVSTREAM, + MSG_ADDSENDSTREAM, + MSG_REMOVESENDSTREAM, MSG_SETRINGBACKTONE, MSG_PLAYRINGBACKTONE, MSG_SETMAXSENDBANDWIDTH, @@ -74,7 +77,7 @@ enum { MSG_DATARECEIVED, MSG_SETCAPTURER, MSG_ISSCREENCASTING, - MSG_SCREENCASTFPS, + MSG_GETSCREENCASTDETAILS, MSG_SETSCREENCASTFACTORY, MSG_FIRSTPACKETRECEIVED, MSG_SESSION_ERROR, @@ -187,6 +190,7 @@ struct VideoStatsMessageData : public talk_base::MessageData { struct PacketMessageData : public talk_base::MessageData { talk_base::Buffer packet; + talk_base::DiffServCodePoint dscp; }; struct AudioRenderMessageData: public talk_base::MessageData { @@ -334,12 +338,14 @@ struct IsScreencastingMessageData : public talk_base::MessageData { bool result; }; -struct ScreencastFpsMessageData : public talk_base::MessageData { - explicit ScreencastFpsMessageData(uint32 s) - : ssrc(s), result(0) { +struct VideoChannel::ScreencastDetailsMessageData : + public talk_base::MessageData { + explicit ScreencastDetailsMessageData(uint32 s) + : ssrc(s), fps(0), screencast_max_pixels(0) { } uint32 ssrc; - int result; + int fps; + int screencast_max_pixels; }; struct SetScreenCaptureFactoryMessageData : public talk_base::MessageData { @@ -480,6 +486,18 @@ bool BaseChannel::RemoveRecvStream(uint32 ssrc) { return data.result; } +bool BaseChannel::AddSendStream(const StreamParams& sp) { + StreamMessageData data(sp); + Send(MSG_ADDSENDSTREAM, &data); + return data.result; +} + +bool BaseChannel::RemoveSendStream(uint32 ssrc) { + SsrcMessageData data(ssrc); + Send(MSG_REMOVESENDSTREAM, &data); + return data.result; +} + bool BaseChannel::SetLocalContent(const MediaContentDescription* content, ContentAction action) { SetContentData data(content, action); @@ -550,12 +568,14 @@ bool BaseChannel::IsReadyToSend() const { was_ever_writable(); } -bool BaseChannel::SendPacket(talk_base::Buffer* packet) { - return SendPacket(false, packet); +bool BaseChannel::SendPacket(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { + return SendPacket(false, packet, dscp); } -bool BaseChannel::SendRtcp(talk_base::Buffer* packet) { - return SendPacket(true, packet); +bool BaseChannel::SendRtcp(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { + return SendPacket(true, packet, dscp); } int BaseChannel::SetOption(SocketType type, talk_base::Socket::Option opt, @@ -619,7 +639,8 @@ bool BaseChannel::PacketIsRtcp(const TransportChannel* channel, rtcp_mux_filter_.DemuxRtcp(data, static_cast<int>(len))); } -bool BaseChannel::SendPacket(bool rtcp, talk_base::Buffer* packet) { +bool BaseChannel::SendPacket(bool rtcp, talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp) { // Unless we're sending optimistically, we only allow packets through when we // are completely writable. if (!optimistic_data_send_ && !writable_) { @@ -638,6 +659,7 @@ bool BaseChannel::SendPacket(bool rtcp, talk_base::Buffer* packet) { int message_id = (!rtcp) ? MSG_RTPPACKET : MSG_RTCPPACKET; PacketMessageData* data = new PacketMessageData; packet->TransferTo(&data->packet); + data->dscp = dscp; worker_thread_->Post(this, message_id, data); return true; } @@ -715,7 +737,7 @@ bool BaseChannel::SendPacket(bool rtcp, talk_base::Buffer* packet) { } // Bon voyage. - int ret = channel->SendPacket(packet->data(), packet->length(), + int ret = channel->SendPacket(packet->data(), packet->length(), dscp, (secure() && secure_dtls()) ? PF_SRTP_BYPASS : 0); if (ret != static_cast<int>(packet->length())) { if (channel->GetError() == EWOULDBLOCK) { @@ -1003,8 +1025,13 @@ bool BaseChannel::SetupDtlsSrtp(bool rtcp_channel) { &dtls_buffer[offset], SRTP_MASTER_KEY_SALT_LEN); std::vector<unsigned char> *send_key, *recv_key; + talk_base::SSLRole role; + if (!channel->GetSslRole(&role)) { + LOG(LS_WARNING) << "GetSslRole failed"; + return false; + } - if (channel->GetIceRole() == ICEROLE_CONTROLLING) { + if (role == talk_base::SSL_SERVER) { send_key = &server_write_key; recv_key = &client_write_key; } else { @@ -1144,6 +1171,16 @@ bool BaseChannel::RemoveRecvStream_w(uint32 ssrc) { return media_channel()->RemoveRecvStream(ssrc); } +bool BaseChannel::AddSendStream_w(const StreamParams& sp) { + ASSERT(worker_thread() == talk_base::Thread::Current()); + return media_channel()->AddSendStream(sp); +} + +bool BaseChannel::RemoveSendStream_w(uint32 ssrc) { + ASSERT(worker_thread() == talk_base::Thread::Current()); + return media_channel()->RemoveSendStream(ssrc); +} + bool BaseChannel::UpdateLocalStreams_w(const std::vector<StreamParams>& streams, ContentAction action) { if (!VERIFY(action == CA_OFFER || action == CA_ANSWER || @@ -1354,6 +1391,16 @@ void BaseChannel::OnMessage(talk_base::Message *pmsg) { data->result = RemoveRecvStream_w(data->ssrc); break; } + case MSG_ADDSENDSTREAM: { + StreamMessageData* data = static_cast<StreamMessageData*>(pmsg->pdata); + data->result = AddSendStream_w(data->sp); + break; + } + case MSG_REMOVESENDSTREAM: { + SsrcMessageData* data = static_cast<SsrcMessageData*>(pmsg->pdata); + data->result = RemoveSendStream_w(data->ssrc); + break; + } case MSG_SETMAXSENDBANDWIDTH: { SetBandwidthData* data = static_cast<SetBandwidthData*>(pmsg->pdata); data->result = SetMaxSendBandwidth_w(data->value); @@ -1363,7 +1410,7 @@ void BaseChannel::OnMessage(talk_base::Message *pmsg) { case MSG_RTPPACKET: case MSG_RTCPPACKET: { PacketMessageData* data = static_cast<PacketMessageData*>(pmsg->pdata); - SendPacket(pmsg->message_id == MSG_RTCPPACKET, &data->packet); + SendPacket(pmsg->message_id == MSG_RTCPPACKET, &data->packet, data->dscp); delete data; // because it is Posted break; } @@ -1959,10 +2006,16 @@ bool VideoChannel::IsScreencasting() { return data.result; } -int VideoChannel::ScreencastFps(uint32 ssrc) { - ScreencastFpsMessageData data(ssrc); - Send(MSG_SCREENCASTFPS, &data); - return data.result; +int VideoChannel::GetScreencastFps(uint32 ssrc) { + ScreencastDetailsMessageData data(ssrc); + Send(MSG_GETSCREENCASTDETAILS, &data); + return data.fps; +} + +int VideoChannel::GetScreencastMaxPixels(uint32 ssrc) { + ScreencastDetailsMessageData data(ssrc); + Send(MSG_GETSCREENCASTDETAILS, &data); + return data.screencast_max_pixels; } bool VideoChannel::SendIntraFrame() { @@ -2179,14 +2232,16 @@ bool VideoChannel::IsScreencasting_w() const { return !screencast_capturers_.empty(); } -int VideoChannel::ScreencastFps_w(uint32 ssrc) const { - ScreencastMap::const_iterator iter = screencast_capturers_.find(ssrc); +void VideoChannel::ScreencastDetails_w( + ScreencastDetailsMessageData* data) const { + ScreencastMap::const_iterator iter = screencast_capturers_.find(data->ssrc); if (iter == screencast_capturers_.end()) { - return 0; + return; } VideoCapturer* capturer = iter->second; const VideoFormat* video_format = capturer->GetCaptureFormat(); - return VideoFormat::IntervalToFps(video_format->interval); + data->fps = VideoFormat::IntervalToFps(video_format->interval); + data->screencast_max_pixels = capturer->screencast_max_pixels(); } void VideoChannel::SetScreenCaptureFactory_w( @@ -2257,10 +2312,10 @@ void VideoChannel::OnMessage(talk_base::Message *pmsg) { data->result = IsScreencasting_w(); break; } - case MSG_SCREENCASTFPS: { - ScreencastFpsMessageData* data = - static_cast<ScreencastFpsMessageData*>(pmsg->pdata); - data->result = ScreencastFps_w(data->ssrc); + case MSG_GETSCREENCASTDETAILS: { + ScreencastDetailsMessageData* data = + static_cast<ScreencastDetailsMessageData*>(pmsg->pdata); + ScreencastDetails_w(data); break; } case MSG_SENDINTRAFRAME: { diff --git a/chromium/third_party/libjingle/source/talk/session/media/channel.h b/chromium/third_party/libjingle/source/talk/session/media/channel.h index eccadd32d60..0d66be9a90d 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/channel.h +++ b/chromium/third_party/libjingle/source/talk/session/media/channel.h @@ -119,6 +119,8 @@ class BaseChannel // Multiplexing bool AddRecvStream(const StreamParams& sp); bool RemoveRecvStream(uint32 ssrc); + bool AddSendStream(const StreamParams& sp); + bool RemoveSendStream(uint32 ssrc); // Monitoring void StartConnectionMonitor(int cms); @@ -249,8 +251,10 @@ class BaseChannel void FlushRtcpMessages(); // NetworkInterface implementation, called by MediaEngine - virtual bool SendPacket(talk_base::Buffer* packet); - virtual bool SendRtcp(talk_base::Buffer* packet); + virtual bool SendPacket(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp); + virtual bool SendRtcp(talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp); virtual int SetOption(SocketType type, talk_base::Socket::Option o, int val); // From TransportChannel @@ -261,7 +265,8 @@ class BaseChannel bool PacketIsRtcp(const TransportChannel* channel, const char* data, size_t len); - bool SendPacket(bool rtcp, talk_base::Buffer* packet); + bool SendPacket(bool rtcp, talk_base::Buffer* packet, + talk_base::DiffServCodePoint dscp); virtual bool WantsPacket(bool rtcp, talk_base::Buffer* packet); void HandlePacket(bool rtcp, talk_base::Buffer* packet); @@ -277,6 +282,8 @@ class BaseChannel void ChannelNotWritable_w(); bool AddRecvStream_w(const StreamParams& sp); bool RemoveRecvStream_w(uint32 ssrc); + bool AddSendStream_w(const StreamParams& sp); + bool RemoveSendStream_w(uint32 ssrc); virtual bool ShouldSetupDtlsSrtp() const; // Do the DTLS key expansion and impose it on the SRTP/SRTCP filters. // |rtcp_channel| indicates whether to set up the RTP or RTCP filter. @@ -488,13 +495,13 @@ class VideoChannel : public BaseChannel { // TODO(pthatcher): Refactor to use a "capture id" instead of an // ssrc here as the "key". VideoCapturer* AddScreencast(uint32 ssrc, const ScreencastId& id); - VideoCapturer* GetScreencastCapturer(uint32 ssrc); bool SetCapturer(uint32 ssrc, VideoCapturer* capturer); bool RemoveScreencast(uint32 ssrc); // True if we've added a screencast. Doesn't matter if the capturer // has been started or not. bool IsScreencasting(); - int ScreencastFps(uint32 ssrc); + int GetScreencastFps(uint32 ssrc); + int GetScreencastMaxPixels(uint32 ssrc); // Get statistics about the current media session. bool GetStats(VideoMediaInfo* stats); @@ -525,6 +532,7 @@ class VideoChannel : public BaseChannel { private: typedef std::map<uint32, VideoCapturer*> ScreencastMap; + struct ScreencastDetailsMessageData; // overrides from BaseChannel virtual void ChangeState(); @@ -544,12 +552,11 @@ class VideoChannel : public BaseChannel { void SetRenderer_w(uint32 ssrc, VideoRenderer* renderer); VideoCapturer* AddScreencast_w(uint32 ssrc, const ScreencastId& id); - VideoCapturer* GetScreencastCapturer_w(uint32 ssrc); bool SetCapturer_w(uint32 ssrc, VideoCapturer* capturer); bool RemoveScreencast_w(uint32 ssrc); void OnScreencastWindowEvent_s(uint32 ssrc, talk_base::WindowEvent we); bool IsScreencasting_w() const; - int ScreencastFps_w(uint32 ssrc) const; + void ScreencastDetails_w(ScreencastDetailsMessageData* d) const; void SetScreenCaptureFactory_w( ScreenCapturerFactory* screencapture_factory); bool GetStats_w(VideoMediaInfo* stats); @@ -590,11 +597,6 @@ class DataChannel : public BaseChannel { ~DataChannel(); bool Init(); - // downcasts a MediaChannel - virtual DataMediaChannel* media_channel() const { - return static_cast<DataMediaChannel*>(BaseChannel::media_channel()); - } - virtual bool SendData(const SendDataParams& params, const talk_base::Buffer& payload, SendDataResult* result); @@ -616,6 +618,12 @@ class DataChannel : public BaseChannel { // both local and remote descriptions are set, and the channel is unblocked. sigslot::signal1<bool> SignalReadyToSendData; + protected: + // downcasts a MediaChannel. + virtual DataMediaChannel* media_channel() const { + return static_cast<DataMediaChannel*>(BaseChannel::media_channel()); + } + private: struct SendDataMessageData : public talk_base::MessageData { SendDataMessageData(const SendDataParams& params, diff --git a/chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc b/chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc index ff03b49805f..fda89b3c6ed 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc @@ -1794,7 +1794,7 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> { transport_channel->SignalReadPacket( transport_channel, reinterpret_cast<const char*>(kBadPacket), sizeof(kBadPacket), 0); - EXPECT_EQ_WAIT(T::MediaChannel::ERROR_PLAY_SRTP_AUTH_FAILED, error_, 500); + EXPECT_EQ_WAIT(T::MediaChannel::ERROR_PLAY_SRTP_ERROR, error_, 500); } void TestOnReadyToSend() { @@ -2189,7 +2189,7 @@ TEST_F(VoiceChannelTest, TestVoiceSpecificMuteStream) { } // Test that keyboard automute works correctly and signals upwards. -TEST_F(VoiceChannelTest, TestKeyboardMute) { +TEST_F(VoiceChannelTest, DISABLED_TestKeyboardMute) { CreateChannels(0, 0); EXPECT_FALSE(media_channel1_->IsStreamMuted(0)); EXPECT_EQ(cricket::VoiceMediaChannel::ERROR_NONE, error_); diff --git a/chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc b/chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc index c16b066be11..36c71832da3 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc +++ b/chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc @@ -670,7 +670,16 @@ VideoCapturer* ChannelManager::CreateVideoCapturer() { } return NULL; } - return device_manager_->CreateVideoCapturer(device); + VideoCapturer* capturer = device_manager_->CreateVideoCapturer(device); + if (capturer && default_video_encoder_config_.max_codec.id != 0) { + // For now, use the aspect ratio of the default_video_encoder_config_, + // which may be different than the native aspect ratio of the start + // format the camera may use. + capturer->UpdateAspectRatio( + default_video_encoder_config_.max_codec.width, + default_video_encoder_config_.max_codec.height); + } + return capturer; } bool ChannelManager::SetCaptureDevice_w(const Device* cam_device) { diff --git a/chromium/third_party/libjingle/source/talk/session/media/channelmanager.h b/chromium/third_party/libjingle/source/talk/session/media/channelmanager.h index b1967bfcd50..04af5e19632 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/channelmanager.h +++ b/chromium/third_party/libjingle/source/talk/session/media/channelmanager.h @@ -163,9 +163,6 @@ class ChannelManager : public talk_base::MessageHandler, bool monitoring() const { return monitoring_; } // Sets the local renderer where to renderer the local camera. bool SetLocalRenderer(VideoRenderer* renderer); - // Sets the externally provided video capturer. The ssrc is the ssrc of the - // (video) stream for which the video capturer should be set. - bool SetVideoCapturer(VideoCapturer* capturer); bool capturing() const { return capturing_; } // Configures the logging output of the mediaengine(s). diff --git a/chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc b/chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc index 32321ebcd01..6f7c7687154 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc @@ -149,7 +149,8 @@ TEST_F(ChannelManagerTest, CreateDestroyChannels) { } // Test that we can create and destroy a voice and video channel with a worker. -TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) { +// BUG=https://code.google.com/p/webrtc/issues/detail?id=2355 +TEST_F(ChannelManagerTest, DISABLED_CreateDestroyChannelsOnThread) { worker_.Start(); EXPECT_TRUE(cm_->set_worker_thread(&worker_)); EXPECT_TRUE(cm_->Init()); @@ -203,6 +204,45 @@ TEST_F(ChannelManagerTest, SetDefaultVideoEncoderConfig) { EXPECT_EQ(config, fme_->default_video_encoder_config()); } +struct GetCapturerFrameSize : public sigslot::has_slots<> { + void OnVideoFrame(VideoCapturer* capturer, const VideoFrame* frame) { + width = frame->GetWidth(); + height = frame->GetHeight(); + } + GetCapturerFrameSize(VideoCapturer* capturer) : width(0), height(0) { + capturer->SignalVideoFrame.connect(this, + &GetCapturerFrameSize::OnVideoFrame); + static_cast<FakeVideoCapturer*>(capturer)->CaptureFrame(); + } + size_t width; + size_t height; +}; + +TEST_F(ChannelManagerTest, DefaultCapturerAspectRatio) { + VideoCodec codec(100, "VP8", 640, 360, 30, 0); + VideoFormat format(640, 360, 33, FOURCC_ANY); + VideoEncoderConfig config(codec, 1, 2); + EXPECT_TRUE(cm_->Init()); + // A capturer created before the default encoder config is set will have no + // set aspect ratio, so it'll be 4:3 (based on the fake video capture impl). + VideoCapturer* capturer = cm_->CreateVideoCapturer(); + ASSERT_TRUE(capturer != NULL); + EXPECT_EQ(CS_RUNNING, capturer->Start(format)); + GetCapturerFrameSize size(capturer); + EXPECT_EQ(640u, size.width); + EXPECT_EQ(480u, size.height); + delete capturer; + // Try again, but with the encoder config set to 16:9. + EXPECT_TRUE(cm_->SetDefaultVideoEncoderConfig(config)); + capturer = cm_->CreateVideoCapturer(); + ASSERT_TRUE(capturer != NULL); + EXPECT_EQ(CS_RUNNING, capturer->Start(format)); + GetCapturerFrameSize cropped_size(capturer); + EXPECT_EQ(640u, cropped_size.width); + EXPECT_EQ(360u, cropped_size.height); + delete capturer; +} + // Test that SetDefaultVideoCodec passes through the right values. TEST_F(ChannelManagerTest, SetDefaultVideoCodecBeforeInit) { cricket::VideoCodec codec(96, "G264", 1280, 720, 60, 0); diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediasession.cc b/chromium/third_party/libjingle/source/talk/session/media/mediasession.cc index 1215008b05d..85612308211 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/mediasession.cc +++ b/chromium/third_party/libjingle/source/talk/session/media/mediasession.cc @@ -38,6 +38,7 @@ #include "talk/base/stringutils.h" #include "talk/media/base/constants.h" #include "talk/media/base/cryptoparams.h" +#include "talk/media/sctp/sctpdataengine.h" #include "talk/p2p/base/constants.h" #include "talk/session/media/channelmanager.h" #include "talk/session/media/srtpfilter.h" diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediasession.h b/chromium/third_party/libjingle/source/talk/session/media/mediasession.h index 327480466a7..5dfc765e7d2 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/mediasession.h +++ b/chromium/third_party/libjingle/source/talk/session/media/mediasession.h @@ -83,10 +83,6 @@ extern const char kMediaProtocolDtlsSctp[]; // Options to control how session descriptions are generated. const int kAutoBandwidth = -1; const int kBufferedModeDisabled = 0; -// TODO(pthatcher): This is imposed by usrsctp lib. I have no idea -// why it is 9. Figure out why, and make it bigger, hopefully up to -// 2^16-1. -const uint32 kMaxSctpSid = 9; struct MediaSessionOptions { MediaSessionOptions() : diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc b/chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc index 6e04915704a..f2e576ca92c 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc +++ b/chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc @@ -219,25 +219,19 @@ class MediaSessionDescriptionFactoryTest : public testing::Test { current_desc.reset(new SessionDescription()); EXPECT_TRUE(current_desc->AddTransportInfo( TransportInfo("audio", - TransportDescription("", std::vector<std::string>(), + TransportDescription("", current_audio_ufrag, - current_audio_pwd, - cricket::ICEMODE_FULL, - NULL, Candidates())))); + current_audio_pwd)))); EXPECT_TRUE(current_desc->AddTransportInfo( TransportInfo("video", - TransportDescription("", std::vector<std::string>(), + TransportDescription("", current_video_ufrag, - current_video_pwd, - cricket::ICEMODE_FULL, - NULL, Candidates())))); + current_video_pwd)))); EXPECT_TRUE(current_desc->AddTransportInfo( TransportInfo("data", - TransportDescription("", std::vector<std::string>(), + TransportDescription("", current_data_ufrag, - current_data_pwd, - cricket::ICEMODE_FULL, - NULL, Candidates())))); + current_data_pwd)))); } if (offer) { desc.reset(f1_.CreateOffer(options, current_desc.get())); diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediasessionclient.cc b/chromium/third_party/libjingle/source/talk/session/media/mediasessionclient.cc index b54891e8ccb..246592c617f 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/mediasessionclient.cc +++ b/chromium/third_party/libjingle/source/talk/session/media/mediasessionclient.cc @@ -35,6 +35,7 @@ #include "talk/base/stringutils.h" #include "talk/media/base/cryptoparams.h" #include "talk/media/base/capturemanager.h" +#include "talk/media/sctp/sctpdataengine.h" #include "talk/p2p/base/constants.h" #include "talk/p2p/base/parsing.h" #include "talk/session/media/mediamessages.h" diff --git a/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h b/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h index 9b48dcd957c..b6a269952a4 100644 --- a/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h +++ b/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h @@ -271,7 +271,10 @@ class SrtpStat { error(in_error) { } bool operator <(const FailureKey& key) const { - return ssrc < key.ssrc || mode < key.mode || error < key.error; + return + (ssrc < key.ssrc) || + (ssrc == key.ssrc && mode < key.mode) || + (ssrc == key.ssrc && mode == key.mode && error < key.error); } uint32 ssrc; SrtpFilter::Mode mode; diff --git a/chromium/third_party/libjingle/source/talk/session/tunnel/pseudotcpchannel.cc b/chromium/third_party/libjingle/source/talk/session/tunnel/pseudotcpchannel.cc index 8b9a19f0b32..92e9e0ea5d0 100644 --- a/chromium/third_party/libjingle/source/talk/session/tunnel/pseudotcpchannel.cc +++ b/chromium/third_party/libjingle/source/talk/session/tunnel/pseudotcpchannel.cc @@ -502,7 +502,7 @@ IPseudoTcpNotify::WriteResult PseudoTcpChannel::TcpWritePacket( ASSERT(cs_.CurrentThreadIsOwner()); ASSERT(tcp == tcp_); ASSERT(NULL != channel_); - int sent = channel_->SendPacket(buffer, len); + int sent = channel_->SendPacket(buffer, len, talk_base::DSCP_NO_CHANGE); if (sent > 0) { //LOG_F(LS_VERBOSE) << "(" << sent << ") Sent"; return IPseudoTcpNotify::WR_SUCCESS; diff --git a/chromium/third_party/libjingle/source/talk/xmpp/constants.cc b/chromium/third_party/libjingle/source/talk/xmpp/constants.cc index 196a1ec2c45..c56796bc9e7 100644 --- a/chromium/third_party/libjingle/source/talk/xmpp/constants.cc +++ b/chromium/third_party/libjingle/source/talk/xmpp/constants.cc @@ -363,6 +363,7 @@ const StaticQName QN_ATTR_STATUS = { STR_EMPTY, "status" }; // Presence connection status const char STR_PSTN_CONFERENCE_STATUS_CONNECTING[] = "connecting"; +const char STR_PSTN_CONFERENCE_STATUS_JOINING[] = "joining"; const char STR_PSTN_CONFERENCE_STATUS_CONNECTED[] = "connected"; const char STR_PSTN_CONFERENCE_STATUS_HANGUP[] = "hangup"; diff --git a/chromium/third_party/libjingle/source/talk/xmpp/constants.h b/chromium/third_party/libjingle/source/talk/xmpp/constants.h index cd6d2b7c2c4..c53abb5b86f 100644 --- a/chromium/third_party/libjingle/source/talk/xmpp/constants.h +++ b/chromium/third_party/libjingle/source/talk/xmpp/constants.h @@ -322,6 +322,7 @@ extern const StaticQName QN_ATTR_STATUS; // Presence connection status extern const char STR_PSTN_CONFERENCE_STATUS_CONNECTING[]; +extern const char STR_PSTN_CONFERENCE_STATUS_JOINING[]; extern const char STR_PSTN_CONFERENCE_STATUS_CONNECTED[]; extern const char STR_PSTN_CONFERENCE_STATUS_HANGUP[]; diff --git a/chromium/third_party/libjingle/source/talk/xmpp/hangoutpubsubclient.cc b/chromium/third_party/libjingle/source/talk/xmpp/hangoutpubsubclient.cc index edbf4dddbeb..b6669a10865 100644 --- a/chromium/third_party/libjingle/source/talk/xmpp/hangoutpubsubclient.cc +++ b/chromium/third_party/libjingle/source/talk/xmpp/hangoutpubsubclient.cc @@ -238,7 +238,10 @@ class PubSubStateClient : public sigslot::has_slots<> { } PubSubStateChange<C> change; - change.publisher_nick = info.publisher_nick; + if (!retracted) { + // Retracts do not have publisher information. + change.publisher_nick = info.publisher_nick; + } change.published_nick = info.published_nick; change.old_state = old_state; change.new_state = new_state; diff --git a/chromium/third_party/libjingle/source/talk/xmpp/rostermodule.h b/chromium/third_party/libjingle/source/talk/xmpp/rostermodule.h index eafd5954446..7e14dc1314b 100644 --- a/chromium/third_party/libjingle/source/talk/xmpp/rostermodule.h +++ b/chromium/third_party/libjingle/source/talk/xmpp/rostermodule.h @@ -81,9 +81,14 @@ enum XmppPresenceAvailable { enum XmppPresenceConnectionStatus { XMPP_CONNECTION_STATUS_UNKNOWN = 0, + // Status set by the server while the user is being rung. XMPP_CONNECTION_STATUS_CONNECTING = 1, - XMPP_CONNECTION_STATUS_CONNECTED = 2, - XMPP_CONNECTION_STATUS_HANGUP = 3, + // Status set by the client when the user has accepted the ring but before + // the client has joined the call. + XMPP_CONNECTION_STATUS_JOINING = 2, + // Status set by the client as part of joining the call. + XMPP_CONNECTION_STATUS_CONNECTED = 3, + XMPP_CONNECTION_STATUS_HANGUP = 4, }; //! Presence Information diff --git a/chromium/third_party/libjingle/source/talk/xmpp/rostermoduleimpl.cc b/chromium/third_party/libjingle/source/talk/xmpp/rostermoduleimpl.cc index 24228803286..31b3abdf0fb 100644 --- a/chromium/third_party/libjingle/source/talk/xmpp/rostermoduleimpl.cc +++ b/chromium/third_party/libjingle/source/talk/xmpp/rostermoduleimpl.cc @@ -300,6 +300,8 @@ XmppPresenceImpl::connection_status() const { return XMPP_CONNECTION_STATUS_CONNECTING; else if (status == STR_PSTN_CONFERENCE_STATUS_CONNECTED) return XMPP_CONNECTION_STATUS_CONNECTED; + else if (status == STR_PSTN_CONFERENCE_STATUS_JOINING) + return XMPP_CONNECTION_STATUS_JOINING; else if (status == STR_PSTN_CONFERENCE_STATUS_HANGUP) return XMPP_CONNECTION_STATUS_HANGUP; } @@ -349,8 +351,11 @@ XmppPresenceImpl::set_raw_xml(const XmlElement * xml) { xml->Name() != QN_PRESENCE) return XMPP_RETURN_BADARGUMENT; - raw_xml_.reset(new XmlElement(*xml)); + const std::string& type = xml->Attr(QN_TYPE); + if (type != STR_EMPTY && type != "unavailable") + return XMPP_RETURN_BADARGUMENT; + raw_xml_.reset(new XmlElement(*xml)); return XMPP_RETURN_OK; } |
