summaryrefslogtreecommitdiff
path: root/chromium/content/browser/renderer_host/media
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/content/browser/renderer_host/media')
-rw-r--r--chromium/content/browser/renderer_host/media/DEPS9
-rw-r--r--chromium/content/browser/renderer_host/media/OWNERS26
-rw-r--r--chromium/content/browser/renderer_host/media/audio_input_device_manager.cc237
-rw-r--r--chromium/content/browser/renderer_host/media/audio_input_device_manager.h100
-rw-r--r--chromium/content/browser/renderer_host/media/audio_input_device_manager_unittest.cc291
-rw-r--r--chromium/content/browser/renderer_host/media/audio_input_renderer_host.cc407
-rw-r--r--chromium/content/browser/renderer_host/media/audio_input_renderer_host.h162
-rw-r--r--chromium/content/browser/renderer_host/media/audio_input_sync_writer.cc82
-rw-r--r--chromium/content/browser/renderer_host/media/audio_input_sync_writer.h60
-rw-r--r--chromium/content/browser/renderer_host/media/audio_mirroring_manager.cc164
-rw-r--r--chromium/content/browser/renderer_host/media/audio_mirroring_manager.h108
-rw-r--r--chromium/content/browser/renderer_host/media/audio_mirroring_manager_unittest.cc234
-rw-r--r--chromium/content/browser/renderer_host/media/audio_renderer_host.cc477
-rw-r--r--chromium/content/browser/renderer_host/media/audio_renderer_host.h162
-rw-r--r--chromium/content/browser/renderer_host/media/audio_renderer_host_unittest.cc423
-rw-r--r--chromium/content/browser/renderer_host/media/audio_sync_reader.cc200
-rw-r--r--chromium/content/browser/renderer_host/media/audio_sync_reader.h91
-rw-r--r--chromium/content/browser/renderer_host/media/desktop_capture_device.cc462
-rw-r--r--chromium/content/browser/renderer_host/media/desktop_capture_device.h58
-rw-r--r--chromium/content/browser/renderer_host/media/desktop_capture_device_unittest.cc271
-rw-r--r--chromium/content/browser/renderer_host/media/device_request_message_filter.cc219
-rw-r--r--chromium/content/browser/renderer_host/media/device_request_message_filter.h78
-rw-r--r--chromium/content/browser/renderer_host/media/device_request_message_filter_unittest.cc304
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_dispatcher_host.cc224
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_dispatcher_host.h95
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_dispatcher_host_unittest.cc353
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_manager.cc1115
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_manager.h265
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_manager_unittest.cc174
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_provider.h94
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_requester.h44
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_ui_controller_unittest.cc170
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_ui_proxy.cc216
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_ui_proxy.h88
-rw-r--r--chromium/content/browser/renderer_host/media/media_stream_ui_proxy_unittest.cc219
-rw-r--r--chromium/content/browser/renderer_host/media/midi_dispatcher_host.cc63
-rw-r--r--chromium/content/browser/renderer_host/media/midi_dispatcher_host.h48
-rw-r--r--chromium/content/browser/renderer_host/media/midi_host.cc167
-rw-r--r--chromium/content/browser/renderer_host/media/midi_host.h79
-rw-r--r--chromium/content/browser/renderer_host/media/mock_media_observer.cc17
-rw-r--r--chromium/content/browser/renderer_host/media/mock_media_observer.h60
-rw-r--r--chromium/content/browser/renderer_host/media/peer_connection_tracker_host.cc69
-rw-r--r--chromium/content/browser/renderer_host/media/peer_connection_tracker_host.h49
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_buffer_pool.cc209
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_buffer_pool.h136
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc159
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_controller.cc732
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_controller.h164
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_controller_event_handler.cc23
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_controller_event_handler.h65
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_controller_unittest.cc265
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_host.cc315
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_host.h161
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_host_unittest.cc371
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_manager.cc593
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_manager.h170
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_manager_unittest.cc272
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_oracle.cc165
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_oracle.h107
-rw-r--r--chromium/content/browser/renderer_host/media/video_capture_oracle_unittest.cc478
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_audio_input_stream.cc349
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_audio_input_stream.h92
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_audio_input_stream_unittest.cc513
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_capture_util.cc59
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_capture_util.h35
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_tracker.cc102
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_tracker.h86
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_video_capture_device.cc1278
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_video_capture_device.h73
-rw-r--r--chromium/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc797
-rw-r--r--chromium/content/browser/renderer_host/media/webrtc_identity_service_host.cc87
-rw-r--r--chromium/content/browser/renderer_host/media/webrtc_identity_service_host.h63
-rw-r--r--chromium/content/browser/renderer_host/media/webrtc_identity_service_host_unittest.cc187
73 files changed, 16340 insertions, 0 deletions
diff --git a/chromium/content/browser/renderer_host/media/DEPS b/chromium/content/browser/renderer_host/media/DEPS
new file mode 100644
index 00000000000..6f4140f1e51
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/DEPS
@@ -0,0 +1,9 @@
+include_rules = [
+ "+media",
+
+ # TODO: this is temporary, this directory doesn't belong under renderer_host
+ # since it depends on web_contents.
+ "+content/browser/web_contents",
+ "+content/public/browser/web_contents.h",
+ "+content/public/browser/web_contents_view.h",
+]
diff --git a/chromium/content/browser/renderer_host/media/OWNERS b/chromium/content/browser/renderer_host/media/OWNERS
new file mode 100644
index 00000000000..45ac6611129
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/OWNERS
@@ -0,0 +1,26 @@
+acolwell@chromium.org
+dalecurtis@chromium.org
+ddorwin@chromium.org
+fischman@chromium.org
+scherkus@chromium.org
+shadi@chromium.org
+tommi@chromium.org
+vrk@chromium.org
+wjia@chromium.org
+xhwang@chromium.org
+xians@chromium.org
+
+# Tab capture OWNERS.
+per-file audio*=miu@chromium.org
+per-file web_contents*=hclam@chromium.org
+per-file web_contents*=justinlin@chromium.org
+per-file web_contents*=miu@chromium.org
+per-file web_contents*=nick@chromium.org
+per-file video_capture_oracle*=hclam@chromium.org
+per-file video_capture_oracle*=justinlin@chromium.org
+per-file video_capture_oracle*=miu@chromium.org
+per-file video_capture_oracle*=nick@chromium.org
+
+# Screen capture OWNERS.
+per-file desktop_capture_*=sergeyu@chromium.org
+per-file desktop_capture_*=wez@chromium.org
diff --git a/chromium/content/browser/renderer_host/media/audio_input_device_manager.cc b/chromium/content/browser/renderer_host/media/audio_input_device_manager.cc
new file mode 100644
index 00000000000..b4959567a31
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_input_device_manager.cc
@@ -0,0 +1,237 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/audio_input_device_manager.h"
+
+#include "base/bind.h"
+#include "base/memory/scoped_ptr.h"
+#include "content/public/browser/browser_thread.h"
+#include "content/public/common/media_stream_request.h"
+#include "media/audio/audio_device_name.h"
+#include "media/audio/audio_input_ipc.h"
+#include "media/audio/audio_manager_base.h"
+#include "media/audio/audio_parameters.h"
+#include "media/base/channel_layout.h"
+#include "media/base/scoped_histogram_timer.h"
+
+namespace content {
+
+const int AudioInputDeviceManager::kFakeOpenSessionId = 1;
+
+namespace {
+// Starting id for the first capture session.
+const int kFirstSessionId = AudioInputDeviceManager::kFakeOpenSessionId + 1;
+}
+
+AudioInputDeviceManager::AudioInputDeviceManager(
+ media::AudioManager* audio_manager)
+ : listener_(NULL),
+ next_capture_session_id_(kFirstSessionId),
+ use_fake_device_(false),
+ audio_manager_(audio_manager) {
+ // TODO(xians): Remove this fake_device after the unittests do not need it.
+ StreamDeviceInfo fake_device(MEDIA_DEVICE_AUDIO_CAPTURE,
+ media::AudioManagerBase::kDefaultDeviceName,
+ media::AudioManagerBase::kDefaultDeviceId,
+ 44100, media::CHANNEL_LAYOUT_STEREO, false);
+ fake_device.session_id = kFakeOpenSessionId;
+ devices_.push_back(fake_device);
+}
+
+AudioInputDeviceManager::~AudioInputDeviceManager() {
+}
+
+const StreamDeviceInfo* AudioInputDeviceManager::GetOpenedDeviceInfoById(
+ int session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ StreamDeviceList::iterator device = GetDevice(session_id);
+ if (device == devices_.end())
+ return NULL;
+
+ return &(*device);
+}
+
+void AudioInputDeviceManager::Register(
+ MediaStreamProviderListener* listener,
+ base::MessageLoopProxy* device_thread_loop) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(!listener_);
+ DCHECK(!device_loop_.get());
+ listener_ = listener;
+ device_loop_ = device_thread_loop;
+}
+
+void AudioInputDeviceManager::Unregister() {
+ DCHECK(listener_);
+ listener_ = NULL;
+}
+
+void AudioInputDeviceManager::EnumerateDevices(MediaStreamType stream_type) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(listener_);
+
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&AudioInputDeviceManager::EnumerateOnDeviceThread,
+ this, stream_type));
+}
+
+int AudioInputDeviceManager::Open(const StreamDeviceInfo& device) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ // Generate a new id for this device.
+ int session_id = next_capture_session_id_++;
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&AudioInputDeviceManager::OpenOnDeviceThread,
+ this, session_id, device));
+
+ return session_id;
+}
+
+void AudioInputDeviceManager::Close(int session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(listener_);
+ StreamDeviceList::iterator device = GetDevice(session_id);
+ if (device == devices_.end())
+ return;
+ const MediaStreamType stream_type = device->device.type;
+ if (session_id != kFakeOpenSessionId)
+ devices_.erase(device);
+
+ // Post a callback through the listener on IO thread since
+ // MediaStreamManager is expecting the callback asynchronously.
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&AudioInputDeviceManager::ClosedOnIOThread,
+ this, stream_type, session_id));
+}
+
+void AudioInputDeviceManager::UseFakeDevice() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ use_fake_device_ = true;
+}
+
+bool AudioInputDeviceManager::ShouldUseFakeDevice() const {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ return use_fake_device_;
+}
+
+void AudioInputDeviceManager::EnumerateOnDeviceThread(
+ MediaStreamType stream_type) {
+ SCOPED_UMA_HISTOGRAM_TIMER(
+ "Media.AudioInputDeviceManager.EnumerateOnDeviceThreadTime");
+ DCHECK(IsOnDeviceThread());
+
+ media::AudioDeviceNames device_names;
+
+ switch (stream_type) {
+ case MEDIA_DEVICE_AUDIO_CAPTURE:
+ // AudioManager is guaranteed to outlive MediaStreamManager in
+ // BrowserMainloop.
+ audio_manager_->GetAudioInputDeviceNames(&device_names);
+ break;
+
+ default:
+ NOTREACHED();
+ break;
+ }
+
+ scoped_ptr<StreamDeviceInfoArray> devices(new StreamDeviceInfoArray());
+ for (media::AudioDeviceNames::iterator it = device_names.begin();
+ it != device_names.end(); ++it) {
+ // Add device information to device vector.
+ devices->push_back(StreamDeviceInfo(
+ stream_type, it->device_name, it->unique_id, false));
+ }
+
+ // If the |use_fake_device_| flag is on, inject the fake device if there is
+ // no available device on the OS.
+ if (use_fake_device_ && devices->empty()) {
+ devices->push_back(StreamDeviceInfo(
+ stream_type, media::AudioManagerBase::kDefaultDeviceName,
+ media::AudioManagerBase::kDefaultDeviceId, false));
+ }
+
+ // Return the device list through the listener by posting a task on
+ // IO thread since MediaStreamManager handles the callback asynchronously.
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&AudioInputDeviceManager::DevicesEnumeratedOnIOThread,
+ this, stream_type, base::Passed(&devices)));
+}
+
+void AudioInputDeviceManager::OpenOnDeviceThread(
+ int session_id, const StreamDeviceInfo& info) {
+ SCOPED_UMA_HISTOGRAM_TIMER(
+ "Media.AudioInputDeviceManager.OpenOnDeviceThreadTime");
+ DCHECK(IsOnDeviceThread());
+
+ StreamDeviceInfo out(info.device.type, info.device.name, info.device.id,
+ 0, 0, false);
+ out.session_id = session_id;
+ if (use_fake_device_) {
+ // Don't need to query the hardware information if using fake device.
+ out.device.sample_rate = 44100;
+ out.device.channel_layout = media::CHANNEL_LAYOUT_STEREO;
+ } else {
+ // Get the preferred sample rate and channel configuration for the
+ // audio device.
+ media::AudioParameters params =
+ audio_manager_->GetInputStreamParameters(info.device.id);
+ out.device.sample_rate = params.sample_rate();
+ out.device.channel_layout = params.channel_layout();
+ }
+
+ // Return the |session_id| through the listener by posting a task on
+ // IO thread since MediaStreamManager handles the callback asynchronously.
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&AudioInputDeviceManager::OpenedOnIOThread,
+ this, session_id, out));
+}
+
+void AudioInputDeviceManager::DevicesEnumeratedOnIOThread(
+ MediaStreamType stream_type,
+ scoped_ptr<StreamDeviceInfoArray> devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ // Ensure that |devices| gets deleted on exit.
+ if (listener_)
+ listener_->DevicesEnumerated(stream_type, *devices);
+}
+
+void AudioInputDeviceManager::OpenedOnIOThread(int session_id,
+ const StreamDeviceInfo& info) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK_EQ(session_id, info.session_id);
+ DCHECK(GetDevice(session_id) == devices_.end());
+ devices_.push_back(info);
+
+ if (listener_)
+ listener_->Opened(info.device.type, session_id);
+}
+
+void AudioInputDeviceManager::ClosedOnIOThread(MediaStreamType stream_type,
+ int session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (listener_)
+ listener_->Closed(stream_type, session_id);
+}
+
+bool AudioInputDeviceManager::IsOnDeviceThread() const {
+ return device_loop_->BelongsToCurrentThread();
+}
+
+AudioInputDeviceManager::StreamDeviceList::iterator
+AudioInputDeviceManager::GetDevice(int session_id) {
+ for (StreamDeviceList::iterator i(devices_.begin()); i != devices_.end();
+ ++i) {
+ if (i->session_id == session_id)
+ return i;
+ }
+
+ return devices_.end();
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_input_device_manager.h b/chromium/content/browser/renderer_host/media/audio_input_device_manager.h
new file mode 100644
index 00000000000..133673f2a5f
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_input_device_manager.h
@@ -0,0 +1,100 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// AudioInputDeviceManager manages the audio input devices. In particular it
+// communicates with MediaStreamManager and AudioInputRendererHost on the
+// browser IO thread, handles queries like
+// enumerate/open/close/GetOpenedDeviceInfoById from MediaStreamManager and
+// GetOpenedDeviceInfoById from AudioInputRendererHost.
+// The work for enumerate/open/close is handled asynchronously on Media Stream
+// device thread, while GetOpenedDeviceInfoById is synchronous on the IO thread.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_DEVICE_MANAGER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_DEVICE_MANAGER_H_
+
+#include <map>
+
+#include "base/basictypes.h"
+#include "base/memory/ref_counted.h"
+#include "base/threading/thread.h"
+#include "content/browser/renderer_host/media/media_stream_provider.h"
+#include "content/common/content_export.h"
+#include "content/common/media/media_stream_options.h"
+#include "content/public/common/media_stream_request.h"
+
+namespace media {
+class AudioManager;
+}
+
+namespace content {
+
+class CONTENT_EXPORT AudioInputDeviceManager : public MediaStreamProvider {
+ public:
+ // Calling Start() with this kFakeOpenSessionId will open the default device,
+ // even though Open() has not been called. This is used to be able to use the
+ // AudioInputDeviceManager before MediaStream is implemented.
+ // TODO(xians): Remove it when the webrtc unittest does not need it any more.
+ static const int kFakeOpenSessionId;
+
+ explicit AudioInputDeviceManager(media::AudioManager* audio_manager);
+
+ // Gets the opened device info by |session_id|. Returns NULL if the device
+ // is not opened, otherwise the opened device. Called on IO thread.
+ const StreamDeviceInfo* GetOpenedDeviceInfoById(int session_id);
+
+ // MediaStreamProvider implementation, called on IO thread.
+ virtual void Register(MediaStreamProviderListener* listener,
+ base::MessageLoopProxy* device_thread_loop) OVERRIDE;
+ virtual void Unregister() OVERRIDE;
+ virtual void EnumerateDevices(MediaStreamType stream_type) OVERRIDE;
+ virtual int Open(const StreamDeviceInfo& device) OVERRIDE;
+ virtual void Close(int session_id) OVERRIDE;
+
+ void UseFakeDevice();
+ bool ShouldUseFakeDevice() const;
+
+ private:
+ typedef std::vector<StreamDeviceInfo> StreamDeviceList;
+ virtual ~AudioInputDeviceManager();
+
+ // Enumerates audio input devices on media stream device thread.
+ void EnumerateOnDeviceThread(MediaStreamType stream_type);
+ // Opens the device on media stream device thread.
+ void OpenOnDeviceThread(int session_id, const StreamDeviceInfo& info);
+
+ // Callback used by EnumerateOnDeviceThread(), called with a list of
+ // enumerated devices on IO thread.
+ void DevicesEnumeratedOnIOThread(MediaStreamType stream_type,
+ scoped_ptr<StreamDeviceInfoArray> devices);
+ // Callback used by OpenOnDeviceThread(), called with the session_id
+ // referencing the opened device on IO thread.
+ void OpenedOnIOThread(int session_id, const StreamDeviceInfo& info);
+ // Callback used by CloseOnDeviceThread(), called with the session_id
+ // referencing the closed device on IO thread.
+ void ClosedOnIOThread(MediaStreamType type, int session_id);
+
+ // Verifies that the calling thread is media stream device thread.
+ bool IsOnDeviceThread() const;
+
+ // Helper to return iterator to the device referenced by |session_id|. If no
+ // device is found, it will return devices_.end().
+ StreamDeviceList::iterator GetDevice(int session_id);
+
+ // Only accessed on Browser::IO thread.
+ MediaStreamProviderListener* listener_;
+ int next_capture_session_id_;
+ bool use_fake_device_;
+ StreamDeviceList devices_;
+
+ media::AudioManager* const audio_manager_; // Weak.
+
+ // The message loop of media stream device thread that this object runs on.
+ scoped_refptr<base::MessageLoopProxy> device_loop_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioInputDeviceManager);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_DEVICE_MANAGER_H_
diff --git a/chromium/content/browser/renderer_host/media/audio_input_device_manager_unittest.cc b/chromium/content/browser/renderer_host/media/audio_input_device_manager_unittest.cc
new file mode 100644
index 00000000000..03b31d2845e
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_input_device_manager_unittest.cc
@@ -0,0 +1,291 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/audio_input_device_manager.h"
+#include "content/public/common/media_stream_request.h"
+#include "media/audio/audio_manager_base.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::_;
+using testing::InSequence;
+using testing::SaveArg;
+using testing::Return;
+
+namespace content {
+
+class MockAudioInputDeviceManagerListener
+ : public MediaStreamProviderListener {
+ public:
+ MockAudioInputDeviceManagerListener() {}
+ virtual ~MockAudioInputDeviceManagerListener() {}
+
+ MOCK_METHOD2(Opened, void(MediaStreamType, const int));
+ MOCK_METHOD2(Closed, void(MediaStreamType, const int));
+ MOCK_METHOD2(DevicesEnumerated, void(MediaStreamType,
+ const StreamDeviceInfoArray&));
+ MOCK_METHOD3(Error, void(MediaStreamType, int, MediaStreamProviderError));
+
+ StreamDeviceInfoArray devices_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockAudioInputDeviceManagerListener);
+};
+
+class AudioInputDeviceManagerTest : public testing::Test {
+ public:
+ AudioInputDeviceManagerTest() {}
+
+ // Returns true iff machine has an audio input device.
+ bool CanRunAudioInputDeviceTests() {
+ return audio_manager_->HasAudioInputDevices();
+ }
+
+ protected:
+ virtual void SetUp() OVERRIDE {
+ // The test must run on Browser::IO.
+ message_loop_.reset(new base::MessageLoop(base::MessageLoop::TYPE_IO));
+ io_thread_.reset(new BrowserThreadImpl(BrowserThread::IO,
+ message_loop_.get()));
+ audio_manager_.reset(media::AudioManager::Create());
+ manager_ = new AudioInputDeviceManager(audio_manager_.get());
+ audio_input_listener_.reset(new MockAudioInputDeviceManagerListener());
+ manager_->Register(audio_input_listener_.get(),
+ message_loop_->message_loop_proxy().get());
+
+ // Gets the enumerated device list from the AudioInputDeviceManager.
+ manager_->EnumerateDevices(MEDIA_DEVICE_AUDIO_CAPTURE);
+ EXPECT_CALL(*audio_input_listener_,
+ DevicesEnumerated(MEDIA_DEVICE_AUDIO_CAPTURE, _))
+ .Times(1)
+ .WillOnce(SaveArg<1>(&devices_));
+
+ // Wait until we get the list.
+ message_loop_->RunUntilIdle();
+ }
+
+ virtual void TearDown() OVERRIDE {
+ manager_->Unregister();
+ io_thread_.reset();
+ }
+
+ scoped_ptr<base::MessageLoop> message_loop_;
+ scoped_ptr<BrowserThreadImpl> io_thread_;
+ scoped_refptr<AudioInputDeviceManager> manager_;
+ scoped_ptr<MockAudioInputDeviceManagerListener> audio_input_listener_;
+ scoped_ptr<media::AudioManager> audio_manager_;
+ StreamDeviceInfoArray devices_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioInputDeviceManagerTest);
+};
+
+// Opens and closes the devices.
+TEST_F(AudioInputDeviceManagerTest, OpenAndCloseDevice) {
+ if (!CanRunAudioInputDeviceTests())
+ return;
+
+ ASSERT_FALSE(devices_.empty());
+
+ InSequence s;
+
+ for (StreamDeviceInfoArray::const_iterator iter = devices_.begin();
+ iter != devices_.end(); ++iter) {
+ // Opens/closes the devices.
+ int session_id = manager_->Open(*iter);
+
+ // Expected mock call with expected return value.
+ EXPECT_CALL(*audio_input_listener_,
+ Opened(MEDIA_DEVICE_AUDIO_CAPTURE, session_id))
+ .Times(1);
+ // Waits for the callback.
+ message_loop_->RunUntilIdle();
+
+ manager_->Close(session_id);
+ EXPECT_CALL(*audio_input_listener_,
+ Closed(MEDIA_DEVICE_AUDIO_CAPTURE, session_id))
+ .Times(1);
+
+ // Waits for the callback.
+ message_loop_->RunUntilIdle();
+ }
+}
+
+// Opens multiple devices at one time and closes them later.
+TEST_F(AudioInputDeviceManagerTest, OpenMultipleDevices) {
+ if (!CanRunAudioInputDeviceTests())
+ return;
+
+ ASSERT_FALSE(devices_.empty());
+
+ InSequence s;
+
+ int index = 0;
+ scoped_ptr<int[]> session_id(new int[devices_.size()]);
+
+ // Opens the devices in a loop.
+ for (StreamDeviceInfoArray::const_iterator iter = devices_.begin();
+ iter != devices_.end(); ++iter, ++index) {
+ // Opens the devices.
+ session_id[index] = manager_->Open(*iter);
+
+ // Expected mock call with expected returned value.
+ EXPECT_CALL(*audio_input_listener_,
+ Opened(MEDIA_DEVICE_AUDIO_CAPTURE, session_id[index]))
+ .Times(1);
+
+ // Waits for the callback.
+ message_loop_->RunUntilIdle();
+ }
+
+ // Checks if the session_ids are unique.
+ for (size_t i = 0; i < devices_.size() - 1; ++i) {
+ for (size_t k = i + 1; k < devices_.size(); ++k) {
+ EXPECT_TRUE(session_id[i] != session_id[k]);
+ }
+ }
+
+ for (size_t i = 0; i < devices_.size(); ++i) {
+ // Closes the devices.
+ manager_->Close(session_id[i]);
+ EXPECT_CALL(*audio_input_listener_,
+ Closed(MEDIA_DEVICE_AUDIO_CAPTURE, session_id[i]))
+ .Times(1);
+
+ // Waits for the callback.
+ message_loop_->RunUntilIdle();
+ }
+}
+
+// Opens a non-existing device.
+TEST_F(AudioInputDeviceManagerTest, OpenNotExistingDevice) {
+ if (!CanRunAudioInputDeviceTests())
+ return;
+ InSequence s;
+
+ MediaStreamType stream_type = MEDIA_DEVICE_AUDIO_CAPTURE;
+ std::string device_name("device_doesnt_exist");
+ std::string device_id("id_doesnt_exist");
+ int sample_rate(0);
+ int channel_config(0);
+ StreamDeviceInfo dummy_device(
+ stream_type, device_name, device_id, sample_rate, channel_config, false);
+
+ int session_id = manager_->Open(dummy_device);
+ EXPECT_CALL(*audio_input_listener_,
+ Opened(MEDIA_DEVICE_AUDIO_CAPTURE, session_id))
+ .Times(1);
+
+ // Waits for the callback.
+ message_loop_->RunUntilIdle();
+}
+
+// Opens default device twice.
+TEST_F(AudioInputDeviceManagerTest, OpenDeviceTwice) {
+ if (!CanRunAudioInputDeviceTests())
+ return;
+
+ ASSERT_FALSE(devices_.empty());
+
+ InSequence s;
+
+ // Opens and closes the default device twice.
+ int first_session_id = manager_->Open(devices_.front());
+ int second_session_id = manager_->Open(devices_.front());
+
+ // Expected mock calls with expected returned values.
+ EXPECT_NE(first_session_id, second_session_id);
+ EXPECT_CALL(*audio_input_listener_,
+ Opened(MEDIA_DEVICE_AUDIO_CAPTURE, first_session_id))
+ .Times(1);
+ EXPECT_CALL(*audio_input_listener_,
+ Opened(MEDIA_DEVICE_AUDIO_CAPTURE, second_session_id))
+ .Times(1);
+ // Waits for the callback.
+ message_loop_->RunUntilIdle();
+
+ manager_->Close(first_session_id);
+ manager_->Close(second_session_id);
+ EXPECT_CALL(*audio_input_listener_,
+ Closed(MEDIA_DEVICE_AUDIO_CAPTURE, first_session_id))
+ .Times(1);
+ EXPECT_CALL(*audio_input_listener_,
+ Closed(MEDIA_DEVICE_AUDIO_CAPTURE, second_session_id))
+ .Times(1);
+ // Waits for the callback.
+ message_loop_->RunUntilIdle();
+}
+
+// Accesses then closes the sessions after opening the devices.
+TEST_F(AudioInputDeviceManagerTest, AccessAndCloseSession) {
+ if (!CanRunAudioInputDeviceTests())
+ return;
+
+ ASSERT_FALSE(devices_.empty());
+
+ InSequence s;
+
+ int index = 0;
+ scoped_ptr<int[]> session_id(new int[devices_.size()]);
+
+ // Loops through the devices and calls Open()/Close()/GetOpenedDeviceInfoById
+ // for each device.
+ for (StreamDeviceInfoArray::const_iterator iter = devices_.begin();
+ iter != devices_.end(); ++iter, ++index) {
+ // Note that no DeviceStopped() notification for Event Handler as we have
+ // stopped the device before calling close.
+ session_id[index] = manager_->Open(*iter);
+ EXPECT_CALL(*audio_input_listener_,
+ Opened(MEDIA_DEVICE_AUDIO_CAPTURE, session_id[index]))
+ .Times(1);
+ message_loop_->RunUntilIdle();
+
+ const StreamDeviceInfo* info = manager_->GetOpenedDeviceInfoById(
+ session_id[index]);
+ DCHECK(info);
+ EXPECT_EQ(iter->device.id, info->device.id);
+ manager_->Close(session_id[index]);
+ EXPECT_CALL(*audio_input_listener_,
+ Closed(MEDIA_DEVICE_AUDIO_CAPTURE, session_id[index]))
+ .Times(1);
+ message_loop_->RunUntilIdle();
+ }
+}
+
+// Access an invalid session.
+TEST_F(AudioInputDeviceManagerTest, AccessInvalidSession) {
+ if (!CanRunAudioInputDeviceTests())
+ return;
+ InSequence s;
+
+ // Opens the first device.
+ StreamDeviceInfoArray::const_iterator iter = devices_.begin();
+ int session_id = manager_->Open(*iter);
+ EXPECT_CALL(*audio_input_listener_,
+ Opened(MEDIA_DEVICE_AUDIO_CAPTURE, session_id))
+ .Times(1);
+ message_loop_->RunUntilIdle();
+
+ // Access a non-opened device.
+ // This should fail and return an empty StreamDeviceInfo.
+ int invalid_session_id = session_id + 1;
+ const StreamDeviceInfo* info =
+ manager_->GetOpenedDeviceInfoById(invalid_session_id);
+ DCHECK(!info);
+
+ manager_->Close(session_id);
+ EXPECT_CALL(*audio_input_listener_,
+ Closed(MEDIA_DEVICE_AUDIO_CAPTURE, session_id))
+ .Times(1);
+ message_loop_->RunUntilIdle();
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_input_renderer_host.cc b/chromium/content/browser/renderer_host/media/audio_input_renderer_host.cc
new file mode 100644
index 00000000000..4a2f731a81c
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_input_renderer_host.cc
@@ -0,0 +1,407 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/audio_input_renderer_host.h"
+
+#include "base/bind.h"
+#include "base/memory/shared_memory.h"
+#include "base/metrics/histogram.h"
+#include "base/process/process.h"
+#include "content/browser/renderer_host/media/audio_input_device_manager.h"
+#include "content/browser/renderer_host/media/audio_input_sync_writer.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/browser/renderer_host/media/web_contents_audio_input_stream.h"
+#include "content/browser/renderer_host/media/web_contents_capture_util.h"
+#include "media/audio/audio_manager_base.h"
+
+namespace content {
+
+struct AudioInputRendererHost::AudioEntry {
+ AudioEntry();
+ ~AudioEntry();
+
+ // The AudioInputController that manages the audio input stream.
+ scoped_refptr<media::AudioInputController> controller;
+
+ // The audio input stream ID in the render view.
+ int stream_id;
+
+ // Shared memory for transmission of the audio data. It has
+ // |shared_memory_segment_count| equal lengthed segments.
+ base::SharedMemory shared_memory;
+ int shared_memory_segment_count;
+
+ // The synchronous writer to be used by the controller. We have the
+ // ownership of the writer.
+ scoped_ptr<media::AudioInputController::SyncWriter> writer;
+
+ // Set to true after we called Close() for the controller.
+ bool pending_close;
+};
+
+AudioInputRendererHost::AudioEntry::AudioEntry()
+ : stream_id(0),
+ shared_memory_segment_count(0),
+ pending_close(false) {
+}
+
+AudioInputRendererHost::AudioEntry::~AudioEntry() {}
+
+AudioInputRendererHost::AudioInputRendererHost(
+ media::AudioManager* audio_manager,
+ MediaStreamManager* media_stream_manager,
+ AudioMirroringManager* audio_mirroring_manager)
+ : audio_manager_(audio_manager),
+ media_stream_manager_(media_stream_manager),
+ audio_mirroring_manager_(audio_mirroring_manager) {
+}
+
+AudioInputRendererHost::~AudioInputRendererHost() {
+ DCHECK(audio_entries_.empty());
+}
+
+void AudioInputRendererHost::OnChannelClosing() {
+ BrowserMessageFilter::OnChannelClosing();
+
+ // Since the IPC channel is gone, close all requested audio streams.
+ DeleteEntries();
+}
+
+void AudioInputRendererHost::OnDestruct() const {
+ BrowserThread::DeleteOnIOThread::Destruct(this);
+}
+
+void AudioInputRendererHost::OnCreated(
+ media::AudioInputController* controller) {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(
+ &AudioInputRendererHost::DoCompleteCreation,
+ this,
+ make_scoped_refptr(controller)));
+}
+
+void AudioInputRendererHost::OnRecording(
+ media::AudioInputController* controller) {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(
+ &AudioInputRendererHost::DoSendRecordingMessage,
+ this,
+ make_scoped_refptr(controller)));
+}
+
+void AudioInputRendererHost::OnError(media::AudioInputController* controller) {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(
+ &AudioInputRendererHost::DoHandleError,
+ this,
+ make_scoped_refptr(controller)));
+}
+
+void AudioInputRendererHost::OnData(media::AudioInputController* controller,
+ const uint8* data,
+ uint32 size) {
+ NOTREACHED() << "Only low-latency mode is supported.";
+}
+
+void AudioInputRendererHost::DoCompleteCreation(
+ media::AudioInputController* controller) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntry* entry = LookupByController(controller);
+ if (!entry)
+ return;
+
+ if (!PeerHandle()) {
+ NOTREACHED() << "Renderer process handle is invalid.";
+ DeleteEntryOnError(entry);
+ return;
+ }
+
+ if (!entry->controller->LowLatencyMode()) {
+ NOTREACHED() << "Only low-latency mode is supported.";
+ DeleteEntryOnError(entry);
+ return;
+ }
+
+ // Once the audio stream is created then complete the creation process by
+ // mapping shared memory and sharing with the renderer process.
+ base::SharedMemoryHandle foreign_memory_handle;
+ if (!entry->shared_memory.ShareToProcess(PeerHandle(),
+ &foreign_memory_handle)) {
+ // If we failed to map and share the shared memory then close the audio
+ // stream and send an error message.
+ DeleteEntryOnError(entry);
+ return;
+ }
+
+ AudioInputSyncWriter* writer =
+ static_cast<AudioInputSyncWriter*>(entry->writer.get());
+
+#if defined(OS_WIN)
+ base::SyncSocket::Handle foreign_socket_handle;
+#else
+ base::FileDescriptor foreign_socket_handle;
+#endif
+
+ // If we failed to prepare the sync socket for the renderer then we fail
+ // the construction of audio input stream.
+ if (!writer->PrepareForeignSocketHandle(PeerHandle(),
+ &foreign_socket_handle)) {
+ DeleteEntryOnError(entry);
+ return;
+ }
+
+ Send(new AudioInputMsg_NotifyStreamCreated(entry->stream_id,
+ foreign_memory_handle, foreign_socket_handle,
+ entry->shared_memory.requested_size(),
+ entry->shared_memory_segment_count));
+}
+
+void AudioInputRendererHost::DoSendRecordingMessage(
+ media::AudioInputController* controller) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ // TODO(henrika): See crbug.com/115262 for details on why this method
+ // should be implemented.
+}
+
+void AudioInputRendererHost::DoHandleError(
+ media::AudioInputController* controller) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntry* entry = LookupByController(controller);
+ if (!entry)
+ return;
+
+ DeleteEntryOnError(entry);
+}
+
+bool AudioInputRendererHost::OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) {
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP_EX(AudioInputRendererHost, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(AudioInputHostMsg_CreateStream, OnCreateStream)
+ IPC_MESSAGE_HANDLER(AudioInputHostMsg_RecordStream, OnRecordStream)
+ IPC_MESSAGE_HANDLER(AudioInputHostMsg_CloseStream, OnCloseStream)
+ IPC_MESSAGE_HANDLER(AudioInputHostMsg_SetVolume, OnSetVolume)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+
+ return handled;
+}
+
+void AudioInputRendererHost::OnCreateStream(
+ int stream_id,
+ int render_view_id,
+ int session_id,
+ const AudioInputHostMsg_CreateStream_Config& config) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ DVLOG(1) << "AudioInputRendererHost@" << this
+ << "::OnCreateStream(stream_id=" << stream_id
+ << ", render_view_id=" << render_view_id
+ << ", session_id=" << session_id << ")";
+ DCHECK_GT(render_view_id, 0);
+
+ // media::AudioParameters is validated in the deserializer.
+ if (LookupById(stream_id) != NULL) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ media::AudioParameters audio_params(config.params);
+ if (media_stream_manager_->audio_input_device_manager()->
+ ShouldUseFakeDevice()) {
+ audio_params.Reset(
+ media::AudioParameters::AUDIO_FAKE,
+ config.params.channel_layout(), config.params.channels(), 0,
+ config.params.sample_rate(), config.params.bits_per_sample(),
+ config.params.frames_per_buffer());
+ }
+
+ // Check if we have the permission to open the device and which device to use.
+ std::string device_id = media::AudioManagerBase::kDefaultDeviceId;
+ if (audio_params.format() != media::AudioParameters::AUDIO_FAKE) {
+ const StreamDeviceInfo* info = media_stream_manager_->
+ audio_input_device_manager()->GetOpenedDeviceInfoById(session_id);
+ if (!info) {
+ SendErrorMessage(stream_id);
+ DLOG(WARNING) << "No permission has been granted to input stream with "
+ << "session_id=" << session_id;
+ return;
+ }
+
+ device_id = info->device.id;
+ }
+
+ // Create a new AudioEntry structure.
+ scoped_ptr<AudioEntry> entry(new AudioEntry());
+
+ const uint32 segment_size = (sizeof(media::AudioInputBufferParameters) +
+ audio_params.GetBytesPerBuffer());
+ entry->shared_memory_segment_count = config.shared_memory_count;
+
+ // Create the shared memory and share it with the renderer process
+ // using a new SyncWriter object.
+ if (!entry->shared_memory.CreateAndMapAnonymous(
+ segment_size * entry->shared_memory_segment_count)) {
+ // If creation of shared memory failed then send an error message.
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ scoped_ptr<AudioInputSyncWriter> writer(
+ new AudioInputSyncWriter(&entry->shared_memory,
+ entry->shared_memory_segment_count));
+
+ if (!writer->Init()) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ // If we have successfully created the SyncWriter then assign it to the
+ // entry and construct an AudioInputController.
+ entry->writer.reset(writer.release());
+ if (WebContentsCaptureUtil::IsWebContentsDeviceId(device_id)) {
+ entry->controller = media::AudioInputController::CreateForStream(
+ audio_manager_->GetMessageLoop(),
+ this,
+ WebContentsAudioInputStream::Create(
+ device_id, audio_params, audio_manager_->GetWorkerLoop(),
+ audio_mirroring_manager_),
+ entry->writer.get());
+ } else {
+ // TODO(henrika): replace CreateLowLatency() with Create() as soon
+ // as satish has ensured that Speech Input also uses the default low-
+ // latency path. See crbug.com/112472 for details.
+ entry->controller = media::AudioInputController::CreateLowLatency(
+ audio_manager_,
+ this,
+ audio_params,
+ device_id,
+ entry->writer.get());
+ }
+
+ if (!entry->controller.get()) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ // Set the initial AGC state for the audio input stream. Note that, the AGC
+ // is only supported in AUDIO_PCM_LOW_LATENCY mode.
+ if (config.params.format() == media::AudioParameters::AUDIO_PCM_LOW_LATENCY)
+ entry->controller->SetAutomaticGainControl(config.automatic_gain_control);
+
+ // Since the controller was created successfully, create an entry and add it
+ // to the map.
+ entry->stream_id = stream_id;
+ audio_entries_.insert(std::make_pair(stream_id, entry.release()));
+}
+
+void AudioInputRendererHost::OnRecordStream(int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntry* entry = LookupById(stream_id);
+ if (!entry) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ entry->controller->Record();
+}
+
+void AudioInputRendererHost::OnCloseStream(int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntry* entry = LookupById(stream_id);
+
+ if (entry)
+ CloseAndDeleteStream(entry);
+}
+
+void AudioInputRendererHost::OnSetVolume(int stream_id, double volume) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntry* entry = LookupById(stream_id);
+ if (!entry) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ entry->controller->SetVolume(volume);
+}
+
+void AudioInputRendererHost::SendErrorMessage(int stream_id) {
+ Send(new AudioInputMsg_NotifyStreamStateChanged(
+ stream_id, media::AudioInputIPCDelegate::kError));
+}
+
+void AudioInputRendererHost::DeleteEntries() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ for (AudioEntryMap::iterator i = audio_entries_.begin();
+ i != audio_entries_.end(); ++i) {
+ CloseAndDeleteStream(i->second);
+ }
+}
+
+void AudioInputRendererHost::CloseAndDeleteStream(AudioEntry* entry) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ if (!entry->pending_close) {
+ entry->controller->Close(base::Bind(&AudioInputRendererHost::DeleteEntry,
+ this, entry));
+ entry->pending_close = true;
+ }
+}
+
+void AudioInputRendererHost::DeleteEntry(AudioEntry* entry) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Delete the entry when this method goes out of scope.
+ scoped_ptr<AudioEntry> entry_deleter(entry);
+
+ // Erase the entry from the map.
+ audio_entries_.erase(entry->stream_id);
+}
+
+void AudioInputRendererHost::DeleteEntryOnError(AudioEntry* entry) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Sends the error message first before we close the stream because
+ // |entry| is destroyed in DeleteEntry().
+ SendErrorMessage(entry->stream_id);
+ CloseAndDeleteStream(entry);
+}
+
+AudioInputRendererHost::AudioEntry* AudioInputRendererHost::LookupById(
+ int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntryMap::iterator i = audio_entries_.find(stream_id);
+ if (i != audio_entries_.end())
+ return i->second;
+ return NULL;
+}
+
+AudioInputRendererHost::AudioEntry* AudioInputRendererHost::LookupByController(
+ media::AudioInputController* controller) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Iterate the map of entries.
+ // TODO(hclam): Implement a faster look up method.
+ for (AudioEntryMap::iterator i = audio_entries_.begin();
+ i != audio_entries_.end(); ++i) {
+ if (controller == i->second->controller.get())
+ return i->second;
+ }
+ return NULL;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_input_renderer_host.h b/chromium/content/browser/renderer_host/media/audio_input_renderer_host.h
new file mode 100644
index 00000000000..d16ebfad86a
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_input_renderer_host.h
@@ -0,0 +1,162 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// AudioInputRendererHost serves audio related requests from audio capturer
+// which lives inside the render process and provide access to audio hardware.
+//
+// Create stream sequence (AudioInputController = AIC):
+//
+// AudioInputHostMsg_CreateStream -> OnCreateStream -> AIC::CreateLowLatency ->
+// <- AudioInputMsg_NotifyStreamCreated <- DoCompleteCreation <- OnCreated <-
+//
+// Close stream sequence:
+//
+// AudioInputHostMsg_CloseStream -> OnCloseStream -> AIC::Close ->
+//
+// This class is owned by BrowserRenderProcessHost and instantiated on UI
+// thread. All other operations and method calls happen on IO thread, so we
+// need to be extra careful about the lifetime of this object.
+//
+// To ensure low latency audio, a SyncSocket pair is used to signal buffer
+// readiness without having to route messages using the IO thread.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_RENDERER_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_RENDERER_HOST_H_
+
+#include <map>
+#include <string>
+
+#include "base/compiler_specific.h"
+#include "base/gtest_prod_util.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/shared_memory.h"
+#include "base/process/process.h"
+#include "base/sequenced_task_runner_helpers.h"
+#include "content/common/media/audio_messages.h"
+#include "content/public/browser/browser_message_filter.h"
+#include "content/public/browser/browser_thread.h"
+#include "media/audio/audio_input_controller.h"
+#include "media/audio/audio_io.h"
+#include "media/audio/simple_sources.h"
+
+namespace media {
+class AudioManager;
+class AudioParameters;
+}
+
+namespace content {
+class AudioMirroringManager;
+class MediaStreamManager;
+
+class CONTENT_EXPORT AudioInputRendererHost
+ : public BrowserMessageFilter,
+ public media::AudioInputController::EventHandler {
+ public:
+ // Called from UI thread from the owner of this object.
+ AudioInputRendererHost(
+ media::AudioManager* audio_manager,
+ MediaStreamManager* media_stream_manager,
+ AudioMirroringManager* audio_mirroring_manager);
+
+ // BrowserMessageFilter implementation.
+ virtual void OnChannelClosing() OVERRIDE;
+ virtual void OnDestruct() const OVERRIDE;
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+
+ // AudioInputController::EventHandler implementation.
+ virtual void OnCreated(media::AudioInputController* controller) OVERRIDE;
+ virtual void OnRecording(media::AudioInputController* controller) OVERRIDE;
+ virtual void OnError(media::AudioInputController* controller) OVERRIDE;
+ virtual void OnData(media::AudioInputController* controller,
+ const uint8* data,
+ uint32 size) OVERRIDE;
+
+ private:
+ // TODO(henrika): extend test suite (compare AudioRenderHost)
+ friend class BrowserThread;
+ friend class base::DeleteHelper<AudioInputRendererHost>;
+
+ struct AudioEntry;
+ typedef std::map<int, AudioEntry*> AudioEntryMap;
+
+ virtual ~AudioInputRendererHost();
+
+ // Methods called on IO thread ----------------------------------------------
+
+ // Audio related IPC message handlers.
+
+ // Creates an audio input stream with the specified format whose data is
+ // consumed by an entity in the render view referenced by |render_view_id|.
+ // |session_id| is used to find out which device to be used for the stream.
+ // Upon success/failure, the peer is notified via the
+ // NotifyStreamCreated message.
+ void OnCreateStream(int stream_id,
+ int render_view_id,
+ int session_id,
+ const AudioInputHostMsg_CreateStream_Config& config);
+
+ // Record the audio input stream referenced by |stream_id|.
+ void OnRecordStream(int stream_id);
+
+ // Close the audio stream referenced by |stream_id|.
+ void OnCloseStream(int stream_id);
+
+ // Set the volume of the audio stream referenced by |stream_id|.
+ void OnSetVolume(int stream_id, double volume);
+
+ // Complete the process of creating an audio input stream. This will set up
+ // the shared memory or shared socket in low latency mode and send the
+ // NotifyStreamCreated message to the peer.
+ void DoCompleteCreation(media::AudioInputController* controller);
+
+ // Send a state change message to the renderer.
+ void DoSendRecordingMessage(media::AudioInputController* controller);
+
+ // Handle error coming from audio stream.
+ void DoHandleError(media::AudioInputController* controller);
+
+ // Send an error message to the renderer.
+ void SendErrorMessage(int stream_id);
+
+ // Delete all audio entry and all audio streams
+ void DeleteEntries();
+
+ // Closes the stream. The stream is then deleted in DeleteEntry() after it
+ // is closed.
+ void CloseAndDeleteStream(AudioEntry* entry);
+
+ // Delete an audio entry and close the related audio stream.
+ void DeleteEntry(AudioEntry* entry);
+
+ // Delete audio entry and close the related audio input stream.
+ void DeleteEntryOnError(AudioEntry* entry);
+
+ // A helper method to look up a AudioEntry identified by |stream_id|.
+ // Returns NULL if not found.
+ AudioEntry* LookupById(int stream_id);
+
+ // Search for a AudioEntry having the reference to |controller|.
+ // This method is used to look up an AudioEntry after a controller
+ // event is received.
+ AudioEntry* LookupByController(media::AudioInputController* controller);
+
+ // Used to create an AudioInputController.
+ media::AudioManager* audio_manager_;
+
+ // Used to access to AudioInputDeviceManager.
+ MediaStreamManager* media_stream_manager_;
+
+ AudioMirroringManager* audio_mirroring_manager_;
+
+ // A map of stream IDs to audio sources.
+ AudioEntryMap audio_entries_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioInputRendererHost);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_RENDERER_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/audio_input_sync_writer.cc b/chromium/content/browser/renderer_host/media/audio_input_sync_writer.cc
new file mode 100644
index 00000000000..572abf3e1c6
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_input_sync_writer.cc
@@ -0,0 +1,82 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/audio_input_sync_writer.h"
+
+#include <algorithm>
+
+#include "base/memory/shared_memory.h"
+
+namespace content {
+
+AudioInputSyncWriter::AudioInputSyncWriter(
+ base::SharedMemory* shared_memory,
+ int shared_memory_segment_count)
+ : shared_memory_(shared_memory),
+ shared_memory_segment_count_(shared_memory_segment_count),
+ current_segment_id_(0) {
+ DCHECK_GT(shared_memory_segment_count, 0);
+ DCHECK_EQ(shared_memory->requested_size() % shared_memory_segment_count, 0u);
+ shared_memory_segment_size_ =
+ shared_memory->requested_size() / shared_memory_segment_count;
+}
+
+AudioInputSyncWriter::~AudioInputSyncWriter() {}
+
+// TODO(henrika): Combine into one method (including Write).
+void AudioInputSyncWriter::UpdateRecordedBytes(uint32 bytes) {
+ socket_->Send(&bytes, sizeof(bytes));
+}
+
+uint32 AudioInputSyncWriter::Write(
+ const void* data, uint32 size, double volume) {
+ uint8* ptr = static_cast<uint8*>(shared_memory_->memory());
+ ptr += current_segment_id_ * shared_memory_segment_size_;
+ media::AudioInputBuffer* buffer =
+ reinterpret_cast<media::AudioInputBuffer*>(ptr);
+ buffer->params.volume = volume;
+ buffer->params.size = size;
+ memcpy(buffer->audio, data, size);
+
+ if (++current_segment_id_ >= shared_memory_segment_count_)
+ current_segment_id_ = 0;
+
+ return size;
+}
+
+void AudioInputSyncWriter::Close() {
+ socket_->Close();
+}
+
+bool AudioInputSyncWriter::Init() {
+ socket_.reset(new base::CancelableSyncSocket());
+ foreign_socket_.reset(new base::CancelableSyncSocket());
+ return base::CancelableSyncSocket::CreatePair(socket_.get(),
+ foreign_socket_.get());
+}
+
+#if defined(OS_WIN)
+
+bool AudioInputSyncWriter::PrepareForeignSocketHandle(
+ base::ProcessHandle process_handle,
+ base::SyncSocket::Handle* foreign_handle) {
+ ::DuplicateHandle(GetCurrentProcess(), foreign_socket_->handle(),
+ process_handle, foreign_handle,
+ 0, FALSE, DUPLICATE_SAME_ACCESS);
+ return (*foreign_handle != 0);
+}
+
+#else
+
+bool AudioInputSyncWriter::PrepareForeignSocketHandle(
+ base::ProcessHandle process_handle,
+ base::FileDescriptor* foreign_handle) {
+ foreign_handle->fd = foreign_socket_->handle();
+ foreign_handle->auto_close = false;
+ return (foreign_handle->fd != -1);
+}
+
+#endif
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_input_sync_writer.h b/chromium/content/browser/renderer_host/media/audio_input_sync_writer.h
new file mode 100644
index 00000000000..4cfe9e3f396
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_input_sync_writer.h
@@ -0,0 +1,60 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_SYNC_WRITER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_SYNC_WRITER_H_
+
+#include "base/file_descriptor_posix.h"
+#include "base/process/process.h"
+#include "base/sync_socket.h"
+#include "media/audio/audio_input_controller.h"
+
+namespace base {
+class SharedMemory;
+}
+
+namespace content {
+// A AudioInputController::SyncWriter implementation using SyncSocket. This
+// is used by AudioInputController to provide a low latency data source for
+// transmitting audio packets between the browser process and the renderer
+// process.
+class AudioInputSyncWriter : public media::AudioInputController::SyncWriter {
+ public:
+ explicit AudioInputSyncWriter(base::SharedMemory* shared_memory,
+ int shared_memory_segment_count);
+
+ virtual ~AudioInputSyncWriter();
+
+ // media::AudioOutputController::SyncWriter implementation.
+ virtual void UpdateRecordedBytes(uint32 bytes) OVERRIDE;
+ virtual uint32 Write(const void* data, uint32 size, double volume) OVERRIDE;
+ virtual void Close() OVERRIDE;
+
+ bool Init();
+ bool PrepareForeignSocketHandle(base::ProcessHandle process_handle,
+#if defined(OS_WIN)
+ base::SyncSocket::Handle* foreign_handle);
+#else
+ base::FileDescriptor* foreign_handle);
+#endif
+
+ private:
+ base::SharedMemory* shared_memory_;
+ uint32 shared_memory_segment_size_;
+ uint32 shared_memory_segment_count_;
+ uint32 current_segment_id_;
+
+ // Socket for transmitting audio data.
+ scoped_ptr<base::CancelableSyncSocket> socket_;
+
+ // Socket to be used by the renderer. The reference is released after
+ // PrepareForeignSocketHandle() is called and ran successfully.
+ scoped_ptr<base::CancelableSyncSocket> foreign_socket_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(AudioInputSyncWriter);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_INPUT_SYNC_WRITER_H_
diff --git a/chromium/content/browser/renderer_host/media/audio_mirroring_manager.cc b/chromium/content/browser/renderer_host/media/audio_mirroring_manager.cc
new file mode 100644
index 00000000000..8a2bc4b0b5c
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_mirroring_manager.cc
@@ -0,0 +1,164 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/audio_mirroring_manager.h"
+
+#include "content/public/browser/browser_thread.h"
+
+namespace content {
+
+namespace {
+
+// Debug utility to make sure methods of AudioMirroringManager are not invoked
+// more than once in a single call stack. In release builds, this compiles to
+// nothing and gets completely optimized out.
+class ReentrancyGuard {
+ public:
+#ifdef NDEBUG
+ ReentrancyGuard() {}
+ ~ReentrancyGuard() {}
+#else
+ ReentrancyGuard() {
+ DCHECK(!inside_a_method_);
+ inside_a_method_ = true;
+ }
+ ~ReentrancyGuard() {
+ inside_a_method_ = false;
+ }
+
+ static bool inside_a_method_; // Safe to be static, since AMM is a singleton.
+#endif
+};
+
+#ifndef NDEBUG
+bool ReentrancyGuard::inside_a_method_ = false;
+#endif
+
+} // namespace
+
+AudioMirroringManager::AudioMirroringManager() {}
+
+AudioMirroringManager::~AudioMirroringManager() {
+ DCHECK(diverters_.empty());
+ DCHECK(sessions_.empty());
+}
+
+void AudioMirroringManager::AddDiverter(
+ int render_process_id, int render_view_id, Diverter* diverter) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ ReentrancyGuard guard;
+ DCHECK(diverter);
+
+ // DCHECK(diverter not already in diverters_ under any key)
+#ifndef NDEBUG
+ for (DiverterMap::const_iterator it = diverters_.begin();
+ it != diverters_.end(); ++it) {
+ DCHECK_NE(diverter, it->second);
+ }
+#endif
+
+ // Add the diverter to the set of active diverters.
+ const Target target(render_process_id, render_view_id);
+ diverters_.insert(std::make_pair(target, diverter));
+
+ // If a mirroring session is active, start diverting the audio stream
+ // immediately.
+ SessionMap::iterator session_it = sessions_.find(target);
+ if (session_it != sessions_.end()) {
+ diverter->StartDiverting(
+ session_it->second->AddInput(diverter->GetAudioParameters()));
+ }
+}
+
+void AudioMirroringManager::RemoveDiverter(
+ int render_process_id, int render_view_id, Diverter* diverter) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ ReentrancyGuard guard;
+
+ // Stop diverting the audio stream if a mirroring session is active.
+ const Target target(render_process_id, render_view_id);
+ SessionMap::iterator session_it = sessions_.find(target);
+ if (session_it != sessions_.end())
+ diverter->StopDiverting();
+
+ // Remove the diverter from the set of active diverters.
+ for (DiverterMap::iterator it = diverters_.lower_bound(target);
+ it != diverters_.end() && it->first == target; ++it) {
+ if (it->second == diverter) {
+ diverters_.erase(it);
+ break;
+ }
+ }
+}
+
+void AudioMirroringManager::StartMirroring(
+ int render_process_id, int render_view_id,
+ MirroringDestination* destination) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ ReentrancyGuard guard;
+ DCHECK(destination);
+
+ // Insert an entry into the set of active mirroring sessions. If a mirroring
+ // session is already active for |render_process_id| + |render_view_id|,
+ // replace the entry.
+ const Target target(render_process_id, render_view_id);
+ SessionMap::iterator session_it = sessions_.find(target);
+ MirroringDestination* old_destination;
+ if (session_it == sessions_.end()) {
+ old_destination = NULL;
+ sessions_.insert(std::make_pair(target, destination));
+
+ DVLOG(1) << "Start mirroring render_process_id:render_view_id="
+ << render_process_id << ':' << render_view_id
+ << " --> MirroringDestination@" << destination;
+ } else {
+ old_destination = session_it->second;
+ session_it->second = destination;
+
+ DVLOG(1) << "Switch mirroring of render_process_id:render_view_id="
+ << render_process_id << ':' << render_view_id
+ << " MirroringDestination@" << old_destination
+ << " --> MirroringDestination@" << destination;
+ }
+
+ // Divert audio streams coming from |target| to |destination|. If streams
+ // were already diverted to the |old_destination|, remove them.
+ for (DiverterMap::iterator it = diverters_.lower_bound(target);
+ it != diverters_.end() && it->first == target; ++it) {
+ Diverter* const diverter = it->second;
+ if (old_destination)
+ diverter->StopDiverting();
+ diverter->StartDiverting(
+ destination->AddInput(diverter->GetAudioParameters()));
+ }
+}
+
+void AudioMirroringManager::StopMirroring(
+ int render_process_id, int render_view_id,
+ MirroringDestination* destination) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ ReentrancyGuard guard;
+
+ // Stop mirroring if there is an active session *and* the destination
+ // matches.
+ const Target target(render_process_id, render_view_id);
+ SessionMap::iterator session_it = sessions_.find(target);
+ if (session_it == sessions_.end() || destination != session_it->second)
+ return;
+
+ DVLOG(1) << "Stop mirroring render_process_id:render_view_id="
+ << render_process_id << ':' << render_view_id
+ << " --> MirroringDestination@" << destination;
+
+ // Stop diverting each audio stream in the mirroring session being stopped.
+ for (DiverterMap::iterator it = diverters_.lower_bound(target);
+ it != diverters_.end() && it->first == target; ++it) {
+ it->second->StopDiverting();
+ }
+
+ // Remove the entry from the set of active mirroring sessions.
+ sessions_.erase(session_it);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_mirroring_manager.h b/chromium/content/browser/renderer_host/media/audio_mirroring_manager.h
new file mode 100644
index 00000000000..0db4f17539f
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_mirroring_manager.h
@@ -0,0 +1,108 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// AudioMirroringManager is a singleton object that maintains a set of active
+// audio mirroring destinations and auto-connects/disconnects audio streams
+// to/from those destinations. It is meant to be used exclusively on the IO
+// BrowserThread.
+//
+// How it works:
+//
+// 1. AudioRendererHost gets a CreateStream message from the render process
+// and, among other things, creates an AudioOutputController to control the
+// audio data flow between the render and browser processes.
+// 2. At some point, AudioRendererHost receives an "associate with render
+// view" message. Among other actions, it registers the
+// AudioOutputController with AudioMirroringManager (as a Diverter).
+// 3. A user request to mirror all the audio for a single RenderView is made.
+// A MirroringDestination is created, and StartMirroring() is called to
+// begin the mirroring session. This causes AudioMirroringManager to
+// instruct any matching Diverters to divert their audio data to the
+// MirroringDestination.
+//
+// #2 and #3 above may occur in any order, as it is the job of
+// AudioMirroringManager to realize when the players can be "matched up."
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_MIRRORING_MANAGER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_MIRRORING_MANAGER_H_
+
+#include <map>
+#include <utility>
+
+#include "base/basictypes.h"
+#include "content/common/content_export.h"
+#include "media/audio/audio_source_diverter.h"
+
+namespace media {
+class AudioOutputStream;
+}
+
+namespace content {
+
+class CONTENT_EXPORT AudioMirroringManager {
+ public:
+ // Interface for diverting audio data to an alternative AudioOutputStream.
+ typedef media::AudioSourceDiverter Diverter;
+
+ // Interface to be implemented by audio mirroring destinations. See comments
+ // for StartMirroring() and StopMirroring() below.
+ class MirroringDestination {
+ public:
+ // Create a consumer of audio data in the format specified by |params|, and
+ // connect it as an input to mirroring. When Close() is called on the
+ // returned AudioOutputStream, the input is disconnected and the object
+ // becomes invalid.
+ virtual media::AudioOutputStream* AddInput(
+ const media::AudioParameters& params) = 0;
+
+ protected:
+ virtual ~MirroringDestination() {}
+ };
+
+ AudioMirroringManager();
+
+ virtual ~AudioMirroringManager();
+
+ // Add/Remove a diverter for an audio stream with a known RenderView target
+ // (represented by |render_process_id| + |render_view_id|). Multiple
+ // diverters may be added for the same target. |diverter| must live until
+ // after RemoveDiverter() is called.
+ //
+ // Re-entrancy warning: These methods should not be called by a Diverter
+ // during a Start/StopDiverting() invocation.
+ virtual void AddDiverter(int render_process_id, int render_view_id,
+ Diverter* diverter);
+ virtual void RemoveDiverter(int render_process_id, int render_view_id,
+ Diverter* diverter);
+
+ // Start/stop mirroring all audio output streams associated with a RenderView
+ // target (represented by |render_process_id| + |render_view_id|) to
+ // |destination|. |destination| must live until after StopMirroring() is
+ // called.
+ virtual void StartMirroring(int render_process_id, int render_view_id,
+ MirroringDestination* destination);
+ virtual void StopMirroring(int render_process_id, int render_view_id,
+ MirroringDestination* destination);
+
+ private:
+ // A mirroring target is a RenderView identified by a
+ // <render_process_id, render_view_id> pair.
+ typedef std::pair<int, int> Target;
+
+ // Note: Objects in these maps are not owned.
+ typedef std::multimap<Target, Diverter*> DiverterMap;
+ typedef std::map<Target, MirroringDestination*> SessionMap;
+
+ // Currently-active divertable audio streams.
+ DiverterMap diverters_;
+
+ // Currently-active mirroring sessions.
+ SessionMap sessions_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioMirroringManager);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_MIRRORING_MANAGER_H_
diff --git a/chromium/content/browser/renderer_host/media/audio_mirroring_manager_unittest.cc b/chromium/content/browser/renderer_host/media/audio_mirroring_manager_unittest.cc
new file mode 100644
index 00000000000..2468b2c48c9
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_mirroring_manager_unittest.cc
@@ -0,0 +1,234 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/audio_mirroring_manager.h"
+
+#include <map>
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/message_loop/message_loop.h"
+#include "base/synchronization/waitable_event.h"
+#include "content/browser/browser_thread_impl.h"
+#include "media/audio/audio_parameters.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using media::AudioOutputStream;
+using media::AudioParameters;
+using testing::_;
+using testing::NotNull;
+using testing::Ref;
+using testing::Return;
+using testing::ReturnRef;
+
+namespace content {
+
+namespace {
+
+class MockDiverter : public AudioMirroringManager::Diverter {
+ public:
+ MOCK_METHOD0(GetAudioParameters, const AudioParameters&());
+ MOCK_METHOD1(StartDiverting, void(AudioOutputStream*));
+ MOCK_METHOD0(StopDiverting, void());
+};
+
+class MockMirroringDestination
+ : public AudioMirroringManager::MirroringDestination {
+ public:
+ MOCK_METHOD1(AddInput,
+ media::AudioOutputStream*(const media::AudioParameters& params));
+};
+
+} // namespace
+
+class AudioMirroringManagerTest : public testing::Test {
+ public:
+ AudioMirroringManagerTest()
+ : message_loop_(base::MessageLoop::TYPE_IO),
+ io_thread_(BrowserThread::IO, &message_loop_),
+ params_(AudioParameters::AUDIO_FAKE, media::CHANNEL_LAYOUT_STEREO,
+ AudioParameters::kAudioCDSampleRate, 16,
+ AudioParameters::kAudioCDSampleRate / 10) {}
+
+ MockDiverter* CreateStream(
+ int render_process_id, int render_view_id, int expected_times_diverted) {
+ MockDiverter* const diverter = new MockDiverter();
+ if (expected_times_diverted > 0) {
+ EXPECT_CALL(*diverter, GetAudioParameters())
+ .Times(expected_times_diverted)
+ .WillRepeatedly(ReturnRef(params_));
+ EXPECT_CALL(*diverter, StartDiverting(NotNull()))
+ .Times(expected_times_diverted);
+ EXPECT_CALL(*diverter, StopDiverting())
+ .Times(expected_times_diverted);
+ }
+
+ mirroring_manager_.AddDiverter(render_process_id, render_view_id, diverter);
+
+ return diverter;
+ }
+
+ void KillStream(
+ int render_process_id, int render_view_id, MockDiverter* diverter) {
+ mirroring_manager_.RemoveDiverter(
+ render_process_id, render_view_id, diverter);
+
+ delete diverter;
+ }
+
+ MockMirroringDestination* StartMirroringTo(
+ int render_process_id, int render_view_id, int expected_inputs_added) {
+ MockMirroringDestination* const dest = new MockMirroringDestination();
+ if (expected_inputs_added > 0) {
+ static AudioOutputStream* const kNonNullPointer =
+ reinterpret_cast<AudioOutputStream*>(0x11111110);
+ EXPECT_CALL(*dest, AddInput(Ref(params_)))
+ .Times(expected_inputs_added)
+ .WillRepeatedly(Return(kNonNullPointer));
+ }
+
+ mirroring_manager_.StartMirroring(render_process_id, render_view_id, dest);
+
+ return dest;
+ }
+
+ void StopMirroringTo(int render_process_id, int render_view_id,
+ MockMirroringDestination* dest) {
+ mirroring_manager_.StopMirroring(render_process_id, render_view_id, dest);
+
+ delete dest;
+}
+
+ private:
+ base::MessageLoop message_loop_;
+ BrowserThreadImpl io_thread_;
+ AudioParameters params_;
+ AudioMirroringManager mirroring_manager_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioMirroringManagerTest);
+};
+
+namespace {
+const int kRenderProcessId = 123;
+const int kRenderViewId = 456;
+const int kAnotherRenderProcessId = 789;
+const int kAnotherRenderViewId = 1234;
+const int kYetAnotherRenderProcessId = 4560;
+const int kYetAnotherRenderViewId = 7890;
+}
+
+TEST_F(AudioMirroringManagerTest, MirroringSessionOfNothing) {
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 0);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+}
+
+TEST_F(AudioMirroringManagerTest, TwoMirroringSessionsOfNothing) {
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 0);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+
+ MockMirroringDestination* const another_destination =
+ StartMirroringTo(kAnotherRenderProcessId, kAnotherRenderViewId, 0);
+ StopMirroringTo(kAnotherRenderProcessId, kAnotherRenderViewId,
+ another_destination);
+}
+
+TEST_F(AudioMirroringManagerTest, SwitchMirroringDestinationNoStreams) {
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 0);
+ MockMirroringDestination* const new_destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 0);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, new_destination);
+}
+
+TEST_F(AudioMirroringManagerTest, StreamLifetimeAroundMirroringSession) {
+ MockDiverter* const stream = CreateStream(kRenderProcessId, kRenderViewId, 1);
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 1);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+ KillStream(kRenderProcessId, kRenderViewId, stream);
+}
+
+TEST_F(AudioMirroringManagerTest, StreamLifetimeWithinMirroringSession) {
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 1);
+ MockDiverter* const stream = CreateStream(kRenderProcessId, kRenderViewId, 1);
+ KillStream(kRenderProcessId, kRenderViewId, stream);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+}
+
+TEST_F(AudioMirroringManagerTest, StreamLifetimeAroundTwoMirroringSessions) {
+ MockDiverter* const stream = CreateStream(kRenderProcessId, kRenderViewId, 2);
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 1);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+ MockMirroringDestination* const new_destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 1);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, new_destination);
+ KillStream(kRenderProcessId, kRenderViewId, stream);
+}
+
+TEST_F(AudioMirroringManagerTest, StreamLifetimeWithinTwoMirroringSessions) {
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 1);
+ MockDiverter* const stream = CreateStream(kRenderProcessId, kRenderViewId, 2);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+ MockMirroringDestination* const new_destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 1);
+ KillStream(kRenderProcessId, kRenderViewId, stream);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, new_destination);
+}
+
+TEST_F(AudioMirroringManagerTest, MultipleStreamsInOneMirroringSession) {
+ MockDiverter* const stream1 =
+ CreateStream(kRenderProcessId, kRenderViewId, 1);
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 3);
+ MockDiverter* const stream2 =
+ CreateStream(kRenderProcessId, kRenderViewId, 1);
+ MockDiverter* const stream3 =
+ CreateStream(kRenderProcessId, kRenderViewId, 1);
+ KillStream(kRenderProcessId, kRenderViewId, stream2);
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+ KillStream(kRenderProcessId, kRenderViewId, stream3);
+ KillStream(kRenderProcessId, kRenderViewId, stream1);
+}
+
+// A random interleaving of operations for three separate targets, each of which
+// has one stream mirrored to one destination.
+TEST_F(AudioMirroringManagerTest, ThreeSeparateMirroringSessions) {
+ MockDiverter* const stream =
+ CreateStream(kRenderProcessId, kRenderViewId, 1);
+ MockMirroringDestination* const destination =
+ StartMirroringTo(kRenderProcessId, kRenderViewId, 1);
+
+ MockMirroringDestination* const another_destination =
+ StartMirroringTo(kAnotherRenderProcessId, kAnotherRenderViewId, 1);
+ MockDiverter* const another_stream =
+ CreateStream(kAnotherRenderProcessId, kAnotherRenderViewId, 1);
+
+ KillStream(kRenderProcessId, kRenderViewId, stream);
+
+ MockDiverter* const yet_another_stream =
+ CreateStream(kYetAnotherRenderProcessId, kYetAnotherRenderViewId, 1);
+ MockMirroringDestination* const yet_another_destination =
+ StartMirroringTo(kYetAnotherRenderProcessId, kYetAnotherRenderViewId, 1);
+
+ StopMirroringTo(kAnotherRenderProcessId, kAnotherRenderViewId,
+ another_destination);
+
+ StopMirroringTo(kYetAnotherRenderProcessId, kYetAnotherRenderViewId,
+ yet_another_destination);
+
+ StopMirroringTo(kRenderProcessId, kRenderViewId, destination);
+
+ KillStream(kAnotherRenderProcessId, kAnotherRenderViewId, another_stream);
+ KillStream(kYetAnotherRenderProcessId, kYetAnotherRenderViewId,
+ yet_another_stream);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_renderer_host.cc b/chromium/content/browser/renderer_host/media/audio_renderer_host.cc
new file mode 100644
index 00000000000..53f2eb2ae90
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_renderer_host.cc
@@ -0,0 +1,477 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/audio_renderer_host.h"
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/command_line.h"
+#include "base/memory/shared_memory.h"
+#include "base/metrics/histogram.h"
+#include "base/process/process.h"
+#include "content/browser/browser_main_loop.h"
+#include "content/browser/media/media_internals.h"
+#include "content/browser/renderer_host/media/audio_input_device_manager.h"
+#include "content/browser/renderer_host/media/audio_mirroring_manager.h"
+#include "content/browser/renderer_host/media/audio_sync_reader.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/common/media/audio_messages.h"
+#include "content/public/browser/content_browser_client.h"
+#include "content/public/browser/media_observer.h"
+#include "content/public/common/content_switches.h"
+#include "media/audio/audio_manager_base.h"
+#include "media/audio/shared_memory_util.h"
+#include "media/base/audio_bus.h"
+#include "media/base/limits.h"
+
+using media::AudioBus;
+
+namespace content {
+
+class AudioRendererHost::AudioEntry
+ : public media::AudioOutputController::EventHandler {
+ public:
+ AudioEntry(AudioRendererHost* host,
+ int stream_id,
+ int render_view_id,
+ const media::AudioParameters& params,
+ const std::string& input_device_id,
+ scoped_ptr<base::SharedMemory> shared_memory,
+ scoped_ptr<media::AudioOutputController::SyncReader> reader);
+ virtual ~AudioEntry();
+
+ int stream_id() const {
+ return stream_id_;
+ }
+
+ int render_view_id() const {
+ return render_view_id_;
+ }
+
+ media::AudioOutputController* controller() const { return controller_.get(); }
+
+ base::SharedMemory* shared_memory() {
+ return shared_memory_.get();
+ }
+
+ media::AudioOutputController::SyncReader* reader() const {
+ return reader_.get();
+ }
+
+ private:
+ // media::AudioOutputController::EventHandler implementation.
+ virtual void OnCreated() OVERRIDE;
+ virtual void OnPlaying() OVERRIDE;
+ virtual void OnPowerMeasured(float power_dbfs, bool clipped) OVERRIDE;
+ virtual void OnPaused() OVERRIDE;
+ virtual void OnError() OVERRIDE;
+ virtual void OnDeviceChange(int new_buffer_size, int new_sample_rate)
+ OVERRIDE;
+
+ AudioRendererHost* const host_;
+ const int stream_id_;
+
+ // The routing ID of the source render view.
+ const int render_view_id_;
+
+ // The AudioOutputController that manages the audio stream.
+ const scoped_refptr<media::AudioOutputController> controller_;
+
+ // Shared memory for transmission of the audio data.
+ const scoped_ptr<base::SharedMemory> shared_memory_;
+
+ // The synchronous reader to be used by the controller.
+ const scoped_ptr<media::AudioOutputController::SyncReader> reader_;
+};
+
+AudioRendererHost::AudioEntry::AudioEntry(
+ AudioRendererHost* host, int stream_id, int render_view_id,
+ const media::AudioParameters& params,
+ const std::string& input_device_id,
+ scoped_ptr<base::SharedMemory> shared_memory,
+ scoped_ptr<media::AudioOutputController::SyncReader> reader)
+ : host_(host),
+ stream_id_(stream_id),
+ render_view_id_(render_view_id),
+ controller_(media::AudioOutputController::Create(
+ host->audio_manager_, this, params, input_device_id, reader.get())),
+ shared_memory_(shared_memory.Pass()),
+ reader_(reader.Pass()) {
+ DCHECK(controller_.get());
+}
+
+AudioRendererHost::AudioEntry::~AudioEntry() {}
+
+///////////////////////////////////////////////////////////////////////////////
+// AudioRendererHost implementations.
+AudioRendererHost::AudioRendererHost(
+ int render_process_id,
+ media::AudioManager* audio_manager,
+ AudioMirroringManager* mirroring_manager,
+ MediaInternals* media_internals,
+ MediaStreamManager* media_stream_manager)
+ : render_process_id_(render_process_id),
+ audio_manager_(audio_manager),
+ mirroring_manager_(mirroring_manager),
+ media_internals_(media_internals),
+ media_stream_manager_(media_stream_manager) {
+ DCHECK(audio_manager_);
+ DCHECK(media_stream_manager_);
+}
+
+AudioRendererHost::~AudioRendererHost() {
+ DCHECK(audio_entries_.empty());
+}
+
+void AudioRendererHost::OnChannelClosing() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ BrowserMessageFilter::OnChannelClosing();
+
+ // Since the IPC channel is gone, close all requested audio streams.
+ while (!audio_entries_.empty()) {
+ // Note: OnCloseStream() removes the entries from audio_entries_.
+ OnCloseStream(audio_entries_.begin()->first);
+ }
+}
+
+void AudioRendererHost::OnDestruct() const {
+ BrowserThread::DeleteOnIOThread::Destruct(this);
+}
+
+void AudioRendererHost::AudioEntry::OnCreated() {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&AudioRendererHost::DoCompleteCreation, host_, stream_id_));
+}
+
+void AudioRendererHost::AudioEntry::OnPlaying() {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(
+ base::IgnoreResult(&AudioRendererHost::Send), host_,
+ new AudioMsg_NotifyStreamStateChanged(
+ stream_id_, media::AudioOutputIPCDelegate::kPlaying)));
+}
+
+void AudioRendererHost::AudioEntry::OnPowerMeasured(float power_dbfs,
+ bool clipped) {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&AudioRendererHost::DoNotifyAudioPowerLevel, host_,
+ stream_id_, power_dbfs, clipped));
+}
+
+void AudioRendererHost::AudioEntry::OnPaused() {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(
+ base::IgnoreResult(&AudioRendererHost::Send), host_,
+ new AudioMsg_NotifyStreamStateChanged(
+ stream_id_, media::AudioOutputIPCDelegate::kPaused)));
+}
+
+void AudioRendererHost::AudioEntry::OnError() {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&AudioRendererHost::ReportErrorAndClose, host_, stream_id_));
+}
+
+void AudioRendererHost::AudioEntry::OnDeviceChange(int new_buffer_size,
+ int new_sample_rate) {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(base::IgnoreResult(&AudioRendererHost::Send), host_,
+ new AudioMsg_NotifyDeviceChanged(
+ stream_id_, new_buffer_size, new_sample_rate)));
+}
+
+void AudioRendererHost::DoCompleteCreation(int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ if (!PeerHandle()) {
+ NOTREACHED() << "Renderer process handle is invalid.";
+ ReportErrorAndClose(stream_id);
+ return;
+ }
+
+ AudioEntry* const entry = LookupById(stream_id);
+ if (!entry) {
+ ReportErrorAndClose(stream_id);
+ return;
+ }
+
+ // Once the audio stream is created then complete the creation process by
+ // mapping shared memory and sharing with the renderer process.
+ base::SharedMemoryHandle foreign_memory_handle;
+ if (!entry->shared_memory()->ShareToProcess(PeerHandle(),
+ &foreign_memory_handle)) {
+ // If we failed to map and share the shared memory then close the audio
+ // stream and send an error message.
+ ReportErrorAndClose(entry->stream_id());
+ return;
+ }
+
+ AudioSyncReader* reader = static_cast<AudioSyncReader*>(entry->reader());
+
+#if defined(OS_WIN)
+ base::SyncSocket::Handle foreign_socket_handle;
+#else
+ base::FileDescriptor foreign_socket_handle;
+#endif
+
+ // If we failed to prepare the sync socket for the renderer then we fail
+ // the construction of audio stream.
+ if (!reader->PrepareForeignSocketHandle(PeerHandle(),
+ &foreign_socket_handle)) {
+ ReportErrorAndClose(entry->stream_id());
+ return;
+ }
+
+ Send(new AudioMsg_NotifyStreamCreated(
+ entry->stream_id(),
+ foreign_memory_handle,
+ foreign_socket_handle,
+ media::PacketSizeInBytes(entry->shared_memory()->requested_size())));
+}
+
+void AudioRendererHost::DoNotifyAudioPowerLevel(int stream_id,
+ float power_dbfs,
+ bool clipped) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ MediaObserver* const media_observer =
+ GetContentClient()->browser()->GetMediaObserver();
+ if (media_observer) {
+ if (CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kEnableAudibleNotifications)) {
+ AudioEntry* const entry = LookupById(stream_id);
+ if (entry) {
+ media_observer->OnAudioStreamPlayingChanged(
+ render_process_id_, entry->render_view_id(), entry->stream_id(),
+ true, power_dbfs, clipped);
+ }
+ }
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// IPC Messages handler
+bool AudioRendererHost::OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) {
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP_EX(AudioRendererHost, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(AudioHostMsg_CreateStream, OnCreateStream)
+ IPC_MESSAGE_HANDLER(AudioHostMsg_PlayStream, OnPlayStream)
+ IPC_MESSAGE_HANDLER(AudioHostMsg_PauseStream, OnPauseStream)
+ IPC_MESSAGE_HANDLER(AudioHostMsg_CloseStream, OnCloseStream)
+ IPC_MESSAGE_HANDLER(AudioHostMsg_SetVolume, OnSetVolume)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+
+ return handled;
+}
+
+void AudioRendererHost::OnCreateStream(
+ int stream_id, int render_view_id, int session_id,
+ const media::AudioParameters& params) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ DVLOG(1) << "AudioRendererHost@" << this
+ << "::OnCreateStream(stream_id=" << stream_id
+ << ", render_view_id=" << render_view_id
+ << ", session_id=" << session_id << ")";
+ DCHECK_GT(render_view_id, 0);
+
+ // media::AudioParameters is validated in the deserializer.
+ int input_channels = params.input_channels();
+ if (input_channels < 0 ||
+ input_channels > media::limits::kMaxChannels ||
+ LookupById(stream_id) != NULL) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ // When the |input_channels| is valid, clients are trying to create a unified
+ // IO stream which opens an input device mapping to the |session_id|.
+ std::string input_device_id;
+ if (input_channels > 0) {
+ const StreamDeviceInfo* info = media_stream_manager_->
+ audio_input_device_manager()->GetOpenedDeviceInfoById(session_id);
+ if (!info) {
+ SendErrorMessage(stream_id);
+ DLOG(WARNING) << "No permission has been granted to input stream with "
+ << "session_id=" << session_id;
+ return;
+ }
+
+ input_device_id = info->device.id;
+ }
+
+ // Calculate output and input memory size.
+ int output_memory_size = AudioBus::CalculateMemorySize(params);
+ int frames = params.frames_per_buffer();
+ int input_memory_size =
+ AudioBus::CalculateMemorySize(input_channels, frames);
+
+ // Create the shared memory and share with the renderer process.
+ // For synchronized I/O (if input_channels > 0) then we allocate
+ // extra memory after the output data for the input data.
+ uint32 io_buffer_size = output_memory_size + input_memory_size;
+ uint32 shared_memory_size =
+ media::TotalSharedMemorySizeInBytes(io_buffer_size);
+ scoped_ptr<base::SharedMemory> shared_memory(new base::SharedMemory());
+ if (!shared_memory->CreateAndMapAnonymous(shared_memory_size)) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ scoped_ptr<AudioSyncReader> reader(
+ new AudioSyncReader(shared_memory.get(), params, input_channels));
+ if (!reader->Init()) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ MediaObserver* const media_observer =
+ GetContentClient()->browser()->GetMediaObserver();
+ if (media_observer)
+ media_observer->OnCreatingAudioStream(render_process_id_, render_view_id);
+
+ scoped_ptr<AudioEntry> entry(new AudioEntry(
+ this, stream_id, render_view_id, params, input_device_id,
+ shared_memory.Pass(),
+ reader.PassAs<media::AudioOutputController::SyncReader>()));
+ if (mirroring_manager_) {
+ mirroring_manager_->AddDiverter(
+ render_process_id_, entry->render_view_id(), entry->controller());
+ }
+ audio_entries_.insert(std::make_pair(stream_id, entry.release()));
+ if (media_internals_)
+ media_internals_->OnSetAudioStreamStatus(this, stream_id, "created");
+}
+
+void AudioRendererHost::OnPlayStream(int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntry* entry = LookupById(stream_id);
+ if (!entry) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ entry->controller()->Play();
+ if (media_internals_)
+ media_internals_->OnSetAudioStreamPlaying(this, stream_id, true);
+}
+
+void AudioRendererHost::OnPauseStream(int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntry* entry = LookupById(stream_id);
+ if (!entry) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ entry->controller()->Pause();
+ if (media_internals_)
+ media_internals_->OnSetAudioStreamPlaying(this, stream_id, false);
+}
+
+void AudioRendererHost::OnSetVolume(int stream_id, double volume) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntry* entry = LookupById(stream_id);
+ if (!entry) {
+ SendErrorMessage(stream_id);
+ return;
+ }
+
+ // Make sure the volume is valid.
+ if (volume < 0 || volume > 1.0)
+ return;
+ entry->controller()->SetVolume(volume);
+ if (media_internals_)
+ media_internals_->OnSetAudioStreamVolume(this, stream_id, volume);
+}
+
+void AudioRendererHost::SendErrorMessage(int stream_id) {
+ Send(new AudioMsg_NotifyStreamStateChanged(
+ stream_id, media::AudioOutputIPCDelegate::kError));
+}
+
+void AudioRendererHost::OnCloseStream(int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Prevent oustanding callbacks from attempting to close/delete the same
+ // AudioEntry twice.
+ AudioEntryMap::iterator i = audio_entries_.find(stream_id);
+ if (i == audio_entries_.end())
+ return;
+ scoped_ptr<AudioEntry> entry(i->second);
+ audio_entries_.erase(i);
+
+ media::AudioOutputController* const controller = entry->controller();
+ if (mirroring_manager_) {
+ mirroring_manager_->RemoveDiverter(
+ render_process_id_, entry->render_view_id(), controller);
+ }
+ controller->Close(
+ base::Bind(&AudioRendererHost::DeleteEntry, this, base::Passed(&entry)));
+
+ if (media_internals_)
+ media_internals_->OnSetAudioStreamStatus(this, stream_id, "closed");
+}
+
+void AudioRendererHost::DeleteEntry(scoped_ptr<AudioEntry> entry) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // At this point, make the final "say" in audio playback state.
+ MediaObserver* const media_observer =
+ GetContentClient()->browser()->GetMediaObserver();
+ if (media_observer) {
+ media_observer->OnAudioStreamPlayingChanged(
+ render_process_id_, entry->render_view_id(), entry->stream_id(),
+ false, -std::numeric_limits<float>::infinity(), false);
+ }
+
+ // Notify the media observer.
+ if (media_internals_)
+ media_internals_->OnDeleteAudioStream(this, entry->stream_id());
+
+ // Note: |entry| will be deleted upon leaving this scope.
+}
+
+void AudioRendererHost::ReportErrorAndClose(int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Make sure this isn't a stray callback executing after the stream has been
+ // closed, so error notifications aren't sent after clients believe the stream
+ // is closed.
+ if (!LookupById(stream_id))
+ return;
+
+ SendErrorMessage(stream_id);
+
+ if (media_internals_)
+ media_internals_->OnSetAudioStreamStatus(this, stream_id, "error");
+
+ OnCloseStream(stream_id);
+}
+
+AudioRendererHost::AudioEntry* AudioRendererHost::LookupById(int stream_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ AudioEntryMap::const_iterator i = audio_entries_.find(stream_id);
+ return i != audio_entries_.end() ? i->second : NULL;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_renderer_host.h b/chromium/content/browser/renderer_host/media/audio_renderer_host.h
new file mode 100644
index 00000000000..47dbee9c0c1
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_renderer_host.h
@@ -0,0 +1,162 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// AudioRendererHost serves audio related requests from AudioRenderer which
+// lives inside the render process and provide access to audio hardware.
+//
+// This class is owned by BrowserRenderProcessHost, and instantiated on UI
+// thread, but all other operations and method calls happen on IO thread, so we
+// need to be extra careful about the lifetime of this object. AudioManager is a
+// singleton and created in IO thread, audio output streams are also created in
+// the IO thread, so we need to destroy them also in IO thread. After this class
+// is created, a task of OnInitialized() is posted on IO thread in which
+// singleton of AudioManager is created.
+//
+// Here's an example of a typical IPC dialog for audio:
+//
+// Renderer AudioRendererHost
+// | |
+// | CreateStream > |
+// | < NotifyStreamCreated |
+// | |
+// | PlayStream > |
+// | < NotifyStreamStateChanged | kAudioStreamPlaying
+// | |
+// | PauseStream > |
+// | < NotifyStreamStateChanged | kAudioStreamPaused
+// | |
+// | PlayStream > |
+// | < NotifyStreamStateChanged | kAudioStreamPlaying
+// | ... |
+// | CloseStream > |
+// v v
+
+// A SyncSocket pair is used to signal buffer readiness between processes.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_RENDERER_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_RENDERER_HOST_H_
+
+#include <map>
+
+#include "base/gtest_prod_util.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/process/process.h"
+#include "base/sequenced_task_runner_helpers.h"
+#include "content/common/content_export.h"
+#include "content/public/browser/browser_message_filter.h"
+#include "content/public/browser/browser_thread.h"
+#include "media/audio/audio_io.h"
+#include "media/audio/audio_output_controller.h"
+#include "media/audio/simple_sources.h"
+
+namespace media {
+class AudioManager;
+class AudioParameters;
+}
+
+namespace content {
+
+class AudioMirroringManager;
+class MediaInternals;
+class MediaStreamManager;
+class ResourceContext;
+
+class CONTENT_EXPORT AudioRendererHost : public BrowserMessageFilter {
+ public:
+ // Called from UI thread from the owner of this object.
+ AudioRendererHost(int render_process_id,
+ media::AudioManager* audio_manager,
+ AudioMirroringManager* mirroring_manager,
+ MediaInternals* media_internals,
+ MediaStreamManager* media_stream_manager);
+
+ // BrowserMessageFilter implementation.
+ virtual void OnChannelClosing() OVERRIDE;
+ virtual void OnDestruct() const OVERRIDE;
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+
+ private:
+ friend class AudioRendererHostTest;
+ friend class BrowserThread;
+ friend class base::DeleteHelper<AudioRendererHost>;
+ friend class MockAudioRendererHost;
+ FRIEND_TEST_ALL_PREFIXES(AudioRendererHostTest, CreateMockStream);
+ FRIEND_TEST_ALL_PREFIXES(AudioRendererHostTest, MockStreamDataConversation);
+
+ class AudioEntry;
+ typedef std::map<int, AudioEntry*> AudioEntryMap;
+
+ virtual ~AudioRendererHost();
+
+ // Methods called on IO thread ----------------------------------------------
+
+ // Audio related IPC message handlers.
+
+ // Creates an audio output stream with the specified format whose data is
+ // produced by an entity in the render view referenced by |render_view_id|.
+ // |session_id| is used for unified IO to find out which input device to be
+ // opened for the stream. For clients that do not use unified IO,
+ // |session_id| will be ignored.
+ // Upon success/failure, the peer is notified via the NotifyStreamCreated
+ // message.
+ void OnCreateStream(int stream_id,
+ int render_view_id,
+ int session_id,
+ const media::AudioParameters& params);
+
+ // Play the audio stream referenced by |stream_id|.
+ void OnPlayStream(int stream_id);
+
+ // Pause the audio stream referenced by |stream_id|.
+ void OnPauseStream(int stream_id);
+
+ // Close the audio stream referenced by |stream_id|.
+ void OnCloseStream(int stream_id);
+
+ // Set the volume of the audio stream referenced by |stream_id|.
+ void OnSetVolume(int stream_id, double volume);
+
+ // Complete the process of creating an audio stream. This will set up the
+ // shared memory or shared socket in low latency mode and send the
+ // NotifyStreamCreated message to the peer.
+ void DoCompleteCreation(int stream_id);
+
+ // Propagate measured power level of the audio signal to MediaObserver.
+ void DoNotifyAudioPowerLevel(int stream_id, float power_dbfs, bool clipped);
+
+ // Send an error message to the renderer.
+ void SendErrorMessage(int stream_id);
+
+ // Delete an audio entry, notifying observers first. This is called by
+ // AudioOutputController after it has closed.
+ void DeleteEntry(scoped_ptr<AudioEntry> entry);
+
+ // Send an error message to the renderer, then close the stream.
+ void ReportErrorAndClose(int stream_id);
+
+ // A helper method to look up a AudioEntry identified by |stream_id|.
+ // Returns NULL if not found.
+ AudioEntry* LookupById(int stream_id);
+
+ // ID of the RenderProcessHost that owns this instance.
+ const int render_process_id_;
+
+ media::AudioManager* const audio_manager_;
+ AudioMirroringManager* const mirroring_manager_;
+ MediaInternals* const media_internals_;
+
+ // Used to access to AudioInputDeviceManager.
+ MediaStreamManager* media_stream_manager_;
+
+ // A map of stream IDs to audio sources.
+ AudioEntryMap audio_entries_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioRendererHost);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_RENDERER_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/audio_renderer_host_unittest.cc b/chromium/content/browser/renderer_host/media/audio_renderer_host_unittest.cc
new file mode 100644
index 00000000000..42fecc07a96
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_renderer_host_unittest.cc
@@ -0,0 +1,423 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/bind.h"
+#include "base/environment.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "base/sync_socket.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/audio_input_device_manager.h"
+#include "content/browser/renderer_host/media/audio_mirroring_manager.h"
+#include "content/browser/renderer_host/media/audio_renderer_host.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/browser/renderer_host/media/mock_media_observer.h"
+#include "content/common/media/audio_messages.h"
+#include "content/common/media/media_stream_options.h"
+#include "ipc/ipc_message_utils.h"
+#include "media/audio/audio_manager.h"
+#include "media/audio/audio_manager_base.h"
+#include "media/audio/fake_audio_output_stream.h"
+#include "net/url_request/url_request_context.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::Assign;
+using ::testing::DoAll;
+using ::testing::NotNull;
+
+namespace content {
+
+static const int kRenderProcessId = 1;
+static const int kRenderViewId = 4;
+static const int kStreamId = 50;
+
+class MockAudioMirroringManager : public AudioMirroringManager {
+ public:
+ MockAudioMirroringManager() {}
+ virtual ~MockAudioMirroringManager() {}
+
+ MOCK_METHOD3(AddDiverter,
+ void(int render_process_id, int render_view_id,
+ Diverter* diverter));
+ MOCK_METHOD3(RemoveDiverter,
+ void(int render_process_id, int render_view_id,
+ Diverter* diverter));
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockAudioMirroringManager);
+};
+
+class MockAudioRendererHost : public AudioRendererHost {
+ public:
+ explicit MockAudioRendererHost(
+ media::AudioManager* audio_manager,
+ AudioMirroringManager* mirroring_manager,
+ MediaInternals* media_internals,
+ MediaStreamManager* media_stream_manager)
+ : AudioRendererHost(kRenderProcessId,
+ audio_manager,
+ mirroring_manager,
+ media_internals,
+ media_stream_manager),
+ shared_memory_length_(0) {
+ }
+
+ // A list of mock methods.
+ MOCK_METHOD2(OnStreamCreated,
+ void(int stream_id, int length));
+ MOCK_METHOD1(OnStreamPlaying, void(int stream_id));
+ MOCK_METHOD1(OnStreamPaused, void(int stream_id));
+ MOCK_METHOD1(OnStreamError, void(int stream_id));
+
+ private:
+ virtual ~MockAudioRendererHost() {
+ // Make sure all audio streams have been deleted.
+ EXPECT_TRUE(audio_entries_.empty());
+ }
+
+ // This method is used to dispatch IPC messages to the renderer. We intercept
+ // these messages here and dispatch to our mock methods to verify the
+ // conversation between this object and the renderer.
+ virtual bool Send(IPC::Message* message) {
+ CHECK(message);
+
+ // In this method we dispatch the messages to the according handlers as if
+ // we are the renderer.
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP(MockAudioRendererHost, *message)
+ IPC_MESSAGE_HANDLER(AudioMsg_NotifyStreamCreated,
+ OnStreamCreated)
+ IPC_MESSAGE_HANDLER(AudioMsg_NotifyStreamStateChanged,
+ OnStreamStateChanged)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP()
+ EXPECT_TRUE(handled);
+
+ delete message;
+ return true;
+ }
+
+ void OnStreamCreated(const IPC::Message& msg, int stream_id,
+ base::SharedMemoryHandle handle,
+#if defined(OS_WIN)
+ base::SyncSocket::Handle socket_handle,
+#else
+ base::FileDescriptor socket_descriptor,
+#endif
+ uint32 length) {
+ // Maps the shared memory.
+ shared_memory_.reset(new base::SharedMemory(handle, false));
+ CHECK(shared_memory_->Map(length));
+ CHECK(shared_memory_->memory());
+ shared_memory_length_ = length;
+
+ // Create the SyncSocket using the handle.
+ base::SyncSocket::Handle sync_socket_handle;
+#if defined(OS_WIN)
+ sync_socket_handle = socket_handle;
+#else
+ sync_socket_handle = socket_descriptor.fd;
+#endif
+ sync_socket_.reset(new base::SyncSocket(sync_socket_handle));
+
+ // And then delegate the call to the mock method.
+ OnStreamCreated(stream_id, length);
+ }
+
+ void OnStreamStateChanged(const IPC::Message& msg, int stream_id,
+ media::AudioOutputIPCDelegate::State state) {
+ switch (state) {
+ case media::AudioOutputIPCDelegate::kPlaying:
+ OnStreamPlaying(stream_id);
+ break;
+ case media::AudioOutputIPCDelegate::kPaused:
+ OnStreamPaused(stream_id);
+ break;
+ case media::AudioOutputIPCDelegate::kError:
+ OnStreamError(stream_id);
+ break;
+ default:
+ FAIL() << "Unknown stream state";
+ break;
+ }
+ }
+
+ scoped_ptr<base::SharedMemory> shared_memory_;
+ scoped_ptr<base::SyncSocket> sync_socket_;
+ uint32 shared_memory_length_;
+
+ DISALLOW_COPY_AND_ASSIGN(MockAudioRendererHost);
+};
+
+ACTION_P(QuitMessageLoop, message_loop) {
+ message_loop->PostTask(FROM_HERE, base::MessageLoop::QuitClosure());
+}
+
+class AudioRendererHostTest : public testing::Test {
+ public:
+ AudioRendererHostTest() : is_stream_active_(false) {}
+
+ protected:
+ virtual void SetUp() {
+ // Create a message loop so AudioRendererHost can use it.
+ message_loop_.reset(new base::MessageLoop(base::MessageLoop::TYPE_IO));
+
+ // Claim to be on both the UI and IO threads to pass all the DCHECKS.
+ io_thread_.reset(new BrowserThreadImpl(BrowserThread::IO,
+ message_loop_.get()));
+ ui_thread_.reset(new BrowserThreadImpl(BrowserThread::UI,
+ message_loop_.get()));
+ audio_manager_.reset(media::AudioManager::Create());
+ media_stream_manager_.reset(new MediaStreamManager(audio_manager_.get()));
+ media_stream_manager_->UseFakeDevice();
+ observer_.reset(new MockMediaInternals());
+ host_ = new MockAudioRendererHost(
+ audio_manager_.get(), &mirroring_manager_, observer_.get(),
+ media_stream_manager_.get());
+
+ // Simulate IPC channel connected.
+ host_->OnChannelConnected(base::GetCurrentProcId());
+ }
+
+ virtual void TearDown() {
+ // Simulate closing the IPC channel.
+ host_->OnChannelClosing();
+
+ // Release the reference to the mock object. The object will be destructed
+ // on message_loop_.
+ host_ = NULL;
+
+ // We need to continue running message_loop_ to complete all destructions.
+ SyncWithAudioThread();
+ audio_manager_.reset();
+
+ // Make sure the stream has been deleted before continuing.
+ while (is_stream_active_)
+ message_loop_->Run();
+
+ io_thread_.reset();
+ ui_thread_.reset();
+
+ // Delete the IO message loop. This will cause the MediaStreamManager to be
+ // notified so it will stop its device thread and device managers.
+ message_loop_.reset();
+ }
+
+ void Create(bool unified_stream) {
+ EXPECT_CALL(*observer_,
+ OnSetAudioStreamStatus(_, kStreamId, "created"));
+ EXPECT_CALL(*host_.get(), OnStreamCreated(kStreamId, _))
+ .WillOnce(DoAll(Assign(&is_stream_active_, true),
+ QuitMessageLoop(message_loop_.get())));
+ EXPECT_CALL(mirroring_manager_,
+ AddDiverter(kRenderProcessId, kRenderViewId, NotNull()))
+ .RetiresOnSaturation();
+
+ // Send a create stream message to the audio output stream and wait until
+ // we receive the created message.
+ int session_id;
+ media::AudioParameters params;
+ if (unified_stream) {
+ // Use AudioInputDeviceManager::kFakeOpenSessionId as the session id to
+ // pass the permission check.
+ session_id = AudioInputDeviceManager::kFakeOpenSessionId;
+ params = media::AudioParameters(
+ media::AudioParameters::AUDIO_FAKE,
+ media::CHANNEL_LAYOUT_STEREO,
+ 2,
+ media::AudioParameters::kAudioCDSampleRate, 16,
+ media::AudioParameters::kAudioCDSampleRate / 10);
+ } else {
+ session_id = 0;
+ params = media::AudioParameters(
+ media::AudioParameters::AUDIO_FAKE,
+ media::CHANNEL_LAYOUT_STEREO,
+ media::AudioParameters::kAudioCDSampleRate, 16,
+ media::AudioParameters::kAudioCDSampleRate / 10);
+ }
+ host_->OnCreateStream(kStreamId, kRenderViewId, session_id, params);
+ message_loop_->Run();
+
+ // At some point in the future, a corresponding RemoveDiverter() call must
+ // be made.
+ EXPECT_CALL(mirroring_manager_,
+ RemoveDiverter(kRenderProcessId, kRenderViewId, NotNull()))
+ .RetiresOnSaturation();
+
+ // All created streams should ultimately be closed.
+ EXPECT_CALL(*observer_,
+ OnSetAudioStreamStatus(_, kStreamId, "closed"));
+
+ // Expect the audio stream will be deleted at some later point.
+ EXPECT_CALL(*observer_, OnDeleteAudioStream(_, kStreamId))
+ .WillOnce(DoAll(Assign(&is_stream_active_, false),
+ QuitMessageLoop(message_loop_.get())));
+ }
+
+ void Close() {
+ // Send a message to AudioRendererHost to tell it we want to close the
+ // stream.
+ host_->OnCloseStream(kStreamId);
+ if (is_stream_active_)
+ message_loop_->Run();
+ else
+ message_loop_->RunUntilIdle();
+ }
+
+ void Play() {
+ EXPECT_CALL(*observer_,
+ OnSetAudioStreamPlaying(_, kStreamId, true));
+ EXPECT_CALL(*host_.get(), OnStreamPlaying(kStreamId))
+ .WillOnce(QuitMessageLoop(message_loop_.get()));
+
+ host_->OnPlayStream(kStreamId);
+ message_loop_->Run();
+ }
+
+ void Pause() {
+ EXPECT_CALL(*observer_,
+ OnSetAudioStreamPlaying(_, kStreamId, false));
+ EXPECT_CALL(*host_.get(), OnStreamPaused(kStreamId))
+ .WillOnce(QuitMessageLoop(message_loop_.get()));
+
+ host_->OnPauseStream(kStreamId);
+ message_loop_->Run();
+ }
+
+ void SetVolume(double volume) {
+ EXPECT_CALL(*observer_,
+ OnSetAudioStreamVolume(_, kStreamId, volume));
+
+ host_->OnSetVolume(kStreamId, volume);
+ message_loop_->RunUntilIdle();
+ }
+
+ void SimulateError() {
+ EXPECT_CALL(*observer_,
+ OnSetAudioStreamStatus(_, kStreamId, "error"));
+ EXPECT_EQ(1u, host_->audio_entries_.size())
+ << "Calls Create() before calling this method";
+
+ // Expect an error signal sent through IPC.
+ EXPECT_CALL(*host_.get(), OnStreamError(kStreamId));
+
+ // Simulate an error sent from the audio device.
+ host_->ReportErrorAndClose(kStreamId);
+ SyncWithAudioThread();
+
+ // Expect the audio stream record is removed.
+ EXPECT_EQ(0u, host_->audio_entries_.size());
+ }
+
+ // Called on the audio thread.
+ static void PostQuitMessageLoop(base::MessageLoop* message_loop) {
+ message_loop->PostTask(FROM_HERE, base::MessageLoop::QuitClosure());
+ }
+
+ // Called on the main thread.
+ static void PostQuitOnAudioThread(media::AudioManager* audio_manager,
+ base::MessageLoop* message_loop) {
+ audio_manager->GetMessageLoop()->PostTask(FROM_HERE,
+ base::Bind(&PostQuitMessageLoop, message_loop));
+ }
+
+ // SyncWithAudioThread() waits until all pending tasks on the audio thread
+ // are executed while also processing pending task in message_loop_ on the
+ // current thread. It is used to synchronize with the audio thread when we are
+ // closing an audio stream.
+ void SyncWithAudioThread() {
+ // Don't use scoped_refptr to addref the media::AudioManager when posting
+ // to the thread that itself owns.
+ message_loop_->PostTask(
+ FROM_HERE, base::Bind(&PostQuitOnAudioThread,
+ base::Unretained(audio_manager_.get()),
+ message_loop_.get()));
+ message_loop_->Run();
+ }
+
+ private:
+ scoped_ptr<MockMediaInternals> observer_;
+ MockAudioMirroringManager mirroring_manager_;
+ scoped_refptr<MockAudioRendererHost> host_;
+ scoped_ptr<base::MessageLoop> message_loop_;
+ scoped_ptr<BrowserThreadImpl> io_thread_;
+ scoped_ptr<BrowserThreadImpl> ui_thread_;
+ scoped_ptr<media::AudioManager> audio_manager_;
+ scoped_ptr<MediaStreamManager> media_stream_manager_;
+
+ bool is_stream_active_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioRendererHostTest);
+};
+
+TEST_F(AudioRendererHostTest, CreateAndClose) {
+ Create(false);
+ Close();
+}
+
+// Simulate the case where a stream is not properly closed.
+TEST_F(AudioRendererHostTest, CreateAndShutdown) {
+ Create(false);
+}
+
+TEST_F(AudioRendererHostTest, CreatePlayAndClose) {
+ Create(false);
+ Play();
+ Close();
+}
+
+TEST_F(AudioRendererHostTest, CreatePlayPauseAndClose) {
+ Create(false);
+ Play();
+ Pause();
+ Close();
+}
+
+TEST_F(AudioRendererHostTest, SetVolume) {
+ Create(false);
+ SetVolume(0.5);
+ Play();
+ Pause();
+ Close();
+}
+
+// Simulate the case where a stream is not properly closed.
+TEST_F(AudioRendererHostTest, CreatePlayAndShutdown) {
+ Create(false);
+ Play();
+}
+
+// Simulate the case where a stream is not properly closed.
+TEST_F(AudioRendererHostTest, CreatePlayPauseAndShutdown) {
+ Create(false);
+ Play();
+ Pause();
+}
+
+TEST_F(AudioRendererHostTest, SimulateError) {
+ Create(false);
+ Play();
+ SimulateError();
+}
+
+// Simulate the case when an error is generated on the browser process,
+// the audio device is closed but the render process try to close the
+// audio stream again.
+TEST_F(AudioRendererHostTest, SimulateErrorAndClose) {
+ Create(false);
+ Play();
+ SimulateError();
+ Close();
+}
+
+TEST_F(AudioRendererHostTest, CreateUnifiedStreamAndClose) {
+ Create(true);
+ Close();
+}
+
+// TODO(hclam): Add tests for data conversation in low latency mode.
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_sync_reader.cc b/chromium/content/browser/renderer_host/media/audio_sync_reader.cc
new file mode 100644
index 00000000000..dea8ae2a207
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_sync_reader.cc
@@ -0,0 +1,200 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/audio_sync_reader.h"
+
+#include <algorithm>
+
+#include "base/command_line.h"
+#include "base/memory/shared_memory.h"
+#include "base/metrics/histogram.h"
+#include "content/public/common/content_switches.h"
+#include "media/audio/audio_buffers_state.h"
+#include "media/audio/audio_parameters.h"
+#include "media/audio/shared_memory_util.h"
+
+using media::AudioBus;
+
+namespace content {
+
+AudioSyncReader::AudioSyncReader(base::SharedMemory* shared_memory,
+ const media::AudioParameters& params,
+ int input_channels)
+ : shared_memory_(shared_memory),
+ input_channels_(input_channels),
+ mute_audio_(CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kMuteAudio)),
+ renderer_callback_count_(0),
+ renderer_missed_callback_count_(0) {
+ packet_size_ = media::PacketSizeInBytes(shared_memory_->requested_size());
+ int input_memory_size = 0;
+ int output_memory_size = AudioBus::CalculateMemorySize(params);
+ if (input_channels_ > 0) {
+ // The input storage is after the output storage.
+ int frames = params.frames_per_buffer();
+ input_memory_size = AudioBus::CalculateMemorySize(input_channels_, frames);
+ char* input_data =
+ static_cast<char*>(shared_memory_->memory()) + output_memory_size;
+ input_bus_ = AudioBus::WrapMemory(input_channels_, frames, input_data);
+ }
+ DCHECK_EQ(packet_size_, output_memory_size + input_memory_size);
+ output_bus_ = AudioBus::WrapMemory(params, shared_memory->memory());
+}
+
+AudioSyncReader::~AudioSyncReader() {
+ if (!renderer_callback_count_)
+ return;
+
+ // Recording the percentage of deadline misses gives us a rough overview of
+ // how many users might be running into audio glitches.
+ int percentage_missed =
+ 100.0 * renderer_missed_callback_count_ / renderer_callback_count_;
+ UMA_HISTOGRAM_PERCENTAGE(
+ "Media.AudioRendererMissedDeadline", percentage_missed);
+}
+
+bool AudioSyncReader::DataReady() {
+ return !media::IsUnknownDataSize(shared_memory_, packet_size_);
+}
+
+// media::AudioOutputController::SyncReader implementations.
+void AudioSyncReader::UpdatePendingBytes(uint32 bytes) {
+ if (bytes != static_cast<uint32>(media::kPauseMark)) {
+ // Store unknown length of data into buffer, so we later
+ // can find out if data became available.
+ media::SetUnknownDataSize(shared_memory_, packet_size_);
+ }
+
+ if (socket_) {
+ socket_->Send(&bytes, sizeof(bytes));
+ }
+}
+
+int AudioSyncReader::Read(bool block, const AudioBus* source, AudioBus* dest) {
+ ++renderer_callback_count_;
+ if (!DataReady()) {
+ ++renderer_missed_callback_count_;
+
+ if (block)
+ WaitTillDataReady();
+ }
+
+ // Copy optional synchronized live audio input for consumption by renderer
+ // process.
+ if (source && input_bus_) {
+ DCHECK_EQ(source->channels(), input_bus_->channels());
+ // TODO(crogers): In some cases with device and sample-rate changes
+ // it's possible for an AOR to insert a resampler in the path.
+ // Because this is used with the Web Audio API, it'd be better
+ // to bypass the device change handling in AOR and instead let
+ // the renderer-side Web Audio code deal with this.
+ if (source->frames() == input_bus_->frames() &&
+ source->channels() == input_bus_->channels())
+ source->CopyTo(input_bus_.get());
+ else
+ input_bus_->Zero();
+ }
+
+ // Retrieve the actual number of bytes available from the shared memory. If
+ // the renderer has not completed rendering this value will be invalid (still
+ // the marker stored in UpdatePendingBytes() above) and must be sanitized.
+ // TODO(dalecurtis): Technically this is not the exact size. Due to channel
+ // padding for alignment, there may be more data available than this; AudioBus
+ // will automatically do the right thing during CopyTo(). Rename this method
+ // to GetActualFrameCount().
+ uint32 size = media::GetActualDataSizeInBytes(shared_memory_, packet_size_);
+
+ // Compute the actual number of frames read. It's important to sanitize this
+ // value for a couple reasons. One, it might still be the unknown data size
+ // marker. Two, shared memory comes from a potentially untrusted source.
+ int frames =
+ size / (sizeof(*output_bus_->channel(0)) * output_bus_->channels());
+ if (frames < 0)
+ frames = 0;
+ else if (frames > output_bus_->frames())
+ frames = output_bus_->frames();
+
+ if (mute_audio_) {
+ dest->Zero();
+ } else {
+ // Copy data from the shared memory into the caller's AudioBus.
+ output_bus_->CopyTo(dest);
+
+ // Zero out any unfilled frames in the destination bus.
+ dest->ZeroFramesPartial(frames, dest->frames() - frames);
+ }
+
+ // Zero out the entire output buffer to avoid stuttering/repeating-buffers
+ // in the anomalous case if the renderer is unable to keep up with real-time.
+ output_bus_->Zero();
+
+ // Store unknown length of data into buffer, in case renderer does not store
+ // the length itself. It also helps in decision if we need to yield.
+ media::SetUnknownDataSize(shared_memory_, packet_size_);
+
+ // Return the actual number of frames read.
+ return frames;
+}
+
+void AudioSyncReader::Close() {
+ if (socket_) {
+ socket_->Close();
+ }
+}
+
+bool AudioSyncReader::Init() {
+ socket_.reset(new base::CancelableSyncSocket());
+ foreign_socket_.reset(new base::CancelableSyncSocket());
+ return base::CancelableSyncSocket::CreatePair(socket_.get(),
+ foreign_socket_.get());
+}
+
+#if defined(OS_WIN)
+bool AudioSyncReader::PrepareForeignSocketHandle(
+ base::ProcessHandle process_handle,
+ base::SyncSocket::Handle* foreign_handle) {
+ ::DuplicateHandle(GetCurrentProcess(), foreign_socket_->handle(),
+ process_handle, foreign_handle,
+ 0, FALSE, DUPLICATE_SAME_ACCESS);
+ if (*foreign_handle != 0)
+ return true;
+ return false;
+}
+#else
+bool AudioSyncReader::PrepareForeignSocketHandle(
+ base::ProcessHandle process_handle,
+ base::FileDescriptor* foreign_handle) {
+ foreign_handle->fd = foreign_socket_->handle();
+ foreign_handle->auto_close = false;
+ if (foreign_handle->fd != -1)
+ return true;
+ return false;
+}
+#endif
+
+void AudioSyncReader::WaitTillDataReady() {
+ base::TimeTicks start = base::TimeTicks::Now();
+ const base::TimeDelta kMaxWait = base::TimeDelta::FromMilliseconds(20);
+#if defined(OS_WIN)
+ // Sleep(0) on Windows lets the other threads run.
+ const base::TimeDelta kSleep = base::TimeDelta::FromMilliseconds(0);
+#else
+ // We want to sleep for a bit here, as otherwise a backgrounded renderer won't
+ // get enough cpu to send the data and the high priority thread in the browser
+ // will use up a core causing even more skips.
+ const base::TimeDelta kSleep = base::TimeDelta::FromMilliseconds(2);
+#endif
+ base::TimeDelta time_since_start;
+ do {
+ base::PlatformThread::Sleep(kSleep);
+ time_since_start = base::TimeTicks::Now() - start;
+ } while (!DataReady() && time_since_start < kMaxWait);
+ UMA_HISTOGRAM_CUSTOM_TIMES("Media.AudioOutputControllerDataNotReady",
+ time_since_start,
+ base::TimeDelta::FromMilliseconds(1),
+ base::TimeDelta::FromMilliseconds(1000),
+ 50);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/audio_sync_reader.h b/chromium/content/browser/renderer_host/media/audio_sync_reader.h
new file mode 100644
index 00000000000..fdfbf81a601
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/audio_sync_reader.h
@@ -0,0 +1,91 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_SYNC_READER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_SYNC_READER_H_
+
+#include "base/file_descriptor_posix.h"
+#include "base/process/process.h"
+#include "base/sync_socket.h"
+#include "base/synchronization/lock.h"
+#include "base/time/time.h"
+#include "media/audio/audio_output_controller.h"
+#include "media/base/audio_bus.h"
+
+namespace base {
+class SharedMemory;
+}
+
+namespace content {
+
+// A AudioOutputController::SyncReader implementation using SyncSocket. This
+// is used by AudioOutputController to provide a low latency data source for
+// transmitting audio packets between the browser process and the renderer
+// process.
+class AudioSyncReader : public media::AudioOutputController::SyncReader {
+ public:
+ AudioSyncReader(base::SharedMemory* shared_memory,
+ const media::AudioParameters& params,
+ int input_channels);
+
+ virtual ~AudioSyncReader();
+
+ // media::AudioOutputController::SyncReader implementations.
+ virtual void UpdatePendingBytes(uint32 bytes) OVERRIDE;
+ virtual int Read(bool block,
+ const media::AudioBus* source,
+ media::AudioBus* dest) OVERRIDE;
+ virtual void Close() OVERRIDE;
+
+ bool Init();
+ bool PrepareForeignSocketHandle(base::ProcessHandle process_handle,
+#if defined(OS_WIN)
+ base::SyncSocket::Handle* foreign_handle);
+#else
+ base::FileDescriptor* foreign_handle);
+#endif
+
+ private:
+ // Indicates whether the renderer has data available for reading.
+ bool DataReady();
+
+ // Blocks until DataReady() is true or a timeout expires.
+ void WaitTillDataReady();
+
+ base::SharedMemory* shared_memory_;
+
+ // Number of input channels for synchronized I/O.
+ int input_channels_;
+
+ // Mutes all incoming samples. This is used to prevent audible sound
+ // during automated testing.
+ bool mute_audio_;
+
+ // Socket for transmitting audio data.
+ scoped_ptr<base::CancelableSyncSocket> socket_;
+
+ // Socket to be used by the renderer. The reference is released after
+ // PrepareForeignSocketHandle() is called and ran successfully.
+ scoped_ptr<base::CancelableSyncSocket> foreign_socket_;
+
+ // Shared memory wrapper used for transferring audio data to Read() callers.
+ scoped_ptr<media::AudioBus> output_bus_;
+
+ // Shared memory wrapper used for transferring audio data from Read() callers.
+ scoped_ptr<media::AudioBus> input_bus_;
+
+ // Maximum amount of audio data which can be transferred in one Read() call.
+ int packet_size_;
+
+ // Track the number of times the renderer missed its real-time deadline and
+ // report a UMA stat during destruction.
+ size_t renderer_callback_count_;
+ size_t renderer_missed_callback_count_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioSyncReader);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_AUDIO_SYNC_READER_H_
diff --git a/chromium/content/browser/renderer_host/media/desktop_capture_device.cc b/chromium/content/browser/renderer_host/media/desktop_capture_device.cc
new file mode 100644
index 00000000000..9549633e80d
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/desktop_capture_device.cc
@@ -0,0 +1,462 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/desktop_capture_device.h"
+
+#include "base/bind.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "base/sequenced_task_runner.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/synchronization/lock.h"
+#include "base/threading/sequenced_worker_pool.h"
+#include "content/public/browser/browser_thread.h"
+#include "content/public/common/desktop_media_id.h"
+#include "media/base/video_util.h"
+#include "third_party/libyuv/include/libyuv/scale_argb.h"
+#include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h"
+#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
+#include "third_party/webrtc/modules/desktop_capture/screen_capturer.h"
+#include "third_party/webrtc/modules/desktop_capture/window_capturer.h"
+
+namespace content {
+
+namespace {
+
+const int kBytesPerPixel = 4;
+
+webrtc::DesktopRect ComputeLetterboxRect(
+ const webrtc::DesktopSize& max_size,
+ const webrtc::DesktopSize& source_size) {
+ gfx::Rect result = media::ComputeLetterboxRegion(
+ gfx::Rect(0, 0, max_size.width(), max_size.height()),
+ gfx::Size(source_size.width(), source_size.height()));
+ return webrtc::DesktopRect::MakeLTRB(
+ result.x(), result.y(), result.right(), result.bottom());
+}
+
+} // namespace
+
+class DesktopCaptureDevice::Core
+ : public base::RefCountedThreadSafe<Core>,
+ public webrtc::DesktopCapturer::Callback {
+ public:
+ Core(scoped_refptr<base::SequencedTaskRunner> task_runner,
+ scoped_ptr<webrtc::DesktopCapturer> capturer);
+
+ // Implementation of VideoCaptureDevice methods.
+ void Allocate(const media::VideoCaptureCapability& capture_format,
+ EventHandler* event_handler);
+ void Start();
+ void Stop();
+ void DeAllocate();
+
+ private:
+ friend class base::RefCountedThreadSafe<Core>;
+ virtual ~Core();
+
+ // webrtc::DesktopCapturer::Callback interface
+ virtual webrtc::SharedMemory* CreateSharedMemory(size_t size) OVERRIDE;
+ virtual void OnCaptureCompleted(webrtc::DesktopFrame* frame) OVERRIDE;
+
+ // Helper methods that run on the |task_runner_|. Posted from the
+ // corresponding public methods.
+ void DoAllocate(const media::VideoCaptureCapability& capture_format);
+ void DoStart();
+ void DoStop();
+ void DoDeAllocate();
+
+ // Chooses new output properties based on the supplied source size and the
+ // properties requested to Allocate(), and dispatches OnFrameInfo[Changed]
+ // notifications.
+ void RefreshCaptureFormat(const webrtc::DesktopSize& frame_size);
+
+ // Helper to schedule capture tasks.
+ void ScheduleCaptureTimer();
+
+ // Method that is scheduled on |task_runner_| to be called on regular interval
+ // to capture a frame.
+ void OnCaptureTimer();
+
+ // Captures a single frame.
+ void DoCapture();
+
+ // Task runner used for capturing operations.
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
+
+ // The underlying DesktopCapturer instance used to capture frames.
+ scoped_ptr<webrtc::DesktopCapturer> desktop_capturer_;
+
+ // |event_handler_lock_| must be locked whenever |event_handler_| is used.
+ // It's necessary because DeAllocate() needs to reset it on the calling thread
+ // to ensure that the event handler is not called once DeAllocate() returns.
+ base::Lock event_handler_lock_;
+ EventHandler* event_handler_;
+
+ // Requested video capture format (width, height, frame rate, etc).
+ media::VideoCaptureCapability requested_format_;
+
+ // Actual video capture format being generated.
+ media::VideoCaptureCapability capture_format_;
+
+ // Size of frame most recently captured from the source.
+ webrtc::DesktopSize previous_frame_size_;
+
+ // DesktopFrame into which captured frames are down-scaled and/or letterboxed,
+ // depending upon the caller's requested capture capabilities. If frames can
+ // be returned to the caller directly then this is NULL.
+ scoped_ptr<webrtc::DesktopFrame> output_frame_;
+
+ // Sub-rectangle of |output_frame_| into which the source will be scaled
+ // and/or letterboxed.
+ webrtc::DesktopRect output_rect_;
+
+ // True between DoStart() and DoStop(). Can't just check |event_handler_|
+ // because |event_handler_| is used on the caller thread.
+ bool started_;
+
+ // True when we have delayed OnCaptureTimer() task posted on
+ // |task_runner_|.
+ bool capture_task_posted_;
+
+ // True when waiting for |desktop_capturer_| to capture current frame.
+ bool capture_in_progress_;
+
+ DISALLOW_COPY_AND_ASSIGN(Core);
+};
+
+DesktopCaptureDevice::Core::Core(
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ scoped_ptr<webrtc::DesktopCapturer> capturer)
+ : task_runner_(task_runner),
+ desktop_capturer_(capturer.Pass()),
+ event_handler_(NULL),
+ started_(false),
+ capture_task_posted_(false),
+ capture_in_progress_(false) {
+}
+
+DesktopCaptureDevice::Core::~Core() {
+}
+
+void DesktopCaptureDevice::Core::Allocate(
+ const media::VideoCaptureCapability& capture_format,
+ EventHandler* event_handler) {
+ DCHECK_GT(capture_format.width, 0);
+ DCHECK_GT(capture_format.height, 0);
+ DCHECK_GT(capture_format.frame_rate, 0);
+
+ {
+ base::AutoLock auto_lock(event_handler_lock_);
+ event_handler_ = event_handler;
+ }
+
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(&Core::DoAllocate, this, capture_format));
+}
+
+void DesktopCaptureDevice::Core::Start() {
+ task_runner_->PostTask(
+ FROM_HERE, base::Bind(&Core::DoStart, this));
+}
+
+void DesktopCaptureDevice::Core::Stop() {
+ task_runner_->PostTask(
+ FROM_HERE, base::Bind(&Core::DoStop, this));
+}
+
+void DesktopCaptureDevice::Core::DeAllocate() {
+ {
+ base::AutoLock auto_lock(event_handler_lock_);
+ event_handler_ = NULL;
+ }
+ task_runner_->PostTask(FROM_HERE, base::Bind(&Core::DoDeAllocate, this));
+}
+
+webrtc::SharedMemory*
+DesktopCaptureDevice::Core::CreateSharedMemory(size_t size) {
+ return NULL;
+}
+
+void DesktopCaptureDevice::Core::OnCaptureCompleted(
+ webrtc::DesktopFrame* frame) {
+ DCHECK(task_runner_->RunsTasksOnCurrentThread());
+ DCHECK(capture_in_progress_);
+
+ capture_in_progress_ = false;
+
+ if (!frame) {
+ LOG(ERROR) << "Failed to capture a frame.";
+ event_handler_->OnError();
+ return;
+ }
+
+ scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
+
+ // Handle initial frame size and size changes.
+ RefreshCaptureFormat(frame->size());
+
+ if (!started_)
+ return;
+
+ webrtc::DesktopSize output_size(capture_format_.width,
+ capture_format_.height);
+ size_t output_bytes = output_size.width() * output_size.height() *
+ webrtc::DesktopFrame::kBytesPerPixel;
+ const uint8_t* output_data = NULL;
+
+ if (frame->size().equals(output_size)) {
+ // If the captured frame matches the output size, we can return the pixel
+ // data directly, without scaling.
+ output_data = frame->data();
+ } else {
+ // Otherwise we need to down-scale and/or letterbox to the target format.
+
+ // Allocate a buffer of the correct size to scale the frame into.
+ // |output_frame_| is cleared whenever |output_rect_| changes, so we don't
+ // need to worry about clearing out stale pixel data in letterboxed areas.
+ if (!output_frame_) {
+ output_frame_.reset(new webrtc::BasicDesktopFrame(output_size));
+ memset(output_frame_->data(), 0, output_bytes);
+ }
+ DCHECK(output_frame_->size().equals(output_size));
+
+ // TODO(wez): Optimize this to scale only changed portions of the output,
+ // using ARGBScaleClip().
+ uint8_t* output_rect_data = output_frame_->data() +
+ output_frame_->stride() * output_rect_.top() +
+ webrtc::DesktopFrame::kBytesPerPixel * output_rect_.left();
+ libyuv::ARGBScale(frame->data(), frame->stride(),
+ frame->size().width(), frame->size().height(),
+ output_rect_data, output_frame_->stride(),
+ output_rect_.width(), output_rect_.height(),
+ libyuv::kFilterBilinear);
+ output_data = output_frame_->data();
+ }
+
+ base::AutoLock auto_lock(event_handler_lock_);
+ if (event_handler_) {
+ event_handler_->OnIncomingCapturedFrame(output_data, output_bytes,
+ base::Time::Now(), 0, false, false);
+ }
+}
+
+void DesktopCaptureDevice::Core::DoAllocate(
+ const media::VideoCaptureCapability& capture_format) {
+ DCHECK(task_runner_->RunsTasksOnCurrentThread());
+ DCHECK(desktop_capturer_);
+
+ requested_format_ = capture_format;
+
+ // Store requested frame rate and calculate expected delay.
+ capture_format_.frame_rate = requested_format_.frame_rate;
+ capture_format_.expected_capture_delay =
+ base::Time::kMillisecondsPerSecond / requested_format_.frame_rate;
+
+ // Support dynamic changes in resolution only if requester also does.
+ if (requested_format_.frame_size_type ==
+ media::VariableResolutionVideoCaptureDevice) {
+ capture_format_.frame_size_type =
+ media::VariableResolutionVideoCaptureDevice;
+ }
+
+ // This capturer always outputs ARGB, non-interlaced.
+ capture_format_.color = media::VideoCaptureCapability::kARGB;
+ capture_format_.interlaced = false;
+
+ desktop_capturer_->Start(this);
+
+ // Capture first frame, so that we can call OnFrameInfo() callback.
+ DoCapture();
+}
+
+void DesktopCaptureDevice::Core::DoStart() {
+ DCHECK(task_runner_->RunsTasksOnCurrentThread());
+ started_ = true;
+ if (!capture_task_posted_) {
+ ScheduleCaptureTimer();
+ DoCapture();
+ }
+}
+
+void DesktopCaptureDevice::Core::DoStop() {
+ DCHECK(task_runner_->RunsTasksOnCurrentThread());
+ started_ = false;
+ output_frame_.reset();
+ previous_frame_size_.set(0, 0);
+}
+
+void DesktopCaptureDevice::Core::DoDeAllocate() {
+ DCHECK(task_runner_->RunsTasksOnCurrentThread());
+ DoStop();
+ desktop_capturer_.reset();
+}
+
+void DesktopCaptureDevice::Core::RefreshCaptureFormat(
+ const webrtc::DesktopSize& frame_size) {
+ if (previous_frame_size_.equals(frame_size))
+ return;
+
+ // Clear the output frame, if any, since it will either need resizing, or
+ // clearing of stale data in letterbox areas, anyway.
+ output_frame_.reset();
+
+ if (previous_frame_size_.is_empty() ||
+ requested_format_.frame_size_type ==
+ media::VariableResolutionVideoCaptureDevice) {
+ // If this is the first frame, or the receiver supports variable resolution
+ // then determine the output size by treating the requested width & height
+ // as maxima.
+ if (frame_size.width() > requested_format_.width ||
+ frame_size.height() > requested_format_.height) {
+ output_rect_ = ComputeLetterboxRect(
+ webrtc::DesktopSize(requested_format_.width,
+ requested_format_.height),
+ frame_size);
+ output_rect_.Translate(-output_rect_.left(), -output_rect_.top());
+ } else {
+ output_rect_ = webrtc::DesktopRect::MakeSize(frame_size);
+ }
+ capture_format_.width = output_rect_.width();
+ capture_format_.height = output_rect_.height();
+
+ {
+ base::AutoLock auto_lock(event_handler_lock_);
+ if (event_handler_) {
+ if (previous_frame_size_.is_empty()) {
+ event_handler_->OnFrameInfo(capture_format_);
+ } else {
+ event_handler_->OnFrameInfoChanged(capture_format_);
+ }
+ }
+ }
+ } else {
+ // Otherwise the output frame size cannot change, so just scale and
+ // letterbox.
+ output_rect_ = ComputeLetterboxRect(
+ webrtc::DesktopSize(capture_format_.width, capture_format_.height),
+ frame_size);
+ }
+
+ previous_frame_size_ = frame_size;
+}
+
+void DesktopCaptureDevice::Core::ScheduleCaptureTimer() {
+ DCHECK(!capture_task_posted_);
+ capture_task_posted_ = true;
+ task_runner_->PostDelayedTask(
+ FROM_HERE, base::Bind(&Core::OnCaptureTimer, this),
+ base::TimeDelta::FromSeconds(1) / capture_format_.frame_rate);
+}
+
+void DesktopCaptureDevice::Core::OnCaptureTimer() {
+ DCHECK(capture_task_posted_);
+ capture_task_posted_ = false;
+
+ if (!started_)
+ return;
+
+ // Schedule a task for the next frame.
+ ScheduleCaptureTimer();
+ DoCapture();
+}
+
+void DesktopCaptureDevice::Core::DoCapture() {
+ DCHECK(task_runner_->RunsTasksOnCurrentThread());
+ DCHECK(!capture_in_progress_);
+
+ capture_in_progress_ = true;
+ desktop_capturer_->Capture(webrtc::DesktopRegion());
+
+ // Currently only synchronous implementations of DesktopCapturer are
+ // supported.
+ DCHECK(!capture_in_progress_);
+}
+
+// static
+scoped_ptr<media::VideoCaptureDevice> DesktopCaptureDevice::Create(
+ const DesktopMediaID& source) {
+ scoped_refptr<base::SequencedWorkerPool> blocking_pool =
+ BrowserThread::GetBlockingPool();
+ scoped_refptr<base::SequencedTaskRunner> task_runner =
+ blocking_pool->GetSequencedTaskRunner(
+ blocking_pool->GetSequenceToken());
+
+ switch (source.type) {
+ case DesktopMediaID::TYPE_SCREEN: {
+ scoped_ptr<webrtc::DesktopCapturer> capturer;
+
+#if defined(OS_CHROMEOS) && !defined(ARCH_CPU_ARMEL) && defined(USE_X11)
+ // ScreenCapturerX11 polls by default, due to poor driver support for
+ // DAMAGE. ChromeOS' drivers [can be patched to] support DAMAGE properly,
+ // so use it. However ARM driver seems to not support this properly, so
+ // disable it for ARM. See http://crbug.com/230105 .
+ capturer.reset(webrtc::ScreenCapturer::CreateWithXDamage(true));
+#elif defined(OS_WIN)
+ // ScreenCapturerWin disables Aero by default. We don't want it disabled
+ // for WebRTC screen capture, though.
+ capturer.reset(
+ webrtc::ScreenCapturer::CreateWithDisableAero(false));
+#else
+ capturer.reset(webrtc::ScreenCapturer::Create());
+#endif
+
+ return scoped_ptr<media::VideoCaptureDevice>(new DesktopCaptureDevice(
+ task_runner, capturer.Pass()));
+ }
+
+ case DesktopMediaID::TYPE_WINDOW: {
+ scoped_ptr<webrtc::WindowCapturer> capturer(
+ webrtc::WindowCapturer::Create());
+
+ if (!capturer || !capturer->SelectWindow(source.id)) {
+ return scoped_ptr<media::VideoCaptureDevice>();
+ }
+
+ return scoped_ptr<media::VideoCaptureDevice>(new DesktopCaptureDevice(
+ task_runner, capturer.PassAs<webrtc::DesktopCapturer>()));
+ }
+
+ default: {
+ NOTREACHED();
+ return scoped_ptr<media::VideoCaptureDevice>();
+ }
+ }
+}
+
+DesktopCaptureDevice::DesktopCaptureDevice(
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ scoped_ptr<webrtc::DesktopCapturer> capturer)
+ : core_(new Core(task_runner, capturer.Pass())),
+ name_("", "") {
+}
+
+DesktopCaptureDevice::~DesktopCaptureDevice() {
+ DeAllocate();
+}
+
+void DesktopCaptureDevice::Allocate(
+ const media::VideoCaptureCapability& capture_format,
+ EventHandler* event_handler) {
+ core_->Allocate(capture_format, event_handler);
+}
+
+void DesktopCaptureDevice::Start() {
+ core_->Start();
+}
+
+void DesktopCaptureDevice::Stop() {
+ core_->Stop();
+}
+
+void DesktopCaptureDevice::DeAllocate() {
+ core_->DeAllocate();
+}
+
+const media::VideoCaptureDevice::Name& DesktopCaptureDevice::device_name() {
+ return name_;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/desktop_capture_device.h b/chromium/content/browser/renderer_host/media/desktop_capture_device.h
new file mode 100644
index 00000000000..84422d3c5c9
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/desktop_capture_device.h
@@ -0,0 +1,58 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_DESKTOP_CAPTURE_DEVICE_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_DESKTOP_CAPTURE_DEVICE_H_
+
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "content/common/content_export.h"
+#include "media/video/capture/video_capture_device.h"
+
+namespace base {
+class SequencedTaskRunner;
+} // namespace base
+
+namespace webrtc {
+class DesktopCapturer;
+} // namespace webrtc
+
+namespace content {
+
+struct DesktopMediaID;
+
+// DesktopCaptureDevice implements VideoCaptureDevice for screens and windows.
+// It's essentially an adapter between webrtc::DesktopCapturer and
+// VideoCaptureDevice.
+class CONTENT_EXPORT DesktopCaptureDevice : public media::VideoCaptureDevice {
+ public:
+ // Creates capturer for the specified |source| and then creates
+ // DesktopCaptureDevice for it. May return NULL in case of a failure (e.g. if
+ // requested window was destroyed).
+ static scoped_ptr<media::VideoCaptureDevice> Create(
+ const DesktopMediaID& source);
+
+ DesktopCaptureDevice(scoped_refptr<base::SequencedTaskRunner> task_runner,
+ scoped_ptr<webrtc::DesktopCapturer> desktop_capturer);
+ virtual ~DesktopCaptureDevice();
+
+ // VideoCaptureDevice interface.
+ virtual void Allocate(const media::VideoCaptureCapability& capture_format,
+ EventHandler* observer) OVERRIDE;
+ virtual void Start() OVERRIDE;
+ virtual void Stop() OVERRIDE;
+ virtual void DeAllocate() OVERRIDE;
+ virtual const Name& device_name() OVERRIDE;
+
+ private:
+ class Core;
+ scoped_refptr<Core> core_;
+ Name name_;
+
+ DISALLOW_COPY_AND_ASSIGN(DesktopCaptureDevice);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_DESKTOP_CAPTURE_DEVICE_H_
diff --git a/chromium/content/browser/renderer_host/media/desktop_capture_device_unittest.cc b/chromium/content/browser/renderer_host/media/desktop_capture_device_unittest.cc
new file mode 100644
index 00000000000..cf050f5a210
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/desktop_capture_device_unittest.cc
@@ -0,0 +1,271 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/desktop_capture_device.h"
+
+#include "base/basictypes.h"
+#include "base/sequenced_task_runner.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/test/test_timeouts.h"
+#include "base/threading/sequenced_worker_pool.h"
+#include "base/time/time.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
+#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
+#include "third_party/webrtc/modules/desktop_capture/screen_capturer.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::DoAll;
+using ::testing::Expectation;
+using ::testing::InvokeWithoutArgs;
+using ::testing::SaveArg;
+
+namespace content {
+
+namespace {
+
+MATCHER_P2(EqualsCaptureCapability, width, height, "") {
+ return arg.width == width && arg.height == height;
+}
+
+const int kTestFrameWidth1 = 100;
+const int kTestFrameHeight1 = 100;
+const int kTestFrameWidth2 = 200;
+const int kTestFrameHeight2 = 150;
+const int kBufferSize = kTestFrameWidth2 * kTestFrameHeight2 * 4;
+
+const int kFrameRate = 30;
+
+class MockFrameObserver : public media::VideoCaptureDevice::EventHandler {
+ public:
+ MOCK_METHOD0(ReserveOutputBuffer, scoped_refptr<media::VideoFrame>());
+ MOCK_METHOD0(OnError, void());
+ MOCK_METHOD1(OnFrameInfo, void(const media::VideoCaptureCapability& info));
+ MOCK_METHOD1(OnFrameInfoChanged,
+ void(const media::VideoCaptureCapability& info));
+ MOCK_METHOD6(OnIncomingCapturedFrame, void(const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz));
+ MOCK_METHOD2(OnIncomingCapturedVideoFrame,
+ void(const scoped_refptr<media::VideoFrame>& frame,
+ base::Time timestamp));
+};
+
+// TODO(sergeyu): Move this to a separate file where it can be reused.
+class FakeScreenCapturer : public webrtc::ScreenCapturer {
+ public:
+ FakeScreenCapturer()
+ : callback_(NULL),
+ frame_index_(0) {
+ }
+ virtual ~FakeScreenCapturer() {}
+
+ // VideoFrameCapturer interface.
+ virtual void Start(Callback* callback) OVERRIDE {
+ callback_ = callback;
+ }
+
+ virtual void Capture(const webrtc::DesktopRegion& region) OVERRIDE {
+ webrtc::DesktopSize size;
+ if (frame_index_ % 2 == 0) {
+ size = webrtc::DesktopSize(kTestFrameWidth1, kTestFrameHeight1);
+ } else {
+ size = webrtc::DesktopSize(kTestFrameWidth2, kTestFrameHeight2);
+ }
+ frame_index_++;
+ callback_->OnCaptureCompleted(new webrtc::BasicDesktopFrame(size));
+ }
+
+ virtual void SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) OVERRIDE {
+ }
+
+ private:
+ Callback* callback_;
+ int frame_index_;
+};
+
+class DesktopCaptureDeviceTest : public testing::Test {
+ public:
+ virtual void SetUp() OVERRIDE {
+ worker_pool_ = new base::SequencedWorkerPool(3, "TestCaptureThread");
+ }
+
+ protected:
+ scoped_refptr<base::SequencedWorkerPool> worker_pool_;
+};
+
+} // namespace
+
+// There is currently no screen capturer implementation for ozone. So disable
+// the test that uses a real screen-capturer instead of FakeScreenCapturer.
+// http://crbug.com/260318
+#if defined(USE_OZONE)
+#define MAYBE_Capture DISABLED_Capture
+#else
+#define MAYBE_Capture Capture
+#endif
+TEST_F(DesktopCaptureDeviceTest, MAYBE_Capture) {
+ scoped_ptr<webrtc::DesktopCapturer> capturer(
+ webrtc::ScreenCapturer::Create());
+ DesktopCaptureDevice capture_device(
+ worker_pool_->GetSequencedTaskRunner(worker_pool_->GetSequenceToken()),
+ capturer.Pass());
+ media::VideoCaptureCapability caps;
+ base::WaitableEvent done_event(false, false);
+ int frame_size;
+
+ MockFrameObserver frame_observer;
+ EXPECT_CALL(frame_observer, OnFrameInfo(_))
+ .WillOnce(SaveArg<0>(&caps));
+ EXPECT_CALL(frame_observer, OnError())
+ .Times(0);
+ EXPECT_CALL(frame_observer, OnIncomingCapturedFrame(_, _, _, _, _, _))
+ .WillRepeatedly(DoAll(
+ SaveArg<1>(&frame_size),
+ InvokeWithoutArgs(&done_event, &base::WaitableEvent::Signal)));
+
+ media::VideoCaptureCapability capture_format(
+ 640, 480, kFrameRate, media::VideoCaptureCapability::kI420, 0, false,
+ media::ConstantResolutionVideoCaptureDevice);
+ capture_device.Allocate(capture_format, &frame_observer);
+ capture_device.Start();
+ EXPECT_TRUE(done_event.TimedWait(TestTimeouts::action_max_timeout()));
+ capture_device.Stop();
+ capture_device.DeAllocate();
+
+ EXPECT_GT(caps.width, 0);
+ EXPECT_GT(caps.height, 0);
+ EXPECT_EQ(kFrameRate, caps.frame_rate);
+ EXPECT_EQ(media::VideoCaptureCapability::kARGB, caps.color);
+ EXPECT_FALSE(caps.interlaced);
+
+ EXPECT_EQ(caps.width * caps.height * 4, frame_size);
+}
+
+// Test that screen capturer behaves correctly if the source frame size changes
+// but the caller cannot cope with variable resolution output.
+TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeConstantResolution) {
+ FakeScreenCapturer* mock_capturer = new FakeScreenCapturer();
+
+ DesktopCaptureDevice capture_device(
+ worker_pool_->GetSequencedTaskRunner(worker_pool_->GetSequenceToken()),
+ scoped_ptr<webrtc::DesktopCapturer>(mock_capturer));
+
+ media::VideoCaptureCapability caps;
+ base::WaitableEvent done_event(false, false);
+ int frame_size;
+
+ MockFrameObserver frame_observer;
+ Expectation frame_info_called = EXPECT_CALL(frame_observer, OnFrameInfo(_))
+ .WillOnce(SaveArg<0>(&caps));
+ EXPECT_CALL(frame_observer, OnFrameInfoChanged(_))
+ .Times(0);
+ EXPECT_CALL(frame_observer, OnError())
+ .Times(0);
+ EXPECT_CALL(frame_observer, OnIncomingCapturedFrame(_, _, _, _, _, _))
+ .After(frame_info_called)
+ .WillRepeatedly(DoAll(
+ SaveArg<1>(&frame_size),
+ InvokeWithoutArgs(&done_event, &base::WaitableEvent::Signal)));
+
+ media::VideoCaptureCapability capture_format(
+ kTestFrameWidth1,
+ kTestFrameHeight1,
+ kFrameRate,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+
+ capture_device.Allocate(capture_format, &frame_observer);
+ capture_device.Start();
+
+ // Capture at least two frames, to ensure that the source frame size has
+ // changed while capturing.
+ EXPECT_TRUE(done_event.TimedWait(TestTimeouts::action_max_timeout()));
+ done_event.Reset();
+ EXPECT_TRUE(done_event.TimedWait(TestTimeouts::action_max_timeout()));
+
+ capture_device.Stop();
+ capture_device.DeAllocate();
+
+ EXPECT_EQ(kTestFrameWidth1, caps.width);
+ EXPECT_EQ(kTestFrameHeight1, caps.height);
+ EXPECT_EQ(kFrameRate, caps.frame_rate);
+ EXPECT_EQ(media::VideoCaptureCapability::kARGB, caps.color);
+ EXPECT_FALSE(caps.interlaced);
+
+ EXPECT_EQ(caps.width * caps.height * 4, frame_size);
+}
+
+// Test that screen capturer behaves correctly if the source frame size changes
+// and the caller can cope with variable resolution output.
+TEST_F(DesktopCaptureDeviceTest, ScreenResolutionChangeVariableResolution) {
+ FakeScreenCapturer* mock_capturer = new FakeScreenCapturer();
+
+ DesktopCaptureDevice capture_device(
+ worker_pool_->GetSequencedTaskRunner(worker_pool_->GetSequenceToken()),
+ scoped_ptr<webrtc::DesktopCapturer>(mock_capturer));
+
+ media::VideoCaptureCapability caps;
+ base::WaitableEvent done_event(false, false);
+
+ MockFrameObserver frame_observer;
+ Expectation frame_info_called = EXPECT_CALL(frame_observer, OnFrameInfo(_))
+ .WillOnce(SaveArg<0>(&caps));
+ Expectation first_info_changed = EXPECT_CALL(frame_observer,
+ OnFrameInfoChanged(EqualsCaptureCapability(kTestFrameWidth2,
+ kTestFrameHeight2)))
+ .After(frame_info_called);
+ Expectation second_info_changed = EXPECT_CALL(frame_observer,
+ OnFrameInfoChanged(EqualsCaptureCapability(kTestFrameWidth1,
+ kTestFrameHeight1)))
+ .After(first_info_changed);
+ EXPECT_CALL(frame_observer, OnFrameInfoChanged(_))
+ .Times(AnyNumber())
+ .After(second_info_changed);
+ EXPECT_CALL(frame_observer, OnError())
+ .Times(0);
+ EXPECT_CALL(frame_observer, OnIncomingCapturedFrame(_, _, _, _, _, _))
+ .After(frame_info_called)
+ .WillRepeatedly(
+ InvokeWithoutArgs(&done_event, &base::WaitableEvent::Signal));
+
+ media::VideoCaptureCapability capture_format(
+ kTestFrameWidth2,
+ kTestFrameHeight2,
+ kFrameRate,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::VariableResolutionVideoCaptureDevice);
+
+ capture_device.Allocate(capture_format, &frame_observer);
+ capture_device.Start();
+
+ // Capture at least three frames, to ensure that the source frame size has
+ // changed at least twice while capturing.
+ EXPECT_TRUE(done_event.TimedWait(TestTimeouts::action_max_timeout()));
+ done_event.Reset();
+ EXPECT_TRUE(done_event.TimedWait(TestTimeouts::action_max_timeout()));
+ done_event.Reset();
+ EXPECT_TRUE(done_event.TimedWait(TestTimeouts::action_max_timeout()));
+
+ capture_device.Stop();
+ capture_device.DeAllocate();
+
+ EXPECT_EQ(kTestFrameWidth1, caps.width);
+ EXPECT_EQ(kTestFrameHeight1, caps.height);
+ EXPECT_EQ(kFrameRate, caps.frame_rate);
+ EXPECT_EQ(media::VideoCaptureCapability::kARGB, caps.color);
+ EXPECT_FALSE(caps.interlaced);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/device_request_message_filter.cc b/chromium/content/browser/renderer_host/media/device_request_message_filter.cc
new file mode 100644
index 00000000000..770e800d402
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/device_request_message_filter.cc
@@ -0,0 +1,219 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+
+#include "content/browser/renderer_host/media/device_request_message_filter.h"
+
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_util.h"
+#include "content/browser/browser_main_loop.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/common/media/media_stream_messages.h"
+#include "content/public/browser/resource_context.h"
+#include "crypto/hmac.h"
+
+// Clears the MediaStreamDevice.name from all devices in |device_list|.
+static void ClearDeviceLabels(content::StreamDeviceInfoArray* devices) {
+ for (content::StreamDeviceInfoArray::iterator device_itr = devices->begin();
+ device_itr != devices->end();
+ ++device_itr) {
+ device_itr->device.name.clear();
+ }
+}
+
+namespace content {
+
+DeviceRequestMessageFilter::DeviceRequestMessageFilter(
+ ResourceContext* resource_context,
+ MediaStreamManager* media_stream_manager)
+ : resource_context_(resource_context),
+ media_stream_manager_(media_stream_manager) {
+ DCHECK(resource_context);
+ DCHECK(media_stream_manager);
+}
+
+DeviceRequestMessageFilter::~DeviceRequestMessageFilter() {
+ DCHECK(requests_.empty());
+}
+
+struct DeviceRequestMessageFilter::DeviceRequest {
+ DeviceRequest(int request_id,
+ const GURL& origin,
+ const std::string& audio_devices_label,
+ const std::string& video_devices_label)
+ : request_id(request_id),
+ origin(origin),
+ has_audio_returned(false),
+ has_video_returned(false),
+ audio_devices_label(audio_devices_label),
+ video_devices_label(video_devices_label) {}
+
+ int request_id;
+ GURL origin;
+ bool has_audio_returned;
+ bool has_video_returned;
+ std::string audio_devices_label;
+ std::string video_devices_label;
+ StreamDeviceInfoArray audio_devices;
+ StreamDeviceInfoArray video_devices;
+};
+
+void DeviceRequestMessageFilter::StreamGenerated(
+ const std::string& label,
+ const StreamDeviceInfoArray& audio_devices,
+ const StreamDeviceInfoArray& video_devices) {
+ NOTIMPLEMENTED();
+}
+
+void DeviceRequestMessageFilter::StreamGenerationFailed(
+ const std::string& label) {
+ NOTIMPLEMENTED();
+}
+
+void DeviceRequestMessageFilter::StopGeneratedStream(
+ const std::string& label) {
+ NOTIMPLEMENTED();
+}
+
+void DeviceRequestMessageFilter::DevicesEnumerated(
+ const std::string& label,
+ const StreamDeviceInfoArray& new_devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Look up the DeviceRequest by id.
+ DeviceRequestList::iterator request_it = requests_.begin();
+ for (; request_it != requests_.end(); ++request_it) {
+ if (label == request_it->audio_devices_label ||
+ label == request_it->video_devices_label) {
+ break;
+ }
+ }
+ DCHECK(request_it != requests_.end());
+
+ StreamDeviceInfoArray* audio_devices = &request_it->audio_devices;
+ StreamDeviceInfoArray* video_devices = &request_it->video_devices;
+
+ // Store hmac'd device ids instead of raw device ids.
+ if (label == request_it->audio_devices_label) {
+ request_it->has_audio_returned = true;
+ DCHECK(audio_devices->empty());
+ HmacDeviceIds(request_it->origin, new_devices, audio_devices);
+ } else {
+ DCHECK(label == request_it->video_devices_label);
+ request_it->has_video_returned = true;
+ DCHECK(video_devices->empty());
+ HmacDeviceIds(request_it->origin, new_devices, video_devices);
+ }
+
+ if (!request_it->has_audio_returned || !request_it->has_video_returned) {
+ // Wait for the rest of the devices to complete.
+ return;
+ }
+
+ // Query for mic and camera permissions.
+ if (!resource_context_->AllowMicAccess(request_it->origin))
+ ClearDeviceLabels(audio_devices);
+ if (!resource_context_->AllowCameraAccess(request_it->origin))
+ ClearDeviceLabels(video_devices);
+
+ // Both audio and video devices are ready for copying.
+ StreamDeviceInfoArray all_devices = *audio_devices;
+ all_devices.insert(
+ all_devices.end(), video_devices->begin(), video_devices->end());
+
+ Send(new MediaStreamMsg_GetSourcesACK(request_it->request_id, all_devices));
+
+ // TODO(vrk): Rename StopGeneratedStream() to CancelDeviceRequest().
+ media_stream_manager_->StopGeneratedStream(request_it->audio_devices_label);
+ media_stream_manager_->StopGeneratedStream(request_it->video_devices_label);
+ requests_.erase(request_it);
+}
+
+void DeviceRequestMessageFilter::DeviceOpened(
+ const std::string& label,
+ const StreamDeviceInfo& video_device) {
+ NOTIMPLEMENTED();
+}
+
+bool DeviceRequestMessageFilter::OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) {
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP_EX(DeviceRequestMessageFilter, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(MediaStreamHostMsg_GetSources, OnGetSources)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+ return handled;
+}
+
+void DeviceRequestMessageFilter::OnChannelClosing() {
+ BrowserMessageFilter::OnChannelClosing();
+
+ // Since the IPC channel is gone, cancel outstanding device requests.
+ for (DeviceRequestList::iterator it = requests_.begin();
+ it != requests_.end();
+ ++it) {
+ // TODO(vrk): Rename StopGeneratedStream() to CancelDeviceRequest().
+ media_stream_manager_->StopGeneratedStream(it->audio_devices_label);
+ media_stream_manager_->StopGeneratedStream(it->video_devices_label);
+ }
+ requests_.clear();
+}
+
+void DeviceRequestMessageFilter::HmacDeviceIds(
+ const GURL& origin,
+ const StreamDeviceInfoArray& raw_devices,
+ StreamDeviceInfoArray* devices_with_guids) {
+ DCHECK(devices_with_guids);
+
+ // Replace raw ids with hmac'd ids before returning to renderer process.
+ for (StreamDeviceInfoArray::const_iterator device_itr = raw_devices.begin();
+ device_itr != raw_devices.end();
+ ++device_itr) {
+ crypto::HMAC hmac(crypto::HMAC::SHA256);
+ const size_t digest_length = hmac.DigestLength();
+ std::vector<uint8> digest(digest_length);
+ bool result = hmac.Init(origin.spec()) &&
+ hmac.Sign(device_itr->device.id, &digest[0], digest.size());
+ DCHECK(result);
+ if (result) {
+ StreamDeviceInfo current_device_info = *device_itr;
+ current_device_info.device.id =
+ StringToLowerASCII(base::HexEncode(&digest[0], digest.size()));
+ devices_with_guids->push_back(current_device_info);
+ }
+ }
+}
+
+bool DeviceRequestMessageFilter::DoesRawIdMatchGuid(
+ const GURL& security_origin,
+ const std::string& device_guid,
+ const std::string& raw_device_id) {
+ crypto::HMAC hmac(crypto::HMAC::SHA256);
+ bool result = hmac.Init(security_origin.spec());
+ DCHECK(result);
+ std::vector<uint8> converted_guid;
+ base::HexStringToBytes(device_guid, &converted_guid);
+ return hmac.Verify(
+ raw_device_id,
+ base::StringPiece(reinterpret_cast<const char*>(&converted_guid[0]),
+ converted_guid.size()));
+}
+
+void DeviceRequestMessageFilter::OnGetSources(int request_id,
+ const GURL& security_origin) {
+ // Make request to get audio devices.
+ const std::string& audio_label = media_stream_manager_->EnumerateDevices(
+ this, -1, -1, -1, MEDIA_DEVICE_AUDIO_CAPTURE, security_origin);
+ DCHECK(!audio_label.empty());
+
+ // Make request for video devices.
+ const std::string& video_label = media_stream_manager_->EnumerateDevices(
+ this, -1, -1, -1, MEDIA_DEVICE_VIDEO_CAPTURE, security_origin);
+ DCHECK(!video_label.empty());
+
+ requests_.push_back(DeviceRequest(
+ request_id, security_origin, audio_label, video_label));
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/device_request_message_filter.h b/chromium/content/browser/renderer_host/media/device_request_message_filter.h
new file mode 100644
index 00000000000..048e2157871
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/device_request_message_filter.h
@@ -0,0 +1,78 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_CENTER_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_CENTER_HOST_H_
+
+#include <map>
+#include <string>
+
+#include "base/synchronization/lock.h"
+#include "content/browser/renderer_host/media/media_stream_requester.h"
+#include "content/common/content_export.h"
+#include "content/public/browser/browser_message_filter.h"
+
+namespace content {
+
+class MediaStreamManager;
+class ResourceContext;
+
+// DeviceRequestMessageFilter used to delegate requests from the
+// MediaStreamCenter.
+class CONTENT_EXPORT DeviceRequestMessageFilter : public BrowserMessageFilter,
+ public MediaStreamRequester {
+ public:
+ DeviceRequestMessageFilter(ResourceContext* resource_context,
+ MediaStreamManager* media_stream_manager);
+
+ // MediaStreamRequester implementation.
+ // TODO(vrk): Replace MediaStreamRequester interface with a single callback so
+ // we don't have to override all these callbacks we don't care about.
+ // (crbug.com/249476)
+ virtual void StreamGenerated(
+ const std::string& label, const StreamDeviceInfoArray& audio_devices,
+ const StreamDeviceInfoArray& video_devices) OVERRIDE;
+ virtual void StreamGenerationFailed(const std::string& label) OVERRIDE;
+ virtual void StopGeneratedStream(const std::string& label) OVERRIDE;
+ virtual void DeviceOpened(const std::string& label,
+ const StreamDeviceInfo& video_device) OVERRIDE;
+ // DevicesEnumerated() is the only callback we're interested in.
+ virtual void DevicesEnumerated(const std::string& label,
+ const StreamDeviceInfoArray& devices) OVERRIDE;
+
+ // BrowserMessageFilter implementation.
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+ virtual void OnChannelClosing() OVERRIDE;
+
+ // Helper method that checks whether the GUID generated by
+ // DeviceRequestMessageFilter matches the given |raw_device_id|.
+ static bool DoesRawIdMatchGuid(const GURL& security_origin,
+ const std::string& device_guid,
+ const std::string& raw_device_id);
+
+ protected:
+ virtual ~DeviceRequestMessageFilter();
+
+ private:
+ void OnGetSources(int request_id, const GURL& security_origin);
+ void HmacDeviceIds(const GURL& origin,
+ const StreamDeviceInfoArray& raw_devices,
+ StreamDeviceInfoArray* devices_with_guids);
+
+ // Owned by ProfileIOData which is guaranteed to outlive DRMF.
+ ResourceContext* resource_context_;
+ MediaStreamManager* media_stream_manager_;
+
+ struct DeviceRequest;
+ typedef std::vector<DeviceRequest> DeviceRequestList;
+ // List of all requests.
+ DeviceRequestList requests_;
+
+ DISALLOW_COPY_AND_ASSIGN(DeviceRequestMessageFilter);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_CENTER_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/device_request_message_filter_unittest.cc b/chromium/content/browser/renderer_host/media/device_request_message_filter_unittest.cc
new file mode 100644
index 00000000000..04a7bb69eea
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/device_request_message_filter_unittest.cc
@@ -0,0 +1,304 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/strings/string_number_conversions.h"
+#include "content/browser/renderer_host/media/device_request_message_filter.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/common/media/media_stream_messages.h"
+#include "content/public/test/mock_resource_context.h"
+#include "content/public/test/test_browser_thread.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::Invoke;
+
+namespace content {
+
+static const std::string kAudioLabel = "audio_label";
+static const std::string kVideoLabel = "video_label";
+
+class MockMediaStreamManager : public MediaStreamManager {
+ public:
+ MockMediaStreamManager() {}
+
+ virtual ~MockMediaStreamManager() {}
+
+ MOCK_METHOD6(EnumerateDevices,
+ std::string(MediaStreamRequester* requester,
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ MediaStreamType type,
+ const GURL& security_origin));
+ MOCK_METHOD1(StopGeneratedStream, void(const std::string& label));
+
+ std::string DoEnumerateDevices(MediaStreamRequester* requester,
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ MediaStreamType type,
+ const GURL& security_origin) {
+ if (type == MEDIA_DEVICE_AUDIO_CAPTURE) {
+ return kAudioLabel;
+ } else {
+ return kVideoLabel;
+ }
+ }
+};
+
+class MockDeviceRequestMessageFilter : public DeviceRequestMessageFilter {
+ public:
+ MockDeviceRequestMessageFilter(MockResourceContext* context,
+ MockMediaStreamManager* manager)
+ : DeviceRequestMessageFilter(context, manager), received_id_(-1) {}
+ StreamDeviceInfoArray requested_devices() { return requested_devices_; }
+ int received_id() { return received_id_; }
+
+ private:
+ virtual ~MockDeviceRequestMessageFilter() {}
+
+ // Override the Send() method to intercept the message that we're sending to
+ // the renderer.
+ virtual bool Send(IPC::Message* reply_msg) OVERRIDE {
+ CHECK(reply_msg);
+
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP(MockDeviceRequestMessageFilter, *reply_msg)
+ IPC_MESSAGE_HANDLER(MediaStreamMsg_GetSourcesACK, SaveDevices)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP()
+ EXPECT_TRUE(handled);
+
+ delete reply_msg;
+ return true;
+ }
+
+ void SaveDevices(int request_id, const StreamDeviceInfoArray& devices) {
+ received_id_ = request_id;
+ requested_devices_ = devices;
+ }
+
+ int received_id_;
+ StreamDeviceInfoArray requested_devices_;
+};
+
+class DeviceRequestMessageFilterTest : public testing::Test {
+ public:
+ DeviceRequestMessageFilterTest() : next_device_id_(0) {}
+
+ void RunTest(int number_audio_devices, int number_video_devices) {
+ AddAudioDevices(number_audio_devices);
+ AddVideoDevices(number_video_devices);
+ GURL origin("https://test.com");
+ EXPECT_CALL(*media_stream_manager_,
+ EnumerateDevices(_, _, _, _, MEDIA_DEVICE_AUDIO_CAPTURE, _))
+ .Times(1);
+ EXPECT_CALL(*media_stream_manager_,
+ EnumerateDevices(_, _, _, _, MEDIA_DEVICE_VIDEO_CAPTURE, _))
+ .Times(1);
+ // Send message to get devices. Should trigger 2 EnumerateDevice() requests.
+ const int kRequestId = 123;
+ SendGetSourcesMessage(kRequestId, origin);
+
+ // Run audio callback. Because there's still an outstanding video request,
+ // this should not populate |message|.
+ FireAudioDeviceCallback();
+ EXPECT_EQ(0u, host_->requested_devices().size());
+
+ // After the video device callback is fired, |message| should be populated.
+ EXPECT_CALL(*media_stream_manager_, StopGeneratedStream(kAudioLabel))
+ .Times(1);
+ EXPECT_CALL(*media_stream_manager_, StopGeneratedStream(kVideoLabel))
+ .Times(1);
+ FireVideoDeviceCallback();
+ EXPECT_EQ(static_cast<size_t>(number_audio_devices + number_video_devices),
+ host_->requested_devices().size());
+
+ EXPECT_EQ(kRequestId, host_->received_id());
+ // Check to make sure no devices have raw ids.
+ EXPECT_FALSE(DoesContainRawIds(host_->requested_devices()));
+
+ // Check to make sure every GUID produced matches a raw device id.
+ EXPECT_TRUE(DoesEveryDeviceMapToRawId(host_->requested_devices(), origin));
+ }
+
+ bool AreLabelsPresent(MediaStreamType type) {
+ const StreamDeviceInfoArray& devices = host_->requested_devices();
+ for (size_t i = 0; i < devices.size(); i++) {
+ if (devices[i].device.type == type && !devices[i].device.name.empty())
+ return true;
+ }
+ return false;
+ }
+
+ protected:
+ virtual ~DeviceRequestMessageFilterTest() {}
+
+ virtual void SetUp() OVERRIDE {
+ message_loop_.reset(new base::MessageLoop(base::MessageLoop::TYPE_IO));
+ io_thread_.reset(
+ new TestBrowserThread(BrowserThread::IO, message_loop_.get()));
+
+ media_stream_manager_.reset(new MockMediaStreamManager());
+ ON_CALL(*media_stream_manager_, EnumerateDevices(_, _, _, _, _, _))
+ .WillByDefault(Invoke(media_stream_manager_.get(),
+ &MockMediaStreamManager::DoEnumerateDevices));
+
+ resource_context_.reset(new MockResourceContext(NULL));
+ host_ = new MockDeviceRequestMessageFilter(resource_context_.get(),
+ media_stream_manager_.get());
+ }
+
+ scoped_refptr<MockDeviceRequestMessageFilter> host_;
+ scoped_ptr<MockMediaStreamManager> media_stream_manager_;
+ scoped_ptr<MockResourceContext> resource_context_;
+ StreamDeviceInfoArray physical_audio_devices_;
+ StreamDeviceInfoArray physical_video_devices_;
+ scoped_ptr<base::MessageLoop> message_loop_;
+ scoped_ptr<TestBrowserThread> io_thread_;
+
+ private:
+ void AddAudioDevices(int number_of_devices) {
+ for (int i = 0; i < number_of_devices; i++) {
+ physical_audio_devices_.push_back(
+ StreamDeviceInfo(MEDIA_DEVICE_AUDIO_CAPTURE,
+ "/dev/audio/" + base::IntToString(next_device_id_),
+ "Audio Device" + base::IntToString(next_device_id_),
+ false));
+ next_device_id_++;
+ }
+ }
+
+ void AddVideoDevices(int number_of_devices) {
+ for (int i = 0; i < number_of_devices; i++) {
+ physical_video_devices_.push_back(
+ StreamDeviceInfo(MEDIA_DEVICE_VIDEO_CAPTURE,
+ "/dev/video/" + base::IntToString(next_device_id_),
+ "Video Device" + base::IntToString(next_device_id_),
+ false));
+ next_device_id_++;
+ }
+ }
+
+ void SendGetSourcesMessage(int request_id, const GURL& origin) {
+ // Since we're not actually sending IPC messages, this is a throw-away
+ // value.
+ bool message_was_ok;
+ host_->OnMessageReceived(MediaStreamHostMsg_GetSources(request_id, origin),
+ &message_was_ok);
+ }
+
+ void FireAudioDeviceCallback() {
+ host_->DevicesEnumerated(kAudioLabel, physical_audio_devices_);
+ }
+
+ void FireVideoDeviceCallback() {
+ host_->DevicesEnumerated(kVideoLabel, physical_video_devices_);
+ }
+
+ bool DoesContainRawIds(const StreamDeviceInfoArray& devices) {
+ for (size_t i = 0; i < devices.size(); i++) {
+ for (size_t j = 0; j < physical_audio_devices_.size(); ++j) {
+ if (physical_audio_devices_[j].device.id == devices[i].device.id)
+ return true;
+ }
+ for (size_t j = 0; j < physical_video_devices_.size(); ++j) {
+ if (physical_video_devices_[j].device.id == devices[i].device.id)
+ return true;
+ }
+ }
+ return false;
+ }
+
+ bool DoesEveryDeviceMapToRawId(const StreamDeviceInfoArray& devices,
+ const GURL& origin) {
+ for (size_t i = 0; i < devices.size(); i++) {
+ bool found_match = false;
+ for (size_t j = 0; j < physical_audio_devices_.size(); ++j) {
+ if (DeviceRequestMessageFilter::DoesRawIdMatchGuid(
+ origin,
+ devices[i].device.id,
+ physical_audio_devices_[j].device.id)) {
+ EXPECT_FALSE(found_match);
+ found_match = true;
+ }
+ }
+ for (size_t j = 0; j < physical_video_devices_.size(); ++j) {
+ if (DeviceRequestMessageFilter::DoesRawIdMatchGuid(
+ origin,
+ devices[i].device.id,
+ physical_video_devices_[j].device.id)) {
+ EXPECT_FALSE(found_match);
+ found_match = true;
+ }
+ }
+ if (!found_match)
+ return false;
+ }
+ return true;
+ }
+
+ int next_device_id_;
+};
+
+TEST_F(DeviceRequestMessageFilterTest, TestGetSources_AudioAndVideoDevices) {
+ // Runs through test with 1 audio and 1 video device.
+ RunTest(1, 1);
+}
+
+TEST_F(DeviceRequestMessageFilterTest,
+ TestGetSources_MultipleAudioAndVideoDevices) {
+ // Runs through test with 3 audio devices and 2 video devices.
+ RunTest(3, 2);
+}
+
+TEST_F(DeviceRequestMessageFilterTest, TestGetSources_NoVideoDevices) {
+ // Runs through test with 4 audio devices and 0 video devices.
+ RunTest(4, 0);
+}
+
+TEST_F(DeviceRequestMessageFilterTest, TestGetSources_NoAudioDevices) {
+ // Runs through test with 0 audio devices and 3 video devices.
+ RunTest(0, 3);
+}
+
+TEST_F(DeviceRequestMessageFilterTest, TestGetSources_NoDevices) {
+ // Runs through test with no devices.
+ RunTest(0, 0);
+}
+
+TEST_F(DeviceRequestMessageFilterTest, TestGetSources_DenyMicDenyCamera) {
+ resource_context_->set_mic_access(false);
+ resource_context_->set_camera_access(false);
+ RunTest(3, 3);
+ EXPECT_FALSE(AreLabelsPresent(MEDIA_DEVICE_AUDIO_CAPTURE));
+ EXPECT_FALSE(AreLabelsPresent(MEDIA_DEVICE_VIDEO_CAPTURE));
+}
+
+TEST_F(DeviceRequestMessageFilterTest, TestGetSources_AllowMicDenyCamera) {
+ resource_context_->set_mic_access(true);
+ resource_context_->set_camera_access(false);
+ RunTest(3, 3);
+ EXPECT_TRUE(AreLabelsPresent(MEDIA_DEVICE_AUDIO_CAPTURE));
+ EXPECT_FALSE(AreLabelsPresent(MEDIA_DEVICE_VIDEO_CAPTURE));
+}
+
+TEST_F(DeviceRequestMessageFilterTest, TestGetSources_DenyMicAllowCamera) {
+ resource_context_->set_mic_access(false);
+ resource_context_->set_camera_access(true);
+ RunTest(3, 3);
+ EXPECT_FALSE(AreLabelsPresent(MEDIA_DEVICE_AUDIO_CAPTURE));
+ EXPECT_TRUE(AreLabelsPresent(MEDIA_DEVICE_VIDEO_CAPTURE));
+}
+
+TEST_F(DeviceRequestMessageFilterTest, TestGetSources_AllowMicAllowCamera) {
+ resource_context_->set_mic_access(true);
+ resource_context_->set_camera_access(true);
+ RunTest(3, 3);
+ EXPECT_TRUE(AreLabelsPresent(MEDIA_DEVICE_AUDIO_CAPTURE));
+ EXPECT_TRUE(AreLabelsPresent(MEDIA_DEVICE_VIDEO_CAPTURE));
+}
+
+}; // namespace content
diff --git a/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host.cc b/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host.cc
new file mode 100644
index 00000000000..9135b719960
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host.cc
@@ -0,0 +1,224 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/media_stream_dispatcher_host.h"
+
+#include "content/browser/browser_main_loop.h"
+#include "content/browser/renderer_host/media/web_contents_capture_util.h"
+#include "content/common/media/media_stream_messages.h"
+#include "content/common/media/media_stream_options.h"
+#include "url/gurl.h"
+
+namespace content {
+
+MediaStreamDispatcherHost::MediaStreamDispatcherHost(
+ int render_process_id,
+ MediaStreamManager* media_stream_manager)
+ : render_process_id_(render_process_id),
+ media_stream_manager_(media_stream_manager) {
+}
+
+void MediaStreamDispatcherHost::StreamGenerated(
+ const std::string& label,
+ const StreamDeviceInfoArray& audio_devices,
+ const StreamDeviceInfoArray& video_devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "MediaStreamDispatcherHost::StreamGenerated("
+ << ", {label = " << label << "})";
+
+ StreamMap::iterator it = streams_.find(label);
+ DCHECK(it != streams_.end());
+ StreamRequest request = it->second;
+
+ Send(new MediaStreamMsg_StreamGenerated(
+ request.render_view_id, request.page_request_id, label, audio_devices,
+ video_devices));
+}
+
+void MediaStreamDispatcherHost::StreamGenerationFailed(
+ const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "MediaStreamDispatcherHost::StreamGenerationFailed("
+ << ", {label = " << label << "})";
+
+ StreamMap::iterator it = streams_.find(label);
+ DCHECK(it != streams_.end());
+ StreamRequest request = it->second;
+ streams_.erase(it);
+
+ Send(new MediaStreamMsg_StreamGenerationFailed(request.render_view_id,
+ request.page_request_id));
+}
+
+void MediaStreamDispatcherHost::StopGeneratedStream(
+ const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "MediaStreamDispatcherHost::StopGeneratedStream("
+ << ", {label = " << label << "})";
+
+ StreamMap::iterator it = streams_.find(label);
+ DCHECK(it != streams_.end());
+ StreamRequest request = it->second;
+ streams_.erase(it);
+
+ Send(new MediaStreamMsg_StopGeneratedStream(request.render_view_id, label));
+}
+
+void MediaStreamDispatcherHost::DevicesEnumerated(
+ const std::string& label,
+ const StreamDeviceInfoArray& devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "MediaStreamDispatcherHost::DevicesEnumerated("
+ << ", {label = " << label << "})";
+
+ StreamMap::iterator it = streams_.find(label);
+ DCHECK(it != streams_.end());
+ StreamRequest request = it->second;
+
+ Send(new MediaStreamMsg_DevicesEnumerated(
+ request.render_view_id, request.page_request_id, label, devices));
+}
+
+void MediaStreamDispatcherHost::DeviceOpened(
+ const std::string& label,
+ const StreamDeviceInfo& video_device) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "MediaStreamDispatcherHost::DeviceOpened("
+ << ", {label = " << label << "})";
+
+ StreamMap::iterator it = streams_.find(label);
+ DCHECK(it != streams_.end());
+ StreamRequest request = it->second;
+
+ Send(new MediaStreamMsg_DeviceOpened(
+ request.render_view_id, request.page_request_id, label, video_device));
+}
+
+bool MediaStreamDispatcherHost::OnMessageReceived(
+ const IPC::Message& message, bool* message_was_ok) {
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP_EX(MediaStreamDispatcherHost, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(MediaStreamHostMsg_GenerateStream, OnGenerateStream)
+ IPC_MESSAGE_HANDLER(MediaStreamHostMsg_CancelGenerateStream,
+ OnCancelGenerateStream)
+ IPC_MESSAGE_HANDLER(MediaStreamHostMsg_StopGeneratedStream,
+ OnStopGeneratedStream)
+ IPC_MESSAGE_HANDLER(MediaStreamHostMsg_EnumerateDevices,
+ OnEnumerateDevices)
+ IPC_MESSAGE_HANDLER(MediaStreamHostMsg_OpenDevice,
+ OnOpenDevice)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+ return handled;
+}
+
+void MediaStreamDispatcherHost::OnChannelClosing() {
+ BrowserMessageFilter::OnChannelClosing();
+ DVLOG(1) << "MediaStreamDispatcherHost::OnChannelClosing";
+
+ // Since the IPC channel is gone, close all requesting/requested streams.
+ for (StreamMap::iterator it = streams_.begin();
+ it != streams_.end();
+ ++it) {
+ std::string label = it->first;
+ media_stream_manager_->StopGeneratedStream(label);
+ }
+ // Clear the map after we have stopped all the streams.
+ streams_.clear();
+}
+
+MediaStreamDispatcherHost::~MediaStreamDispatcherHost() {
+ DCHECK(streams_.empty());
+}
+
+void MediaStreamDispatcherHost::OnGenerateStream(
+ int render_view_id,
+ int page_request_id,
+ const StreamOptions& components,
+ const GURL& security_origin) {
+ DVLOG(1) << "MediaStreamDispatcherHost::OnGenerateStream("
+ << render_view_id << ", "
+ << page_request_id << ", ["
+ << " audio:" << components.audio_type
+ << " video:" << components.video_type
+ << " ], "
+ << security_origin.spec() << ")";
+
+ const std::string& label = media_stream_manager_->GenerateStream(
+ this, render_process_id_, render_view_id, page_request_id,
+ components, security_origin);
+ if (label.empty()) {
+ Send(new MediaStreamMsg_StreamGenerationFailed(render_view_id,
+ page_request_id));
+ } else {
+ streams_[label] = StreamRequest(render_view_id, page_request_id);
+ }
+}
+
+void MediaStreamDispatcherHost::OnCancelGenerateStream(int render_view_id,
+ int page_request_id) {
+ DVLOG(1) << "MediaStreamDispatcherHost::OnCancelGenerateStream("
+ << render_view_id << ", "
+ << page_request_id << ")";
+
+ for (StreamMap::iterator it = streams_.begin(); it != streams_.end(); ++it) {
+ if (it->second.render_view_id == render_view_id &&
+ it->second.page_request_id == page_request_id) {
+ media_stream_manager_->CancelRequest(it->first);
+ }
+ }
+}
+
+void MediaStreamDispatcherHost::OnStopGeneratedStream(
+ int render_view_id, const std::string& label) {
+ DVLOG(1) << "MediaStreamDispatcherHost::OnStopGeneratedStream("
+ << ", {label = " << label << "})";
+
+ StreamMap::iterator it = streams_.find(label);
+ if (it == streams_.end())
+ return;
+
+ media_stream_manager_->StopGeneratedStream(label);
+ streams_.erase(it);
+}
+
+void MediaStreamDispatcherHost::OnEnumerateDevices(
+ int render_view_id,
+ int page_request_id,
+ MediaStreamType type,
+ const GURL& security_origin) {
+ DVLOG(1) << "MediaStreamDispatcherHost::OnEnumerateDevices("
+ << render_view_id << ", "
+ << page_request_id << ", "
+ << type << ", "
+ << security_origin.spec() << ")";
+
+ const std::string& label = media_stream_manager_->EnumerateDevices(
+ this, render_process_id_, render_view_id, page_request_id,
+ type, security_origin);
+ DCHECK(!label.empty());
+ streams_[label] = StreamRequest(render_view_id, page_request_id);
+}
+
+void MediaStreamDispatcherHost::OnOpenDevice(
+ int render_view_id,
+ int page_request_id,
+ const std::string& device_id,
+ MediaStreamType type,
+ const GURL& security_origin) {
+ DVLOG(1) << "MediaStreamDispatcherHost::OnOpenDevice("
+ << render_view_id << ", "
+ << page_request_id << ", device_id: "
+ << device_id.c_str() << ", type: "
+ << type << ", "
+ << security_origin.spec() << ")";
+
+ const std::string& label = media_stream_manager_->OpenDevice(
+ this, render_process_id_, render_view_id, page_request_id,
+ device_id, type, security_origin);
+ DCHECK(!label.empty());
+ streams_[label] = StreamRequest(render_view_id, page_request_id);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host.h b/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host.h
new file mode 100644
index 00000000000..cfc69137a39
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host.h
@@ -0,0 +1,95 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_DISPATCHER_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_DISPATCHER_HOST_H_
+
+#include <map>
+#include <string>
+#include <utility>
+
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/browser/renderer_host/media/media_stream_requester.h"
+#include "content/common/content_export.h"
+#include "content/common/media/media_stream_options.h"
+#include "content/public/browser/browser_message_filter.h"
+
+namespace content {
+class MediaStreamManager;
+
+// MediaStreamDispatcherHost is a delegate for Media Stream API messages used by
+// MediaStreamImpl. It's the complement of MediaStreamDispatcher
+// (owned by RenderView).
+class CONTENT_EXPORT MediaStreamDispatcherHost : public BrowserMessageFilter,
+ public MediaStreamRequester {
+ public:
+ MediaStreamDispatcherHost(int render_process_id,
+ MediaStreamManager* media_stream_manager);
+
+ // MediaStreamRequester implementation.
+ virtual void StreamGenerated(
+ const std::string& label,
+ const StreamDeviceInfoArray& audio_devices,
+ const StreamDeviceInfoArray& video_devices) OVERRIDE;
+ virtual void StreamGenerationFailed(const std::string& label) OVERRIDE;
+ virtual void StopGeneratedStream(const std::string& label) OVERRIDE;
+ virtual void DevicesEnumerated(const std::string& label,
+ const StreamDeviceInfoArray& devices) OVERRIDE;
+ virtual void DeviceOpened(const std::string& label,
+ const StreamDeviceInfo& video_device) OVERRIDE;
+
+ // BrowserMessageFilter implementation.
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+ virtual void OnChannelClosing() OVERRIDE;
+
+ protected:
+ virtual ~MediaStreamDispatcherHost();
+
+ private:
+ friend class MockMediaStreamDispatcherHost;
+
+ void OnGenerateStream(int render_view_id,
+ int page_request_id,
+ const StreamOptions& components,
+ const GURL& security_origin);
+ void OnCancelGenerateStream(int render_view_id,
+ int page_request_id);
+ void OnStopGeneratedStream(int render_view_id, const std::string& label);
+
+ void OnEnumerateDevices(int render_view_id,
+ int page_request_id,
+ MediaStreamType type,
+ const GURL& security_origin);
+
+ void OnOpenDevice(int render_view_id,
+ int page_request_id,
+ const std::string& device_id,
+ MediaStreamType type,
+ const GURL& security_origin);
+
+ int render_process_id_;
+ MediaStreamManager* media_stream_manager_;
+
+ struct StreamRequest {
+ StreamRequest() : render_view_id(0), page_request_id(0) {}
+ StreamRequest(int render_view_id, int page_request_id)
+ : render_view_id(render_view_id),
+ page_request_id(page_request_id ) {
+ }
+ int render_view_id;
+ // Id of the request generated by MediaStreamDispatcher.
+ int page_request_id;
+ };
+
+ typedef std::map<std::string, StreamRequest> StreamMap;
+ // Streams generated for this host.
+ StreamMap streams_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaStreamDispatcherHost);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_DISPATCHER_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host_unittest.cc b/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host_unittest.cc
new file mode 100644
index 00000000000..05804266035
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_dispatcher_host_unittest.cc
@@ -0,0 +1,353 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/message_loop/message_loop.h"
+#include "base/run_loop.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/media_stream_dispatcher_host.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/browser/renderer_host/media/media_stream_ui_proxy.h"
+#include "content/browser/renderer_host/media/video_capture_manager.h"
+#include "content/common/media/media_stream_messages.h"
+#include "content/common/media/media_stream_options.h"
+#include "content/public/test/mock_resource_context.h"
+#include "content/public/test/test_browser_thread_bundle.h"
+#include "content/test/test_content_browser_client.h"
+#include "content/test/test_content_client.h"
+#include "ipc/ipc_message_macros.h"
+#include "media/audio/audio_manager.h"
+#include "media/video/capture/fake_video_capture_device.h"
+#include "net/url_request/url_request_context.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::DeleteArg;
+using ::testing::DoAll;
+using ::testing::Return;
+using ::testing::SaveArg;
+
+const int kProcessId = 5;
+const int kRenderId = 6;
+const int kPageRequestId = 7;
+
+namespace content {
+
+class MockMediaStreamDispatcherHost : public MediaStreamDispatcherHost,
+ public TestContentBrowserClient {
+ public:
+ MockMediaStreamDispatcherHost(
+ const scoped_refptr<base::MessageLoopProxy>& message_loop,
+ MediaStreamManager* manager)
+ : MediaStreamDispatcherHost(kProcessId, manager),
+ message_loop_(message_loop) {}
+
+ // A list of mock methods.
+ MOCK_METHOD4(OnStreamGenerated,
+ void(int routing_id, int request_id, int audio_array_size,
+ int video_array_size));
+ MOCK_METHOD2(OnStreamGenerationFailed, void(int routing_id, int request_id));
+ MOCK_METHOD1(OnStopGeneratedStreamFromBrowser,
+ void(int routing_id));
+
+ // Accessor to private functions.
+ void OnGenerateStream(int page_request_id,
+ const StreamOptions& components,
+ const base::Closure& quit_closure) {
+ quit_closure_ = quit_closure;
+ MediaStreamDispatcherHost::OnGenerateStream(
+ kRenderId, page_request_id, components, GURL());
+ }
+
+ void OnStopGeneratedStream(const std::string& label) {
+ MediaStreamDispatcherHost::OnStopGeneratedStream(kRenderId, label);
+ }
+
+ // Return the number of streams that have been opened or is being open.
+ size_t NumberOfStreams() {
+ return streams_.size();
+ }
+
+ std::string label_;
+ StreamDeviceInfoArray audio_devices_;
+ StreamDeviceInfoArray video_devices_;
+
+ private:
+ virtual ~MockMediaStreamDispatcherHost() {}
+
+ // This method is used to dispatch IPC messages to the renderer. We intercept
+ // these messages here and dispatch to our mock methods to verify the
+ // conversation between this object and the renderer.
+ virtual bool Send(IPC::Message* message) OVERRIDE {
+ CHECK(message);
+
+ // In this method we dispatch the messages to the according handlers as if
+ // we are the renderer.
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP(MockMediaStreamDispatcherHost, *message)
+ IPC_MESSAGE_HANDLER(MediaStreamMsg_StreamGenerated, OnStreamGenerated)
+ IPC_MESSAGE_HANDLER(MediaStreamMsg_StreamGenerationFailed,
+ OnStreamGenerationFailed)
+ IPC_MESSAGE_HANDLER(MediaStreamMsg_StopGeneratedStream,
+ OnStopGeneratedStreamFromBrowser)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP()
+ EXPECT_TRUE(handled);
+
+ delete message;
+ return true;
+ }
+
+ // These handler methods do minimal things and delegate to the mock methods.
+ void OnStreamGenerated(
+ const IPC::Message& msg,
+ int request_id,
+ std::string label,
+ StreamDeviceInfoArray audio_device_list,
+ StreamDeviceInfoArray video_device_list) {
+ OnStreamGenerated(msg.routing_id(), request_id, audio_device_list.size(),
+ video_device_list.size());
+ // Notify that the event have occured.
+ message_loop_->PostTask(FROM_HERE, base::ResetAndReturn(&quit_closure_));
+ label_ = label;
+ audio_devices_ = audio_device_list;
+ video_devices_ = video_device_list;
+ }
+
+ void OnStreamGenerationFailed(const IPC::Message& msg, int request_id) {
+ OnStreamGenerationFailed(msg.routing_id(), request_id);
+ if (!quit_closure_.is_null())
+ message_loop_->PostTask(FROM_HERE, base::ResetAndReturn(&quit_closure_));
+ label_= "";
+ }
+
+ void OnStopGeneratedStreamFromBrowser(const IPC::Message& msg,
+ const std::string& label) {
+ OnStopGeneratedStreamFromBrowser(msg.routing_id());
+ // Notify that the event have occured.
+ if (!quit_closure_.is_null())
+ message_loop_->PostTask(FROM_HERE, base::ResetAndReturn(&quit_closure_));
+ label_ = "";
+ }
+
+ scoped_refptr<base::MessageLoopProxy> message_loop_;
+ base::Closure quit_closure_;
+};
+
+class MockMediaStreamUIProxy : public FakeMediaStreamUIProxy {
+ public:
+ MOCK_METHOD1(OnStarted, void(const base::Closure& stop));
+};
+
+class MediaStreamDispatcherHostTest : public testing::Test {
+ public:
+ MediaStreamDispatcherHostTest()
+ : old_browser_client_(NULL),
+ thread_bundle_(content::TestBrowserThreadBundle::IO_MAINLOOP) {
+ // Create our own MediaStreamManager.
+ audio_manager_.reset(media::AudioManager::Create());
+ media_stream_manager_.reset(new MediaStreamManager(audio_manager_.get()));
+ // Make sure we use fake devices to avoid long delays.
+ media_stream_manager_->UseFakeDevice();
+
+ host_ = new MockMediaStreamDispatcherHost(base::MessageLoopProxy::current(),
+ media_stream_manager_.get());
+
+ // Use the fake content client and browser.
+ content_client_.reset(new TestContentClient());
+ SetContentClient(content_client_.get());
+ old_browser_client_ = SetBrowserClientForTesting(host_.get());
+ }
+
+ virtual ~MediaStreamDispatcherHostTest() {
+ // Recover the old browser client and content client.
+ SetBrowserClientForTesting(old_browser_client_);
+ content_client_.reset();
+ media_stream_manager_->WillDestroyCurrentMessageLoop();
+ }
+
+ protected:
+ virtual void SetupFakeUI(bool expect_started) {
+ scoped_ptr<MockMediaStreamUIProxy> stream_ui(new MockMediaStreamUIProxy());
+ if (expect_started) {
+ EXPECT_CALL(*stream_ui, OnStarted(_));
+ }
+ media_stream_manager_->UseFakeUI(
+ stream_ui.PassAs<FakeMediaStreamUIProxy>());
+ }
+
+ void GenerateStreamAndWaitForResult(int page_request_id,
+ const StreamOptions& options) {
+ base::RunLoop run_loop;
+ host_->OnGenerateStream(page_request_id, options, run_loop.QuitClosure());
+ run_loop.Run();
+ }
+
+ scoped_refptr<MockMediaStreamDispatcherHost> host_;
+ scoped_ptr<media::AudioManager> audio_manager_;
+ scoped_ptr<MediaStreamManager> media_stream_manager_;
+ ContentBrowserClient* old_browser_client_;
+ scoped_ptr<ContentClient> content_client_;
+ content::TestBrowserThreadBundle thread_bundle_;
+};
+
+TEST_F(MediaStreamDispatcherHostTest, GenerateStream) {
+ StreamOptions options(MEDIA_NO_SERVICE, MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ SetupFakeUI(true);
+ EXPECT_CALL(*host_.get(), OnStreamGenerated(kRenderId, kPageRequestId, 0, 1));
+ GenerateStreamAndWaitForResult(kPageRequestId, options);
+
+ std::string label = host_->label_;
+
+ EXPECT_EQ(host_->audio_devices_.size(), 0u);
+ EXPECT_EQ(host_->video_devices_.size(), 1u);
+ EXPECT_EQ(host_->NumberOfStreams(), 1u);
+
+ host_->OnStopGeneratedStream(label);
+ EXPECT_EQ(host_->NumberOfStreams(), 0u);
+}
+
+TEST_F(MediaStreamDispatcherHostTest, GenerateThreeStreams) {
+ // This test opens three video capture devices. Two fake devices exists and it
+ // is expected the last call to |Open()| will open the first device again, but
+ // with a different label.
+ StreamOptions options(MEDIA_NO_SERVICE, MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // Generate first stream.
+ SetupFakeUI(true);
+ EXPECT_CALL(*host_.get(), OnStreamGenerated(kRenderId, kPageRequestId, 0, 1));
+ GenerateStreamAndWaitForResult(kPageRequestId, options);
+
+ // Check the latest generated stream.
+ EXPECT_EQ(host_->audio_devices_.size(), 0u);
+ EXPECT_EQ(host_->video_devices_.size(), 1u);
+ std::string label1 = host_->label_;
+ std::string device_id1 = host_->video_devices_.front().device.id;
+
+ // Check that we now have one opened streams.
+ EXPECT_EQ(host_->NumberOfStreams(), 1u);
+
+ // Generate second stream.
+ SetupFakeUI(true);
+ EXPECT_CALL(*host_.get(),
+ OnStreamGenerated(kRenderId, kPageRequestId + 1, 0, 1));
+ GenerateStreamAndWaitForResult(kPageRequestId + 1, options);
+
+ // Check the latest generated stream.
+ EXPECT_EQ(host_->audio_devices_.size(), 0u);
+ EXPECT_EQ(host_->video_devices_.size(), 1u);
+ std::string label2 = host_->label_;
+ std::string device_id2 = host_->video_devices_.front().device.id;
+ EXPECT_EQ(device_id1, device_id2);
+ EXPECT_NE(label1, label2);
+
+ // Check that we now have two opened streams.
+ EXPECT_EQ(2u, host_->NumberOfStreams());
+
+ // Generate third stream.
+ SetupFakeUI(true);
+ EXPECT_CALL(*host_.get(),
+ OnStreamGenerated(kRenderId, kPageRequestId + 2, 0, 1));
+ GenerateStreamAndWaitForResult(kPageRequestId + 2, options);
+
+ // Check the latest generated stream.
+ EXPECT_EQ(host_->audio_devices_.size(), 0u);
+ EXPECT_EQ(host_->video_devices_.size(), 1u);
+ std::string label3 = host_->label_;
+ std::string device_id3 = host_->video_devices_.front().device.id;
+ EXPECT_EQ(device_id1, device_id3);
+ EXPECT_NE(label1, label3);
+ EXPECT_NE(label2, label3);
+
+ // Check that we now have three opened streams.
+ EXPECT_EQ(host_->NumberOfStreams(), 3u);
+
+ host_->OnStopGeneratedStream(label1);
+ host_->OnStopGeneratedStream(label2);
+ host_->OnStopGeneratedStream(label3);
+ EXPECT_EQ(host_->NumberOfStreams(), 0u);
+}
+
+TEST_F(MediaStreamDispatcherHostTest, FailOpenVideoDevice) {
+ StreamOptions options(MEDIA_NO_SERVICE, MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ media::FakeVideoCaptureDevice::SetFailNextCreate();
+ SetupFakeUI(false);
+ EXPECT_CALL(*host_.get(),
+ OnStreamGenerationFailed(kRenderId, kPageRequestId));
+ GenerateStreamAndWaitForResult(kPageRequestId, options);
+}
+
+TEST_F(MediaStreamDispatcherHostTest, CancelPendingStreamsOnChannelClosing) {
+ StreamOptions options(MEDIA_NO_SERVICE, MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ base::RunLoop run_loop;
+
+ // Create multiple GenerateStream requests.
+ size_t streams = 5;
+ for (size_t i = 1; i <= streams; ++i) {
+ host_->OnGenerateStream(
+ kPageRequestId + i, options, run_loop.QuitClosure());
+ EXPECT_EQ(host_->NumberOfStreams(), i);
+ }
+
+ // Calling OnChannelClosing() to cancel all the pending requests.
+ host_->OnChannelClosing();
+ run_loop.RunUntilIdle();
+
+ // Streams should have been cleaned up.
+ EXPECT_EQ(host_->NumberOfStreams(), 0u);
+}
+
+TEST_F(MediaStreamDispatcherHostTest, StopGeneratedStreamsOnChannelClosing) {
+ StreamOptions options(MEDIA_NO_SERVICE, MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // Create first group of streams.
+ size_t generated_streams = 3;
+ for (size_t i = 0; i < generated_streams; ++i) {
+ SetupFakeUI(true);
+ EXPECT_CALL(*host_.get(),
+ OnStreamGenerated(kRenderId, kPageRequestId + i, 0, 1));
+ GenerateStreamAndWaitForResult(kPageRequestId + i, options);
+ }
+ EXPECT_EQ(host_->NumberOfStreams(), generated_streams);
+
+ // Calling OnChannelClosing() to cancel all the pending/generated streams.
+ host_->OnChannelClosing();
+ base::RunLoop().RunUntilIdle();
+
+ // Streams should have been cleaned up.
+ EXPECT_EQ(host_->NumberOfStreams(), 0u);
+}
+
+TEST_F(MediaStreamDispatcherHostTest, CloseFromUI) {
+ StreamOptions options(MEDIA_NO_SERVICE, MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ base::Closure close_callback;
+ scoped_ptr<MockMediaStreamUIProxy> stream_ui(new MockMediaStreamUIProxy());
+ EXPECT_CALL(*stream_ui, OnStarted(_))
+ .WillOnce(SaveArg<0>(&close_callback));
+ media_stream_manager_->UseFakeUI(stream_ui.PassAs<FakeMediaStreamUIProxy>());
+
+ EXPECT_CALL(*host_.get(), OnStreamGenerated(kRenderId, kPageRequestId, 0, 1));
+ EXPECT_CALL(*host_.get(), OnStopGeneratedStreamFromBrowser(kRenderId));
+ GenerateStreamAndWaitForResult(kPageRequestId, options);
+
+ EXPECT_EQ(host_->audio_devices_.size(), 0u);
+ EXPECT_EQ(host_->video_devices_.size(), 1u);
+ EXPECT_EQ(host_->NumberOfStreams(), 1u);
+
+ ASSERT_FALSE(close_callback.is_null());
+ close_callback.Run();
+ base::RunLoop().RunUntilIdle();
+
+ EXPECT_EQ(host_->NumberOfStreams(), 0u);
+}
+
+}; // namespace content
diff --git a/chromium/content/browser/renderer_host/media/media_stream_manager.cc b/chromium/content/browser/renderer_host/media/media_stream_manager.cc
new file mode 100644
index 00000000000..449331198bd
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_manager.cc
@@ -0,0 +1,1115 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+
+#include <list>
+
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/compiler_specific.h"
+#include "base/logging.h"
+#include "base/rand_util.h"
+#include "base/threading/thread.h"
+#include "content/browser/renderer_host/media/audio_input_device_manager.h"
+#include "content/browser/renderer_host/media/device_request_message_filter.h"
+#include "content/browser/renderer_host/media/media_stream_requester.h"
+#include "content/browser/renderer_host/media/media_stream_ui_proxy.h"
+#include "content/browser/renderer_host/media/video_capture_manager.h"
+#include "content/browser/renderer_host/media/web_contents_capture_util.h"
+#include "content/public/browser/browser_thread.h"
+#include "content/public/browser/content_browser_client.h"
+#include "content/public/browser/media_observer.h"
+#include "content/public/browser/media_request_state.h"
+#include "content/public/common/content_switches.h"
+#include "content/public/common/media_stream_request.h"
+#include "media/audio/audio_manager_base.h"
+#include "media/audio/audio_parameters.h"
+#include "media/base/channel_layout.h"
+#include "url/gurl.h"
+
+#if defined(OS_WIN)
+#include "base/win/scoped_com_initializer.h"
+#endif
+
+namespace content {
+
+// Creates a random label used to identify requests.
+static std::string RandomLabel() {
+ // An earlier PeerConnection spec,
+ // http://dev.w3.org/2011/webrtc/editor/webrtc.html, specified the
+ // MediaStream::label alphabet as containing 36 characters from
+ // range: U+0021, U+0023 to U+0027, U+002A to U+002B, U+002D to U+002E,
+ // U+0030 to U+0039, U+0041 to U+005A, U+005E to U+007E.
+ // Here we use a safe subset.
+ static const char kAlphabet[] = "0123456789"
+ "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
+
+ std::string label(36, ' ');
+ for (size_t i = 0; i < label.size(); ++i) {
+ int random_char = base::RandGenerator(sizeof(kAlphabet) - 1);
+ label[i] = kAlphabet[random_char];
+ }
+ return label;
+}
+
+// Helper to verify if a media stream type is part of options or not.
+static bool Requested(const MediaStreamRequest& request,
+ MediaStreamType stream_type) {
+ return (request.audio_type == stream_type ||
+ request.video_type == stream_type);
+}
+
+// TODO(xians): Merge DeviceRequest with MediaStreamRequest.
+class MediaStreamManager::DeviceRequest {
+ public:
+ DeviceRequest(MediaStreamRequester* requester,
+ const MediaStreamRequest& request)
+ : requester(requester),
+ request(request),
+ state_(NUM_MEDIA_TYPES, MEDIA_REQUEST_STATE_NOT_REQUESTED) {
+ }
+
+ ~DeviceRequest() {}
+
+ // Update the request state and notify observers.
+ void SetState(MediaStreamType stream_type, MediaRequestState new_state) {
+ if (stream_type == NUM_MEDIA_TYPES) {
+ for (int i = MEDIA_NO_SERVICE + 1; i < NUM_MEDIA_TYPES; ++i) {
+ const MediaStreamType stream_type = static_cast<MediaStreamType>(i);
+ state_[stream_type] = new_state;
+ }
+ } else {
+ state_[stream_type] = new_state;
+ }
+
+ if (request.video_type != MEDIA_TAB_VIDEO_CAPTURE &&
+ request.audio_type != MEDIA_TAB_AUDIO_CAPTURE &&
+ new_state != MEDIA_REQUEST_STATE_CLOSING) {
+ return;
+ }
+
+ MediaObserver* media_observer =
+ GetContentClient()->browser()->GetMediaObserver();
+ if (media_observer == NULL)
+ return;
+
+ // If we appended a device_id scheme, we want to remove it when notifying
+ // observers which may be in different modules since this scheme is only
+ // used internally within the content module.
+ std::string device_id =
+ WebContentsCaptureUtil::StripWebContentsDeviceScheme(
+ request.tab_capture_device_id);
+
+ media_observer->OnMediaRequestStateChanged(
+ request.render_process_id, request.render_view_id,
+ request.page_request_id,
+ MediaStreamDevice(stream_type, device_id, device_id), new_state);
+ }
+
+ MediaRequestState state(MediaStreamType stream_type) const {
+ return state_[stream_type];
+ }
+
+ MediaStreamRequester* const requester; // Can be NULL.
+ MediaStreamRequest request;
+
+ StreamDeviceInfoArray devices;
+
+ // Callback to the requester which audio/video devices have been selected.
+ // It can be null if the requester has no interest to know the result.
+ // Currently it is only used by |DEVICE_ACCESS| type.
+ MediaStreamManager::MediaRequestResponseCallback callback;
+
+ scoped_ptr<MediaStreamUIProxy> ui_proxy;
+
+ private:
+ std::vector<MediaRequestState> state_;
+};
+
+MediaStreamManager::EnumerationCache::EnumerationCache()
+ : valid(false) {
+}
+
+MediaStreamManager::EnumerationCache::~EnumerationCache() {
+}
+
+MediaStreamManager::MediaStreamManager()
+ : audio_manager_(NULL),
+ monitoring_started_(false),
+ io_loop_(NULL),
+ use_fake_ui_(false) {}
+
+MediaStreamManager::MediaStreamManager(media::AudioManager* audio_manager)
+ : audio_manager_(audio_manager),
+ monitoring_started_(false),
+ io_loop_(NULL),
+ use_fake_ui_(false) {
+ DCHECK(audio_manager_);
+ memset(active_enumeration_ref_count_, 0,
+ sizeof(active_enumeration_ref_count_));
+
+ // Some unit tests create the MSM in the IO thread and assumes the
+ // initialization is done synchronously.
+ if (BrowserThread::CurrentlyOn(BrowserThread::IO)) {
+ InitializeDeviceManagersOnIOThread();
+ } else {
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&MediaStreamManager::InitializeDeviceManagersOnIOThread,
+ base::Unretained(this)));
+ }
+}
+
+MediaStreamManager::~MediaStreamManager() {
+ DCHECK(requests_.empty());
+ DCHECK(!device_thread_.get());
+}
+
+VideoCaptureManager* MediaStreamManager::video_capture_manager() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(video_capture_manager_.get());
+ return video_capture_manager_.get();
+}
+
+AudioInputDeviceManager* MediaStreamManager::audio_input_device_manager() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(audio_input_device_manager_.get());
+ return audio_input_device_manager_.get();
+}
+
+std::string MediaStreamManager::MakeMediaAccessRequest(
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ const StreamOptions& options,
+ const GURL& security_origin,
+ const MediaRequestResponseCallback& callback) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ // Create a new request based on options.
+ MediaStreamRequest stream_request(
+ render_process_id, render_view_id, page_request_id, std::string(),
+ security_origin, MEDIA_DEVICE_ACCESS, std::string(), std::string(),
+ options.audio_type, options.video_type);
+ DeviceRequest* request = new DeviceRequest(NULL, stream_request);
+ const std::string& label = AddRequest(request);
+
+ request->callback = callback;
+
+ HandleRequest(label);
+
+ return label;
+}
+
+std::string MediaStreamManager::GenerateStream(
+ MediaStreamRequester* requester,
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ const StreamOptions& options,
+ const GURL& security_origin) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kUseFakeDeviceForMediaStream)) {
+ UseFakeDevice();
+ }
+ if (CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kUseFakeUIForMediaStream)) {
+ UseFakeUI(scoped_ptr<FakeMediaStreamUIProxy>());
+ }
+
+ int target_render_process_id = render_process_id;
+ int target_render_view_id = render_view_id;
+ std::string tab_capture_device_id;
+
+ // Customize options for a WebContents based capture.
+ if (options.audio_type == MEDIA_TAB_AUDIO_CAPTURE ||
+ options.video_type == MEDIA_TAB_VIDEO_CAPTURE) {
+ // TODO(justinlin): Can't plumb audio mirroring using stream type right
+ // now, so plumbing by device_id. Will revisit once it's refactored.
+ // http://crbug.com/163100
+ tab_capture_device_id =
+ WebContentsCaptureUtil::AppendWebContentsDeviceScheme(
+ !options.video_device_id.empty() ?
+ options.video_device_id : options.audio_device_id);
+
+ bool has_valid_device_id = WebContentsCaptureUtil::ExtractTabCaptureTarget(
+ tab_capture_device_id, &target_render_process_id,
+ &target_render_view_id);
+ if (!has_valid_device_id ||
+ (options.audio_type != MEDIA_TAB_AUDIO_CAPTURE &&
+ options.audio_type != MEDIA_NO_SERVICE) ||
+ (options.video_type != MEDIA_TAB_VIDEO_CAPTURE &&
+ options.video_type != MEDIA_NO_SERVICE)) {
+ LOG(ERROR) << "Invalid request.";
+ return std::string();
+ }
+ }
+
+ std::string translated_audio_device_id;
+ std::string translated_video_device_id;
+ if (options.audio_type == MEDIA_DEVICE_AUDIO_CAPTURE) {
+ bool found_match = TranslateGUIDToRawId(
+ MEDIA_DEVICE_AUDIO_CAPTURE, security_origin, options.audio_device_id,
+ &translated_audio_device_id);
+ DCHECK(found_match || translated_audio_device_id.empty());
+ }
+
+ if (options.video_type == MEDIA_DEVICE_VIDEO_CAPTURE) {
+ bool found_match = TranslateGUIDToRawId(
+ MEDIA_DEVICE_VIDEO_CAPTURE, security_origin, options.video_device_id,
+ &translated_video_device_id);
+ DCHECK(found_match || translated_video_device_id.empty());
+ }
+
+ if (options.video_type == MEDIA_DESKTOP_VIDEO_CAPTURE ||
+ options.audio_type == MEDIA_SYSTEM_AUDIO_CAPTURE) {
+ // For screen capture we only support two valid combinations:
+ // (1) screen video capture only, or
+ // (2) screen video capture with system audio capture.
+ if (options.video_type != MEDIA_DESKTOP_VIDEO_CAPTURE ||
+ (options.audio_type != MEDIA_NO_SERVICE &&
+ options.audio_type != MEDIA_SYSTEM_AUDIO_CAPTURE)) {
+ // TODO(sergeyu): Surface error message to the calling JS code.
+ LOG(ERROR) << "Invalid screen capture request.";
+ return std::string();
+ }
+ translated_video_device_id = options.video_device_id;
+ }
+
+ // Create a new request based on options.
+ MediaStreamRequest stream_request(
+ target_render_process_id, target_render_view_id, page_request_id,
+ tab_capture_device_id, security_origin, MEDIA_GENERATE_STREAM,
+ translated_audio_device_id, translated_video_device_id,
+ options.audio_type, options.video_type);
+ DeviceRequest* request = new DeviceRequest(requester, stream_request);
+ const std::string& label = AddRequest(request);
+ HandleRequest(label);
+ return label;
+}
+
+void MediaStreamManager::CancelRequest(const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ DeviceRequests::iterator it = requests_.find(label);
+ if (it != requests_.end()) {
+ if (!RequestDone(*it->second)) {
+ // TODO(xians): update the |state| to STATE_DONE to trigger a state
+ // changed notification to UI before deleting the request?
+ scoped_ptr<DeviceRequest> request(it->second);
+ RemoveRequest(it);
+ for (int i = MEDIA_NO_SERVICE + 1; i < NUM_MEDIA_TYPES; ++i) {
+ const MediaStreamType stream_type = static_cast<MediaStreamType>(i);
+ MediaStreamProvider* device_manager = GetDeviceManager(stream_type);
+ if (!device_manager)
+ continue;
+ if (request->state(stream_type) != MEDIA_REQUEST_STATE_OPENING &&
+ request->state(stream_type) != MEDIA_REQUEST_STATE_DONE) {
+ continue;
+ }
+ for (StreamDeviceInfoArray::const_iterator device_it =
+ request->devices.begin();
+ device_it != request->devices.end(); ++device_it) {
+ if (device_it->device.type == stream_type) {
+ device_manager->Close(device_it->session_id);
+ }
+ }
+ }
+ // Cancel the request if still pending at UI side.
+ request->SetState(NUM_MEDIA_TYPES, MEDIA_REQUEST_STATE_CLOSING);
+ } else {
+ StopGeneratedStream(label);
+ }
+ }
+}
+
+void MediaStreamManager::StopGeneratedStream(const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Find the request and close all open devices for the request.
+ DeviceRequests::iterator it = requests_.find(label);
+ if (it != requests_.end()) {
+ if (it->second->request.request_type == MEDIA_ENUMERATE_DEVICES) {
+ StopEnumerateDevices(label);
+ return;
+ }
+
+ scoped_ptr<DeviceRequest> request(it->second);
+ RemoveRequest(it);
+ for (StreamDeviceInfoArray::const_iterator device_it =
+ request->devices.begin();
+ device_it != request->devices.end(); ++device_it) {
+ GetDeviceManager(device_it->device.type)->Close(device_it->session_id);
+ }
+ if (request->request.request_type == MEDIA_GENERATE_STREAM &&
+ RequestDone(*request)) {
+ // Notify observers that this device is being closed.
+ for (int i = MEDIA_NO_SERVICE + 1; i != NUM_MEDIA_TYPES; ++i) {
+ if (request->state(static_cast<MediaStreamType>(i)) !=
+ MEDIA_REQUEST_STATE_NOT_REQUESTED) {
+ request->SetState(static_cast<MediaStreamType>(i),
+ MEDIA_REQUEST_STATE_CLOSING);
+ }
+ }
+ }
+ }
+}
+
+std::string MediaStreamManager::EnumerateDevices(
+ MediaStreamRequester* requester,
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ MediaStreamType type,
+ const GURL& security_origin) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(type == MEDIA_DEVICE_AUDIO_CAPTURE ||
+ type == MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // When the requester is NULL, the request is made by the UI to ensure MSM
+ // starts monitoring devices.
+ if (!requester) {
+ if (!monitoring_started_)
+ StartMonitoring();
+
+ return std::string();
+ }
+
+ // Create a new request.
+ StreamOptions options;
+ EnumerationCache* cache = NULL;
+ if (type == MEDIA_DEVICE_AUDIO_CAPTURE) {
+ options.audio_type = type;
+ cache = &audio_enumeration_cache_;
+ } else if (type == MEDIA_DEVICE_VIDEO_CAPTURE) {
+ options.video_type = type;
+ cache = &video_enumeration_cache_;
+ } else {
+ NOTREACHED();
+ return std::string();
+ }
+
+ MediaStreamRequest stream_request(
+ render_process_id, render_view_id, page_request_id, std::string(),
+ security_origin, MEDIA_ENUMERATE_DEVICES, std::string(), std::string(),
+ options.audio_type, options.video_type);
+ DeviceRequest* request = new DeviceRequest(requester, stream_request);
+ const std::string& label = AddRequest(request);
+
+ if (cache->valid) {
+ // Cached device list of this type exists. Just send it out.
+ request->SetState(type, MEDIA_REQUEST_STATE_REQUESTED);
+
+ // Need to post a task since the requester won't have label till
+ // this function returns.
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&MediaStreamManager::SendCachedDeviceList,
+ base::Unretained(this), cache, label));
+ } else {
+ StartEnumeration(request);
+ }
+
+ return label;
+}
+
+void MediaStreamManager::StopEnumerateDevices(const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ DeviceRequests::iterator it = requests_.find(label);
+ if (it != requests_.end()) {
+ DCHECK_EQ(it->second->request.request_type, MEDIA_ENUMERATE_DEVICES);
+ // Delete the DeviceRequest.
+ scoped_ptr<DeviceRequest> request(it->second);
+ RemoveRequest(it);
+ }
+}
+
+std::string MediaStreamManager::OpenDevice(
+ MediaStreamRequester* requester,
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ const std::string& device_id,
+ MediaStreamType type,
+ const GURL& security_origin) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(type == MEDIA_DEVICE_AUDIO_CAPTURE ||
+ type == MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // Create a new request.
+ StreamOptions options;
+ if (IsAudioMediaType(type)) {
+ options.audio_type = type;
+ options.audio_device_id = device_id;
+ } else if (IsVideoMediaType(type)) {
+ options.video_type = type;
+ options.video_device_id = device_id;
+ } else {
+ NOTREACHED();
+ return std::string();
+ }
+
+ MediaStreamRequest stream_request(
+ render_process_id, render_view_id, page_request_id, std::string(),
+ security_origin, MEDIA_OPEN_DEVICE, options.audio_device_id,
+ options.video_device_id, options.audio_type, options.video_type);
+ DeviceRequest* request = new DeviceRequest(requester, stream_request);
+ const std::string& label = AddRequest(request);
+ StartEnumeration(request);
+
+ return label;
+}
+
+void MediaStreamManager::SendCachedDeviceList(
+ EnumerationCache* cache,
+ const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (cache->valid) {
+ DeviceRequests::iterator it = requests_.find(label);
+ if (it != requests_.end()) {
+ it->second->requester->DevicesEnumerated(label, cache->devices);
+ }
+ }
+}
+
+void MediaStreamManager::StartMonitoring() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (!base::SystemMonitor::Get())
+ return;
+
+ if (!monitoring_started_) {
+ monitoring_started_ = true;
+ base::SystemMonitor::Get()->AddDevicesChangedObserver(this);
+
+ // Enumerate both the audio and video devices to cache the device lists
+ // and send them to media observer.
+ ++active_enumeration_ref_count_[MEDIA_DEVICE_AUDIO_CAPTURE];
+ audio_input_device_manager_->EnumerateDevices(MEDIA_DEVICE_AUDIO_CAPTURE);
+ ++active_enumeration_ref_count_[MEDIA_DEVICE_VIDEO_CAPTURE];
+ video_capture_manager_->EnumerateDevices(MEDIA_DEVICE_VIDEO_CAPTURE);
+ }
+}
+
+void MediaStreamManager::StopMonitoring() {
+ DCHECK_EQ(base::MessageLoop::current(), io_loop_);
+ if (monitoring_started_) {
+ base::SystemMonitor::Get()->RemoveDevicesChangedObserver(this);
+ monitoring_started_ = false;
+ ClearEnumerationCache(&audio_enumeration_cache_);
+ ClearEnumerationCache(&video_enumeration_cache_);
+ }
+}
+
+bool MediaStreamManager::TranslateGUIDToRawId(MediaStreamType stream_type,
+ const GURL& security_origin,
+ const std::string& device_guid,
+ std::string* raw_device_id) {
+ DCHECK(stream_type == MEDIA_DEVICE_AUDIO_CAPTURE ||
+ stream_type == MEDIA_DEVICE_VIDEO_CAPTURE);
+ if (device_guid.empty())
+ return false;
+
+ EnumerationCache* cache =
+ stream_type == MEDIA_DEVICE_AUDIO_CAPTURE ?
+ &audio_enumeration_cache_ : &video_enumeration_cache_;
+
+ // If device monitoring hasn't started, the |device_guid| is not valid.
+ if (!cache->valid)
+ return false;
+
+ for (StreamDeviceInfoArray::const_iterator it = cache->devices.begin();
+ it != cache->devices.end();
+ ++it) {
+ if (DeviceRequestMessageFilter::DoesRawIdMatchGuid(
+ security_origin, device_guid, it->device.id)) {
+ *raw_device_id = it->device.id;
+ return true;
+ }
+ }
+ return false;
+}
+
+void MediaStreamManager::ClearEnumerationCache(EnumerationCache* cache) {
+ DCHECK_EQ(base::MessageLoop::current(), io_loop_);
+ cache->valid = false;
+}
+
+void MediaStreamManager::StartEnumeration(DeviceRequest* request) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Start monitoring the devices when doing the first enumeration.
+ if (!monitoring_started_ && base::SystemMonitor::Get()) {
+ StartMonitoring();
+ }
+
+ // Start enumeration for devices of all requested device types.
+ for (int i = MEDIA_NO_SERVICE + 1; i < NUM_MEDIA_TYPES; ++i) {
+ const MediaStreamType stream_type = static_cast<MediaStreamType>(i);
+ if (Requested(request->request, stream_type)) {
+ request->SetState(stream_type, MEDIA_REQUEST_STATE_REQUESTED);
+ DCHECK_GE(active_enumeration_ref_count_[stream_type], 0);
+ if (active_enumeration_ref_count_[stream_type] == 0) {
+ ++active_enumeration_ref_count_[stream_type];
+ GetDeviceManager(stream_type)->EnumerateDevices(stream_type);
+ }
+ }
+ }
+}
+
+std::string MediaStreamManager::AddRequest(DeviceRequest* request) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Create a label for this request and verify it is unique.
+ std::string unique_label;
+ do {
+ unique_label = RandomLabel();
+ } while (requests_.find(unique_label) != requests_.end());
+
+ requests_.insert(std::make_pair(unique_label, request));
+
+ return unique_label;
+}
+
+void MediaStreamManager::RemoveRequest(DeviceRequests::iterator it) {
+ requests_.erase(it);
+}
+
+void MediaStreamManager::PostRequestToUI(const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DeviceRequest* request = requests_[label];
+
+ if (use_fake_ui_) {
+ if (!fake_ui_)
+ fake_ui_.reset(new FakeMediaStreamUIProxy());
+
+ MediaStreamDevices devices;
+ if (audio_enumeration_cache_.valid) {
+ for (StreamDeviceInfoArray::const_iterator it =
+ audio_enumeration_cache_.devices.begin();
+ it != audio_enumeration_cache_.devices.end(); ++it) {
+ devices.push_back(it->device);
+ }
+ }
+ if (video_enumeration_cache_.valid) {
+ for (StreamDeviceInfoArray::const_iterator it =
+ video_enumeration_cache_.devices.begin();
+ it != video_enumeration_cache_.devices.end(); ++it) {
+ devices.push_back(it->device);
+ }
+ }
+
+ fake_ui_->SetAvailableDevices(devices);
+
+ request->ui_proxy = fake_ui_.Pass();
+ } else {
+ request->ui_proxy = MediaStreamUIProxy::Create();
+ }
+
+ request->ui_proxy->RequestAccess(
+ request->request,
+ base::Bind(&MediaStreamManager::HandleAccessRequestResponse,
+ base::Unretained(this), label));
+}
+
+void MediaStreamManager::HandleRequest(const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DeviceRequest* request = requests_[label];
+
+ const MediaStreamType audio_type = request->request.audio_type;
+ const MediaStreamType video_type = request->request.video_type;
+
+ bool is_web_contents_capture =
+ audio_type == MEDIA_TAB_AUDIO_CAPTURE ||
+ video_type == MEDIA_TAB_VIDEO_CAPTURE;
+
+ bool is_screen_capture =
+ video_type == MEDIA_DESKTOP_VIDEO_CAPTURE;
+
+ if (!is_web_contents_capture &&
+ !is_screen_capture &&
+ ((IsAudioMediaType(audio_type) && !audio_enumeration_cache_.valid) ||
+ (IsVideoMediaType(video_type) && !video_enumeration_cache_.valid))) {
+ // Enumerate the devices if there is no valid device lists to be used.
+ StartEnumeration(request);
+ return;
+ }
+
+ // No need to do new device enumerations, post the request to UI
+ // immediately.
+ if (IsAudioMediaType(audio_type))
+ request->SetState(audio_type, MEDIA_REQUEST_STATE_PENDING_APPROVAL);
+ if (IsVideoMediaType(video_type))
+ request->SetState(video_type, MEDIA_REQUEST_STATE_PENDING_APPROVAL);
+
+ PostRequestToUI(label);
+}
+
+void MediaStreamManager::InitializeDeviceManagersOnIOThread() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (device_thread_)
+ return;
+
+ device_thread_.reset(new base::Thread("MediaStreamDeviceThread"));
+#if defined(OS_WIN)
+ device_thread_->init_com_with_mta(true);
+#endif
+ CHECK(device_thread_->Start());
+
+ audio_input_device_manager_ = new AudioInputDeviceManager(audio_manager_);
+ audio_input_device_manager_->Register(
+ this, device_thread_->message_loop_proxy().get());
+
+ video_capture_manager_ = new VideoCaptureManager();
+ video_capture_manager_->Register(this,
+ device_thread_->message_loop_proxy().get());
+
+ // We want to be notified of IO message loop destruction to delete the thread
+ // and the device managers.
+ io_loop_ = base::MessageLoop::current();
+ io_loop_->AddDestructionObserver(this);
+}
+
+void MediaStreamManager::Opened(MediaStreamType stream_type,
+ int capture_session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Find the request containing this device and mark it as used.
+ DeviceRequest* request = NULL;
+ StreamDeviceInfoArray* devices = NULL;
+ std::string label;
+ for (DeviceRequests::iterator request_it = requests_.begin();
+ request_it != requests_.end() && request == NULL; ++request_it) {
+ devices = &(request_it->second->devices);
+ for (StreamDeviceInfoArray::iterator device_it = devices->begin();
+ device_it != devices->end(); ++device_it) {
+ if (device_it->device.type == stream_type &&
+ device_it->session_id == capture_session_id) {
+ // We've found the request.
+ device_it->in_use = true;
+ label = request_it->first;
+ request = request_it->second;
+ break;
+ }
+ }
+ }
+ if (request == NULL) {
+ // The request doesn't exist.
+ return;
+ }
+
+ DCHECK_NE(request->state(stream_type), MEDIA_REQUEST_STATE_REQUESTED);
+
+ // Check if all devices for this stream type are opened. Update the state if
+ // they are.
+ for (StreamDeviceInfoArray::iterator device_it = devices->begin();
+ device_it != devices->end(); ++device_it) {
+ if (device_it->device.type != stream_type) {
+ continue;
+ }
+ if (device_it->in_use == false) {
+ // Wait for more devices to be opened before we're done.
+ return;
+ }
+ }
+
+ request->SetState(stream_type, MEDIA_REQUEST_STATE_DONE);
+
+ if (!RequestDone(*request)) {
+ // This stream_type is done, but not the other type.
+ return;
+ }
+
+ switch (request->request.request_type) {
+ case MEDIA_OPEN_DEVICE:
+ request->requester->DeviceOpened(label, devices->front());
+ break;
+ case MEDIA_GENERATE_STREAM: {
+ // Partition the array of devices into audio vs video.
+ StreamDeviceInfoArray audio_devices, video_devices;
+ for (StreamDeviceInfoArray::iterator device_it = devices->begin();
+ device_it != devices->end(); ++device_it) {
+ if (IsAudioMediaType(device_it->device.type)) {
+ // Store the native audio parameters in the device struct.
+ // TODO(xians): Handle the tab capture sample rate/channel layout
+ // in AudioInputDeviceManager::Open().
+ if (device_it->device.type != content::MEDIA_TAB_AUDIO_CAPTURE) {
+ const StreamDeviceInfo* info =
+ audio_input_device_manager_->GetOpenedDeviceInfoById(
+ device_it->session_id);
+ DCHECK_EQ(info->device.id, device_it->device.id);
+ device_it->device.sample_rate = info->device.sample_rate;
+ device_it->device.channel_layout = info->device.channel_layout;
+ }
+ audio_devices.push_back(*device_it);
+ } else if (IsVideoMediaType(device_it->device.type)) {
+ video_devices.push_back(*device_it);
+ } else {
+ NOTREACHED();
+ }
+ }
+
+ request->requester->StreamGenerated(label, audio_devices, video_devices);
+ request->ui_proxy->OnStarted(
+ base::Bind(&MediaStreamManager::StopStreamFromUI,
+ base::Unretained(this), label));
+ break;
+ }
+ default:
+ NOTREACHED();
+ break;
+ }
+}
+
+void MediaStreamManager::Closed(MediaStreamType stream_type,
+ int capture_session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+}
+
+void MediaStreamManager::DevicesEnumerated(
+ MediaStreamType stream_type, const StreamDeviceInfoArray& devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // Only cache the device list when the device list has been changed.
+ bool need_update_clients = false;
+ EnumerationCache* cache =
+ stream_type == MEDIA_DEVICE_AUDIO_CAPTURE ?
+ &audio_enumeration_cache_ : &video_enumeration_cache_;
+ if (!cache->valid ||
+ devices.size() != cache->devices.size() ||
+ !std::equal(devices.begin(), devices.end(), cache->devices.begin(),
+ StreamDeviceInfo::IsEqual)) {
+ cache->valid = true;
+ cache->devices = devices;
+ need_update_clients = true;
+ }
+
+ if (need_update_clients && monitoring_started_)
+ NotifyDevicesChanged(stream_type, devices);
+
+ // Publish the result for all requests waiting for device list(s).
+ // Find the requests waiting for this device list, store their labels and
+ // release the iterator before calling device settings. We might get a call
+ // back from device_settings that will need to iterate through devices.
+ std::list<std::string> label_list;
+ for (DeviceRequests::iterator it = requests_.begin(); it != requests_.end();
+ ++it) {
+ if (it->second->state(stream_type) == MEDIA_REQUEST_STATE_REQUESTED &&
+ Requested(it->second->request, stream_type)) {
+ if (it->second->request.request_type != MEDIA_ENUMERATE_DEVICES)
+ it->second->SetState(stream_type, MEDIA_REQUEST_STATE_PENDING_APPROVAL);
+ label_list.push_back(it->first);
+ }
+ }
+ for (std::list<std::string>::iterator it = label_list.begin();
+ it != label_list.end(); ++it) {
+ DeviceRequest* request = requests_[*it];
+ switch (request->request.request_type) {
+ case MEDIA_ENUMERATE_DEVICES:
+ if (need_update_clients && request->requester)
+ request->requester->DevicesEnumerated(*it, devices);
+ break;
+ default:
+ if (request->state(request->request.audio_type) ==
+ MEDIA_REQUEST_STATE_REQUESTED ||
+ request->state(request->request.video_type) ==
+ MEDIA_REQUEST_STATE_REQUESTED) {
+ // We are doing enumeration for other type of media, wait until it is
+ // all done before posting the request to UI because UI needs
+ // the device lists to handle the request.
+ break;
+ }
+
+ // Post the request to UI for permission approval.
+ PostRequestToUI(*it);
+ break;
+ }
+ }
+ label_list.clear();
+ --active_enumeration_ref_count_[stream_type];
+ DCHECK_GE(active_enumeration_ref_count_[stream_type], 0);
+}
+
+void MediaStreamManager::Error(MediaStreamType stream_type,
+ int capture_session_id,
+ MediaStreamProviderError error) {
+ // Find the device for the error call.
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ for (DeviceRequests::iterator it = requests_.begin(); it != requests_.end();
+ ++it) {
+ StreamDeviceInfoArray& devices = it->second->devices;
+
+ // TODO(miu): BUG. It's possible for the audio (or video) device array in
+ // the "requester" to become out-of-sync with the order of devices we have
+ // here. See http://crbug.com/147650
+ int audio_device_idx = -1;
+ int video_device_idx = -1;
+ for (StreamDeviceInfoArray::iterator device_it = devices.begin();
+ device_it != devices.end(); ++device_it) {
+ if (IsAudioMediaType(device_it->device.type)) {
+ ++audio_device_idx;
+ } else if (IsVideoMediaType(device_it->device.type)) {
+ ++video_device_idx;
+ } else {
+ NOTREACHED();
+ continue;
+ }
+ if (device_it->device.type != stream_type ||
+ device_it->session_id != capture_session_id) {
+ continue;
+ }
+ // We've found the failing device. Find the error case:
+ // An error should only be reported to the MediaStreamManager if
+ // the request has not been fulfilled yet.
+ DCHECK(it->second->state(stream_type) != MEDIA_REQUEST_STATE_DONE);
+ if (it->second->state(stream_type) != MEDIA_REQUEST_STATE_DONE) {
+ // Request is not done, devices are not opened in this case.
+ if (devices.size() <= 1) {
+ scoped_ptr<DeviceRequest> request(it->second);
+ // 1. Device not opened and no other devices for this request ->
+ // signal stream error and remove the request.
+ if (request->requester)
+ request->requester->StreamGenerationFailed(it->first);
+
+ RemoveRequest(it);
+ } else {
+ // 2. Not opened but other devices exists for this request -> remove
+ // device from list, but don't signal an error.
+ devices.erase(device_it); // NOTE: This invalidates device_it!
+ }
+ }
+ return;
+ }
+ }
+}
+
+void MediaStreamManager::HandleAccessRequestResponse(
+ const std::string& label,
+ const MediaStreamDevices& devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ DeviceRequests::iterator request_it = requests_.find(label);
+ if (request_it == requests_.end()) {
+ return;
+ }
+
+ // Handle the case when the request was denied.
+ if (devices.empty()) {
+ // Notify the users about the request result.
+ scoped_ptr<DeviceRequest> request(request_it->second);
+ if (request->requester)
+ request->requester->StreamGenerationFailed(label);
+
+ if (request->request.request_type == MEDIA_DEVICE_ACCESS &&
+ !request->callback.is_null()) {
+ request->callback.Run(MediaStreamDevices(), request->ui_proxy.Pass());
+ }
+
+ RemoveRequest(request_it);
+ return;
+ }
+
+ if (request_it->second->request.request_type == MEDIA_DEVICE_ACCESS) {
+ scoped_ptr<DeviceRequest> request(request_it->second);
+ if (!request->callback.is_null())
+ request->callback.Run(devices, request->ui_proxy.Pass());
+
+ // Delete the request since it is done.
+ RemoveRequest(request_it);
+ return;
+ }
+
+ // Process all newly-accepted devices for this request.
+ DeviceRequest* request = request_it->second;
+ bool found_audio = false;
+ bool found_video = false;
+ for (MediaStreamDevices::const_iterator device_it = devices.begin();
+ device_it != devices.end(); ++device_it) {
+ StreamDeviceInfo device_info;
+ device_info.device = *device_it;
+
+ // TODO(justinlin): Nicer way to do this?
+ // Re-append the device's id since we lost it when posting request to UI.
+ if (device_info.device.type == content::MEDIA_TAB_VIDEO_CAPTURE ||
+ device_info.device.type == content::MEDIA_TAB_AUDIO_CAPTURE) {
+ device_info.device.id = request->request.tab_capture_device_id;
+
+ // Initialize the sample_rate and channel_layout here since for audio
+ // mirroring, we don't go through EnumerateDevices where these are usually
+ // initialized.
+ if (device_info.device.type == content::MEDIA_TAB_AUDIO_CAPTURE) {
+ const media::AudioParameters parameters =
+ audio_manager_->GetDefaultOutputStreamParameters();
+ int sample_rate = parameters.sample_rate();
+ // If we weren't able to get the native sampling rate or the sample_rate
+ // is outside the valid range for input devices set reasonable defaults.
+ if (sample_rate <= 0 || sample_rate > 96000)
+ sample_rate = 44100;
+
+ device_info.device.sample_rate = sample_rate;
+ device_info.device.channel_layout = media::CHANNEL_LAYOUT_STEREO;
+ }
+ }
+
+ // Set in_use to false to be able to track if this device has been
+ // opened. in_use might be true if the device type can be used in more
+ // than one session.
+ device_info.in_use = false;
+
+ device_info.session_id =
+ GetDeviceManager(device_info.device.type)->Open(device_info);
+ request->SetState(device_info.device.type, MEDIA_REQUEST_STATE_OPENING);
+ request->devices.push_back(device_info);
+
+ if (device_info.device.type == request->request.audio_type) {
+ found_audio = true;
+ } else if (device_info.device.type == request->request.video_type) {
+ found_video = true;
+ }
+ }
+
+ // Check whether we've received all stream types requested.
+ if (!found_audio && IsAudioMediaType(request->request.audio_type))
+ request->SetState(request->request.audio_type, MEDIA_REQUEST_STATE_ERROR);
+
+ if (!found_video && IsVideoMediaType(request->request.video_type))
+ request->SetState(request->request.video_type, MEDIA_REQUEST_STATE_ERROR);
+}
+
+void MediaStreamManager::StopStreamFromUI(const std::string& label) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ DeviceRequests::iterator it = requests_.find(label);
+ if (it == requests_.end())
+ return;
+
+ // Notify renderers that the stream has been stopped.
+ if (it->second->requester)
+ it->second->requester->StopGeneratedStream(label);
+
+ StopGeneratedStream(label);
+}
+
+void MediaStreamManager::UseFakeDevice() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ video_capture_manager()->UseFakeDevice();
+ audio_input_device_manager()->UseFakeDevice();
+}
+
+void MediaStreamManager::UseFakeUI(scoped_ptr<FakeMediaStreamUIProxy> fake_ui) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ use_fake_ui_ = true;
+ fake_ui_ = fake_ui.Pass();
+}
+
+void MediaStreamManager::WillDestroyCurrentMessageLoop() {
+ DCHECK_EQ(base::MessageLoop::current(), io_loop_);
+ DCHECK(requests_.empty());
+ if (device_thread_) {
+ StopMonitoring();
+
+ video_capture_manager_->Unregister();
+ audio_input_device_manager_->Unregister();
+ device_thread_.reset();
+ }
+
+ audio_input_device_manager_ = NULL;
+ video_capture_manager_ = NULL;
+}
+
+void MediaStreamManager::NotifyDevicesChanged(
+ MediaStreamType stream_type,
+ const StreamDeviceInfoArray& devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ MediaObserver* media_observer =
+ GetContentClient()->browser()->GetMediaObserver();
+ if (media_observer == NULL)
+ return;
+
+ // Map the devices to MediaStreamDevices.
+ MediaStreamDevices new_devices;
+ for (StreamDeviceInfoArray::const_iterator it = devices.begin();
+ it != devices.end(); ++it) {
+ new_devices.push_back(it->device);
+ }
+
+ if (IsAudioMediaType(stream_type)) {
+ media_observer->OnAudioCaptureDevicesChanged(new_devices);
+ } else if (IsVideoMediaType(stream_type)) {
+ media_observer->OnVideoCaptureDevicesChanged(new_devices);
+ } else {
+ NOTREACHED();
+ }
+}
+
+bool MediaStreamManager::RequestDone(const DeviceRequest& request) const {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ const bool requested_audio = IsAudioMediaType(request.request.audio_type);
+ const bool requested_video = IsVideoMediaType(request.request.video_type);
+
+ const bool audio_done =
+ !requested_audio ||
+ request.state(request.request.audio_type) ==
+ MEDIA_REQUEST_STATE_DONE ||
+ request.state(request.request.audio_type) ==
+ MEDIA_REQUEST_STATE_ERROR;
+ if (!audio_done)
+ return false;
+
+ const bool video_done =
+ !requested_video ||
+ request.state(request.request.video_type) ==
+ MEDIA_REQUEST_STATE_DONE ||
+ request.state(request.request.video_type) ==
+ MEDIA_REQUEST_STATE_ERROR;
+ if (!video_done)
+ return false;
+
+ for (StreamDeviceInfoArray::const_iterator it = request.devices.begin();
+ it != request.devices.end(); ++it) {
+ if (it->in_use == false)
+ return false;
+ }
+
+ return true;
+}
+
+MediaStreamProvider* MediaStreamManager::GetDeviceManager(
+ MediaStreamType stream_type) {
+ if (IsVideoMediaType(stream_type)) {
+ return video_capture_manager();
+ } else if (IsAudioMediaType(stream_type)) {
+ return audio_input_device_manager();
+ }
+ NOTREACHED();
+ return NULL;
+}
+
+void MediaStreamManager::OnDevicesChanged(
+ base::SystemMonitor::DeviceType device_type) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ // NOTE: This method is only called in response to physical audio/video device
+ // changes (from the operating system).
+
+ MediaStreamType stream_type;
+ if (device_type == base::SystemMonitor::DEVTYPE_AUDIO_CAPTURE) {
+ stream_type = MEDIA_DEVICE_AUDIO_CAPTURE;
+ } else if (device_type == base::SystemMonitor::DEVTYPE_VIDEO_CAPTURE) {
+ stream_type = MEDIA_DEVICE_VIDEO_CAPTURE;
+ } else {
+ return; // Uninteresting device change.
+ }
+
+ // Always do enumeration even though some enumeration is in progress,
+ // because those enumeration commands could be sent before these devices
+ // change.
+ ++active_enumeration_ref_count_[stream_type];
+ GetDeviceManager(stream_type)->EnumerateDevices(stream_type);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/media_stream_manager.h b/chromium/content/browser/renderer_host/media/media_stream_manager.h
new file mode 100644
index 00000000000..5a444ef4403
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_manager.h
@@ -0,0 +1,265 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// MediaStreamManager is used to open/enumerate media capture devices (video
+// supported now). Call flow:
+// 1. GenerateStream is called when a render process wants to use a capture
+// device.
+// 2. MediaStreamManager will ask MediaStreamUIController for permission to
+// use devices and for which device to use.
+// 3. MediaStreamManager will request the corresponding media device manager(s)
+// to enumerate available devices. The result will be given to
+// MediaStreamUIController.
+// 4. MediaStreamUIController will, by posting the request to UI, let the
+// users to select which devices to use and send callback to
+// MediaStreamManager with the result.
+// 5. MediaStreamManager will call the proper media device manager to open the
+// device and let the MediaStreamRequester know it has been done.
+
+// When enumeration and open are done in separate operations,
+// MediaStreamUIController is not involved as in steps.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_MANAGER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_MANAGER_H_
+
+#include <map>
+#include <string>
+
+#include "base/basictypes.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "base/system_monitor/system_monitor.h"
+#include "content/browser/renderer_host/media/media_stream_provider.h"
+#include "content/common/content_export.h"
+#include "content/common/media/media_stream_options.h"
+
+namespace base {
+class Thread;
+}
+
+namespace media {
+class AudioManager;
+}
+
+namespace content {
+
+class AudioInputDeviceManager;
+class FakeMediaStreamUIProxy;
+class MediaStreamDeviceSettings;
+class MediaStreamRequester;
+class MediaStreamUIProxy;
+class VideoCaptureManager;
+
+// MediaStreamManager is used to generate and close new media devices, not to
+// start the media flow.
+// The classes requesting new media streams are answered using
+// MediaStreamManager::Listener.
+class CONTENT_EXPORT MediaStreamManager
+ : public MediaStreamProviderListener,
+ public base::MessageLoop::DestructionObserver,
+ public base::SystemMonitor::DevicesChangedObserver {
+ public:
+ // Callback to deliver the result of a media request. |label| is the string
+ // to identify the request,
+ typedef base::Callback<void(const MediaStreamDevices& devices,
+ scoped_ptr<MediaStreamUIProxy> ui)>
+ MediaRequestResponseCallback;
+
+ explicit MediaStreamManager(media::AudioManager* audio_manager);
+ virtual ~MediaStreamManager();
+
+ // Used to access VideoCaptureManager.
+ VideoCaptureManager* video_capture_manager();
+
+ // Used to access AudioInputDeviceManager.
+ AudioInputDeviceManager* audio_input_device_manager();
+
+ // Creates a new media access request which is identified by a unique string
+ // that's returned to the caller. This will trigger the infobar and ask users
+ // for access to the device. |render_process_id| and |render_view_id| refer
+ // to the view where the infobar will appear to the user. |callback| is
+ // used to send the selected device to the clients. An empty list of device
+ // will be returned if the users deny the access.
+ std::string MakeMediaAccessRequest(
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ const StreamOptions& components,
+ const GURL& security_origin,
+ const MediaRequestResponseCallback& callback);
+
+ // GenerateStream opens new media devices according to |components|. It
+ // creates a new request which is identified by a unique string that's
+ // returned to the caller. |render_process_id| and |render_view_id| refer to
+ // the view where the infobar will appear to the user.
+ std::string GenerateStream(MediaStreamRequester* requester,
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ const StreamOptions& components,
+ const GURL& security_origin);
+
+ void CancelRequest(const std::string& label);
+
+ // Closes generated stream.
+ virtual void StopGeneratedStream(const std::string& label);
+
+ // Gets a list of devices of |type|, which must be MEDIA_DEVICE_AUDIO_CAPTURE
+ // or MEDIA_DEVICE_VIDEO_CAPTURE.
+ // The request is identified using the string returned to the caller.
+ // When the |requester| is NULL, MediaStreamManager will enumerate both audio
+ // and video devices and also start monitoring device changes, such as
+ // plug/unplug. The new device lists will be delivered via media observer to
+ // MediaCaptureDevicesDispatcher.
+ virtual std::string EnumerateDevices(MediaStreamRequester* requester,
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ MediaStreamType type,
+ const GURL& security_origin);
+
+ // Open a device identified by |device_id|. |type| must be either
+ // MEDIA_DEVICE_AUDIO_CAPTURE or MEDIA_DEVICE_VIDEO_CAPTURE.
+ // The request is identified using string returned to the caller.
+ std::string OpenDevice(MediaStreamRequester* requester,
+ int render_process_id,
+ int render_view_id,
+ int page_request_id,
+ const std::string& device_id,
+ MediaStreamType type,
+ const GURL& security_origin);
+
+ // Implements MediaStreamProviderListener.
+ virtual void Opened(MediaStreamType stream_type,
+ int capture_session_id) OVERRIDE;
+ virtual void Closed(MediaStreamType stream_type,
+ int capture_session_id) OVERRIDE;
+ virtual void DevicesEnumerated(MediaStreamType stream_type,
+ const StreamDeviceInfoArray& devices) OVERRIDE;
+ virtual void Error(MediaStreamType stream_type,
+ int capture_session_id,
+ MediaStreamProviderError error) OVERRIDE;
+
+ // Implements base::SystemMonitor::DevicesChangedObserver.
+ virtual void OnDevicesChanged(
+ base::SystemMonitor::DeviceType device_type) OVERRIDE;
+
+ // Used by unit test to make sure fake devices are used instead of a real
+ // devices, which is needed for server based testing or certain tests (which
+ // can pass --use-fake-device-for-media-stream).
+ void UseFakeDevice();
+
+ // Called by the tests to specify a fake UI that should be used for next
+ // generated stream (or when using --use-fake-ui-for-media-stream).
+ void UseFakeUI(scoped_ptr<FakeMediaStreamUIProxy> fake_ui);
+
+ // This object gets deleted on the UI thread after the IO thread has been
+ // destroyed. So we need to know when IO thread is being destroyed so that
+ // we can delete VideoCaptureManager and AudioInputDeviceManager.
+ // We also must call this function explicitly in tests which use
+ // TestBrowserThreadBundle, because the notification happens too late in that
+ // case (see http://crbug.com/247525#c14).
+ virtual void WillDestroyCurrentMessageLoop() OVERRIDE;
+
+ protected:
+ // Used for testing.
+ MediaStreamManager();
+
+ private:
+ // Contains all data needed to keep track of requests.
+ class DeviceRequest;
+
+ // Cache enumerated device list.
+ struct EnumerationCache {
+ EnumerationCache();
+ ~EnumerationCache();
+
+ bool valid;
+ StreamDeviceInfoArray devices;
+ };
+
+ typedef std::map<std::string, DeviceRequest*> DeviceRequests;
+
+ // Initializes the device managers on IO thread. Auto-starts the device
+ // thread and registers this as a listener with the device managers.
+ void InitializeDeviceManagersOnIOThread();
+
+ // Helper for sending up-to-date device lists to media observer when a
+ // capture device is plugged in or unplugged.
+ void NotifyDevicesChanged(MediaStreamType stream_type,
+ const StreamDeviceInfoArray& devices);
+
+
+ void HandleAccessRequestResponse(const std::string& label,
+ const MediaStreamDevices& devices);
+ void StopStreamFromUI(const std::string& label);
+
+ // Helpers.
+ bool RequestDone(const DeviceRequest& request) const;
+ MediaStreamProvider* GetDeviceManager(MediaStreamType stream_type);
+ void StartEnumeration(DeviceRequest* request);
+ std::string AddRequest(DeviceRequest* request);
+ void RemoveRequest(DeviceRequests::iterator it);
+ void ClearEnumerationCache(EnumerationCache* cache);
+ void PostRequestToUI(const std::string& label);
+ void HandleRequest(const std::string& label);
+
+ // Sends cached device list to a client corresponding to the request
+ // identified by |label|.
+ void SendCachedDeviceList(EnumerationCache* cache, const std::string& label);
+
+ // Stop the request of enumerating devices indentified by |label|.
+ void StopEnumerateDevices(const std::string& label);
+
+ // Helpers to start and stop monitoring devices.
+ void StartMonitoring();
+ void StopMonitoring();
+
+ // Finds and returns the raw device id corresponding to the given
+ // |device_guid|. Returns true if there was a raw device id that matched the
+ // given |device_guid|, false if nothing matched it.
+ bool TranslateGUIDToRawId(
+ MediaStreamType stream_type,
+ const GURL& security_origin,
+ const std::string& device_guid,
+ std::string* raw_device_id);
+
+ // Device thread shared by VideoCaptureManager and AudioInputDeviceManager.
+ scoped_ptr<base::Thread> device_thread_;
+
+ media::AudioManager* const audio_manager_; // not owned
+ scoped_refptr<AudioInputDeviceManager> audio_input_device_manager_;
+ scoped_refptr<VideoCaptureManager> video_capture_manager_;
+
+ // Indicator of device monitoring state.
+ bool monitoring_started_;
+
+ // Stores most recently enumerated device lists. The cache is cleared when
+ // monitoring is stopped or there is no request for that type of device.
+ EnumerationCache audio_enumeration_cache_;
+ EnumerationCache video_enumeration_cache_;
+
+ // Keeps track of live enumeration commands sent to VideoCaptureManager or
+ // AudioInputDeviceManager, in order to only enumerate when necessary.
+ int active_enumeration_ref_count_[NUM_MEDIA_TYPES];
+
+ // All non-closed request.
+ DeviceRequests requests_;
+
+ // Hold a pointer to the IO loop to check we delete the device thread and
+ // managers on the right thread.
+ base::MessageLoop* io_loop_;
+
+ bool screen_capture_active_;
+
+ bool use_fake_ui_;
+ scoped_ptr<FakeMediaStreamUIProxy> fake_ui_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaStreamManager);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_MANAGER_H_
diff --git a/chromium/content/browser/renderer_host/media/media_stream_manager_unittest.cc b/chromium/content/browser/renderer_host/media/media_stream_manager_unittest.cc
new file mode 100644
index 00000000000..91413710020
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_manager_unittest.cc
@@ -0,0 +1,174 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/message_loop/message_loop.h"
+#include "base/run_loop.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/browser/renderer_host/media/media_stream_ui_proxy.h"
+#include "content/common/media/media_stream_options.h"
+#include "content/public/test/test_browser_thread_bundle.h"
+#include "media/audio/audio_manager_base.h"
+#if defined(OS_ANDROID)
+#include "media/audio/android/audio_manager_android.h"
+#elif defined(OS_LINUX) || defined(OS_OPENBSD)
+#include "media/audio/linux/audio_manager_linux.h"
+#elif defined(OS_MACOSX)
+#include "media/audio/mac/audio_manager_mac.h"
+#elif defined(OS_WIN)
+#include "media/audio/win/audio_manager_win.h"
+#endif
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::_;
+
+namespace content {
+
+#if defined(OS_LINUX) || defined(OS_OPENBSD)
+typedef media::AudioManagerLinux AudioManagerPlatform;
+#elif defined(OS_MACOSX)
+typedef media::AudioManagerMac AudioManagerPlatform;
+#elif defined(OS_WIN)
+typedef media::AudioManagerWin AudioManagerPlatform;
+#elif defined(OS_ANDROID)
+typedef media::AudioManagerAndroid AudioManagerPlatform;
+#endif
+
+
+// This class mocks the audio manager and overrides the
+// GetAudioInputDeviceNames() method to ensure that we can run our tests on
+// the buildbots. media::AudioManagerBase
+class MockAudioManager : public AudioManagerPlatform {
+ public:
+ MockAudioManager() {}
+ virtual ~MockAudioManager() {}
+
+ virtual void GetAudioInputDeviceNames(
+ media::AudioDeviceNames* device_names) OVERRIDE {
+ if (HasAudioInputDevices()) {
+ AudioManagerBase::GetAudioInputDeviceNames(device_names);
+ } else {
+ device_names->push_back(media::AudioDeviceName("fake_device_name",
+ "fake_device_id"));
+ }
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockAudioManager);
+};
+
+class MediaStreamManagerTest : public ::testing::Test {
+ public:
+ MediaStreamManagerTest()
+ : thread_bundle_(content::TestBrowserThreadBundle::IO_MAINLOOP),
+ message_loop_(base::MessageLoopProxy::current()) {
+ // Create our own MediaStreamManager.
+ audio_manager_.reset(new MockAudioManager());
+ media_stream_manager_.reset(new MediaStreamManager(audio_manager_.get()));
+
+ // Use fake devices in order to run on the bots.
+ media_stream_manager_->UseFakeDevice();
+ }
+
+ virtual ~MediaStreamManagerTest() {
+ media_stream_manager_->WillDestroyCurrentMessageLoop();
+ }
+
+ MOCK_METHOD1(Response, void(int index));
+ void ResponseCallback(int index,
+ const MediaStreamDevices& devices,
+ scoped_ptr<MediaStreamUIProxy> ui_proxy) {
+ Response(index);
+ message_loop_->PostTask(FROM_HERE, run_loop_.QuitClosure());
+ }
+
+ protected:
+ std::string MakeMediaAccessRequest(int index) {
+ const int render_process_id = 1;
+ const int render_view_id = 1;
+ const int page_request_id = 1;
+ StreamOptions components(MEDIA_DEVICE_AUDIO_CAPTURE,
+ MEDIA_DEVICE_VIDEO_CAPTURE);
+ const GURL security_origin;
+ MediaStreamManager::MediaRequestResponseCallback callback =
+ base::Bind(&MediaStreamManagerTest::ResponseCallback,
+ base::Unretained(this), index);
+ return media_stream_manager_->MakeMediaAccessRequest(render_process_id,
+ render_view_id,
+ page_request_id,
+ components,
+ security_origin,
+ callback);
+ }
+
+ scoped_ptr<media::AudioManager> audio_manager_;
+ scoped_ptr<MediaStreamManager> media_stream_manager_;
+ content::TestBrowserThreadBundle thread_bundle_;
+ scoped_refptr<base::MessageLoopProxy> message_loop_;
+ base::RunLoop run_loop_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MediaStreamManagerTest);
+};
+
+TEST_F(MediaStreamManagerTest, MakeMediaAccessRequest) {
+ MakeMediaAccessRequest(0);
+
+ // Expecting the callback will be triggered and quit the test.
+ EXPECT_CALL(*this, Response(0));
+ run_loop_.Run();
+}
+
+TEST_F(MediaStreamManagerTest, MakeAndCancelMediaAccessRequest) {
+ std::string label = MakeMediaAccessRequest(0);
+ // No callback is expected.
+ media_stream_manager_->CancelRequest(label);
+}
+
+TEST_F(MediaStreamManagerTest, MakeMultipleRequests) {
+ // First request.
+ std::string label1 = MakeMediaAccessRequest(0);
+
+ // Second request.
+ int render_process_id = 2;
+ int render_view_id = 2;
+ int page_request_id = 2;
+ StreamOptions components(MEDIA_DEVICE_AUDIO_CAPTURE,
+ MEDIA_DEVICE_VIDEO_CAPTURE);
+ GURL security_origin;
+ MediaStreamManager::MediaRequestResponseCallback callback =
+ base::Bind(&MediaStreamManagerTest::ResponseCallback,
+ base::Unretained(this), 1);
+ std::string label2 = media_stream_manager_->MakeMediaAccessRequest(
+ render_process_id,
+ render_view_id,
+ page_request_id,
+ components,
+ security_origin,
+ callback);
+
+ // Expecting the callbackS from requests will be triggered and quit the test.
+ // Note, the callbacks might come in a different order depending on the
+ // value of labels.
+ EXPECT_CALL(*this, Response(0));
+ EXPECT_CALL(*this, Response(1));
+ run_loop_.Run();
+}
+
+TEST_F(MediaStreamManagerTest, MakeAndCancelMultipleRequests) {
+ std::string label1 = MakeMediaAccessRequest(0);
+ std::string label2 = MakeMediaAccessRequest(1);
+ media_stream_manager_->CancelRequest(label1);
+
+ // Expecting the callback from the second request will be triggered and
+ // quit the test.
+ EXPECT_CALL(*this, Response(1));
+ run_loop_.Run();
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/media_stream_provider.h b/chromium/content/browser/renderer_host/media/media_stream_provider.h
new file mode 100644
index 00000000000..7f0ff17eb34
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_provider.h
@@ -0,0 +1,94 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// MediaStreamProvider is used to capture media of the types defined in
+// MediaStreamType. There is only one MediaStreamProvider instance per media
+// type and a MediaStreamProvider instance can have only one registered
+// listener.
+// The MediaStreamManager is expected to be called on Browser::IO thread and
+// the listener will be called on the same thread.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_PROVIDER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_PROVIDER_H_
+
+#include <list>
+#include <string>
+
+#include "base/memory/ref_counted.h"
+#include "content/common/content_export.h"
+#include "content/common/media/media_stream_options.h"
+
+namespace base {
+class MessageLoopProxy;
+}
+
+namespace content {
+
+enum MediaStreamProviderError {
+ kMediaStreamOk = 0,
+ kInvalidMediaStreamType,
+ kInvalidSession,
+ kUnknownSession,
+ kDeviceNotAvailable,
+ kDeviceAlreadyInUse,
+ kUnknownError
+};
+
+enum { kInvalidMediaCaptureSessionId = 0xFFFFFFFF };
+
+// Callback class used by MediaStreamProvider.
+class CONTENT_EXPORT MediaStreamProviderListener {
+ public:
+ // Called by a MediaStreamProvider when a stream has been opened.
+ virtual void Opened(MediaStreamType stream_type,
+ int capture_session_id) = 0;
+
+ // Called by a MediaStreamProvider when a stream has been closed.
+ virtual void Closed(MediaStreamType stream_type,
+ int capture_session_id) = 0;
+
+ // Called by a MediaStreamProvider when available devices has been enumerated.
+ virtual void DevicesEnumerated(MediaStreamType stream_type,
+ const StreamDeviceInfoArray& devices) = 0;
+
+ // Called by a MediaStreamProvider when an error has occured.
+ virtual void Error(MediaStreamType stream_type,
+ int capture_session_id,
+ MediaStreamProviderError error) = 0;
+
+ protected:
+ virtual ~MediaStreamProviderListener() {}
+};
+
+// Implemented by a manager class providing captured media.
+class CONTENT_EXPORT MediaStreamProvider
+ : public base::RefCountedThreadSafe<MediaStreamProvider> {
+ public:
+ // Registers a listener and a device message loop.
+ virtual void Register(MediaStreamProviderListener* listener,
+ base::MessageLoopProxy* device_thread_loop) = 0;
+
+ // Unregisters the previously registered listener.
+ virtual void Unregister() = 0;
+
+ // Enumerates existing capture devices and calls |DevicesEnumerated|.
+ virtual void EnumerateDevices(MediaStreamType stream_type) = 0;
+
+ // Opens the specified device. The device is not started and it is still
+ // possible for other applications to open the device before the device is
+ // started. |Opened| is called when the device is opened.
+ // kInvalidMediaCaptureSessionId is returned on error.
+ virtual int Open(const StreamDeviceInfo& device) = 0;
+
+ // Closes the specified device and calls |Closed| when done.
+ virtual void Close(int capture_session_id) = 0;
+
+ protected:
+ friend class base::RefCountedThreadSafe<MediaStreamProvider>;
+ virtual ~MediaStreamProvider() {}
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_PROVIDER_H_
diff --git a/chromium/content/browser/renderer_host/media/media_stream_requester.h b/chromium/content/browser/renderer_host/media/media_stream_requester.h
new file mode 100644
index 00000000000..50858fa3393
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_requester.h
@@ -0,0 +1,44 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_REQUESTER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_REQUESTER_H_
+
+#include <string>
+
+#include "content/common/content_export.h"
+#include "content/common/media/media_stream_options.h"
+
+namespace content {
+
+// MediaStreamRequester must be implemented by the class requesting a new media
+// stream to be opened. MediaStreamManager will use this interface to signal
+// success and error for a request.
+class CONTENT_EXPORT MediaStreamRequester {
+ public:
+ // Called as a reply of a successful call to GenerateStream.
+ virtual void StreamGenerated(const std::string& label,
+ const StreamDeviceInfoArray& audio_devices,
+ const StreamDeviceInfoArray& video_devices) = 0;
+ // Called if GenerateStream failed.
+ virtual void StreamGenerationFailed(const std::string& label) = 0;
+
+ // Called if stream has been stopped by user request.
+ virtual void StopGeneratedStream(const std::string& label) = 0;
+
+ // Called as a reply of a successful call to EnumerateDevices.
+ virtual void DevicesEnumerated(const std::string& label,
+ const StreamDeviceInfoArray& devices) = 0;
+ // Called as a reply of a successful call to OpenDevice.
+ virtual void DeviceOpened(const std::string& label,
+ const StreamDeviceInfo& device_info) = 0;
+
+ protected:
+ virtual ~MediaStreamRequester() {
+ }
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_REQUESTER_H_
diff --git a/chromium/content/browser/renderer_host/media/media_stream_ui_controller_unittest.cc b/chromium/content/browser/renderer_host/media/media_stream_ui_controller_unittest.cc
new file mode 100644
index 00000000000..00a2cc25270
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_ui_controller_unittest.cc
@@ -0,0 +1,170 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/message_loop/message_loop.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/media_stream_settings_requester.h"
+#include "content/browser/renderer_host/media/media_stream_ui_controller.h"
+#include "content/common/media/media_stream_options.h"
+#include "content/public/common/media_stream_request.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::_;
+
+namespace content {
+
+class MediaStreamDeviceUIControllerTest
+ : public ::testing::Test,
+ public SettingsRequester {
+ public:
+ MediaStreamDeviceUIControllerTest() {}
+
+ // Mock implementation of SettingsRequester.
+ // TODO(sergeyu): Move mock SettingsRequester to a separate class.
+ MOCK_METHOD2(DevicesAccepted, void(
+ const std::string&, const StreamDeviceInfoArray&));
+ MOCK_METHOD1(SettingsError, void(const std::string&));
+ MOCK_METHOD1(StopStreamFromUI, void(const std::string&));
+ void GetAvailableDevices(MediaStreamDevices* devices) OVERRIDE {
+ devices->push_back(MediaStreamDevice(MEDIA_DEVICE_AUDIO_CAPTURE,
+ "mic",
+ "mic_id",
+ 0,
+ 0));
+ devices->push_back(MediaStreamDevice(MEDIA_DEVICE_VIDEO_CAPTURE,
+ "camera",
+ "camera_id"));
+ }
+
+ protected:
+ virtual void SetUp() {
+ message_loop_.reset(new base::MessageLoop(base::MessageLoop::TYPE_IO));
+ ui_thread_.reset(new BrowserThreadImpl(BrowserThread::UI,
+ message_loop_.get()));
+ io_thread_.reset(new BrowserThreadImpl(BrowserThread::IO,
+ message_loop_.get()));
+ ui_controller_.reset(new MediaStreamUIController(this));
+ }
+
+ virtual void TearDown() {
+ message_loop_->RunUntilIdle();
+ }
+
+ void CreateDummyRequest(const std::string& label, bool audio, bool video) {
+ int dummy_render_process_id = 1;
+ int dummy_render_view_id = 1;
+ StreamOptions components(
+ audio ? MEDIA_DEVICE_AUDIO_CAPTURE : MEDIA_NO_SERVICE,
+ video ? MEDIA_DEVICE_VIDEO_CAPTURE : MEDIA_NO_SERVICE);
+ GURL security_origin;
+ ui_controller_->MakeUIRequest(label,
+ dummy_render_process_id,
+ dummy_render_view_id,
+ components,
+ security_origin,
+ MEDIA_GENERATE_STREAM,
+ std::string());
+ }
+
+ scoped_ptr<base::MessageLoop> message_loop_;
+ scoped_ptr<BrowserThreadImpl> ui_thread_;
+ scoped_ptr<BrowserThreadImpl> io_thread_;
+ scoped_ptr<MediaStreamUIController> ui_controller_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MediaStreamDeviceUIControllerTest);
+};
+
+TEST_F(MediaStreamDeviceUIControllerTest, GenerateRequest) {
+ const std::string label = "dummy_label";
+ CreateDummyRequest(label, true, false);
+
+ // Expecting an error callback triggered by the non-existing
+ // RenderViewHostImpl.
+ EXPECT_CALL(*this, SettingsError(label));
+}
+
+TEST_F(MediaStreamDeviceUIControllerTest, GenerateAndRemoveRequest) {
+ const std::string label = "label";
+ CreateDummyRequest(label, true, false);
+
+ // Remove the current request, it should not crash.
+ ui_controller_->CancelUIRequest(label);
+}
+
+TEST_F(MediaStreamDeviceUIControllerTest, HandleRequestUsingFakeUI) {
+ ui_controller_->UseFakeUI(scoped_ptr<MediaStreamUI>());
+
+ const std::string label = "label";
+ CreateDummyRequest(label, true, true);
+
+ // Remove the current request, it should not crash.
+ EXPECT_CALL(*this, DevicesAccepted(label, _));
+
+ message_loop_->RunUntilIdle();
+
+ ui_controller_->NotifyUIIndicatorDevicesClosed(label);
+}
+
+TEST_F(MediaStreamDeviceUIControllerTest, CreateRequestsAndCancelTheFirst) {
+ ui_controller_->UseFakeUI(scoped_ptr<MediaStreamUI>());
+
+ // Create the first audio request.
+ const std::string label_1 = "label_1";
+ CreateDummyRequest(label_1, true, false);
+
+ // Create the second video request.
+ const std::string label_2 = "label_2";
+ CreateDummyRequest(label_2, false, true);
+
+ // Create the third audio and video request.
+ const std::string label_3 = "label_3";
+ CreateDummyRequest(label_3, true, true);
+
+ // Remove the first request which has been brought to the UI.
+ ui_controller_->CancelUIRequest(label_1);
+
+ // We should get callbacks from the rest of the requests.
+ EXPECT_CALL(*this, DevicesAccepted(label_2, _));
+ EXPECT_CALL(*this, DevicesAccepted(label_3, _));
+
+ message_loop_->RunUntilIdle();
+
+ ui_controller_->NotifyUIIndicatorDevicesClosed(label_2);
+ ui_controller_->NotifyUIIndicatorDevicesClosed(label_3);
+}
+
+TEST_F(MediaStreamDeviceUIControllerTest, CreateRequestsAndCancelTheLast) {
+ ui_controller_->UseFakeUI(scoped_ptr<MediaStreamUI>());
+
+ // Create the first audio request.
+ const std::string label_1 = "label_1";
+ CreateDummyRequest(label_1, true, false);
+
+ // Create the second video request.
+ const std::string label_2 = "label_2";
+ CreateDummyRequest(label_2, false, true);
+
+ // Create the third audio and video request.
+ const std::string label_3 = "label_3";
+ CreateDummyRequest(label_3, true, true);
+
+ // Remove the last request which is pending in the queue.
+ ui_controller_->CancelUIRequest(label_3);
+
+ // We should get callbacks from the rest of the requests.
+ EXPECT_CALL(*this, DevicesAccepted(label_1, _));
+ EXPECT_CALL(*this, DevicesAccepted(label_2, _));
+
+ message_loop_->RunUntilIdle();
+
+ ui_controller_->NotifyUIIndicatorDevicesClosed(label_1);
+ ui_controller_->NotifyUIIndicatorDevicesClosed(label_2);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/media_stream_ui_proxy.cc b/chromium/content/browser/renderer_host/media/media_stream_ui_proxy.cc
new file mode 100644
index 00000000000..3e4edbc0ac0
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_ui_proxy.cc
@@ -0,0 +1,216 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/media_stream_ui_proxy.h"
+
+#include "content/browser/renderer_host/render_view_host_delegate.h"
+#include "content/browser/renderer_host/render_view_host_impl.h"
+#include "content/public/browser/browser_thread.h"
+#include "media/video/capture/fake_video_capture_device.h"
+
+namespace content {
+
+class MediaStreamUIProxy::Core {
+ public:
+ explicit Core(const base::WeakPtr<MediaStreamUIProxy>& proxy,
+ RenderViewHostDelegate* test_render_delegate);
+ ~Core();
+
+ void RequestAccess(const MediaStreamRequest& request);
+ void OnStarted();
+
+ private:
+ void ProcessAccessRequestResponse(const MediaStreamDevices& devices,
+ scoped_ptr<MediaStreamUI> stream_ui);
+ void ProcessStopRequestFromUI();
+
+ base::WeakPtr<MediaStreamUIProxy> proxy_;
+ scoped_ptr<MediaStreamUI> ui_;
+
+ RenderViewHostDelegate* const test_render_delegate_;
+
+ // WeakPtr<> is used to RequestMediaAccessPermission() because there is no way
+ // cancel media requests.
+ base::WeakPtrFactory<Core> weak_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(Core);
+};
+
+MediaStreamUIProxy::Core::Core(const base::WeakPtr<MediaStreamUIProxy>& proxy,
+ RenderViewHostDelegate* test_render_delegate)
+ : proxy_(proxy),
+ test_render_delegate_(test_render_delegate),
+ weak_factory_(this) {
+}
+
+MediaStreamUIProxy::Core::~Core() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+}
+
+void MediaStreamUIProxy::Core::RequestAccess(
+ const MediaStreamRequest& request) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ RenderViewHostDelegate* render_delegate;
+
+ if (test_render_delegate_) {
+ render_delegate = test_render_delegate_;
+ } else {
+ RenderViewHostImpl* host = RenderViewHostImpl::FromID(
+ request.render_process_id, request.render_view_id);
+
+ // Tab may have gone away.
+ if (!host || !host->GetDelegate()) {
+ ProcessAccessRequestResponse(
+ MediaStreamDevices(), scoped_ptr<MediaStreamUI>());
+ return;
+ }
+
+ render_delegate = host->GetDelegate();
+ }
+
+ render_delegate->RequestMediaAccessPermission(
+ request, base::Bind(&Core::ProcessAccessRequestResponse,
+ weak_factory_.GetWeakPtr()));
+}
+
+void MediaStreamUIProxy::Core::OnStarted() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+ if (ui_) {
+ ui_->OnStarted(base::Bind(&Core::ProcessStopRequestFromUI,
+ base::Unretained(this)));
+ }
+}
+
+void MediaStreamUIProxy::Core::ProcessAccessRequestResponse(
+ const MediaStreamDevices& devices,
+ scoped_ptr<MediaStreamUI> stream_ui) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ ui_ = stream_ui.Pass();
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&MediaStreamUIProxy::ProcessAccessRequestResponse,
+ proxy_, devices));
+}
+
+void MediaStreamUIProxy::Core::ProcessStopRequestFromUI() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&MediaStreamUIProxy::ProcessStopRequestFromUI, proxy_));
+}
+
+// static
+scoped_ptr<MediaStreamUIProxy> MediaStreamUIProxy::Create() {
+ return scoped_ptr<MediaStreamUIProxy>(new MediaStreamUIProxy(NULL));
+}
+
+// static
+scoped_ptr<MediaStreamUIProxy> MediaStreamUIProxy::CreateForTests(
+ RenderViewHostDelegate* render_delegate) {
+ return scoped_ptr<MediaStreamUIProxy>(
+ new MediaStreamUIProxy(render_delegate));
+}
+
+MediaStreamUIProxy::MediaStreamUIProxy(
+ RenderViewHostDelegate* test_render_delegate)
+ : weak_factory_(this) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ core_.reset(new Core(weak_factory_.GetWeakPtr(), test_render_delegate));
+}
+
+MediaStreamUIProxy::~MediaStreamUIProxy() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ BrowserThread::DeleteSoon(BrowserThread::UI, FROM_HERE, core_.release());
+}
+
+void MediaStreamUIProxy::RequestAccess(
+ const MediaStreamRequest& request,
+ const ResponseCallback& response_callback) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ response_callback_ = response_callback;
+ BrowserThread::PostTask(
+ BrowserThread::UI, FROM_HERE,
+ base::Bind(&Core::RequestAccess, base::Unretained(core_.get()), request));
+}
+
+void MediaStreamUIProxy::OnStarted(const base::Closure& stop_callback) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ stop_callback_ = stop_callback;
+ BrowserThread::PostTask(
+ BrowserThread::UI, FROM_HERE,
+ base::Bind(&Core::OnStarted, base::Unretained(core_.get())));
+}
+
+void MediaStreamUIProxy::ProcessAccessRequestResponse(
+ const MediaStreamDevices& devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(!response_callback_.is_null());
+
+ ResponseCallback cb = response_callback_;
+ response_callback_.Reset();
+ cb.Run(devices);
+}
+
+void MediaStreamUIProxy::ProcessStopRequestFromUI() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(!stop_callback_.is_null());
+
+ base::Closure cb = stop_callback_;
+ stop_callback_.Reset();
+ cb.Run();
+}
+
+FakeMediaStreamUIProxy::FakeMediaStreamUIProxy()
+ : MediaStreamUIProxy(NULL) {
+}
+
+FakeMediaStreamUIProxy::~FakeMediaStreamUIProxy() {}
+
+void FakeMediaStreamUIProxy::SetAvailableDevices(
+ const MediaStreamDevices& devices) {
+ devices_ = devices;
+}
+
+void FakeMediaStreamUIProxy::RequestAccess(
+ const MediaStreamRequest& request,
+ const ResponseCallback& response_callback) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ response_callback_ = response_callback;
+
+ MediaStreamDevices devices_to_use;
+ bool accepted_audio = false;
+ bool accepted_video = false;
+ // Use the first capture device of the same media type in the list for the
+ // fake UI.
+ for (MediaStreamDevices::const_iterator it = devices_.begin();
+ it != devices_.end(); ++it) {
+ if (!accepted_audio &&
+ IsAudioMediaType(request.audio_type) &&
+ IsAudioMediaType(it->type)) {
+ devices_to_use.push_back(*it);
+ accepted_audio = true;
+ } else if (!accepted_video &&
+ IsVideoMediaType(request.video_type) &&
+ IsVideoMediaType(it->type)) {
+ devices_to_use.push_back(*it);
+ accepted_video = true;
+ }
+ }
+
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&MediaStreamUIProxy::ProcessAccessRequestResponse,
+ weak_factory_.GetWeakPtr(), devices_to_use));
+}
+
+void FakeMediaStreamUIProxy::OnStarted(const base::Closure& stop_callback) {
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/media_stream_ui_proxy.h b/chromium/content/browser/renderer_host/media/media_stream_ui_proxy.h
new file mode 100644
index 00000000000..62bbeb84495
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_ui_proxy.h
@@ -0,0 +1,88 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_UI_PROXY_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_UI_PROXY_H_
+
+#include "base/basictypes.h"
+#include "base/callback.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "content/public/common/media_stream_request.h"
+
+namespace content {
+
+class RenderViewHostDelegate;
+
+// MediaStreamUIProxy proxies calls to media stream UI between IO thread and UI
+// thread. One instance of this class is create per MediaStream object. It must
+// be create, used and destroyed on IO thread.
+class CONTENT_EXPORT MediaStreamUIProxy {
+ public:
+ typedef base::Callback<
+ void (const MediaStreamDevices& devices)> ResponseCallback;
+
+ static scoped_ptr<MediaStreamUIProxy> Create();
+ static scoped_ptr<MediaStreamUIProxy> CreateForTests(
+ RenderViewHostDelegate* render_delegate);
+
+ virtual ~MediaStreamUIProxy();
+
+ // Requests access for the MediaStream by calling
+ // WebContentsDelegate::RequestMediaAccessPermission(). The specified
+ // |response_callback| is called when the WebContentsDelegate approves or
+ // denies request.
+ virtual void RequestAccess(const MediaStreamRequest& request,
+ const ResponseCallback& response_callback);
+
+ // Notifies the UI that the MediaStream has been started. Must be called after
+ // access has been approved using RequestAccess(). |stop_callback| is be
+ // called on the IO thread after the user has requests the stream to be
+ // stopped.
+ virtual void OnStarted(const base::Closure& stop_callback);
+
+ void SetRenderViewHostDelegateForTests(RenderViewHostDelegate* delegate);
+
+ protected:
+ MediaStreamUIProxy(RenderViewHostDelegate* test_render_delegate);
+
+ private:
+ class Core;
+ friend class Core;
+ friend class FakeMediaStreamUIProxy;
+
+ void ProcessAccessRequestResponse(const MediaStreamDevices& devices);
+ void ProcessStopRequestFromUI();
+
+ scoped_ptr<Core> core_;
+ ResponseCallback response_callback_;
+ base::Closure stop_callback_;
+
+ base::WeakPtrFactory<MediaStreamUIProxy> weak_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaStreamUIProxy);
+};
+
+class CONTENT_EXPORT FakeMediaStreamUIProxy : public MediaStreamUIProxy {
+ public:
+ explicit FakeMediaStreamUIProxy();
+ virtual ~FakeMediaStreamUIProxy();
+
+ void SetAvailableDevices(const MediaStreamDevices& devices);
+
+ // MediaStreamUIProxy overrides.
+ virtual void RequestAccess(
+ const MediaStreamRequest& request,
+ const ResponseCallback& response_callback) OVERRIDE;
+ virtual void OnStarted(const base::Closure& stop_callback) OVERRIDE;
+
+ private:
+ MediaStreamDevices devices_;
+
+ DISALLOW_COPY_AND_ASSIGN(FakeMediaStreamUIProxy);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MEDIA_STREAM_UI_PROXY_H_
diff --git a/chromium/content/browser/renderer_host/media/media_stream_ui_proxy_unittest.cc b/chromium/content/browser/renderer_host/media/media_stream_ui_proxy_unittest.cc
new file mode 100644
index 00000000000..82219a6e872
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/media_stream_ui_proxy_unittest.cc
@@ -0,0 +1,219 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/media_stream_ui_proxy.h"
+
+#include "base/message_loop/message_loop.h"
+#include "content/browser/renderer_host/render_view_host_delegate.h"
+#include "content/public/common/renderer_preferences.h"
+#include "content/public/test/test_browser_thread.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/rect.h"
+
+using testing::_;
+using testing::SaveArg;
+
+namespace content {
+namespace {
+
+class MockRenderViewHostDelegate : public RenderViewHostDelegate {
+ public:
+ MOCK_METHOD2(RequestMediaAccessPermission,
+ void(const MediaStreamRequest& request,
+ const MediaResponseCallback& callback));
+
+ // Stubs for pure virtual methods we don't care about.
+ virtual gfx::Rect GetRootWindowResizerRect() const OVERRIDE {
+ NOTREACHED();
+ return gfx::Rect();
+ }
+ virtual RendererPreferences GetRendererPrefs(
+ BrowserContext* browser_context) const OVERRIDE {
+ NOTREACHED();
+ return RendererPreferences();
+ }
+};
+
+class MockResponseCallback {
+ public:
+ MOCK_METHOD1(OnAccessRequestResponse,
+ void(const MediaStreamDevices& devices));
+};
+
+class MockMediaStreamUI : public MediaStreamUI {
+ public:
+ MOCK_METHOD1(OnStarted, void(const base::Closure& stop));
+};
+
+class MockStopStreamHandler {
+ public:
+ MOCK_METHOD0(OnStop, void());
+};
+
+
+} // namespace
+
+class MediaStreamUIProxyTest : public testing::Test {
+ public:
+ MediaStreamUIProxyTest()
+ : ui_thread_(BrowserThread::UI, &message_loop_),
+ io_thread_(BrowserThread::IO, &message_loop_) {
+ proxy_ = MediaStreamUIProxy::CreateForTests(&delegate_);
+ }
+
+ virtual ~MediaStreamUIProxyTest() {
+ proxy_.reset();
+ message_loop_.RunUntilIdle();
+ }
+
+ protected:
+ base::MessageLoop message_loop_;
+ TestBrowserThread ui_thread_;
+ TestBrowserThread io_thread_;
+
+ MockRenderViewHostDelegate delegate_;
+ MockResponseCallback response_callback_;
+ scoped_ptr<MediaStreamUIProxy> proxy_;
+};
+
+MATCHER_P(SameRequest, expected, "") {
+ return
+ expected.render_process_id == arg.render_process_id &&
+ expected.render_view_id == arg.render_view_id &&
+ expected.tab_capture_device_id == arg.tab_capture_device_id &&
+ expected.security_origin == arg.security_origin &&
+ expected.request_type == arg.request_type &&
+ expected.requested_audio_device_id == arg.requested_audio_device_id &&
+ expected.requested_video_device_id == arg.requested_video_device_id &&
+ expected.audio_type == arg.audio_type &&
+ expected.video_type == arg.video_type;
+}
+
+TEST_F(MediaStreamUIProxyTest, Deny) {
+ MediaStreamRequest request(0, 0, 0, std::string(), GURL("http://origin/"),
+ MEDIA_GENERATE_STREAM, std::string(),
+ std::string(),
+ MEDIA_DEVICE_AUDIO_CAPTURE,
+ MEDIA_DEVICE_VIDEO_CAPTURE);
+ proxy_->RequestAccess(
+ request, base::Bind(&MockResponseCallback::OnAccessRequestResponse,
+ base::Unretained(&response_callback_)));
+ MediaResponseCallback callback;
+ EXPECT_CALL(delegate_, RequestMediaAccessPermission(SameRequest(request), _))
+ .WillOnce(SaveArg<1>(&callback));
+ message_loop_.RunUntilIdle();
+ ASSERT_FALSE(callback.is_null());
+
+ MediaStreamDevices devices;
+ callback.Run(devices, scoped_ptr<MediaStreamUI>());
+
+ MediaStreamDevices response;
+ EXPECT_CALL(response_callback_, OnAccessRequestResponse(_))
+ .WillOnce(SaveArg<0>(&response));
+ message_loop_.RunUntilIdle();
+
+ EXPECT_TRUE(response.empty());
+}
+
+TEST_F(MediaStreamUIProxyTest, AcceptAndStart) {
+ MediaStreamRequest request(0, 0, 0, std::string(), GURL("http://origin/"),
+ MEDIA_GENERATE_STREAM, std::string(),
+ std::string(),
+ MEDIA_DEVICE_AUDIO_CAPTURE,
+ MEDIA_DEVICE_VIDEO_CAPTURE);
+ proxy_->RequestAccess(
+ request, base::Bind(&MockResponseCallback::OnAccessRequestResponse,
+ base::Unretained(&response_callback_)));
+ MediaResponseCallback callback;
+ EXPECT_CALL(delegate_, RequestMediaAccessPermission(SameRequest(request), _))
+ .WillOnce(SaveArg<1>(&callback));
+ message_loop_.RunUntilIdle();
+ ASSERT_FALSE(callback.is_null());
+
+ MediaStreamDevices devices;
+ devices.push_back(
+ MediaStreamDevice(MEDIA_DEVICE_AUDIO_CAPTURE, "Mic", "Mic"));
+ scoped_ptr<MockMediaStreamUI> ui(new MockMediaStreamUI());
+ EXPECT_CALL(*ui, OnStarted(_));
+ callback.Run(devices, ui.PassAs<MediaStreamUI>());
+
+ MediaStreamDevices response;
+ EXPECT_CALL(response_callback_, OnAccessRequestResponse(_))
+ .WillOnce(SaveArg<0>(&response));
+ message_loop_.RunUntilIdle();
+
+ EXPECT_FALSE(response.empty());
+
+ proxy_->OnStarted(base::Closure());
+ message_loop_.RunUntilIdle();
+}
+
+// Verify that the proxy can be deleted before the request is processed.
+TEST_F(MediaStreamUIProxyTest, DeleteBeforeAccepted) {
+ MediaStreamRequest request(0, 0, 0, std::string(), GURL("http://origin/"),
+ MEDIA_GENERATE_STREAM, std::string(),
+ std::string(),
+ MEDIA_DEVICE_AUDIO_CAPTURE,
+ MEDIA_DEVICE_VIDEO_CAPTURE);
+ proxy_->RequestAccess(
+ request, base::Bind(&MockResponseCallback::OnAccessRequestResponse,
+ base::Unretained(&response_callback_)));
+ MediaResponseCallback callback;
+ EXPECT_CALL(delegate_, RequestMediaAccessPermission(SameRequest(request), _))
+ .WillOnce(SaveArg<1>(&callback));
+ message_loop_.RunUntilIdle();
+ ASSERT_FALSE(callback.is_null());
+
+ proxy_.reset();
+
+ MediaStreamDevices devices;
+ scoped_ptr<MediaStreamUI> ui;
+ callback.Run(devices, ui.Pass());
+}
+
+TEST_F(MediaStreamUIProxyTest, StopFromUI) {
+ MediaStreamRequest request(0, 0, 0, std::string(), GURL("http://origin/"),
+ MEDIA_GENERATE_STREAM, std::string(),
+ std::string(),
+ MEDIA_DEVICE_AUDIO_CAPTURE,
+ MEDIA_DEVICE_VIDEO_CAPTURE);
+ proxy_->RequestAccess(
+ request, base::Bind(&MockResponseCallback::OnAccessRequestResponse,
+ base::Unretained(&response_callback_)));
+ MediaResponseCallback callback;
+ EXPECT_CALL(delegate_, RequestMediaAccessPermission(SameRequest(request), _))
+ .WillOnce(SaveArg<1>(&callback));
+ message_loop_.RunUntilIdle();
+ ASSERT_FALSE(callback.is_null());
+
+ base::Closure stop_callback;
+
+ MediaStreamDevices devices;
+ devices.push_back(
+ MediaStreamDevice(MEDIA_DEVICE_AUDIO_CAPTURE, "Mic", "Mic"));
+ scoped_ptr<MockMediaStreamUI> ui(new MockMediaStreamUI());
+ EXPECT_CALL(*ui, OnStarted(_))
+ .WillOnce(SaveArg<0>(&stop_callback));
+ callback.Run(devices, ui.PassAs<MediaStreamUI>());
+
+ MediaStreamDevices response;
+ EXPECT_CALL(response_callback_, OnAccessRequestResponse(_))
+ .WillOnce(SaveArg<0>(&response));
+ message_loop_.RunUntilIdle();
+
+ EXPECT_FALSE(response.empty());
+
+ MockStopStreamHandler stop_handler;
+ proxy_->OnStarted(base::Bind(&MockStopStreamHandler::OnStop,
+ base::Unretained(&stop_handler)));
+ message_loop_.RunUntilIdle();
+
+ ASSERT_FALSE(stop_callback.is_null());
+ EXPECT_CALL(stop_handler, OnStop());
+ stop_callback.Run();
+ message_loop_.RunUntilIdle();
+}
+
+} // content
diff --git a/chromium/content/browser/renderer_host/media/midi_dispatcher_host.cc b/chromium/content/browser/renderer_host/media/midi_dispatcher_host.cc
new file mode 100644
index 00000000000..8ecbabcd47d
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/midi_dispatcher_host.cc
@@ -0,0 +1,63 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/midi_dispatcher_host.h"
+
+#include "base/bind.h"
+#include "content/browser/renderer_host/render_view_host_impl.h"
+#include "content/common/media/midi_messages.h"
+#include "content/public/browser/browser_context.h"
+#include "content/public/browser/browser_thread.h"
+#include "url/gurl.h"
+
+namespace content {
+
+MIDIDispatcherHost::MIDIDispatcherHost(int render_process_id,
+ BrowserContext* browser_context)
+ : render_process_id_(render_process_id),
+ browser_context_(browser_context) {
+}
+
+MIDIDispatcherHost::~MIDIDispatcherHost() {
+}
+
+bool MIDIDispatcherHost::OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) {
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP_EX(MIDIDispatcherHost, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(MIDIHostMsg_RequestSysExPermission,
+ OnRequestSysExPermission)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+ return handled;
+}
+
+void MIDIDispatcherHost::OverrideThreadForMessage(
+ const IPC::Message& message, BrowserThread::ID* thread) {
+ if (message.type() == MIDIHostMsg_RequestSysExPermission::ID)
+ *thread = BrowserThread::UI;
+}
+
+void MIDIDispatcherHost::OnRequestSysExPermission(int render_view_id,
+ int client_id,
+ const GURL& origin) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ browser_context_->RequestMIDISysExPermission(
+ render_process_id_,
+ render_view_id,
+ origin,
+ base::Bind(&MIDIDispatcherHost::WasSysExPermissionGranted,
+ base::Unretained(this),
+ render_view_id,
+ client_id));
+}
+
+void MIDIDispatcherHost::WasSysExPermissionGranted(int render_view_id,
+ int client_id,
+ bool success) {
+ Send(new MIDIMsg_SysExPermissionApproved(render_view_id, client_id, success));
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/midi_dispatcher_host.h b/chromium/content/browser/renderer_host/media/midi_dispatcher_host.h
new file mode 100644
index 00000000000..ee861551b44
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/midi_dispatcher_host.h
@@ -0,0 +1,48 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MIDI_DISPATCHER_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MIDI_DISPATCHER_HOST_H_
+
+#include "content/public/browser/browser_message_filter.h"
+
+class GURL;
+
+namespace content {
+
+class BrowserContext;
+
+// MIDIDispatcherHost handles permissions for using system exclusive messages.
+// It works as BrowserMessageFilter to handle IPC messages between
+// MIDIDispatcher running as a RenderViewObserver.
+class MIDIDispatcherHost : public BrowserMessageFilter {
+ public:
+ MIDIDispatcherHost(int render_process_id, BrowserContext* browser_context);
+
+ // BrowserMessageFilter implementation.
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+ virtual void OverrideThreadForMessage(
+ const IPC::Message& message, BrowserThread::ID* thread) OVERRIDE;
+
+ protected:
+ virtual ~MIDIDispatcherHost();
+
+ private:
+ void OnRequestSysExPermission(int render_view_id,
+ int client_id,
+ const GURL& origin);
+ void WasSysExPermissionGranted(int render_view_id,
+ int client_id,
+ bool success);
+
+ int render_process_id_;
+ BrowserContext* browser_context_;
+
+ DISALLOW_COPY_AND_ASSIGN(MIDIDispatcherHost);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MIDI_DISPATCHER_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/midi_host.cc b/chromium/content/browser/renderer_host/media/midi_host.cc
new file mode 100644
index 00000000000..6ed473afeff
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/midi_host.cc
@@ -0,0 +1,167 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/midi_host.h"
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/debug/trace_event.h"
+#include "base/process/process.h"
+#include "content/browser/browser_main_loop.h"
+#include "content/browser/media/media_internals.h"
+#include "content/common/media/midi_messages.h"
+#include "content/public/browser/content_browser_client.h"
+#include "content/public/browser/media_observer.h"
+#include "media/midi/midi_manager.h"
+
+using media::MIDIManager;
+using media::MIDIPortInfoList;
+
+// The total number of bytes which we're allowed to send to the OS
+// before knowing that they have been successfully sent.
+static const size_t kMaxInFlightBytes = 10 * 1024 * 1024; // 10 MB.
+
+// We keep track of the number of bytes successfully sent to
+// the hardware. Every once in a while we report back to the renderer
+// the number of bytes sent since the last report. This threshold determines
+// how many bytes will be sent before reporting back to the renderer.
+static const size_t kAcknowledgementThresholdBytes = 1024 * 1024; // 1 MB.
+
+static const uint8 kSysExMessage = 0xf0;
+
+namespace content {
+
+MIDIHost::MIDIHost(media::MIDIManager* midi_manager)
+ : midi_manager_(midi_manager),
+ sent_bytes_in_flight_(0),
+ bytes_sent_since_last_acknowledgement_(0) {
+}
+
+MIDIHost::~MIDIHost() {
+ if (midi_manager_)
+ midi_manager_->EndSession(this);
+}
+
+void MIDIHost::OnChannelClosing() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ BrowserMessageFilter::OnChannelClosing();
+}
+
+void MIDIHost::OnDestruct() const {
+ BrowserThread::DeleteOnIOThread::Destruct(this);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// IPC Messages handler
+bool MIDIHost::OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) {
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP_EX(MIDIHost, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(MIDIHostMsg_StartSession, OnStartSession)
+ IPC_MESSAGE_HANDLER(MIDIHostMsg_SendData, OnSendData)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+
+ return handled;
+}
+
+void MIDIHost::OnStartSession(int client_id) {
+ MIDIPortInfoList input_ports;
+ MIDIPortInfoList output_ports;
+
+ // Initialize devices and register to receive MIDI data.
+ bool success = false;
+ if (midi_manager_) {
+ success = midi_manager_->StartSession(this);
+ if (success) {
+ input_ports = midi_manager_->input_ports();
+ output_ports = midi_manager_->output_ports();
+ }
+ }
+
+ Send(new MIDIMsg_SessionStarted(
+ client_id,
+ success,
+ input_ports,
+ output_ports));
+}
+
+void MIDIHost::OnSendData(int port,
+ const std::vector<uint8>& data,
+ double timestamp) {
+ if (!midi_manager_)
+ return;
+
+ base::AutoLock auto_lock(in_flight_lock_);
+
+ // Sanity check that we won't send too much.
+ if (sent_bytes_in_flight_ > kMaxInFlightBytes ||
+ data.size() > kMaxInFlightBytes ||
+ data.size() + sent_bytes_in_flight_ > kMaxInFlightBytes)
+ return;
+
+ // For now disallow all System Exclusive messages even if we
+ // have permission.
+ // TODO(toyoshim): allow System Exclusive if browser has granted
+ // this client access. We'll likely need to pass a GURL
+ // here to compare against our permissions.
+ if (data.size() > 0 && data[0] >= kSysExMessage)
+ return;
+
+#if defined(OS_ANDROID)
+ // TODO(toyoshim): figure out why data() method does not compile on Android.
+ NOTIMPLEMENTED();
+#else
+ midi_manager_->DispatchSendMIDIData(
+ this,
+ port,
+ data.data(),
+ data.size(),
+ timestamp);
+#endif
+
+ sent_bytes_in_flight_ += data.size();
+}
+
+void MIDIHost::ReceiveMIDIData(
+ int port_index,
+ const uint8* data,
+ size_t length,
+ double timestamp) {
+ TRACE_EVENT0("midi", "MIDIHost::ReceiveMIDIData");
+
+ // For now disallow all System Exclusive messages even if we
+ // have permission.
+ // TODO(toyoshim): allow System Exclusive if browser has granted
+ // this client access. We'll likely need to pass a GURL
+ // here to compare against our permissions.
+ if (length > 0 && data[0] >= kSysExMessage)
+ return;
+
+ // Send to the renderer.
+ std::vector<uint8> v(data, data + length);
+ Send(new MIDIMsg_DataReceived(port_index, v, timestamp));
+}
+
+void MIDIHost::AccumulateMIDIBytesSent(size_t n) {
+ {
+ base::AutoLock auto_lock(in_flight_lock_);
+ if (n <= sent_bytes_in_flight_)
+ sent_bytes_in_flight_ -= n;
+ }
+
+ if (bytes_sent_since_last_acknowledgement_ + n >=
+ bytes_sent_since_last_acknowledgement_)
+ bytes_sent_since_last_acknowledgement_ += n;
+
+ if (bytes_sent_since_last_acknowledgement_ >=
+ kAcknowledgementThresholdBytes) {
+ Send(new MIDIMsg_AcknowledgeSentData(
+ bytes_sent_since_last_acknowledgement_));
+ bytes_sent_since_last_acknowledgement_ = 0;
+ }
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/midi_host.h b/chromium/content/browser/renderer_host/media/midi_host.h
new file mode 100644
index 00000000000..f6b2813264e
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/midi_host.h
@@ -0,0 +1,79 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MIDI_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MIDI_HOST_H_
+
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "content/common/content_export.h"
+#include "content/public/browser/browser_message_filter.h"
+#include "content/public/browser/browser_thread.h"
+#include "media/midi/midi_manager.h"
+
+namespace media {
+class MIDIManager;
+}
+
+namespace content {
+
+class CONTENT_EXPORT MIDIHost
+ : public BrowserMessageFilter,
+ public media::MIDIManagerClient {
+ public:
+ // Called from UI thread from the owner of this object.
+ MIDIHost(media::MIDIManager* midi_manager);
+
+ // BrowserMessageFilter implementation.
+ virtual void OnChannelClosing() OVERRIDE;
+ virtual void OnDestruct() const OVERRIDE;
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+
+ // MIDIManagerClient implementation.
+ virtual void ReceiveMIDIData(
+ int port_index,
+ const uint8* data,
+ size_t length,
+ double timestamp) OVERRIDE;
+ virtual void AccumulateMIDIBytesSent(size_t n) OVERRIDE;
+
+ // Start session to access MIDI hardware.
+ void OnStartSession(int client_id);
+
+ // Data to be sent to a MIDI output port.
+ void OnSendData(int port,
+ const std::vector<uint8>& data,
+ double timestamp);
+
+ private:
+ friend class base::DeleteHelper<MIDIHost>;
+ friend class BrowserThread;
+
+ virtual ~MIDIHost();
+
+ // |midi_manager_| talks to the platform-specific MIDI APIs.
+ // It can be NULL if the platform (or our current implementation)
+ // does not support MIDI. If not supported then a call to
+ // OnRequestAccess() will always refuse access and a call to
+ // OnSendData() will do nothing.
+ media::MIDIManager* const midi_manager_;
+
+ // The number of bytes sent to the platform-specific MIDI sending
+ // system, but not yet completed.
+ size_t sent_bytes_in_flight_;
+
+ // The number of bytes successfully sent since the last time
+ // we've acknowledged back to the renderer.
+ size_t bytes_sent_since_last_acknowledgement_;
+
+ // Protects access to |sent_bytes_in_flight_|.
+ base::Lock in_flight_lock_;
+
+ DISALLOW_COPY_AND_ASSIGN(MIDIHost);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MIDI_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/mock_media_observer.cc b/chromium/content/browser/renderer_host/media/mock_media_observer.cc
new file mode 100644
index 00000000000..f6d4e56cbab
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/mock_media_observer.cc
@@ -0,0 +1,17 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/mock_media_observer.h"
+
+namespace content {
+
+MockMediaObserver::MockMediaObserver() {}
+
+MockMediaObserver::~MockMediaObserver() {}
+
+MockMediaInternals::MockMediaInternals() {}
+
+MockMediaInternals::~MockMediaInternals() {}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/mock_media_observer.h b/chromium/content/browser/renderer_host/media/mock_media_observer.h
new file mode 100644
index 00000000000..fc3734744ea
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/mock_media_observer.h
@@ -0,0 +1,60 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_MOCK_MEDIA_OBSERVER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_MOCK_MEDIA_OBSERVER_H_
+
+#include <string>
+
+#include "base/basictypes.h"
+#include "content/browser/media/media_internals.h"
+#include "content/public/browser/media_observer.h"
+#include "media/base/media_log_event.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace content {
+
+class MockMediaObserver : public MediaObserver {
+ public:
+ MockMediaObserver();
+ virtual ~MockMediaObserver();
+
+ MOCK_METHOD1(OnAudioCaptureDevicesChanged,
+ void(const MediaStreamDevices& devices));
+ MOCK_METHOD1(OnVideoCaptureDevicesChanged,
+ void(const MediaStreamDevices& devices));
+ MOCK_METHOD5(OnMediaRequestStateChanged,
+ void(int render_process_id, int render_view_id,
+ int page_request_id,
+ const MediaStreamDevice& device,
+ const MediaRequestState state));
+ MOCK_METHOD6(OnAudioStreamPlayingChanged,
+ void(int render_process_id,
+ int render_view_id,
+ int stream_id,
+ bool is_playing,
+ float power_dbfs,
+ bool clipped));
+};
+
+class MockMediaInternals : public MediaInternals {
+ public:
+ MockMediaInternals();
+ virtual ~MockMediaInternals();
+
+ MOCK_METHOD2(OnDeleteAudioStream,
+ void(void* host, int stream_id));
+ MOCK_METHOD3(OnSetAudioStreamPlaying,
+ void(void* host, int stream_id, bool playing));
+ MOCK_METHOD3(OnSetAudioStreamStatus,
+ void(void* host, int stream_id, const std::string& status));
+ MOCK_METHOD3(OnSetAudioStreamVolume,
+ void(void* host, int stream_id, double volume));
+ MOCK_METHOD2(OnMediaEvent,
+ void(int source, const media::MediaLogEvent& event));
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_MOCK_MEDIA_OBSERVER_H_
diff --git a/chromium/content/browser/renderer_host/media/peer_connection_tracker_host.cc b/chromium/content/browser/renderer_host/media/peer_connection_tracker_host.cc
new file mode 100644
index 00000000000..b986055e700
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/peer_connection_tracker_host.cc
@@ -0,0 +1,69 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+#include "content/browser/renderer_host/media/peer_connection_tracker_host.h"
+
+#include "content/browser/media/webrtc_internals.h"
+#include "content/common/media/peer_connection_tracker_messages.h"
+
+namespace content {
+
+PeerConnectionTrackerHost::PeerConnectionTrackerHost(int render_process_id)
+ : render_process_id_(render_process_id) {}
+
+bool PeerConnectionTrackerHost::OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) {
+ bool handled = true;
+
+ IPC_BEGIN_MESSAGE_MAP_EX(PeerConnectionTrackerHost, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(PeerConnectionTrackerHost_AddPeerConnection,
+ OnAddPeerConnection)
+ IPC_MESSAGE_HANDLER(PeerConnectionTrackerHost_RemovePeerConnection,
+ OnRemovePeerConnection)
+ IPC_MESSAGE_HANDLER(PeerConnectionTrackerHost_UpdatePeerConnection,
+ OnUpdatePeerConnection)
+ IPC_MESSAGE_HANDLER(PeerConnectionTrackerHost_AddStats, OnAddStats)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+ return handled;
+}
+
+void PeerConnectionTrackerHost::OverrideThreadForMessage(
+ const IPC::Message& message, BrowserThread::ID* thread) {
+ if (IPC_MESSAGE_CLASS(message) == PeerConnectionTrackerMsgStart)
+ *thread = BrowserThread::UI;
+}
+
+PeerConnectionTrackerHost::~PeerConnectionTrackerHost() {
+}
+
+void PeerConnectionTrackerHost::OnAddPeerConnection(
+ const PeerConnectionInfo& info) {
+ WebRTCInternals::GetInstance()->OnAddPeerConnection(
+ render_process_id_,
+ peer_pid(),
+ info.lid,
+ info.url,
+ info.servers,
+ info.constraints);
+}
+
+void PeerConnectionTrackerHost::OnRemovePeerConnection(int lid) {
+ WebRTCInternals::GetInstance()->OnRemovePeerConnection(peer_pid(), lid);
+}
+
+void PeerConnectionTrackerHost::OnUpdatePeerConnection(
+ int lid, const std::string& type, const std::string& value) {
+ WebRTCInternals::GetInstance()->OnUpdatePeerConnection(
+ peer_pid(),
+ lid,
+ type,
+ value);
+}
+
+void PeerConnectionTrackerHost::OnAddStats(int lid,
+ const base::ListValue& value) {
+ WebRTCInternals::GetInstance()->OnAddStats(peer_pid(), lid, value);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/peer_connection_tracker_host.h b/chromium/content/browser/renderer_host/media/peer_connection_tracker_host.h
new file mode 100644
index 00000000000..2803e7be602
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/peer_connection_tracker_host.h
@@ -0,0 +1,49 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_PEER_CONNECTION_TRACKER_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_PEER_CONNECTION_TRACKER_HOST_H_
+
+#include "content/public/browser/browser_message_filter.h"
+
+struct PeerConnectionInfo;
+
+namespace base {
+class ListValue;
+} // namespace base
+
+namespace content {
+
+// This class is the host for PeerConnectionTracker in the browser process
+// managed by RenderProcessHostImpl. It passes IPC messages between
+// WebRTCInternals and PeerConnectionTracker.
+class PeerConnectionTrackerHost : public BrowserMessageFilter {
+ public:
+ PeerConnectionTrackerHost(int render_process_id);
+
+ // content::BrowserMessageFilter override.
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+ virtual void OverrideThreadForMessage(const IPC::Message& message,
+ BrowserThread::ID* thread) OVERRIDE;
+
+ protected:
+ virtual ~PeerConnectionTrackerHost();
+
+ private:
+ // Handlers for peer connection messages coming from the renderer.
+ void OnAddPeerConnection(const PeerConnectionInfo& info);
+ void OnRemovePeerConnection(int lid);
+ void OnUpdatePeerConnection(
+ int lid, const std::string& type, const std::string& value);
+ void OnAddStats(int lid, const base::ListValue& value);
+
+ int render_process_id_;
+
+ DISALLOW_COPY_AND_ASSIGN(PeerConnectionTrackerHost);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_PEER_CONNECTION_TRACKER_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/video_capture_buffer_pool.cc b/chromium/content/browser/renderer_host/media/video_capture_buffer_pool.cc
new file mode 100644
index 00000000000..207f86a99f3
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_buffer_pool.cc
@@ -0,0 +1,209 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/video_capture_buffer_pool.h"
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/logging.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
+
+namespace content {
+
+VideoCaptureBufferPool::VideoCaptureBufferPool(size_t size, int count)
+ : size_(size),
+ count_(count) {
+}
+
+VideoCaptureBufferPool::~VideoCaptureBufferPool() {
+}
+
+bool VideoCaptureBufferPool::Allocate() {
+ base::AutoLock lock(lock_);
+ DCHECK(!IsAllocated());
+ buffers_.resize(count_);
+ for (int buffer_id = 0; buffer_id < count(); ++buffer_id) {
+ Buffer* buffer = new Buffer();
+ buffers_[buffer_id] = buffer;
+ if (!buffer->shared_memory.CreateAndMapAnonymous(GetMemorySize()))
+ return false;
+ }
+ return true;
+}
+
+base::SharedMemoryHandle VideoCaptureBufferPool::ShareToProcess(
+ int buffer_id,
+ base::ProcessHandle process_handle) {
+ base::AutoLock lock(lock_);
+ DCHECK(IsAllocated());
+ DCHECK(buffer_id >= 0);
+ DCHECK(buffer_id < count_);
+ Buffer* buffer = buffers_[buffer_id];
+ base::SharedMemoryHandle remote_handle;
+ buffer->shared_memory.ShareToProcess(process_handle, &remote_handle);
+ return remote_handle;
+}
+
+base::SharedMemoryHandle VideoCaptureBufferPool::GetHandle(int buffer_id) {
+ base::AutoLock lock(lock_);
+ DCHECK(IsAllocated());
+ DCHECK(buffer_id >= 0);
+ DCHECK(buffer_id < count_);
+ return buffers_[buffer_id]->shared_memory.handle();
+}
+
+void* VideoCaptureBufferPool::GetMemory(int buffer_id) {
+ base::AutoLock lock(lock_);
+ DCHECK(IsAllocated());
+ DCHECK(buffer_id >= 0);
+ DCHECK(buffer_id < count_);
+ return buffers_[buffer_id]->shared_memory.memory();
+}
+
+int VideoCaptureBufferPool::ReserveForProducer() {
+ base::AutoLock lock(lock_);
+ return ReserveForProducerInternal();
+}
+
+void VideoCaptureBufferPool::RelinquishProducerReservation(int buffer_id) {
+ base::AutoLock lock(lock_);
+ DCHECK(buffer_id >= 0);
+ DCHECK(buffer_id < count());
+ Buffer* buffer = buffers_[buffer_id];
+ DCHECK(buffer->held_by_producer);
+ buffer->held_by_producer = false;
+}
+
+void VideoCaptureBufferPool::HoldForConsumers(
+ int buffer_id,
+ int num_clients) {
+ base::AutoLock lock(lock_);
+ DCHECK(buffer_id >= 0);
+ DCHECK(buffer_id < count());
+ DCHECK(IsAllocated());
+ Buffer* buffer = buffers_[buffer_id];
+ DCHECK(buffer->held_by_producer);
+ DCHECK(!buffer->consumer_hold_count);
+
+ buffer->consumer_hold_count = num_clients;
+ // Note: |held_by_producer| will stay true until
+ // RelinquishProducerReservation() (usually called by destructor of the object
+ // wrapping this buffer, e.g. a media::VideoFrame
+}
+
+void VideoCaptureBufferPool::RelinquishConsumerHold(int buffer_id,
+ int num_clients) {
+ base::AutoLock lock(lock_);
+ DCHECK(buffer_id >= 0);
+ DCHECK(buffer_id < count());
+ DCHECK_GT(num_clients, 0);
+ DCHECK(IsAllocated());
+ Buffer* buffer = buffers_[buffer_id];
+ DCHECK_GE(buffer->consumer_hold_count, num_clients);
+
+ buffer->consumer_hold_count -= num_clients;
+}
+
+// State query functions.
+size_t VideoCaptureBufferPool::GetMemorySize() const {
+ // No need to take |lock_| currently.
+ return size_;
+}
+
+int VideoCaptureBufferPool::RecognizeReservedBuffer(
+ base::SharedMemoryHandle maybe_belongs_to_pool) {
+ base::AutoLock lock(lock_);
+ for (int buffer_id = 0; buffer_id < count(); ++buffer_id) {
+ Buffer* buffer = buffers_[buffer_id];
+ if (buffer->shared_memory.handle() == maybe_belongs_to_pool) {
+ DCHECK(buffer->held_by_producer);
+ return buffer_id;
+ }
+ }
+ return -1; // Buffer is not from our pool.
+}
+
+scoped_refptr<media::VideoFrame> VideoCaptureBufferPool::ReserveI420VideoFrame(
+ const gfx::Size& size,
+ int rotation) {
+ if (static_cast<size_t>(size.GetArea() * 3 / 2) != GetMemorySize())
+ return NULL;
+
+ base::AutoLock lock(lock_);
+
+ int buffer_id = ReserveForProducerInternal();
+ if (buffer_id < 0)
+ return NULL;
+
+ base::Closure disposal_handler = base::Bind(
+ &VideoCaptureBufferPool::RelinquishProducerReservation,
+ this,
+ buffer_id);
+
+ Buffer* buffer = buffers_[buffer_id];
+ // Wrap the buffer in a VideoFrame container.
+ scoped_refptr<media::VideoFrame> frame =
+ media::VideoFrame::WrapExternalSharedMemory(
+ media::VideoFrame::I420,
+ size,
+ gfx::Rect(size),
+ size,
+ static_cast<uint8*>(buffer->shared_memory.memory()),
+ buffer->shared_memory.handle(),
+ base::TimeDelta(),
+ disposal_handler);
+
+ if (buffer->rotation != rotation) {
+ // TODO(nick): Generalize the |rotation| mechanism.
+ media::FillYUV(frame.get(), 0, 128, 128);
+ buffer->rotation = rotation;
+ }
+
+ return frame;
+}
+
+bool VideoCaptureBufferPool::IsAnyBufferHeldForConsumers() {
+ base::AutoLock lock(lock_);
+ for (int buffer_id = 0; buffer_id < count(); ++buffer_id) {
+ Buffer* buffer = buffers_[buffer_id];
+ if (buffer->consumer_hold_count > 0)
+ return true;
+ }
+ return false;
+}
+
+VideoCaptureBufferPool::Buffer::Buffer()
+ : rotation(0),
+ held_by_producer(false),
+ consumer_hold_count(0) {}
+
+int VideoCaptureBufferPool::ReserveForProducerInternal() {
+ lock_.AssertAcquired();
+ DCHECK(IsAllocated());
+
+ int buffer_id = -1;
+ for (int candidate_id = 0; candidate_id < count(); ++candidate_id) {
+ Buffer* candidate = buffers_[candidate_id];
+ if (!candidate->consumer_hold_count && !candidate->held_by_producer) {
+ buffer_id = candidate_id;
+ break;
+ }
+ }
+ if (buffer_id == -1)
+ return -1;
+
+ Buffer* buffer = buffers_[buffer_id];
+ CHECK_GE(buffer->shared_memory.requested_size(), size_);
+ buffer->held_by_producer = true;
+ return buffer_id;
+}
+
+bool VideoCaptureBufferPool::IsAllocated() const {
+ lock_.AssertAcquired();
+ return !buffers_.empty();
+}
+
+} // namespace content
+
diff --git a/chromium/content/browser/renderer_host/media/video_capture_buffer_pool.h b/chromium/content/browser/renderer_host/media/video_capture_buffer_pool.h
new file mode 100644
index 00000000000..6d9607737dc
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_buffer_pool.h
@@ -0,0 +1,136 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_BUFFER_POOL_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_BUFFER_POOL_H_
+
+#include "base/basictypes.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_vector.h"
+#include "base/memory/shared_memory.h"
+#include "base/process/process.h"
+#include "base/synchronization/lock.h"
+#include "content/common/content_export.h"
+#include "ui/gfx/size.h"
+
+namespace media {
+
+class VideoFrame;
+
+} // namespace media
+
+namespace content {
+
+// A thread-safe class that does the bookkeeping and lifetime management for a
+// pool of shared-memory pixel buffers cycled between an in-process producer
+// (e.g. a VideoCaptureDevice) and a set of out-of-process consumers. The pool
+// is intended to be allocated and orchestrated by a VideoCaptureController, but
+// is designed to outlive the controller if necessary.
+//
+// Buffers are identified by an int value called |buffer_id|. Callers may depend
+// on the buffer IDs being dense in the range [0, count()), so long as the
+// Allocate() step succeeded. -1 is never a valid ID, and is returned by some
+// methods to indicate failure. Producers get a buffer by calling
+// ReserveForProducer(), and may pass on their ownership to the consumer by
+// calling HoldForConsumers(), or drop the buffer (without further processing)
+// by calling ReserveForProducer(). Consumers signal that they are done with the
+// buffer by calling RelinquishConsumerHold().
+class CONTENT_EXPORT VideoCaptureBufferPool
+ : public base::RefCountedThreadSafe<VideoCaptureBufferPool> {
+ public:
+ VideoCaptureBufferPool(size_t size, int count);
+
+ // One-time initialization to allocate the shared memory buffers. Returns true
+ // on success.
+ bool Allocate();
+
+ // One-time (per client/per-buffer) initialization to share a particular
+ // buffer to a process.
+ base::SharedMemoryHandle ShareToProcess(int buffer_id,
+ base::ProcessHandle process_handle);
+
+ // Get the shared memory handle for a particular buffer index.
+ base::SharedMemoryHandle GetHandle(int buffer_id);
+
+ // Get the mapped buffer memory for a particular buffer index.
+ void* GetMemory(int buffer_id);
+
+ // Locate the index of a buffer (if any) that's not in use by the producer or
+ // consumers, and reserve it. The buffer remains reserved (and writable by the
+ // producer) until ownership is transferred either to the consumer via
+ // HoldForConsumers(), or back to the pool with
+ // RelinquishProducerReservation().
+ int ReserveForProducer();
+
+ // Indicate that a buffer held for the producer should be returned back to the
+ // pool without passing on to the consumer. This effectively is the opposite
+ // of ReserveForProducer().
+ void RelinquishProducerReservation(int buffer_id);
+
+ // Transfer a buffer from producer to consumer ownership.
+ // |buffer_id| must be a buffer index previously returned by
+ // ReserveForProducer(), and not already passed to HoldForConsumers().
+ void HoldForConsumers(int buffer_id, int num_clients);
+
+ // Indicate that one or more consumers are done with a particular buffer. This
+ // effectively is the opposite of HoldForConsumers(). Once the consumers are
+ // done, a buffer is returned to the pool for reuse.
+ void RelinquishConsumerHold(int buffer_id, int num_clients);
+
+ // Detect whether a particular SharedMemoryHandle is exported by a buffer that
+ // belongs to this pool -- that is, whether it was allocated by an earlier
+ // call to ReserveForProducer(). If so, return its buffer_id (a value on the
+ // range [0, count())). If not, return -1, indicating the buffer is not
+ // recognized (it may be a valid frame, but we didn't allocate it).
+ int RecognizeReservedBuffer(base::SharedMemoryHandle maybe_belongs_to_pool);
+
+ // Utility functions to return a buffer wrapped in a useful type.
+ scoped_refptr<media::VideoFrame> ReserveI420VideoFrame(const gfx::Size& size,
+ int rotation);
+
+ int count() const { return count_; }
+ size_t GetMemorySize() const;
+ bool IsAnyBufferHeldForConsumers();
+
+ private:
+ friend class base::RefCountedThreadSafe<VideoCaptureBufferPool>;
+
+ // Per-buffer state.
+ struct Buffer {
+ Buffer();
+
+ // The memory created to be shared with renderer processes.
+ base::SharedMemory shared_memory;
+
+ // Rotation in degrees of the buffer.
+ int rotation;
+
+ // Tracks whether this buffer is currently referenced by the producer.
+ bool held_by_producer;
+
+ // Number of consumer processes which hold this shared memory.
+ int consumer_hold_count;
+ };
+
+ virtual ~VideoCaptureBufferPool();
+
+ int ReserveForProducerInternal();
+
+ bool IsAllocated() const;
+
+ // Protects |buffers_| and contents thereof.
+ base::Lock lock_;
+
+ // The buffers, indexed by |buffer_id|. Element 0 is always NULL.
+ ScopedVector<Buffer> buffers_;
+
+ const size_t size_;
+ const int count_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureBufferPool);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_BUFFER_POOL_H_
diff --git a/chromium/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc b/chromium/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc
new file mode 100644
index 00000000000..67a8be4c761
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_buffer_pool_unittest.cc
@@ -0,0 +1,159 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Unit test for VideoCaptureBufferPool.
+
+#include "content/browser/renderer_host/media/video_capture_buffer_pool.h"
+
+#include "base/bind.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "content/browser/renderer_host/media/video_capture_controller.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace content {
+
+TEST(VideoCaptureBufferPoolTest, BufferPool) {
+ const gfx::Size size = gfx::Size(640, 480);
+ scoped_refptr<media::VideoFrame> non_pool_frame =
+ media::VideoFrame::CreateFrame(media::VideoFrame::YV12, size,
+ gfx::Rect(size), size, base::TimeDelta());
+ scoped_refptr<VideoCaptureBufferPool> pool =
+ new VideoCaptureBufferPool(size.GetArea() * 3 / 2, 3);
+
+ ASSERT_EQ(460800u, pool->GetMemorySize());
+ ASSERT_TRUE(pool->Allocate());
+
+ scoped_refptr<media::VideoFrame> frame1 =
+ pool->ReserveI420VideoFrame(size, 0);
+ ASSERT_TRUE(NULL != frame1.get());
+ ASSERT_EQ(size, frame1->coded_size());
+ scoped_refptr<media::VideoFrame> frame2 =
+ pool->ReserveI420VideoFrame(size, 0);
+ ASSERT_TRUE(NULL != frame2.get());
+ ASSERT_EQ(size, frame2->coded_size());
+ scoped_refptr<media::VideoFrame> frame3 =
+ pool->ReserveI420VideoFrame(size, 0);
+ ASSERT_TRUE(NULL != frame3.get());
+
+ // Touch the memory.
+ media::FillYUV(frame1.get(), 0x11, 0x22, 0x33);
+ media::FillYUV(frame2.get(), 0x44, 0x55, 0x66);
+ media::FillYUV(frame3.get(), 0x77, 0x88, 0x99);
+
+ // Fourth frame should fail.
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+
+ // Release 1st frame and retry; this should succeed.
+ frame1 = NULL;
+ scoped_refptr<media::VideoFrame> frame4 =
+ pool->ReserveI420VideoFrame(size, 0);
+ ASSERT_TRUE(NULL != frame4.get());
+
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+
+ // Validate the IDs
+ int buffer_id2 =
+ pool->RecognizeReservedBuffer(frame2->shared_memory_handle());
+ ASSERT_LE(0, buffer_id2);
+ int buffer_id3 =
+ pool->RecognizeReservedBuffer(frame3->shared_memory_handle());
+ ASSERT_LE(0, buffer_id3);
+ int buffer_id4 =
+ pool->RecognizeReservedBuffer(frame4->shared_memory_handle());
+ ASSERT_LE(0, buffer_id4);
+ int buffer_id_non_pool =
+ pool->RecognizeReservedBuffer(non_pool_frame->shared_memory_handle());
+ ASSERT_GT(0, buffer_id_non_pool);
+
+ ASSERT_FALSE(pool->IsAnyBufferHeldForConsumers());
+
+ // Deliver a frame.
+ pool->HoldForConsumers(buffer_id3, 2);
+
+ ASSERT_TRUE(pool->IsAnyBufferHeldForConsumers());
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+ frame3 = NULL; // Old producer releases frame. Should be a noop.
+ ASSERT_TRUE(pool->IsAnyBufferHeldForConsumers());
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+ frame2 = NULL; // Active producer releases frame. Should free a frame.
+ buffer_id2 = 0;
+
+ ASSERT_TRUE(pool->IsAnyBufferHeldForConsumers());
+ frame1 = pool->ReserveI420VideoFrame(size, 0);
+ ASSERT_TRUE(NULL != frame1.get());
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+ ASSERT_TRUE(pool->IsAnyBufferHeldForConsumers());
+
+ // First consumer finishes.
+ pool->RelinquishConsumerHold(buffer_id3, 1);
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+ ASSERT_TRUE(pool->IsAnyBufferHeldForConsumers());
+
+ // Second consumer finishes. This should free that frame.
+ pool->RelinquishConsumerHold(buffer_id3, 1);
+ ASSERT_FALSE(pool->IsAnyBufferHeldForConsumers());
+ frame3 = pool->ReserveI420VideoFrame(size, 0);
+ ASSERT_TRUE(NULL != frame3.get());
+ ASSERT_FALSE(pool->IsAnyBufferHeldForConsumers());
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+
+ // Now deliver & consume frame1, but don't release the VideoFrame.
+ int buffer_id1 =
+ pool->RecognizeReservedBuffer(frame1->shared_memory_handle());
+ ASSERT_LE(0, buffer_id1);
+ pool->HoldForConsumers(buffer_id1, 5);
+ ASSERT_TRUE(pool->IsAnyBufferHeldForConsumers());
+ pool->RelinquishConsumerHold(buffer_id1, 5);
+ ASSERT_FALSE(pool->IsAnyBufferHeldForConsumers());
+
+ // Even though the consumer is done with the buffer at |buffer_id1|, it cannot
+ // be re-allocated to the producer, because |frame1| still references it. But
+ // when |frame1| goes away, we should be able to re-reserve the buffer (and
+ // the ID ought to be the same).
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+ frame1 = NULL; // Should free the frame.
+ frame2 = pool->ReserveI420VideoFrame(size, 0);
+ ASSERT_TRUE(NULL != frame2.get());
+ ASSERT_EQ(buffer_id1,
+ pool->RecognizeReservedBuffer(frame2->shared_memory_handle()));
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+
+ // For good measure, do one more cycle of free/realloc without delivery, now
+ // that this buffer has been through the consumer-hold cycle.
+ frame2 = NULL;
+ frame1 = pool->ReserveI420VideoFrame(size, 0);
+ ASSERT_TRUE(NULL != frame1.get());
+ ASSERT_EQ(buffer_id1,
+ pool->RecognizeReservedBuffer(frame1->shared_memory_handle()));
+ ASSERT_EQ(NULL, pool->ReserveI420VideoFrame(size, 0).get())
+ << "Pool should be empty";
+
+ // Tear down the pool, writing into the frames. The VideoFrame should
+ // preserve the lifetime of the underlying memory.
+ frame3 = NULL;
+ pool = NULL;
+
+ // Touch the memory.
+ media::FillYUV(frame1.get(), 0x11, 0x22, 0x33);
+ media::FillYUV(frame4.get(), 0x44, 0x55, 0x66);
+
+ frame1 = NULL;
+
+ media::FillYUV(frame4.get(), 0x44, 0x55, 0x66);
+ frame4 = NULL;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_controller.cc b/chromium/content/browser/renderer_host/media/video_capture_controller.cc
new file mode 100644
index 00000000000..bac43289a21
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_controller.cc
@@ -0,0 +1,732 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/video_capture_controller.h"
+
+#include <set>
+
+#include "base/bind.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/stl_util.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/browser/renderer_host/media/video_capture_manager.h"
+#include "content/public/browser/browser_thread.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
+#include "media/base/yuv_convert.h"
+
+#if !defined(OS_IOS) && !defined(OS_ANDROID)
+#include "third_party/libyuv/include/libyuv.h"
+#endif
+
+namespace {
+
+// TODO(wjia): Support stride.
+void RotatePackedYV12Frame(
+ const uint8* src,
+ uint8* dest_yplane,
+ uint8* dest_uplane,
+ uint8* dest_vplane,
+ int width,
+ int height,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) {
+ media::RotatePlaneByPixels(
+ src, dest_yplane, width, height, rotation, flip_vert, flip_horiz);
+ int y_size = width * height;
+ src += y_size;
+ media::RotatePlaneByPixels(
+ src, dest_uplane, width/2, height/2, rotation, flip_vert, flip_horiz);
+ src += y_size/4;
+ media::RotatePlaneByPixels(
+ src, dest_vplane, width/2, height/2, rotation, flip_vert, flip_horiz);
+}
+
+} // namespace
+
+namespace content {
+
+// The number of buffers that VideoCaptureBufferPool should allocate.
+static const int kNoOfBuffers = 3;
+
+struct VideoCaptureController::ControllerClient {
+ ControllerClient(
+ const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* handler,
+ base::ProcessHandle render_process,
+ const media::VideoCaptureParams& params)
+ : controller_id(id),
+ event_handler(handler),
+ render_process_handle(render_process),
+ parameters(params),
+ session_closed(false) {
+ }
+
+ ~ControllerClient() {}
+
+ // ID used for identifying this object.
+ VideoCaptureControllerID controller_id;
+ VideoCaptureControllerEventHandler* event_handler;
+
+ // Handle to the render process that will receive the capture buffers.
+ base::ProcessHandle render_process_handle;
+ media::VideoCaptureParams parameters;
+
+ // Buffers used by this client.
+ std::set<int> buffers;
+
+ // State of capture session, controlled by VideoCaptureManager directly.
+ bool session_closed;
+};
+
+VideoCaptureController::VideoCaptureController(
+ VideoCaptureManager* video_capture_manager)
+ : chopped_width_(0),
+ chopped_height_(0),
+ frame_info_available_(false),
+ video_capture_manager_(video_capture_manager),
+ device_in_use_(false),
+ state_(VIDEO_CAPTURE_STATE_STOPPED) {
+ memset(&current_params_, 0, sizeof(current_params_));
+}
+
+void VideoCaptureController::StartCapture(
+ const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* event_handler,
+ base::ProcessHandle render_process,
+ const media::VideoCaptureParams& params) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "VideoCaptureController::StartCapture, id " << id.device_id
+ << ", (" << params.width
+ << ", " << params.height
+ << ", " << params.frame_per_second
+ << ", " << params.session_id
+ << ")";
+
+ // Signal error in case device is already in error state.
+ if (state_ == VIDEO_CAPTURE_STATE_ERROR) {
+ event_handler->OnError(id);
+ return;
+ }
+
+ // Do nothing if this client has called StartCapture before.
+ if (FindClient(id, event_handler, controller_clients_) ||
+ FindClient(id, event_handler, pending_clients_))
+ return;
+
+ ControllerClient* client = new ControllerClient(id, event_handler,
+ render_process, params);
+ // In case capture has been started, need to check different conditions.
+ if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
+ // TODO(wjia): Temporarily disable restarting till client supports resampling.
+#if 0
+ // This client has higher resolution than what is currently requested.
+ // Need restart capturing.
+ if (params.width > current_params_.width ||
+ params.height > current_params_.height) {
+ video_capture_manager_->Stop(current_params_.session_id,
+ base::Bind(&VideoCaptureController::OnDeviceStopped, this));
+ frame_info_available_ = false;
+ state_ = VIDEO_CAPTURE_STATE_STOPPING;
+ pending_clients_.push_back(client);
+ return;
+ }
+#endif
+
+ // This client's resolution is no larger than what's currently requested.
+ // When frame_info has been returned by device, send them to client.
+ if (frame_info_available_) {
+ SendFrameInfoAndBuffers(client);
+ }
+ controller_clients_.push_back(client);
+ return;
+ }
+
+ // In case the device is in the middle of stopping, put the client in
+ // pending queue.
+ if (state_ == VIDEO_CAPTURE_STATE_STOPPING) {
+ pending_clients_.push_back(client);
+ return;
+ }
+
+ // Fresh start.
+ controller_clients_.push_back(client);
+ current_params_ = params;
+ // Order the manager to start the actual capture.
+ video_capture_manager_->Start(params, this);
+ state_ = VIDEO_CAPTURE_STATE_STARTED;
+ device_in_use_ = true;
+}
+
+void VideoCaptureController::StopCapture(
+ const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* event_handler) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "VideoCaptureController::StopCapture, id " << id.device_id;
+
+ ControllerClient* client = FindClient(id, event_handler, pending_clients_);
+ // If the client is still in pending queue, just remove it.
+ if (client) {
+ pending_clients_.remove(client);
+ return;
+ }
+
+ client = FindClient(id, event_handler, controller_clients_);
+ if (!client)
+ return;
+
+ // Take back all buffers held by the |client|.
+ if (buffer_pool_.get()) {
+ for (std::set<int>::iterator buffer_it = client->buffers.begin();
+ buffer_it != client->buffers.end();
+ ++buffer_it) {
+ int buffer_id = *buffer_it;
+ buffer_pool_->RelinquishConsumerHold(buffer_id, 1);
+ }
+ }
+ client->buffers.clear();
+
+ int session_id = client->parameters.session_id;
+ delete client;
+ controller_clients_.remove(client);
+
+ // No more clients. Stop device.
+ if (controller_clients_.empty() &&
+ (state_ == VIDEO_CAPTURE_STATE_STARTED ||
+ state_ == VIDEO_CAPTURE_STATE_ERROR)) {
+ video_capture_manager_->Stop(session_id,
+ base::Bind(&VideoCaptureController::OnDeviceStopped, this));
+ frame_info_available_ = false;
+ state_ = VIDEO_CAPTURE_STATE_STOPPING;
+ }
+}
+
+void VideoCaptureController::StopSession(
+ int session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "VideoCaptureController::StopSession, id " << session_id;
+
+ ControllerClient* client = FindClient(session_id, pending_clients_);
+ if (!client)
+ client = FindClient(session_id, controller_clients_);
+
+ if (client) {
+ client->session_closed = true;
+ client->event_handler->OnEnded(client->controller_id);
+ }
+}
+
+void VideoCaptureController::ReturnBuffer(
+ const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* event_handler,
+ int buffer_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ ControllerClient* client = FindClient(id, event_handler,
+ controller_clients_);
+
+ // If this buffer is not held by this client, or this client doesn't exist
+ // in controller, do nothing.
+ if (!client ||
+ client->buffers.find(buffer_id) == client->buffers.end())
+ return;
+
+ client->buffers.erase(buffer_id);
+ buffer_pool_->RelinquishConsumerHold(buffer_id, 1);
+
+ // When all buffers have been returned by clients and device has been
+ // called to stop, check if restart is needed. This could happen when
+ // capture needs to be restarted due to resolution change.
+ if (!buffer_pool_->IsAnyBufferHeldForConsumers() &&
+ state_ == VIDEO_CAPTURE_STATE_STOPPING) {
+ PostStopping();
+ }
+}
+
+scoped_refptr<media::VideoFrame> VideoCaptureController::ReserveOutputBuffer() {
+ base::AutoLock lock(buffer_pool_lock_);
+ if (!buffer_pool_.get())
+ return NULL;
+ return buffer_pool_->ReserveI420VideoFrame(gfx::Size(frame_info_.width,
+ frame_info_.height),
+ 0);
+}
+
+// Implements VideoCaptureDevice::EventHandler.
+// OnIncomingCapturedFrame is called the thread running the capture device.
+// I.e.- DirectShow thread on windows and v4l2_thread on Linux.
+void VideoCaptureController::OnIncomingCapturedFrame(
+ const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) {
+ DCHECK(frame_info_.color == media::VideoCaptureCapability::kI420 ||
+ frame_info_.color == media::VideoCaptureCapability::kYV12 ||
+ (rotation == 0 && !flip_vert && !flip_horiz));
+
+ scoped_refptr<media::VideoFrame> dst;
+ {
+ base::AutoLock lock(buffer_pool_lock_);
+ if (!buffer_pool_.get())
+ return;
+ dst = buffer_pool_->ReserveI420VideoFrame(gfx::Size(frame_info_.width,
+ frame_info_.height),
+ rotation);
+ }
+
+ if (!dst.get())
+ return;
+
+ uint8* yplane = dst->data(media::VideoFrame::kYPlane);
+ uint8* uplane = dst->data(media::VideoFrame::kUPlane);
+ uint8* vplane = dst->data(media::VideoFrame::kVPlane);
+
+ // Do color conversion from the camera format to I420.
+ switch (frame_info_.color) {
+ case media::VideoCaptureCapability::kColorUnknown: // Color format not set.
+ break;
+ case media::VideoCaptureCapability::kI420:
+ DCHECK(!chopped_width_ && !chopped_height_);
+ RotatePackedYV12Frame(
+ data, yplane, uplane, vplane, frame_info_.width, frame_info_.height,
+ rotation, flip_vert, flip_horiz);
+ break;
+ case media::VideoCaptureCapability::kYV12:
+ DCHECK(!chopped_width_ && !chopped_height_);
+ RotatePackedYV12Frame(
+ data, yplane, vplane, uplane, frame_info_.width, frame_info_.height,
+ rotation, flip_vert, flip_horiz);
+ break;
+ case media::VideoCaptureCapability::kNV21:
+ DCHECK(!chopped_width_ && !chopped_height_);
+ media::ConvertNV21ToYUV(data, yplane, uplane, vplane, frame_info_.width,
+ frame_info_.height);
+ break;
+ case media::VideoCaptureCapability::kYUY2:
+ DCHECK(!chopped_width_ && !chopped_height_);
+ if (frame_info_.width * frame_info_.height * 2 != length) {
+ // If |length| of |data| does not match the expected width and height
+ // we can't convert the frame to I420. YUY2 is 2 bytes per pixel.
+ break;
+ }
+
+ media::ConvertYUY2ToYUV(data, yplane, uplane, vplane, frame_info_.width,
+ frame_info_.height);
+ break;
+ case media::VideoCaptureCapability::kRGB24: {
+ int ystride = frame_info_.width;
+ int uvstride = frame_info_.width / 2;
+#if defined(OS_WIN) // RGB on Windows start at the bottom line.
+ int rgb_stride = -3 * (frame_info_.width + chopped_width_);
+ const uint8* rgb_src = data + 3 * (frame_info_.width + chopped_width_) *
+ (frame_info_.height -1 + chopped_height_);
+#else
+ int rgb_stride = 3 * (frame_info_.width + chopped_width_);
+ const uint8* rgb_src = data;
+#endif
+ media::ConvertRGB24ToYUV(rgb_src, yplane, uplane, vplane,
+ frame_info_.width, frame_info_.height,
+ rgb_stride, ystride, uvstride);
+ break;
+ }
+ case media::VideoCaptureCapability::kARGB:
+ media::ConvertRGB32ToYUV(data, yplane, uplane, vplane, frame_info_.width,
+ frame_info_.height,
+ (frame_info_.width + chopped_width_) * 4,
+ frame_info_.width, frame_info_.width / 2);
+ break;
+#if !defined(OS_IOS) && !defined(OS_ANDROID)
+ case media::VideoCaptureCapability::kMJPEG: {
+ int yplane_stride = frame_info_.width;
+ int uv_plane_stride = (frame_info_.width + 1) / 2;
+ int crop_x = 0;
+ int crop_y = 0;
+ libyuv::ConvertToI420(data, length, yplane, yplane_stride, uplane,
+ uv_plane_stride, vplane, uv_plane_stride, crop_x,
+ crop_y, frame_info_.width, frame_info_.height,
+ frame_info_.width, frame_info_.height,
+ libyuv::kRotate0, libyuv::FOURCC_MJPG);
+ break;
+ }
+#endif
+ default:
+ NOTREACHED();
+ }
+
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread,
+ this, dst, timestamp));
+}
+
+// OnIncomingCapturedVideoFrame is called the thread running the capture device.
+void VideoCaptureController::OnIncomingCapturedVideoFrame(
+ const scoped_refptr<media::VideoFrame>& frame,
+ base::Time timestamp) {
+
+ scoped_refptr<media::VideoFrame> target;
+ {
+ base::AutoLock lock(buffer_pool_lock_);
+
+ if (!buffer_pool_.get())
+ return;
+
+ // If this is a frame that belongs to the buffer pool, we can forward it
+ // directly to the IO thread and be done.
+ if (buffer_pool_->RecognizeReservedBuffer(
+ frame->shared_memory_handle()) >= 0) {
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread,
+ this, frame, timestamp));
+ return;
+ }
+ // Otherwise, this is a frame that belongs to the caller, and we must copy
+ // it to a frame from the buffer pool.
+ target = buffer_pool_->ReserveI420VideoFrame(gfx::Size(frame_info_.width,
+ frame_info_.height),
+ 0);
+ }
+
+ if (!target.get())
+ return;
+
+ // Validate the inputs.
+ if (frame->coded_size() != target->coded_size())
+ return; // Only exact copies are supported.
+ if (!(frame->format() == media::VideoFrame::I420 ||
+ frame->format() == media::VideoFrame::YV12 ||
+ frame->format() == media::VideoFrame::RGB32)) {
+ NOTREACHED() << "Unsupported format passed to OnIncomingCapturedVideoFrame";
+ return;
+ }
+
+ const int kYPlane = media::VideoFrame::kYPlane;
+ const int kUPlane = media::VideoFrame::kUPlane;
+ const int kVPlane = media::VideoFrame::kVPlane;
+ const int kAPlane = media::VideoFrame::kAPlane;
+ const int kRGBPlane = media::VideoFrame::kRGBPlane;
+
+ // Do color conversion from the camera format to I420.
+ switch (frame->format()) {
+#if defined(GOOGLE_TV)
+ case media::VideoFrame::HOLE:
+ // Fall-through to NOTREACHED() block.
+#endif
+ case media::VideoFrame::INVALID:
+ case media::VideoFrame::YV16:
+ case media::VideoFrame::EMPTY:
+ case media::VideoFrame::NATIVE_TEXTURE: {
+ NOTREACHED();
+ break;
+ }
+ case media::VideoFrame::I420:
+ case media::VideoFrame::YV12: {
+ DCHECK(!chopped_width_ && !chopped_height_);
+ media::CopyYPlane(frame->data(kYPlane),
+ frame->stride(kYPlane),
+ frame->rows(kYPlane),
+ target.get());
+ media::CopyUPlane(frame->data(kUPlane),
+ frame->stride(kUPlane),
+ frame->rows(kUPlane),
+ target.get());
+ media::CopyVPlane(frame->data(kVPlane),
+ frame->stride(kVPlane),
+ frame->rows(kVPlane),
+ target.get());
+ break;
+ }
+ case media::VideoFrame::YV12A: {
+ DCHECK(!chopped_width_ && !chopped_height_);
+ media::CopyYPlane(frame->data(kYPlane),
+ frame->stride(kYPlane),
+ frame->rows(kYPlane),
+ target.get());
+ media::CopyUPlane(frame->data(kUPlane),
+ frame->stride(kUPlane),
+ frame->rows(kUPlane),
+ target.get());
+ media::CopyVPlane(frame->data(kVPlane),
+ frame->stride(kVPlane),
+ frame->rows(kVPlane),
+ target.get());
+ media::CopyAPlane(frame->data(kAPlane),
+ frame->stride(kAPlane),
+ frame->rows(kAPlane),
+ target.get());
+ break;
+ }
+ case media::VideoFrame::RGB32: {
+ media::ConvertRGB32ToYUV(frame->data(kRGBPlane),
+ target->data(kYPlane),
+ target->data(kUPlane),
+ target->data(kVPlane),
+ target->coded_size().width(),
+ target->coded_size().height(),
+ frame->stride(kRGBPlane),
+ target->stride(kYPlane),
+ target->stride(kUPlane));
+ break;
+ }
+ }
+
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread,
+ this, target, timestamp));
+}
+
+void VideoCaptureController::OnError() {
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureController::DoErrorOnIOThread, this));
+}
+
+void VideoCaptureController::OnFrameInfo(
+ const media::VideoCaptureCapability& info) {
+ frame_info_= info;
+ // Handle cases when |info| has odd numbers for width/height.
+ if (info.width & 1) {
+ --frame_info_.width;
+ chopped_width_ = 1;
+ } else {
+ chopped_width_ = 0;
+ }
+ if (info.height & 1) {
+ --frame_info_.height;
+ chopped_height_ = 1;
+ } else {
+ chopped_height_ = 0;
+ }
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureController::DoFrameInfoOnIOThread, this));
+}
+
+void VideoCaptureController::OnFrameInfoChanged(
+ const media::VideoCaptureCapability& info) {
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureController::DoFrameInfoChangedOnIOThread,
+ this, info));
+}
+
+VideoCaptureController::~VideoCaptureController() {
+ buffer_pool_ = NULL; // Release all buffers.
+ STLDeleteContainerPointers(controller_clients_.begin(),
+ controller_clients_.end());
+ STLDeleteContainerPointers(pending_clients_.begin(),
+ pending_clients_.end());
+}
+
+// Called by VideoCaptureManager when a device have been stopped.
+void VideoCaptureController::OnDeviceStopped() {
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureController::DoDeviceStoppedOnIOThread, this));
+}
+
+void VideoCaptureController::DoIncomingCapturedFrameOnIOThread(
+ const scoped_refptr<media::VideoFrame>& reserved_frame,
+ base::Time timestamp) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ if (!buffer_pool_.get())
+ return;
+
+ int buffer_id = buffer_pool_->RecognizeReservedBuffer(
+ reserved_frame->shared_memory_handle());
+ if (buffer_id < 0) {
+ NOTREACHED();
+ return;
+ }
+
+ int count = 0;
+ if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
+ for (ControllerClients::iterator client_it = controller_clients_.begin();
+ client_it != controller_clients_.end(); ++client_it) {
+ if ((*client_it)->session_closed)
+ continue;
+
+ (*client_it)->event_handler->OnBufferReady((*client_it)->controller_id,
+ buffer_id, timestamp);
+ (*client_it)->buffers.insert(buffer_id);
+ count++;
+ }
+ }
+
+ buffer_pool_->HoldForConsumers(buffer_id, count);
+}
+
+void VideoCaptureController::DoFrameInfoOnIOThread() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(!buffer_pool_.get())
+ << "Device is restarted without releasing shared memory.";
+
+ // Allocate memory only when device has been started.
+ if (state_ != VIDEO_CAPTURE_STATE_STARTED)
+ return;
+
+ scoped_refptr<VideoCaptureBufferPool> buffer_pool =
+ new VideoCaptureBufferPool(frame_info_.width * frame_info_.height * 3 / 2,
+ kNoOfBuffers);
+
+ // Check whether all buffers were created successfully.
+ if (!buffer_pool->Allocate()) {
+ state_ = VIDEO_CAPTURE_STATE_ERROR;
+ for (ControllerClients::iterator client_it = controller_clients_.begin();
+ client_it != controller_clients_.end(); ++client_it) {
+ (*client_it)->event_handler->OnError((*client_it)->controller_id);
+ }
+ return;
+ }
+
+ {
+ base::AutoLock lock(buffer_pool_lock_);
+ buffer_pool_ = buffer_pool;
+ }
+ frame_info_available_ = true;
+
+ for (ControllerClients::iterator client_it = controller_clients_.begin();
+ client_it != controller_clients_.end(); ++client_it) {
+ SendFrameInfoAndBuffers(*client_it);
+ }
+}
+
+void VideoCaptureController::DoFrameInfoChangedOnIOThread(
+ const media::VideoCaptureCapability& info) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ // TODO(mcasas): Here we should reallocate the VideoCaptureBufferPool, if
+ // needed, to support the new video capture format. See crbug.com/266082.
+ for (ControllerClients::iterator client_it = controller_clients_.begin();
+ client_it != controller_clients_.end(); ++client_it) {
+ (*client_it)->event_handler->OnFrameInfoChanged(
+ (*client_it)->controller_id,
+ info.width,
+ info.height,
+ info.frame_rate);
+ }
+}
+
+void VideoCaptureController::DoErrorOnIOThread() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ state_ = VIDEO_CAPTURE_STATE_ERROR;
+ ControllerClients::iterator client_it;
+ for (client_it = controller_clients_.begin();
+ client_it != controller_clients_.end(); ++client_it) {
+ (*client_it)->event_handler->OnError((*client_it)->controller_id);
+ }
+ for (client_it = pending_clients_.begin();
+ client_it != pending_clients_.end(); ++client_it) {
+ (*client_it)->event_handler->OnError((*client_it)->controller_id);
+ }
+}
+
+void VideoCaptureController::DoDeviceStoppedOnIOThread() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ device_in_use_ = false;
+ if (state_ == VIDEO_CAPTURE_STATE_STOPPING) {
+ PostStopping();
+ }
+}
+
+void VideoCaptureController::SendFrameInfoAndBuffers(ControllerClient* client) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(frame_info_available_);
+ client->event_handler->OnFrameInfo(client->controller_id,
+ frame_info_);
+ for (int buffer_id = 0; buffer_id < buffer_pool_->count(); ++buffer_id) {
+ base::SharedMemoryHandle remote_handle =
+ buffer_pool_->ShareToProcess(buffer_id, client->render_process_handle);
+
+ client->event_handler->OnBufferCreated(client->controller_id,
+ remote_handle,
+ buffer_pool_->GetMemorySize(),
+ buffer_id);
+ }
+}
+
+VideoCaptureController::ControllerClient*
+VideoCaptureController::FindClient(
+ const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* handler,
+ const ControllerClients& clients) {
+ for (ControllerClients::const_iterator client_it = clients.begin();
+ client_it != clients.end(); ++client_it) {
+ if ((*client_it)->controller_id == id &&
+ (*client_it)->event_handler == handler) {
+ return *client_it;
+ }
+ }
+ return NULL;
+}
+
+VideoCaptureController::ControllerClient*
+VideoCaptureController::FindClient(
+ int session_id,
+ const ControllerClients& clients) {
+ for (ControllerClients::const_iterator client_it = clients.begin();
+ client_it != clients.end(); ++client_it) {
+ if ((*client_it)->parameters.session_id == session_id) {
+ return *client_it;
+ }
+ }
+ return NULL;
+}
+
+// This function is called when all buffers have been returned to controller,
+// or when device is stopped. It decides whether the device needs to be
+// restarted.
+void VideoCaptureController::PostStopping() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK_EQ(state_, VIDEO_CAPTURE_STATE_STOPPING);
+
+ // When clients still have some buffers, or device has not been stopped yet,
+ // do nothing.
+ if ((buffer_pool_.get() && buffer_pool_->IsAnyBufferHeldForConsumers()) ||
+ device_in_use_)
+ return;
+
+ {
+ base::AutoLock lock(buffer_pool_lock_);
+ buffer_pool_ = NULL;
+ }
+
+ // No more client. Therefore the controller is stopped.
+ if (controller_clients_.empty() && pending_clients_.empty()) {
+ state_ = VIDEO_CAPTURE_STATE_STOPPED;
+ return;
+ }
+
+ // Restart the device.
+ current_params_.width = 0;
+ current_params_.height = 0;
+ ControllerClients::iterator client_it;
+ for (client_it = controller_clients_.begin();
+ client_it != controller_clients_.end(); ++client_it) {
+ if (current_params_.width < (*client_it)->parameters.width)
+ current_params_.width = (*client_it)->parameters.width;
+ if (current_params_.height < (*client_it)->parameters.height)
+ current_params_.height = (*client_it)->parameters.height;
+ }
+ for (client_it = pending_clients_.begin();
+ client_it != pending_clients_.end(); ) {
+ if (current_params_.width < (*client_it)->parameters.width)
+ current_params_.width = (*client_it)->parameters.width;
+ if (current_params_.height < (*client_it)->parameters.height)
+ current_params_.height = (*client_it)->parameters.height;
+ controller_clients_.push_back((*client_it));
+ pending_clients_.erase(client_it++);
+ }
+ // Request the manager to start the actual capture.
+ video_capture_manager_->Start(current_params_, this);
+ state_ = VIDEO_CAPTURE_STATE_STARTED;
+ device_in_use_ = true;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_controller.h b/chromium/content/browser/renderer_host/media/video_capture_controller.h
new file mode 100644
index 00000000000..5d33d01163c
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_controller.h
@@ -0,0 +1,164 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// VideoCaptureController is the glue between VideoCaptureHost,
+// VideoCaptureManager and VideoCaptureDevice.
+// It provides functions for VideoCaptureHost to start a VideoCaptureDevice and
+// is responsible for keeping track of shared DIBs and filling them with I420
+// video frames for IPC communication between VideoCaptureHost and
+// VideoCaptureMessageFilter.
+// It implements media::VideoCaptureDevice::EventHandler to get video frames
+// from a VideoCaptureDevice object and do color conversion straight into the
+// shared DIBs to avoid a memory copy.
+// It serves multiple VideoCaptureControllerEventHandlers.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_CONTROLLER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_CONTROLLER_H_
+
+#include <list>
+#include <map>
+
+#include "base/compiler_specific.h"
+#include "base/memory/ref_counted.h"
+#include "base/process/process.h"
+#include "base/synchronization/lock.h"
+#include "content/browser/renderer_host/media/video_capture_buffer_pool.h"
+#include "content/browser/renderer_host/media/video_capture_controller_event_handler.h"
+#include "content/common/content_export.h"
+#include "content/common/media/video_capture.h"
+#include "media/video/capture/video_capture.h"
+#include "media/video/capture/video_capture_device.h"
+#include "media/video/capture/video_capture_types.h"
+
+namespace content {
+class VideoCaptureManager;
+class VideoCaptureBufferPool;
+
+class CONTENT_EXPORT VideoCaptureController
+ : public base::RefCountedThreadSafe<VideoCaptureController>,
+ public media::VideoCaptureDevice::EventHandler {
+ public:
+ VideoCaptureController(VideoCaptureManager* video_capture_manager);
+
+ // Start video capturing and try to use the resolution specified in
+ // |params|.
+ // When capturing has started, the |event_handler| receives a call OnFrameInfo
+ // with resolution that best matches the requested that the video
+ // capture device support.
+ void StartCapture(const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* event_handler,
+ base::ProcessHandle render_process,
+ const media::VideoCaptureParams& params);
+
+ // Stop video capture.
+ // This will take back all buffers held by by |event_handler|, and
+ // |event_handler| shouldn't use those buffers any more.
+ void StopCapture(const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* event_handler);
+
+ // API called directly by VideoCaptureManager in case the device is
+ // prematurely closed.
+ void StopSession(int session_id);
+
+ // Return a buffer previously given in
+ // VideoCaptureControllerEventHandler::OnBufferReady.
+ void ReturnBuffer(const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* event_handler,
+ int buffer_id);
+
+ // Implement media::VideoCaptureDevice::EventHandler.
+ virtual scoped_refptr<media::VideoFrame> ReserveOutputBuffer() OVERRIDE;
+ virtual void OnIncomingCapturedFrame(const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) OVERRIDE;
+ virtual void OnIncomingCapturedVideoFrame(
+ const scoped_refptr<media::VideoFrame>& frame,
+ base::Time timestamp) OVERRIDE;
+ virtual void OnError() OVERRIDE;
+ virtual void OnFrameInfo(const media::VideoCaptureCapability& info) OVERRIDE;
+ virtual void OnFrameInfoChanged(
+ const media::VideoCaptureCapability& info) OVERRIDE;
+
+ protected:
+ virtual ~VideoCaptureController();
+
+ private:
+ friend class base::RefCountedThreadSafe<VideoCaptureController>;
+
+ struct ControllerClient;
+ typedef std::list<ControllerClient*> ControllerClients;
+
+ // Callback when manager has stopped device.
+ void OnDeviceStopped();
+
+ // Worker functions on IO thread.
+ void DoIncomingCapturedFrameOnIOThread(
+ const scoped_refptr<media::VideoFrame>& captured_frame,
+ base::Time timestamp);
+ void DoFrameInfoOnIOThread();
+ void DoFrameInfoChangedOnIOThread(const media::VideoCaptureCapability& info);
+ void DoErrorOnIOThread();
+ void DoDeviceStoppedOnIOThread();
+
+ // Send frame info and init buffers to |client|.
+ void SendFrameInfoAndBuffers(ControllerClient* client);
+
+ // Find a client of |id| and |handler| in |clients|.
+ ControllerClient* FindClient(
+ const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* handler,
+ const ControllerClients& clients);
+
+ // Find a client of |session_id| in |clients|.
+ ControllerClient* FindClient(
+ int session_id,
+ const ControllerClients& clients);
+
+ // Decide what to do after kStopping state. Dependent on events, controller
+ // can stay in kStopping state, or go to kStopped, or restart capture.
+ void PostStopping();
+
+ // Protects access to the |buffer_pool_| pointer on non-IO threads. IO thread
+ // must hold this lock when modifying the |buffer_pool_| pointer itself.
+ // TODO(nick): Make it so that this lock isn't required.
+ base::Lock buffer_pool_lock_;
+
+ // The pool of shared-memory buffers used for capturing.
+ scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
+
+ // All clients served by this controller.
+ ControllerClients controller_clients_;
+
+ // All clients waiting for service.
+ ControllerClients pending_clients_;
+
+ // The parameter that currently used for the capturing.
+ media::VideoCaptureParams current_params_;
+
+ // It's modified on caller thread, assuming there is only one OnFrameInfo()
+ // call per StartCapture().
+ media::VideoCaptureCapability frame_info_;
+
+ // Chopped pixels in width/height in case video capture device has odd numbers
+ // for width/height.
+ int chopped_width_;
+ int chopped_height_;
+
+ // It's accessed only on IO thread.
+ bool frame_info_available_;
+
+ VideoCaptureManager* video_capture_manager_;
+
+ bool device_in_use_;
+ VideoCaptureState state_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureController);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_CONTROLLER_H_
diff --git a/chromium/content/browser/renderer_host/media/video_capture_controller_event_handler.cc b/chromium/content/browser/renderer_host/media/video_capture_controller_event_handler.cc
new file mode 100644
index 00000000000..7d2d0be3283
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_controller_event_handler.cc
@@ -0,0 +1,23 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/video_capture_controller_event_handler.h"
+
+namespace content {
+
+VideoCaptureControllerID::VideoCaptureControllerID(int did)
+ : device_id(did) {
+}
+
+bool VideoCaptureControllerID::operator<(
+ const VideoCaptureControllerID& vc) const {
+ return this->device_id < vc.device_id;
+}
+
+bool VideoCaptureControllerID::operator==(
+ const VideoCaptureControllerID& vc) const {
+ return this->device_id == vc.device_id;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_controller_event_handler.h b/chromium/content/browser/renderer_host/media/video_capture_controller_event_handler.h
new file mode 100644
index 00000000000..c4844af2f73
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_controller_event_handler.h
@@ -0,0 +1,65 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_CONTROLLER_EVENT_HANDLER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_CONTROLLER_EVENT_HANDLER_H_
+
+#include "base/memory/shared_memory.h"
+#include "base/time/time.h"
+#include "content/common/content_export.h"
+
+namespace media {
+struct VideoCaptureCapability;
+}
+
+namespace content {
+
+// ID used for identifying an object of VideoCaptureController.
+struct CONTENT_EXPORT VideoCaptureControllerID {
+ explicit VideoCaptureControllerID(int device_id);
+
+ bool operator<(const VideoCaptureControllerID& vc) const;
+ bool operator==(const VideoCaptureControllerID& vc) const;
+
+ int device_id;
+};
+
+// VideoCaptureControllerEventHandler is the interface for
+// VideoCaptureController to notify clients about the events such as
+// BufferReady, FrameInfo, Error, etc.
+class CONTENT_EXPORT VideoCaptureControllerEventHandler {
+ public:
+ // An Error has occurred in the VideoCaptureDevice.
+ virtual void OnError(const VideoCaptureControllerID& id) = 0;
+
+ // A buffer has been newly created.
+ virtual void OnBufferCreated(const VideoCaptureControllerID& id,
+ base::SharedMemoryHandle handle,
+ int length, int buffer_id) = 0;
+
+ // A buffer has been filled with I420 video.
+ virtual void OnBufferReady(const VideoCaptureControllerID& id,
+ int buffer_id,
+ base::Time timestamp) = 0;
+
+ // The frame resolution the VideoCaptureDevice capture video in.
+ virtual void OnFrameInfo(const VideoCaptureControllerID& id,
+ const media::VideoCaptureCapability& format) = 0;
+
+ // The frame resolution the VideoCaptureDevice capture video in.
+ virtual void OnFrameInfoChanged(const VideoCaptureControllerID& id,
+ int width,
+ int height,
+ int frame_rate) {};
+
+ // The capture session has ended and no more frames will be sent.
+ virtual void OnEnded(const VideoCaptureControllerID& id) = 0;
+
+ protected:
+ virtual ~VideoCaptureControllerEventHandler() {}
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_CONTROLLER_EVENT_HANDLER_H_
diff --git a/chromium/content/browser/renderer_host/media/video_capture_controller_unittest.cc b/chromium/content/browser/renderer_host/media/video_capture_controller_unittest.cc
new file mode 100644
index 00000000000..c4b716d2e33
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_controller_unittest.cc
@@ -0,0 +1,265 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Unit test for VideoCaptureController.
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/media_stream_provider.h"
+#include "content/browser/renderer_host/media/video_capture_controller.h"
+#include "content/browser/renderer_host/media/video_capture_manager.h"
+#include "content/common/media/media_stream_options.h"
+#include "media/video/capture/fake_video_capture_device.h"
+#include "media/video/capture/video_capture_device.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::AtLeast;
+using ::testing::InSequence;
+using ::testing::Return;
+
+namespace content {
+
+enum { kDeviceId = 1 };
+
+ACTION_P4(StopCapture, controller, controller_id, controller_handler,
+ message_loop) {
+ message_loop->PostTask(FROM_HERE,
+ base::Bind(&VideoCaptureController::StopCapture,
+ controller, controller_id, controller_handler));
+ message_loop->PostTask(FROM_HERE, base::MessageLoop::QuitClosure());
+}
+
+ACTION_P3(StopSession, controller, session_id, message_loop) {
+ message_loop->PostTask(FROM_HERE,
+ base::Bind(&VideoCaptureController::StopSession,
+ controller, session_id));
+ message_loop->PostTask(FROM_HERE, base::MessageLoop::QuitClosure());
+}
+
+class MockVideoCaptureControllerEventHandler
+ : public VideoCaptureControllerEventHandler {
+ public:
+ MockVideoCaptureControllerEventHandler(VideoCaptureController* controller,
+ base::MessageLoop* message_loop)
+ : controller_(controller),
+ message_loop_(message_loop),
+ controller_id_(kDeviceId),
+ process_handle_(base::kNullProcessHandle) {
+ }
+ virtual ~MockVideoCaptureControllerEventHandler() {}
+
+ MOCK_METHOD1(DoBufferCreated, void(const VideoCaptureControllerID&));
+ MOCK_METHOD1(DoBufferReady, void(const VideoCaptureControllerID&));
+ MOCK_METHOD1(DoFrameInfo, void(const VideoCaptureControllerID&));
+ MOCK_METHOD1(DoEnded, void(const VideoCaptureControllerID&));
+
+ virtual void OnError(const VideoCaptureControllerID& id) OVERRIDE {}
+ virtual void OnBufferCreated(const VideoCaptureControllerID& id,
+ base::SharedMemoryHandle handle,
+ int length, int buffer_id) OVERRIDE {
+ EXPECT_EQ(id, controller_id_);
+ DoBufferCreated(id);
+ }
+ virtual void OnBufferReady(const VideoCaptureControllerID& id,
+ int buffer_id,
+ base::Time timestamp) OVERRIDE {
+ EXPECT_EQ(id, controller_id_);
+ DoBufferReady(id);
+ message_loop_->PostTask(FROM_HERE,
+ base::Bind(&VideoCaptureController::ReturnBuffer,
+ controller_, controller_id_, this, buffer_id));
+ }
+ virtual void OnFrameInfo(
+ const VideoCaptureControllerID& id,
+ const media::VideoCaptureCapability& format) OVERRIDE {
+ EXPECT_EQ(id, controller_id_);
+ DoFrameInfo(id);
+ }
+ virtual void OnEnded(const VideoCaptureControllerID& id) OVERRIDE {
+ EXPECT_EQ(id, controller_id_);
+ DoEnded(id);
+ }
+
+ scoped_refptr<VideoCaptureController> controller_;
+ base::MessageLoop* message_loop_;
+ VideoCaptureControllerID controller_id_;
+ base::ProcessHandle process_handle_;
+};
+
+class MockVideoCaptureManager : public VideoCaptureManager {
+ public:
+ MockVideoCaptureManager()
+ : video_session_id_(kStartOpenSessionId),
+ device_name_("fake_device_0", "/dev/video0") {}
+
+ void Init() {
+ video_capture_device_.reset(
+ media::FakeVideoCaptureDevice::Create(device_name_));
+ ASSERT_TRUE(video_capture_device_.get() != NULL);
+ }
+
+ MOCK_METHOD3(StartCapture, void(int, int,
+ media::VideoCaptureDevice::EventHandler*));
+ MOCK_METHOD1(StopCapture, void(const media::VideoCaptureSessionId&));
+
+ void Start(const media::VideoCaptureParams& capture_params,
+ media::VideoCaptureDevice::EventHandler* vc_receiver) OVERRIDE {
+ StartCapture(capture_params.width, capture_params.height, vc_receiver);
+ // TODO(mcasas): Add testing for variable resolution video capture devices,
+ // supported by FakeVideoCaptureDevice. See crbug.com/261410, second part.
+ media::VideoCaptureCapability capture_format(
+ capture_params.width,
+ capture_params.height,
+ capture_params.frame_per_second,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ video_capture_device_->Allocate(capture_format, vc_receiver);
+ video_capture_device_->Start();
+ }
+
+ void Stop(const media::VideoCaptureSessionId& capture_session_id,
+ base::Closure stopped_cb) OVERRIDE {
+ StopCapture(capture_session_id);
+ video_capture_device_->Stop();
+ video_capture_device_->DeAllocate();
+ }
+
+ int video_session_id_;
+ media::VideoCaptureDevice::Name device_name_;
+ scoped_ptr<media::VideoCaptureDevice> video_capture_device_;
+
+ private:
+ virtual ~MockVideoCaptureManager() {}
+ DISALLOW_COPY_AND_ASSIGN(MockVideoCaptureManager);
+};
+
+// Test class.
+class VideoCaptureControllerTest : public testing::Test {
+ public:
+ VideoCaptureControllerTest() {}
+ virtual ~VideoCaptureControllerTest() {}
+
+ protected:
+ virtual void SetUp() OVERRIDE {
+ message_loop_.reset(new base::MessageLoop(base::MessageLoop::TYPE_IO));
+ file_thread_.reset(new BrowserThreadImpl(BrowserThread::FILE,
+ message_loop_.get()));
+ io_thread_.reset(new BrowserThreadImpl(BrowserThread::IO,
+ message_loop_.get()));
+
+ vcm_ = new MockVideoCaptureManager();
+ vcm_->Init();
+ controller_ = new VideoCaptureController(vcm_.get());
+ controller_handler_.reset(new MockVideoCaptureControllerEventHandler(
+ controller_.get(), message_loop_.get()));
+ }
+
+ virtual void TearDown() OVERRIDE {}
+
+ scoped_ptr<base::MessageLoop> message_loop_;
+ scoped_ptr<BrowserThreadImpl> file_thread_;
+ scoped_ptr<BrowserThreadImpl> io_thread_;
+ scoped_refptr<MockVideoCaptureManager> vcm_;
+ scoped_ptr<MockVideoCaptureControllerEventHandler> controller_handler_;
+ scoped_refptr<VideoCaptureController> controller_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(VideoCaptureControllerTest);
+};
+
+// Try to start and stop capture.
+TEST_F(VideoCaptureControllerTest, StartAndStop) {
+ media::VideoCaptureParams capture_params;
+ capture_params.session_id = vcm_->video_session_id_;
+ capture_params.width = 320;
+ capture_params.height = 240;
+ capture_params.frame_per_second = 30;
+
+ InSequence s;
+ EXPECT_CALL(*vcm_.get(),
+ StartCapture(capture_params.width,
+ capture_params.height,
+ controller_.get())).Times(1);
+ EXPECT_CALL(*controller_handler_,
+ DoFrameInfo(controller_handler_->controller_id_))
+ .Times(AtLeast(1));
+ EXPECT_CALL(*controller_handler_,
+ DoBufferCreated(controller_handler_->controller_id_))
+ .Times(AtLeast(1));
+ EXPECT_CALL(*controller_handler_,
+ DoBufferReady(controller_handler_->controller_id_))
+ .Times(AtLeast(1))
+ .WillOnce(StopCapture(controller_.get(),
+ controller_handler_->controller_id_,
+ controller_handler_.get(),
+ message_loop_.get()));
+ EXPECT_CALL(*vcm_.get(), StopCapture(vcm_->video_session_id_)).Times(1);
+
+ controller_->StartCapture(controller_handler_->controller_id_,
+ controller_handler_.get(),
+ controller_handler_->process_handle_,
+ capture_params);
+ message_loop_->Run();
+}
+
+// Try to stop session before stopping capture.
+TEST_F(VideoCaptureControllerTest, StopSession) {
+ media::VideoCaptureParams capture_params;
+ capture_params.session_id = vcm_->video_session_id_;
+ capture_params.width = 320;
+ capture_params.height = 240;
+ capture_params.frame_per_second = 30;
+
+ InSequence s;
+ EXPECT_CALL(*vcm_.get(),
+ StartCapture(capture_params.width,
+ capture_params.height,
+ controller_.get())).Times(1);
+ EXPECT_CALL(*controller_handler_,
+ DoFrameInfo(controller_handler_->controller_id_))
+ .Times(AtLeast(1));
+ EXPECT_CALL(*controller_handler_,
+ DoBufferCreated(controller_handler_->controller_id_))
+ .Times(AtLeast(1));
+ EXPECT_CALL(*controller_handler_,
+ DoBufferReady(controller_handler_->controller_id_))
+ .Times(AtLeast(1))
+ .WillOnce(StopSession(controller_.get(),
+ vcm_->video_session_id_,
+ message_loop_.get()));
+ EXPECT_CALL(*controller_handler_,
+ DoEnded(controller_handler_->controller_id_))
+ .Times(1);
+
+ controller_->StartCapture(controller_handler_->controller_id_,
+ controller_handler_.get(),
+ controller_handler_->process_handle_,
+ capture_params);
+ message_loop_->Run();
+
+ // The session is stopped now. There should be no buffer coming from
+ // controller.
+ EXPECT_CALL(*controller_handler_,
+ DoBufferReady(controller_handler_->controller_id_))
+ .Times(0);
+ message_loop_->PostDelayedTask(FROM_HERE,
+ base::MessageLoop::QuitClosure(), base::TimeDelta::FromSeconds(1));
+ message_loop_->Run();
+
+ EXPECT_CALL(*vcm_.get(), StopCapture(vcm_->video_session_id_)).Times(1);
+ controller_->StopCapture(controller_handler_->controller_id_,
+ controller_handler_.get());
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_host.cc b/chromium/content/browser/renderer_host/media/video_capture_host.cc
new file mode 100644
index 00000000000..bc7c8c19d7d
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_host.cc
@@ -0,0 +1,315 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/video_capture_host.h"
+
+#include "base/bind.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/stl_util.h"
+#include "content/browser/browser_main_loop.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/browser/renderer_host/media/video_capture_manager.h"
+#include "content/common/media/video_capture_messages.h"
+
+namespace content {
+
+struct VideoCaptureHost::Entry {
+ Entry(VideoCaptureController* controller)
+ : controller(controller) {}
+
+ ~Entry() {}
+
+ scoped_refptr<VideoCaptureController> controller;
+};
+
+VideoCaptureHost::VideoCaptureHost(MediaStreamManager* media_stream_manager)
+ : media_stream_manager_(media_stream_manager) {
+}
+
+VideoCaptureHost::~VideoCaptureHost() {}
+
+void VideoCaptureHost::OnChannelClosing() {
+ BrowserMessageFilter::OnChannelClosing();
+
+ // Since the IPC channel is gone, close all requested VideCaptureDevices.
+ for (EntryMap::iterator it = entries_.begin(); it != entries_.end(); it++) {
+ VideoCaptureController* controller = it->second->controller.get();
+ if (controller) {
+ VideoCaptureControllerID controller_id(it->first);
+ controller->StopCapture(controller_id, this);
+ media_stream_manager_->video_capture_manager()->RemoveController(
+ controller, this);
+ }
+ }
+ STLDeleteValues(&entries_);
+}
+
+void VideoCaptureHost::OnDestruct() const {
+ BrowserThread::DeleteOnIOThread::Destruct(this);
+}
+
+///////////////////////////////////////////////////////////////////////////////
+
+// Implements VideoCaptureControllerEventHandler.
+void VideoCaptureHost::OnError(const VideoCaptureControllerID& controller_id) {
+ DVLOG(1) << "VideoCaptureHost::OnError";
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&VideoCaptureHost::DoHandleErrorOnIOThread,
+ this, controller_id));
+}
+
+void VideoCaptureHost::OnBufferCreated(
+ const VideoCaptureControllerID& controller_id,
+ base::SharedMemoryHandle handle,
+ int length,
+ int buffer_id) {
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&VideoCaptureHost::DoSendNewBufferOnIOThread,
+ this, controller_id, handle, length, buffer_id));
+}
+
+void VideoCaptureHost::OnBufferReady(
+ const VideoCaptureControllerID& controller_id,
+ int buffer_id,
+ base::Time timestamp) {
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&VideoCaptureHost::DoSendFilledBufferOnIOThread,
+ this, controller_id, buffer_id, timestamp));
+}
+
+void VideoCaptureHost::OnFrameInfo(
+ const VideoCaptureControllerID& controller_id,
+ const media::VideoCaptureCapability& format) {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureHost::DoSendFrameInfoOnIOThread,
+ this, controller_id, format));
+}
+
+void VideoCaptureHost::OnFrameInfoChanged(
+ const VideoCaptureControllerID& controller_id,
+ int width,
+ int height,
+ int frame_per_second) {
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&VideoCaptureHost::DoSendFrameInfoChangedOnIOThread,
+ this, controller_id, width, height, frame_per_second));
+}
+
+void VideoCaptureHost::OnEnded(const VideoCaptureControllerID& controller_id) {
+ DVLOG(1) << "VideoCaptureHost::OnEnded";
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&VideoCaptureHost::DoEndedOnIOThread, this, controller_id));
+}
+
+void VideoCaptureHost::DoSendNewBufferOnIOThread(
+ const VideoCaptureControllerID& controller_id,
+ base::SharedMemoryHandle handle,
+ int length,
+ int buffer_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ if (entries_.find(controller_id) == entries_.end())
+ return;
+
+ Send(new VideoCaptureMsg_NewBuffer(controller_id.device_id, handle,
+ length, buffer_id));
+}
+
+void VideoCaptureHost::DoSendFilledBufferOnIOThread(
+ const VideoCaptureControllerID& controller_id,
+ int buffer_id, base::Time timestamp) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ if (entries_.find(controller_id) == entries_.end())
+ return;
+
+ Send(new VideoCaptureMsg_BufferReady(controller_id.device_id, buffer_id,
+ timestamp));
+}
+
+void VideoCaptureHost::DoHandleErrorOnIOThread(
+ const VideoCaptureControllerID& controller_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ if (entries_.find(controller_id) == entries_.end())
+ return;
+
+ Send(new VideoCaptureMsg_StateChanged(controller_id.device_id,
+ VIDEO_CAPTURE_STATE_ERROR));
+ DeleteVideoCaptureControllerOnIOThread(controller_id);
+}
+
+void VideoCaptureHost::DoEndedOnIOThread(
+ const VideoCaptureControllerID& controller_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "VideoCaptureHost::DoEndedOnIOThread";
+ if (entries_.find(controller_id) == entries_.end())
+ return;
+
+ Send(new VideoCaptureMsg_StateChanged(controller_id.device_id,
+ VIDEO_CAPTURE_STATE_ENDED));
+ DeleteVideoCaptureControllerOnIOThread(controller_id);
+}
+
+void VideoCaptureHost::DoSendFrameInfoOnIOThread(
+ const VideoCaptureControllerID& controller_id,
+ const media::VideoCaptureCapability& format) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ if (entries_.find(controller_id) == entries_.end())
+ return;
+
+ media::VideoCaptureParams params;
+ params.width = format.width;
+ params.height = format.height;
+ params.frame_per_second = format.frame_rate;
+ params.frame_size_type = format.frame_size_type;
+ Send(new VideoCaptureMsg_DeviceInfo(controller_id.device_id, params));
+ Send(new VideoCaptureMsg_StateChanged(controller_id.device_id,
+ VIDEO_CAPTURE_STATE_STARTED));
+}
+
+void VideoCaptureHost::DoSendFrameInfoChangedOnIOThread(
+ const VideoCaptureControllerID& controller_id,
+ int width,
+ int height,
+ int frame_per_second) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ if (entries_.find(controller_id) == entries_.end())
+ return;
+
+ media::VideoCaptureParams params;
+ params.width = width;
+ params.height = height;
+ params.frame_per_second = frame_per_second;
+ Send(new VideoCaptureMsg_DeviceInfoChanged(controller_id.device_id, params));
+}
+
+///////////////////////////////////////////////////////////////////////////////
+// IPC Messages handler.
+bool VideoCaptureHost::OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) {
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP_EX(VideoCaptureHost, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(VideoCaptureHostMsg_Start, OnStartCapture)
+ IPC_MESSAGE_HANDLER(VideoCaptureHostMsg_Pause, OnPauseCapture)
+ IPC_MESSAGE_HANDLER(VideoCaptureHostMsg_Stop, OnStopCapture)
+ IPC_MESSAGE_HANDLER(VideoCaptureHostMsg_BufferReady, OnReceiveEmptyBuffer)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+
+ return handled;
+}
+
+void VideoCaptureHost::OnStartCapture(int device_id,
+ const media::VideoCaptureParams& params) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "VideoCaptureHost::OnStartCapture, device_id " << device_id
+ << ", (" << params.width << ", " << params.height << ", "
+ << params.frame_per_second << ", " << params.session_id
+ << ", variable resolution device:"
+ << ((params.frame_size_type ==
+ media::VariableResolutionVideoCaptureDevice) ? "yes" : "no")
+ << ")";
+ VideoCaptureControllerID controller_id(device_id);
+ DCHECK(entries_.find(controller_id) == entries_.end());
+
+ entries_[controller_id] = new Entry(NULL);
+ media_stream_manager_->video_capture_manager()->AddController(
+ params, this, base::Bind(&VideoCaptureHost::OnControllerAdded, this,
+ device_id, params));
+}
+
+void VideoCaptureHost::OnControllerAdded(
+ int device_id, const media::VideoCaptureParams& params,
+ VideoCaptureController* controller) {
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&VideoCaptureHost::DoControllerAddedOnIOThread,
+ this, device_id, params, make_scoped_refptr(controller)));
+}
+
+void VideoCaptureHost::DoControllerAddedOnIOThread(
+ int device_id, const media::VideoCaptureParams params,
+ VideoCaptureController* controller) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ VideoCaptureControllerID controller_id(device_id);
+ EntryMap::iterator it = entries_.find(controller_id);
+ if (it == entries_.end()) {
+ if (controller) {
+ media_stream_manager_->video_capture_manager()->RemoveController(
+ controller, this);
+ }
+ return;
+ }
+
+ if (controller == NULL) {
+ Send(new VideoCaptureMsg_StateChanged(device_id,
+ VIDEO_CAPTURE_STATE_ERROR));
+ delete it->second;
+ entries_.erase(controller_id);
+ return;
+ }
+
+ it->second->controller = controller;
+ controller->StartCapture(controller_id, this, PeerHandle(), params);
+}
+
+void VideoCaptureHost::OnStopCapture(int device_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "VideoCaptureHost::OnStopCapture, device_id " << device_id;
+
+ VideoCaptureControllerID controller_id(device_id);
+
+ Send(new VideoCaptureMsg_StateChanged(device_id,
+ VIDEO_CAPTURE_STATE_STOPPED));
+ DeleteVideoCaptureControllerOnIOThread(controller_id);
+}
+
+void VideoCaptureHost::OnPauseCapture(int device_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DVLOG(1) << "VideoCaptureHost::OnPauseCapture, device_id " << device_id;
+ // Not used.
+ Send(new VideoCaptureMsg_StateChanged(device_id, VIDEO_CAPTURE_STATE_ERROR));
+}
+
+void VideoCaptureHost::OnReceiveEmptyBuffer(int device_id, int buffer_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ VideoCaptureControllerID controller_id(device_id);
+ EntryMap::iterator it = entries_.find(controller_id);
+ if (it != entries_.end()) {
+ scoped_refptr<VideoCaptureController> controller = it->second->controller;
+ if (controller.get())
+ controller->ReturnBuffer(controller_id, this, buffer_id);
+ }
+}
+
+void VideoCaptureHost::DeleteVideoCaptureControllerOnIOThread(
+ const VideoCaptureControllerID& controller_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+
+ EntryMap::iterator it = entries_.find(controller_id);
+ if (it == entries_.end())
+ return;
+
+ VideoCaptureController* controller = it->second->controller.get();
+ if (controller) {
+ controller->StopCapture(controller_id, this);
+ media_stream_manager_->video_capture_manager()->RemoveController(
+ controller, this);
+ }
+ delete it->second;
+ entries_.erase(controller_id);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_host.h b/chromium/content/browser/renderer_host/media/video_capture_host.h
new file mode 100644
index 00000000000..025b849de94
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_host.h
@@ -0,0 +1,161 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// VideoCaptureHost serves video capture related messages from
+// VideoCaptureMessageFilter which lives inside the render process.
+//
+// This class is owned by BrowserRenderProcessHost, and instantiated on UI
+// thread, but all other operations and method calls happen on IO thread.
+//
+// Here's an example of a typical IPC dialog for video capture:
+//
+// Renderer VideoCaptureHost
+// | |
+// | VideoCaptureHostMsg_Start > |
+// | < VideoCaptureMsg_DeviceInfo |
+// | |
+// | < VideoCaptureMsg_StateChanged |
+// | (kStarted) |
+// | < VideoCaptureMsg_BufferReady |
+// | ... |
+// | < VideoCaptureMsg_BufferReady |
+// | ... |
+// | VideoCaptureHostMsg_BufferReady > |
+// | VideoCaptureHostMsg_BufferReady > |
+// | |
+// | ... |
+// | |
+// | < VideoCaptureMsg_BufferReady |
+// | VideoCaptureHostMsg_Stop > |
+// | VideoCaptureHostMsg_BufferReady > |
+// | < VideoCaptureMsg_StateChanged |
+// | (kStopped) |
+// v v
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_HOST_H_
+
+#include <map>
+
+#include "base/memory/ref_counted.h"
+#include "base/sequenced_task_runner_helpers.h"
+#include "content/browser/renderer_host/media/video_capture_controller.h"
+#include "content/common/content_export.h"
+#include "content/public/browser/browser_message_filter.h"
+#include "ipc/ipc_message.h"
+
+namespace media {
+struct VideoCaptureCapability;
+}
+
+namespace content {
+class MediaStreamManager;
+
+class CONTENT_EXPORT VideoCaptureHost
+ : public BrowserMessageFilter,
+ public VideoCaptureControllerEventHandler {
+ public:
+ explicit VideoCaptureHost(MediaStreamManager* media_stream_manager);
+
+ // BrowserMessageFilter implementation.
+ virtual void OnChannelClosing() OVERRIDE;
+ virtual void OnDestruct() const OVERRIDE;
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+
+ // VideoCaptureControllerEventHandler implementation.
+ virtual void OnError(const VideoCaptureControllerID& id) OVERRIDE;
+ virtual void OnBufferCreated(const VideoCaptureControllerID& id,
+ base::SharedMemoryHandle handle,
+ int length, int buffer_id) OVERRIDE;
+ virtual void OnBufferReady(const VideoCaptureControllerID& id,
+ int buffer_id,
+ base::Time timestamp) OVERRIDE;
+ virtual void OnFrameInfo(
+ const VideoCaptureControllerID& id,
+ const media::VideoCaptureCapability& format) OVERRIDE;
+ virtual void OnFrameInfoChanged(const VideoCaptureControllerID& id,
+ int width,
+ int height,
+ int frame_per_second) OVERRIDE;
+ virtual void OnEnded(const VideoCaptureControllerID& id) OVERRIDE;
+
+ private:
+ friend class BrowserThread;
+ friend class base::DeleteHelper<VideoCaptureHost>;
+ friend class MockVideoCaptureHost;
+ friend class VideoCaptureHostTest;
+
+ virtual ~VideoCaptureHost();
+
+ // IPC message: Start capture on the VideoCaptureDevice referenced by
+ // VideoCaptureParams::session_id. |device_id| is an id created by
+ // VideoCaptureMessageFilter to identify a session
+ // between a VideoCaptureMessageFilter and a VideoCaptureHost.
+ void OnStartCapture(int device_id,
+ const media::VideoCaptureParams& params);
+ void OnControllerAdded(
+ int device_id, const media::VideoCaptureParams& params,
+ VideoCaptureController* controller);
+ void DoControllerAddedOnIOThread(
+ int device_id, const media::VideoCaptureParams params,
+ VideoCaptureController* controller);
+
+ // IPC message: Stop capture on device referenced by |device_id|.
+ void OnStopCapture(int device_id);
+
+ // IPC message: Pause capture on device referenced by |device_id|.
+ void OnPauseCapture(int device_id);
+
+ // IPC message: Receive an empty buffer from renderer. Send it to device
+ // referenced by |device_id|.
+ void OnReceiveEmptyBuffer(int device_id, int buffer_id);
+
+ // Send a newly created buffer to the VideoCaptureMessageFilter.
+ void DoSendNewBufferOnIOThread(
+ const VideoCaptureControllerID& controller_id,
+ base::SharedMemoryHandle handle,
+ int length,
+ int buffer_id);
+
+ // Send a filled buffer to the VideoCaptureMessageFilter.
+ void DoSendFilledBufferOnIOThread(
+ const VideoCaptureControllerID& controller_id,
+ int buffer_id,
+ base::Time timestamp);
+
+ // Send information about the capture parameters (resolution, frame rate etc)
+ // to the VideoCaptureMessageFilter.
+ void DoSendFrameInfoOnIOThread(const VideoCaptureControllerID& controller_id,
+ const media::VideoCaptureCapability& format);
+
+ // Send newly changed information about frame resolution and frame rate
+ // to the VideoCaptureMessageFilter.
+ void DoSendFrameInfoChangedOnIOThread(
+ const VideoCaptureControllerID& controller_id,
+ int width,
+ int height,
+ int frame_per_second);
+
+ // Handle error coming from VideoCaptureDevice.
+ void DoHandleErrorOnIOThread(const VideoCaptureControllerID& controller_id);
+
+ void DoEndedOnIOThread(const VideoCaptureControllerID& controller_id);
+
+ void DeleteVideoCaptureControllerOnIOThread(
+ const VideoCaptureControllerID& controller_id);
+
+ MediaStreamManager* media_stream_manager_;
+
+ struct Entry;
+ typedef std::map<VideoCaptureControllerID, Entry*> EntryMap;
+ // A map of VideoCaptureControllerID to its state and VideoCaptureController.
+ EntryMap entries_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoCaptureHost);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/video_capture_host_unittest.cc b/chromium/content/browser/renderer_host/media/video_capture_host_unittest.cc
new file mode 100644
index 00000000000..762148c86a2
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_host_unittest.cc
@@ -0,0 +1,371 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <map>
+#include <string>
+
+#include "base/bind.h"
+#include "base/file_util.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "base/run_loop.h"
+#include "base/stl_util.h"
+#include "base/strings/stringprintf.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/media_stream_manager.h"
+#include "content/browser/renderer_host/media/video_capture_host.h"
+#include "content/browser/renderer_host/media/video_capture_manager.h"
+#include "content/common/media/video_capture_messages.h"
+#include "content/public/test/mock_resource_context.h"
+#include "content/public/test/test_browser_thread_bundle.h"
+#include "media/audio/audio_manager.h"
+#include "media/video/capture/video_capture_types.h"
+#include "net/url_request/url_request_context.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::AnyNumber;
+using ::testing::DoAll;
+using ::testing::InSequence;
+using ::testing::Mock;
+using ::testing::Return;
+
+namespace content {
+
+// Id used to identify the capture session between renderer and
+// video_capture_host.
+static const int kDeviceId = 1;
+// Id of a video capture device
+static const media::VideoCaptureSessionId kTestFakeDeviceId =
+ VideoCaptureManager::kStartOpenSessionId;
+
+// Define to enable test where video is dumped to file.
+// #define DUMP_VIDEO
+
+// Define to use a real video capture device.
+// #define TEST_REAL_CAPTURE_DEVICE
+
+// Simple class used for dumping video to a file. This can be used for
+// verifying the output.
+class DumpVideo {
+ public:
+ DumpVideo() : expected_size_(0) {}
+ void StartDump(int width, int height) {
+ base::FilePath file_name = base::FilePath(base::StringPrintf(
+ FILE_PATH_LITERAL("dump_w%d_h%d.yuv"), width, height));
+ file_.reset(file_util::OpenFile(file_name, "wb"));
+ expected_size_ = width * height * 3 / 2;
+ }
+ void NewVideoFrame(const void* buffer) {
+ if (file_.get() != NULL) {
+ fwrite(buffer, expected_size_, 1, file_.get());
+ }
+ }
+
+ private:
+ file_util::ScopedFILE file_;
+ int expected_size_;
+};
+
+class MockVideoCaptureHost : public VideoCaptureHost {
+ public:
+ MockVideoCaptureHost(MediaStreamManager* manager)
+ : VideoCaptureHost(manager),
+ return_buffers_(false),
+ dump_video_(false) {}
+
+ // A list of mock methods.
+ MOCK_METHOD4(OnNewBufferCreated,
+ void(int device_id, base::SharedMemoryHandle handle,
+ int length, int buffer_id));
+ MOCK_METHOD3(OnBufferFilled,
+ void(int device_id, int buffer_id, base::Time timestamp));
+ MOCK_METHOD2(OnStateChanged, void(int device_id, VideoCaptureState state));
+ MOCK_METHOD1(OnDeviceInfo, void(int device_id));
+
+ // Use class DumpVideo to write I420 video to file.
+ void SetDumpVideo(bool enable) {
+ dump_video_ = enable;
+ }
+
+ void SetReturnReceviedDibs(bool enable) {
+ return_buffers_ = enable;
+ }
+
+ // Return Dibs we currently have received.
+ void ReturnReceivedDibs(int device_id) {
+ int handle = GetReceivedDib();
+ while (handle) {
+ this->OnReceiveEmptyBuffer(device_id, handle);
+ handle = GetReceivedDib();
+ }
+ }
+
+ int GetReceivedDib() {
+ if (filled_dib_.empty())
+ return 0;
+ std::map<int, base::SharedMemory*>::iterator it = filled_dib_.begin();
+ int h = it->first;
+ delete it->second;
+ filled_dib_.erase(it);
+
+ return h;
+ }
+
+ private:
+ virtual ~MockVideoCaptureHost() {
+ STLDeleteContainerPairSecondPointers(filled_dib_.begin(),
+ filled_dib_.end());
+ }
+
+ // This method is used to dispatch IPC messages to the renderer. We intercept
+ // these messages here and dispatch to our mock methods to verify the
+ // conversation between this object and the renderer.
+ virtual bool Send(IPC::Message* message) OVERRIDE {
+ CHECK(message);
+
+ // In this method we dispatch the messages to the according handlers as if
+ // we are the renderer.
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP(MockVideoCaptureHost, *message)
+ IPC_MESSAGE_HANDLER(VideoCaptureMsg_NewBuffer, OnNewBufferCreatedDispatch)
+ IPC_MESSAGE_HANDLER(VideoCaptureMsg_BufferReady, OnBufferFilledDispatch)
+ IPC_MESSAGE_HANDLER(VideoCaptureMsg_StateChanged, OnStateChangedDispatch)
+ IPC_MESSAGE_HANDLER(VideoCaptureMsg_DeviceInfo, OnDeviceInfoDispatch)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP()
+ EXPECT_TRUE(handled);
+
+ delete message;
+ return true;
+ }
+
+ // These handler methods do minimal things and delegate to the mock methods.
+ void OnNewBufferCreatedDispatch(int device_id,
+ base::SharedMemoryHandle handle,
+ int length, int buffer_id) {
+ OnNewBufferCreated(device_id, handle, length, buffer_id);
+ base::SharedMemory* dib = new base::SharedMemory(handle, false);
+ dib->Map(length);
+ filled_dib_[buffer_id] = dib;
+ }
+
+ void OnBufferFilledDispatch(int device_id, int buffer_id,
+ base::Time timestamp) {
+ if (dump_video_) {
+ base::SharedMemory* dib = filled_dib_[buffer_id];
+ ASSERT_TRUE(dib != NULL);
+ dumper_.NewVideoFrame(dib->memory());
+ }
+
+ OnBufferFilled(device_id, buffer_id, timestamp);
+ if (return_buffers_) {
+ VideoCaptureHost::OnReceiveEmptyBuffer(device_id, buffer_id);
+ }
+ }
+
+ void OnStateChangedDispatch(int device_id, VideoCaptureState state) {
+ OnStateChanged(device_id, state);
+ }
+
+ void OnDeviceInfoDispatch(int device_id,
+ media::VideoCaptureParams params) {
+ if (dump_video_) {
+ dumper_.StartDump(params.width, params.height);
+ }
+ OnDeviceInfo(device_id);
+ }
+
+ std::map<int, base::SharedMemory*> filled_dib_;
+ bool return_buffers_;
+ bool dump_video_;
+ DumpVideo dumper_;
+};
+
+ACTION_P2(ExitMessageLoop, message_loop, quit_closure) {
+ message_loop->PostTask(FROM_HERE, quit_closure);
+}
+
+class VideoCaptureHostTest : public testing::Test {
+ public:
+ VideoCaptureHostTest()
+ : thread_bundle_(content::TestBrowserThreadBundle::IO_MAINLOOP),
+ message_loop_(base::MessageLoopProxy::current()) {
+ // Create our own MediaStreamManager.
+ audio_manager_.reset(media::AudioManager::Create());
+ media_stream_manager_.reset(new MediaStreamManager(audio_manager_.get()));
+#ifndef TEST_REAL_CAPTURE_DEVICE
+ media_stream_manager_->UseFakeDevice();
+#endif
+
+ host_ = new MockVideoCaptureHost(media_stream_manager_.get());
+
+ // Simulate IPC channel connected.
+ host_->OnChannelConnected(base::GetCurrentProcId());
+ }
+
+ virtual ~VideoCaptureHostTest() {
+ // Verifies and removes the expectations on host_ and
+ // returns true iff successful.
+ Mock::VerifyAndClearExpectations(host_.get());
+
+ EXPECT_CALL(*host_.get(),
+ OnStateChanged(kDeviceId, VIDEO_CAPTURE_STATE_STOPPED))
+ .Times(AnyNumber());
+
+ // Simulate closing the IPC channel.
+ host_->OnChannelClosing();
+
+ // Release the reference to the mock object. The object will be destructed
+ // on the current message loop.
+ host_ = NULL;
+
+ media_stream_manager_->WillDestroyCurrentMessageLoop();
+ }
+
+ protected:
+ void StartCapture() {
+ InSequence s;
+ // 1. First - get info about the new resolution
+ EXPECT_CALL(*host_.get(), OnDeviceInfo(kDeviceId));
+
+ // 2. Change state to started
+ EXPECT_CALL(*host_.get(),
+ OnStateChanged(kDeviceId, VIDEO_CAPTURE_STATE_STARTED));
+
+ // 3. Newly created buffers will arrive.
+ EXPECT_CALL(*host_.get(), OnNewBufferCreated(kDeviceId, _, _, _))
+ .Times(AnyNumber()).WillRepeatedly(Return());
+
+ // 4. First filled buffer will arrive.
+ base::RunLoop run_loop;
+ EXPECT_CALL(*host_.get(), OnBufferFilled(kDeviceId, _, _))
+ .Times(AnyNumber()).WillOnce(ExitMessageLoop(
+ message_loop_, run_loop.QuitClosure()));
+
+ media::VideoCaptureParams params;
+ params.width = 352;
+ params.height = 288;
+ params.frame_per_second = 30;
+ params.session_id = kTestFakeDeviceId;
+ host_->OnStartCapture(kDeviceId, params);
+ run_loop.Run();
+ }
+
+#ifdef DUMP_VIDEO
+ void CaptureAndDumpVideo(int width, int heigt, int frame_rate) {
+ InSequence s;
+ // 1. First - get info about the new resolution
+ EXPECT_CALL(*host_, OnDeviceInfo(kDeviceId));
+
+ // 2. Change state to started
+ EXPECT_CALL(*host_, OnStateChanged(kDeviceId, VIDEO_CAPTURE_STATE_STARTED));
+
+ // 3. First filled buffer will arrive.
+ base::RunLoop run_loop;
+ EXPECT_CALL(*host_, OnBufferFilled(kDeviceId, _, _))
+ .Times(AnyNumber())
+ .WillOnce(ExitMessageLoop(message_loop_, run_loop.QuitClosure()));
+
+ media::VideoCaptureParams params;
+ params.width = width;
+ params.height = heigt;
+ params.frame_per_second = frame_rate;
+ params.session_id = kTestFakeDeviceId;
+ host_->SetDumpVideo(true);
+ host_->OnStartCapture(kDeviceId, params);
+ run_loop.Run();
+ }
+#endif
+
+ void StopCapture() {
+ base::RunLoop run_loop;
+ EXPECT_CALL(*host_.get(),
+ OnStateChanged(kDeviceId, VIDEO_CAPTURE_STATE_STOPPED))
+ .WillOnce(ExitMessageLoop(message_loop_, run_loop.QuitClosure()));
+
+ host_->OnStopCapture(kDeviceId);
+ host_->SetReturnReceviedDibs(true);
+ host_->ReturnReceivedDibs(kDeviceId);
+
+ run_loop.Run();
+
+ host_->SetReturnReceviedDibs(false);
+ // Expect the VideoCaptureDevice has been stopped
+ EXPECT_EQ(0u, host_->entries_.size());
+ }
+
+ void NotifyPacketReady() {
+ base::RunLoop run_loop;
+ EXPECT_CALL(*host_.get(), OnBufferFilled(kDeviceId, _, _))
+ .Times(AnyNumber()).WillOnce(ExitMessageLoop(
+ message_loop_, run_loop.QuitClosure()))
+ .RetiresOnSaturation();
+ run_loop.Run();
+ }
+
+ void ReturnReceivedPackets() {
+ host_->ReturnReceivedDibs(kDeviceId);
+ }
+
+ void SimulateError() {
+ // Expect a change state to error state sent through IPC.
+ EXPECT_CALL(*host_.get(),
+ OnStateChanged(kDeviceId, VIDEO_CAPTURE_STATE_ERROR)).Times(1);
+ VideoCaptureControllerID id(kDeviceId);
+ host_->OnError(id);
+ // Wait for the error callback.
+ base::RunLoop().RunUntilIdle();
+ }
+
+ scoped_refptr<MockVideoCaptureHost> host_;
+
+ private:
+ scoped_ptr<media::AudioManager> audio_manager_;
+ scoped_ptr<MediaStreamManager> media_stream_manager_;
+ content::TestBrowserThreadBundle thread_bundle_;
+ scoped_refptr<base::MessageLoopProxy> message_loop_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoCaptureHostTest);
+};
+
+TEST_F(VideoCaptureHostTest, StartCapture) {
+ StartCapture();
+}
+
+TEST_F(VideoCaptureHostTest, StartCapturePlayStop) {
+ StartCapture();
+ NotifyPacketReady();
+ NotifyPacketReady();
+ ReturnReceivedPackets();
+ StopCapture();
+}
+
+TEST_F(VideoCaptureHostTest, StartCaptureErrorStop) {
+ StartCapture();
+ SimulateError();
+ StopCapture();
+}
+
+TEST_F(VideoCaptureHostTest, StartCaptureError) {
+ EXPECT_CALL(*host_.get(),
+ OnStateChanged(kDeviceId, VIDEO_CAPTURE_STATE_STOPPED)).Times(0);
+ StartCapture();
+ NotifyPacketReady();
+ SimulateError();
+ base::PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(200));
+}
+
+#ifdef DUMP_VIDEO
+TEST_F(VideoCaptureHostTest, CaptureAndDumpVideoVga) {
+ CaptureAndDumpVideo(640, 480, 30);
+}
+TEST_F(VideoCaptureHostTest, CaptureAndDump720P) {
+ CaptureAndDumpVideo(1280, 720, 30);
+}
+#endif
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_manager.cc b/chromium/content/browser/renderer_host/media/video_capture_manager.cc
new file mode 100644
index 00000000000..79da41260a3
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_manager.cc
@@ -0,0 +1,593 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/video_capture_manager.h"
+
+#include <set>
+
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/logging.h"
+#include "base/stl_util.h"
+#include "base/threading/sequenced_worker_pool.h"
+#include "content/browser/renderer_host/media/video_capture_controller.h"
+#include "content/browser/renderer_host/media/video_capture_controller_event_handler.h"
+#include "content/browser/renderer_host/media/web_contents_video_capture_device.h"
+#include "content/public/browser/browser_thread.h"
+#include "content/public/common/content_switches.h"
+#include "content/public/common/desktop_media_id.h"
+#include "content/public/common/media_stream_request.h"
+#include "media/base/scoped_histogram_timer.h"
+#include "media/video/capture/fake_video_capture_device.h"
+#include "media/video/capture/video_capture_device.h"
+
+#if defined(ENABLE_SCREEN_CAPTURE)
+#include "content/browser/renderer_host/media/desktop_capture_device.h"
+#endif
+
+namespace content {
+
+// Starting id for the first capture session.
+// VideoCaptureManager::kStartOpenSessionId is used as default id without
+// explicitly calling open device.
+enum { kFirstSessionId = VideoCaptureManager::kStartOpenSessionId + 1 };
+
+struct VideoCaptureManager::Controller {
+ Controller(
+ VideoCaptureController* vc_controller,
+ VideoCaptureControllerEventHandler* handler)
+ : controller(vc_controller),
+ ready_to_delete(false) {
+ handlers.push_front(handler);
+ }
+ ~Controller() {}
+
+ scoped_refptr<VideoCaptureController> controller;
+ bool ready_to_delete;
+ Handlers handlers;
+};
+
+VideoCaptureManager::VideoCaptureManager()
+ : listener_(NULL),
+ new_capture_session_id_(kFirstSessionId),
+ use_fake_device_(false) {
+}
+
+VideoCaptureManager::~VideoCaptureManager() {
+ DCHECK(devices_.empty());
+ DCHECK(controllers_.empty());
+}
+
+void VideoCaptureManager::Register(MediaStreamProviderListener* listener,
+ base::MessageLoopProxy* device_thread_loop) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(!listener_);
+ DCHECK(!device_loop_.get());
+ listener_ = listener;
+ device_loop_ = device_thread_loop;
+}
+
+void VideoCaptureManager::Unregister() {
+ DCHECK(listener_);
+ listener_ = NULL;
+}
+
+void VideoCaptureManager::EnumerateDevices(MediaStreamType stream_type) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(listener_);
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnEnumerateDevices, this, stream_type));
+}
+
+int VideoCaptureManager::Open(const StreamDeviceInfo& device) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(listener_);
+
+ // Generate a new id for this device.
+ int video_capture_session_id = new_capture_session_id_++;
+
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnOpen, this, video_capture_session_id,
+ device));
+
+ return video_capture_session_id;
+}
+
+void VideoCaptureManager::Close(int capture_session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK(listener_);
+ DVLOG(1) << "VideoCaptureManager::Close, id " << capture_session_id;
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnClose, this, capture_session_id));
+}
+
+void VideoCaptureManager::Start(
+ const media::VideoCaptureParams& capture_params,
+ media::VideoCaptureDevice::EventHandler* video_capture_receiver) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnStart, this, capture_params,
+ video_capture_receiver));
+}
+
+void VideoCaptureManager::Stop(
+ const media::VideoCaptureSessionId& capture_session_id,
+ base::Closure stopped_cb) {
+ DVLOG(1) << "VideoCaptureManager::Stop, id " << capture_session_id;
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnStop, this, capture_session_id,
+ stopped_cb));
+}
+
+void VideoCaptureManager::UseFakeDevice() {
+ use_fake_device_ = true;
+}
+
+void VideoCaptureManager::OnEnumerateDevices(MediaStreamType stream_type) {
+ SCOPED_UMA_HISTOGRAM_TIMER(
+ "Media.VideoCaptureManager.OnEnumerateDevicesTime");
+ DCHECK(IsOnDeviceThread());
+
+ media::VideoCaptureDevice::Names device_names;
+ GetAvailableDevices(stream_type, &device_names);
+
+ scoped_ptr<StreamDeviceInfoArray> devices(new StreamDeviceInfoArray());
+ for (media::VideoCaptureDevice::Names::iterator it =
+ device_names.begin(); it != device_names.end(); ++it) {
+ bool opened = DeviceOpened(*it);
+ devices->push_back(StreamDeviceInfo(
+ stream_type, it->GetNameAndModel(), it->id(), opened));
+ }
+
+ PostOnDevicesEnumerated(stream_type, devices.Pass());
+}
+
+void VideoCaptureManager::OnOpen(int capture_session_id,
+ const StreamDeviceInfo& device) {
+ SCOPED_UMA_HISTOGRAM_TIMER("Media.VideoCaptureManager.OnOpenTime");
+ DCHECK(IsOnDeviceThread());
+ DCHECK(devices_.find(capture_session_id) == devices_.end());
+ DVLOG(1) << "VideoCaptureManager::OnOpen, id " << capture_session_id;
+
+ // Check if another session has already opened this device. If so, just
+ // use that opened device.
+ media::VideoCaptureDevice* opened_video_capture_device =
+ GetOpenedDevice(device);
+ if (opened_video_capture_device) {
+ DeviceEntry& new_entry = devices_[capture_session_id];
+ new_entry.stream_type = device.device.type;
+ new_entry.capture_device = opened_video_capture_device;
+ PostOnOpened(device.device.type, capture_session_id);
+ return;
+ }
+
+ scoped_ptr<media::VideoCaptureDevice> video_capture_device;
+
+ // Open the device.
+ switch (device.device.type) {
+ case MEDIA_DEVICE_VIDEO_CAPTURE: {
+ // We look up the device id from the renderer in our local enumeration
+ // since the renderer does not have all the information that might be
+ // held in the browser-side VideoCaptureDevice::Name structure.
+ media::VideoCaptureDevice::Name* found =
+ video_capture_devices_.FindById(device.device.id);
+ if (found) {
+ video_capture_device.reset(use_fake_device_ ?
+ media::FakeVideoCaptureDevice::Create(*found) :
+ media::VideoCaptureDevice::Create(*found));
+ }
+ break;
+ }
+ case MEDIA_TAB_VIDEO_CAPTURE: {
+ video_capture_device.reset(
+ WebContentsVideoCaptureDevice::Create(device.device.id));
+ break;
+ }
+ case MEDIA_DESKTOP_VIDEO_CAPTURE: {
+#if defined(ENABLE_SCREEN_CAPTURE)
+ DesktopMediaID id = DesktopMediaID::Parse(device.device.id);
+ if (id.type != DesktopMediaID::TYPE_NONE) {
+ video_capture_device = DesktopCaptureDevice::Create(id);
+ }
+#endif // defined(ENABLE_SCREEN_CAPTURE)
+ break;
+ }
+ default: {
+ NOTIMPLEMENTED();
+ break;
+ }
+ }
+
+ if (!video_capture_device) {
+ PostOnError(capture_session_id, kDeviceNotAvailable);
+ return;
+ }
+
+ DeviceEntry& new_entry = devices_[capture_session_id];
+ new_entry.stream_type = device.device.type;
+ new_entry.capture_device = video_capture_device.release();
+ PostOnOpened(device.device.type, capture_session_id);
+}
+
+void VideoCaptureManager::OnClose(int capture_session_id) {
+ SCOPED_UMA_HISTOGRAM_TIMER("Media.VideoCaptureManager.OnCloseTime");
+ DCHECK(IsOnDeviceThread());
+ DVLOG(1) << "VideoCaptureManager::OnClose, id " << capture_session_id;
+
+ VideoCaptureDevices::iterator device_it = devices_.find(capture_session_id);
+ if (device_it == devices_.end()) {
+ return;
+ }
+ const DeviceEntry removed_entry = device_it->second;
+ devices_.erase(device_it);
+
+ Controllers::iterator cit = controllers_.find(removed_entry.capture_device);
+ if (cit != controllers_.end()) {
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&VideoCaptureController::StopSession,
+ cit->second->controller, capture_session_id));
+ }
+
+ if (!DeviceInUse(removed_entry.capture_device)) {
+ // No other users of this device, deallocate (if not done already) and
+ // delete the device. No need to take care of the controller, that is done
+ // by |OnStop|.
+ removed_entry.capture_device->DeAllocate();
+ Controllers::iterator cit = controllers_.find(removed_entry.capture_device);
+ if (cit != controllers_.end()) {
+ delete cit->second;
+ controllers_.erase(cit);
+ }
+ delete removed_entry.capture_device;
+ }
+
+ PostOnClosed(removed_entry.stream_type, capture_session_id);
+}
+
+void VideoCaptureManager::OnStart(
+ const media::VideoCaptureParams capture_params,
+ media::VideoCaptureDevice::EventHandler* video_capture_receiver) {
+ SCOPED_UMA_HISTOGRAM_TIMER("Media.VideoCaptureManager.OnStartTime");
+ DCHECK(IsOnDeviceThread());
+ DCHECK(video_capture_receiver != NULL);
+ DVLOG(1) << "VideoCaptureManager::OnStart, (" << capture_params.width
+ << ", " << capture_params.height
+ << ", " << capture_params.frame_per_second
+ << ", " << capture_params.session_id
+ << ")";
+
+ media::VideoCaptureDevice* video_capture_device =
+ GetDeviceInternal(capture_params.session_id);
+ if (!video_capture_device) {
+ // Invalid session id.
+ video_capture_receiver->OnError();
+ return;
+ }
+ // TODO(mcasas): Variable resolution video capture devices, are not yet
+ // fully supported, see crbug.com/261410, second part, and crbug.com/266082 .
+ if (capture_params.frame_size_type !=
+ media::ConstantResolutionVideoCaptureDevice) {
+ LOG(DFATAL) << "Only constant Video Capture resolution device supported.";
+ video_capture_receiver->OnError();
+ return;
+ }
+ Controllers::iterator cit = controllers_.find(video_capture_device);
+ if (cit != controllers_.end()) {
+ cit->second->ready_to_delete = false;
+ }
+
+ // Possible errors are signaled to video_capture_receiver by
+ // video_capture_device. video_capture_receiver to perform actions.
+ media::VideoCaptureCapability params_as_capability_copy;
+ params_as_capability_copy.width = capture_params.width;
+ params_as_capability_copy.height = capture_params.height;
+ params_as_capability_copy.frame_rate = capture_params.frame_per_second;
+ params_as_capability_copy.session_id = capture_params.session_id;
+ params_as_capability_copy.frame_size_type = capture_params.frame_size_type;
+ video_capture_device->Allocate(params_as_capability_copy,
+ video_capture_receiver);
+ video_capture_device->Start();
+}
+
+void VideoCaptureManager::OnStop(
+ const media::VideoCaptureSessionId capture_session_id,
+ base::Closure stopped_cb) {
+ SCOPED_UMA_HISTOGRAM_TIMER("Media.VideoCaptureManager.OnStopTime");
+ DCHECK(IsOnDeviceThread());
+ DVLOG(1) << "VideoCaptureManager::OnStop, id " << capture_session_id;
+
+ VideoCaptureDevices::iterator it = devices_.find(capture_session_id);
+ if (it != devices_.end()) {
+ media::VideoCaptureDevice* video_capture_device = it->second.capture_device;
+ // Possible errors are signaled to video_capture_receiver by
+ // video_capture_device. video_capture_receiver to perform actions.
+ video_capture_device->Stop();
+ video_capture_device->DeAllocate();
+ Controllers::iterator cit = controllers_.find(video_capture_device);
+ if (cit != controllers_.end()) {
+ cit->second->ready_to_delete = true;
+ if (cit->second->handlers.empty()) {
+ delete cit->second;
+ controllers_.erase(cit);
+ }
+ }
+ }
+
+ if (!stopped_cb.is_null())
+ stopped_cb.Run();
+
+ if (capture_session_id == kStartOpenSessionId) {
+ // This device was opened from Start(), not Open(). Close it!
+ OnClose(capture_session_id);
+ }
+}
+
+void VideoCaptureManager::OnOpened(MediaStreamType stream_type,
+ int capture_session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (!listener_) {
+ // Listener has been removed.
+ return;
+ }
+ listener_->Opened(stream_type, capture_session_id);
+}
+
+void VideoCaptureManager::OnClosed(MediaStreamType stream_type,
+ int capture_session_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (!listener_) {
+ // Listener has been removed.
+ return;
+ }
+ listener_->Closed(stream_type, capture_session_id);
+}
+
+void VideoCaptureManager::OnDevicesEnumerated(
+ MediaStreamType stream_type,
+ scoped_ptr<StreamDeviceInfoArray> devices) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (!listener_) {
+ // Listener has been removed.
+ return;
+ }
+ listener_->DevicesEnumerated(stream_type, *devices);
+}
+
+void VideoCaptureManager::OnError(MediaStreamType stream_type,
+ int capture_session_id,
+ MediaStreamProviderError error) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ if (!listener_) {
+ // Listener has been removed.
+ return;
+ }
+ listener_->Error(stream_type, capture_session_id, error);
+}
+
+void VideoCaptureManager::PostOnOpened(
+ MediaStreamType stream_type, int capture_session_id) {
+ DCHECK(IsOnDeviceThread());
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnOpened, this,
+ stream_type, capture_session_id));
+}
+
+void VideoCaptureManager::PostOnClosed(
+ MediaStreamType stream_type, int capture_session_id) {
+ DCHECK(IsOnDeviceThread());
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnClosed, this,
+ stream_type, capture_session_id));
+}
+
+void VideoCaptureManager::PostOnDevicesEnumerated(
+ MediaStreamType stream_type,
+ scoped_ptr<StreamDeviceInfoArray> devices) {
+ DCHECK(IsOnDeviceThread());
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnDevicesEnumerated,
+ this, stream_type, base::Passed(&devices)));
+}
+
+void VideoCaptureManager::PostOnError(int capture_session_id,
+ MediaStreamProviderError error) {
+ DCHECK(IsOnDeviceThread());
+ MediaStreamType stream_type = MEDIA_DEVICE_VIDEO_CAPTURE;
+ VideoCaptureDevices::const_iterator it = devices_.find(capture_session_id);
+ if (it != devices_.end())
+ stream_type = it->second.stream_type;
+ BrowserThread::PostTask(BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::OnError, this,
+ stream_type, capture_session_id, error));
+}
+
+bool VideoCaptureManager::IsOnDeviceThread() const {
+ return device_loop_->BelongsToCurrentThread();
+}
+
+void VideoCaptureManager::GetAvailableDevices(
+ MediaStreamType stream_type,
+ media::VideoCaptureDevice::Names* device_names) {
+ DCHECK(IsOnDeviceThread());
+
+ switch (stream_type) {
+ case MEDIA_DEVICE_VIDEO_CAPTURE:
+ // Cache the latest enumeration of video capture devices.
+ // We'll refer to this list again in OnOpen to avoid having to
+ // enumerate the devices again.
+ video_capture_devices_.clear();
+ if (!use_fake_device_) {
+ media::VideoCaptureDevice::GetDeviceNames(&video_capture_devices_);
+ } else {
+ media::FakeVideoCaptureDevice::GetDeviceNames(&video_capture_devices_);
+ }
+ *device_names = video_capture_devices_;
+ break;
+
+ case MEDIA_DESKTOP_VIDEO_CAPTURE:
+ device_names->clear();
+ break;
+
+ default:
+ NOTREACHED();
+ break;
+ }
+}
+
+bool VideoCaptureManager::DeviceOpened(
+ const media::VideoCaptureDevice::Name& device_name) {
+ DCHECK(IsOnDeviceThread());
+
+ for (VideoCaptureDevices::iterator it = devices_.begin();
+ it != devices_.end(); ++it) {
+ if (device_name.id() == it->second.capture_device->device_name().id()) {
+ // We've found the device!
+ return true;
+ }
+ }
+ return false;
+}
+
+media::VideoCaptureDevice* VideoCaptureManager::GetOpenedDevice(
+ const StreamDeviceInfo& device_info) {
+ DCHECK(IsOnDeviceThread());
+
+ for (VideoCaptureDevices::iterator it = devices_.begin();
+ it != devices_.end(); it++) {
+ if (device_info.device.id ==
+ it->second.capture_device->device_name().id()) {
+ return it->second.capture_device;
+ }
+ }
+ return NULL;
+}
+
+bool VideoCaptureManager::DeviceInUse(
+ const media::VideoCaptureDevice* video_capture_device) {
+ DCHECK(IsOnDeviceThread());
+
+ for (VideoCaptureDevices::iterator it = devices_.begin();
+ it != devices_.end(); ++it) {
+ if (video_capture_device == it->second.capture_device) {
+ // We've found the device!
+ return true;
+ }
+ }
+ return false;
+}
+
+void VideoCaptureManager::AddController(
+ const media::VideoCaptureParams& capture_params,
+ VideoCaptureControllerEventHandler* handler,
+ base::Callback<void(VideoCaptureController*)> added_cb) {
+ DCHECK(handler);
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::DoAddControllerOnDeviceThread,
+ this, capture_params, handler, added_cb));
+}
+
+void VideoCaptureManager::DoAddControllerOnDeviceThread(
+ const media::VideoCaptureParams capture_params,
+ VideoCaptureControllerEventHandler* handler,
+ base::Callback<void(VideoCaptureController*)> added_cb) {
+ DCHECK(IsOnDeviceThread());
+
+ media::VideoCaptureDevice* video_capture_device =
+ GetDeviceInternal(capture_params.session_id);
+ scoped_refptr<VideoCaptureController> controller;
+ if (video_capture_device) {
+ Controllers::iterator cit = controllers_.find(video_capture_device);
+ if (cit == controllers_.end()) {
+ controller = new VideoCaptureController(this);
+ controllers_[video_capture_device] =
+ new Controller(controller.get(), handler);
+ } else {
+ controllers_[video_capture_device]->handlers.push_front(handler);
+ controller = controllers_[video_capture_device]->controller;
+ }
+ }
+ added_cb.Run(controller.get());
+}
+
+void VideoCaptureManager::RemoveController(
+ VideoCaptureController* controller,
+ VideoCaptureControllerEventHandler* handler) {
+ DCHECK(handler);
+ device_loop_->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoCaptureManager::DoRemoveControllerOnDeviceThread, this,
+ make_scoped_refptr(controller), handler));
+}
+
+void VideoCaptureManager::DoRemoveControllerOnDeviceThread(
+ VideoCaptureController* controller,
+ VideoCaptureControllerEventHandler* handler) {
+ DCHECK(IsOnDeviceThread());
+
+ for (Controllers::iterator cit = controllers_.begin();
+ cit != controllers_.end(); ++cit) {
+ if (controller == cit->second->controller.get()) {
+ Handlers& handlers = cit->second->handlers;
+ for (Handlers::iterator hit = handlers.begin();
+ hit != handlers.end(); ++hit) {
+ if ((*hit) == handler) {
+ handlers.erase(hit);
+ break;
+ }
+ }
+ if (handlers.empty() && cit->second->ready_to_delete) {
+ delete cit->second;
+ controllers_.erase(cit);
+ }
+ return;
+ }
+ }
+}
+
+media::VideoCaptureDevice* VideoCaptureManager::GetDeviceInternal(
+ int capture_session_id) {
+ DCHECK(IsOnDeviceThread());
+ VideoCaptureDevices::iterator dit = devices_.find(capture_session_id);
+ if (dit != devices_.end()) {
+ return dit->second.capture_device;
+ }
+
+ // Solution for not using MediaStreamManager.
+ // This session id won't be returned by Open().
+ if (capture_session_id == kStartOpenSessionId) {
+ media::VideoCaptureDevice::Names device_names;
+ GetAvailableDevices(MEDIA_DEVICE_VIDEO_CAPTURE, &device_names);
+ if (device_names.empty()) {
+ // No devices available.
+ return NULL;
+ }
+ StreamDeviceInfo device(MEDIA_DEVICE_VIDEO_CAPTURE,
+ device_names.front().GetNameAndModel(),
+ device_names.front().id(),
+ false);
+
+ // Call OnOpen to open using the first device in the list.
+ OnOpen(capture_session_id, device);
+
+ VideoCaptureDevices::iterator dit = devices_.find(capture_session_id);
+ if (dit != devices_.end()) {
+ return dit->second.capture_device;
+ }
+ }
+ return NULL;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_manager.h b/chromium/content/browser/renderer_host/media/video_capture_manager.h
new file mode 100644
index 00000000000..34d6e626413
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_manager.h
@@ -0,0 +1,170 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// VideoCaptureManager is used to open/close, start/stop, enumerate available
+// video capture devices, and manage VideoCaptureController's.
+// All functions are expected to be called from Browser::IO thread.
+// VideoCaptureManager will open OS dependent instances of VideoCaptureDevice.
+// A device can only be opened once.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_MANAGER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_MANAGER_H_
+
+#include <list>
+#include <map>
+
+#include "base/memory/ref_counted.h"
+#include "content/browser/renderer_host/media/media_stream_provider.h"
+#include "content/common/content_export.h"
+#include "content/common/media/media_stream_options.h"
+#include "media/video/capture/video_capture_device.h"
+#include "media/video/capture/video_capture_types.h"
+
+namespace content {
+class MockVideoCaptureManager;
+class VideoCaptureController;
+class VideoCaptureControllerEventHandler;
+
+// VideoCaptureManager opens/closes and start/stops video capture devices.
+class CONTENT_EXPORT VideoCaptureManager : public MediaStreamProvider {
+ public:
+ // Calling |Start| of this id will open the first device, even though open has
+ // not been called. This is used to be able to use video capture devices
+ // before MediaStream is implemented in Chrome and WebKit.
+ enum { kStartOpenSessionId = 1 };
+
+ VideoCaptureManager();
+
+ // Implements MediaStreamProvider.
+ virtual void Register(MediaStreamProviderListener* listener,
+ base::MessageLoopProxy* device_thread_loop) OVERRIDE;
+
+ virtual void Unregister() OVERRIDE;
+
+ virtual void EnumerateDevices(MediaStreamType stream_type) OVERRIDE;
+
+ virtual int Open(const StreamDeviceInfo& device) OVERRIDE;
+
+ virtual void Close(int capture_session_id) OVERRIDE;
+
+ // Functions used to start and stop media flow.
+ // Start allocates the device and no other application can use the device
+ // before Stop is called. Captured video frames will be delivered to
+ // video_capture_receiver.
+ virtual void Start(const media::VideoCaptureParams& capture_params,
+ media::VideoCaptureDevice::EventHandler* video_capture_receiver);
+
+ // Stops capture device referenced by |capture_session_id|. No more frames
+ // will be delivered to the frame receiver, and |stopped_cb| will be called.
+ // |stopped_cb| can be NULL.
+ virtual void Stop(const media::VideoCaptureSessionId& capture_session_id,
+ base::Closure stopped_cb);
+
+ // Used by unit test to make sure a fake device is used instead of a real
+ // video capture device. Due to timing requirements, the function must be
+ // called before EnumerateDevices and Open.
+ void UseFakeDevice();
+
+ // Called by VideoCaptureHost to get a controller for |capture_params|.
+ // The controller is returned via calling |added_cb|.
+ void AddController(
+ const media::VideoCaptureParams& capture_params,
+ VideoCaptureControllerEventHandler* handler,
+ base::Callback<void(VideoCaptureController*)> added_cb);
+ // Called by VideoCaptureHost to remove the |controller|.
+ void RemoveController(
+ VideoCaptureController* controller,
+ VideoCaptureControllerEventHandler* handler);
+
+ private:
+ friend class MockVideoCaptureManager;
+
+ virtual ~VideoCaptureManager();
+
+ typedef std::list<VideoCaptureControllerEventHandler*> Handlers;
+ struct Controller;
+
+ // Called by the public functions, executed on device thread.
+ void OnEnumerateDevices(MediaStreamType stream_type);
+ void OnOpen(int capture_session_id, const StreamDeviceInfo& device);
+ void OnClose(int capture_session_id);
+ void OnStart(const media::VideoCaptureParams capture_params,
+ media::VideoCaptureDevice::EventHandler* video_capture_receiver);
+ void OnStop(const media::VideoCaptureSessionId capture_session_id,
+ base::Closure stopped_cb);
+ void DoAddControllerOnDeviceThread(
+ const media::VideoCaptureParams capture_params,
+ VideoCaptureControllerEventHandler* handler,
+ base::Callback<void(VideoCaptureController*)> added_cb);
+ void DoRemoveControllerOnDeviceThread(
+ VideoCaptureController* controller,
+ VideoCaptureControllerEventHandler* handler);
+
+ // Executed on Browser::IO thread to call Listener.
+ void OnOpened(MediaStreamType type, int capture_session_id);
+ void OnClosed(MediaStreamType type, int capture_session_id);
+ void OnDevicesEnumerated(MediaStreamType stream_type,
+ scoped_ptr<StreamDeviceInfoArray> devices);
+ void OnError(MediaStreamType type, int capture_session_id,
+ MediaStreamProviderError error);
+
+ // Executed on device thread to make sure Listener is called from
+ // Browser::IO thread.
+ void PostOnOpened(MediaStreamType type, int capture_session_id);
+ void PostOnClosed(MediaStreamType type, int capture_session_id);
+ void PostOnDevicesEnumerated(MediaStreamType stream_type,
+ scoped_ptr<StreamDeviceInfoArray> devices);
+ void PostOnError(int capture_session_id, MediaStreamProviderError error);
+
+ // Helpers
+ void GetAvailableDevices(MediaStreamType stream_type,
+ media::VideoCaptureDevice::Names* device_names);
+ bool DeviceOpened(const media::VideoCaptureDevice::Name& device_name);
+ bool DeviceInUse(const media::VideoCaptureDevice* video_capture_device);
+ media::VideoCaptureDevice* GetOpenedDevice(
+ const StreamDeviceInfo& device_info);
+ bool IsOnDeviceThread() const;
+ media::VideoCaptureDevice* GetDeviceInternal(int capture_session_id);
+
+ // The message loop of media stream device thread that this object runs on.
+ scoped_refptr<base::MessageLoopProxy> device_loop_;
+
+ // Only accessed on Browser::IO thread.
+ MediaStreamProviderListener* listener_;
+ int new_capture_session_id_;
+
+ // Only accessed from device thread.
+ // VideoCaptureManager owns all VideoCaptureDevices and is responsible for
+ // deleting the instances when they are not used any longer.
+ struct DeviceEntry {
+ MediaStreamType stream_type;
+ media::VideoCaptureDevice* capture_device; // Maybe shared across sessions.
+ };
+ typedef std::map<int, DeviceEntry> VideoCaptureDevices;
+ VideoCaptureDevices devices_; // Maps capture_session_id to DeviceEntry.
+
+ // Set to true if using fake video capture devices for testing,
+ // false by default. This is only used for the MEDIA_DEVICE_VIDEO_CAPTURE
+ // device type.
+ bool use_fake_device_;
+
+ // Only accessed from device thread.
+ // VideoCaptureManager owns all VideoCaptureController's and is responsible
+ // for deleting the instances when they are not used any longer.
+ // VideoCaptureDevice is one-to-one mapped to VideoCaptureController.
+ typedef std::map<media::VideoCaptureDevice*, Controller*> Controllers;
+ Controllers controllers_;
+
+ // We cache the enumerated video capture devices in GetAvailableDevices
+ // (e.g. called by OnEnumerateDevices) and then look up the requested ID when
+ // a device is opened (see OnOpen).
+ // Used only on the device thread.
+ media::VideoCaptureDevice::Names video_capture_devices_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoCaptureManager);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_MANAGER_H_
diff --git a/chromium/content/browser/renderer_host/media/video_capture_manager_unittest.cc b/chromium/content/browser/renderer_host/media/video_capture_manager_unittest.cc
new file mode 100644
index 00000000000..83c064655ed
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_manager_unittest.cc
@@ -0,0 +1,272 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Unit test for VideoCaptureManager.
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/message_loop/message_loop.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/media_stream_provider.h"
+#include "content/browser/renderer_host/media/video_capture_manager.h"
+#include "content/common/media/media_stream_options.h"
+#include "media/video/capture/video_capture_device.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::InSequence;
+using testing::SaveArg;
+using ::testing::Return;
+
+namespace content {
+
+// Listener class used to track progress of VideoCaptureManager test.
+class MockMediaStreamProviderListener : public MediaStreamProviderListener {
+ public:
+ MockMediaStreamProviderListener() {}
+ ~MockMediaStreamProviderListener() {}
+
+ MOCK_METHOD2(Opened, void(MediaStreamType, int));
+ MOCK_METHOD2(Closed, void(MediaStreamType, int));
+ MOCK_METHOD2(DevicesEnumerated, void(MediaStreamType,
+ const StreamDeviceInfoArray&));
+ MOCK_METHOD3(Error, void(MediaStreamType, int,
+ MediaStreamProviderError));
+}; // class MockMediaStreamProviderListener
+
+// Needed as an input argument to Start().
+class MockFrameObserver : public media::VideoCaptureDevice::EventHandler {
+ public:
+ virtual scoped_refptr<media::VideoFrame> ReserveOutputBuffer() OVERRIDE {
+ return NULL;
+ }
+ virtual void OnError() OVERRIDE {}
+ virtual void OnFrameInfo(
+ const media::VideoCaptureCapability& info) OVERRIDE {}
+ virtual void OnIncomingCapturedFrame(const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) OVERRIDE {}
+ virtual void OnIncomingCapturedVideoFrame(
+ const scoped_refptr<media::VideoFrame>& frame,
+ base::Time timestamp) OVERRIDE {}
+};
+
+// Test class
+class VideoCaptureManagerTest : public testing::Test {
+ public:
+ VideoCaptureManagerTest() {}
+ virtual ~VideoCaptureManagerTest() {}
+
+ protected:
+ virtual void SetUp() OVERRIDE {
+ listener_.reset(new MockMediaStreamProviderListener());
+ message_loop_.reset(new base::MessageLoop(base::MessageLoop::TYPE_IO));
+ io_thread_.reset(new BrowserThreadImpl(BrowserThread::IO,
+ message_loop_.get()));
+ vcm_ = new VideoCaptureManager();
+ vcm_->UseFakeDevice();
+ vcm_->Register(listener_.get(), message_loop_->message_loop_proxy().get());
+ frame_observer_.reset(new MockFrameObserver());
+ }
+
+ virtual void TearDown() OVERRIDE {}
+
+ scoped_refptr<VideoCaptureManager> vcm_;
+ scoped_ptr<MockMediaStreamProviderListener> listener_;
+ scoped_ptr<base::MessageLoop> message_loop_;
+ scoped_ptr<BrowserThreadImpl> io_thread_;
+ scoped_ptr<MockFrameObserver> frame_observer_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(VideoCaptureManagerTest);
+};
+
+// Test cases
+
+// Try to open, start, stop and close a device.
+TEST_F(VideoCaptureManagerTest, CreateAndClose) {
+ StreamDeviceInfoArray devices;
+
+ InSequence s;
+ EXPECT_CALL(*listener_, DevicesEnumerated(MEDIA_DEVICE_VIDEO_CAPTURE, _))
+ .Times(1).WillOnce(SaveArg<1>(&devices));
+ EXPECT_CALL(*listener_, Opened(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(1);
+ EXPECT_CALL(*listener_, Closed(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(1);
+
+ vcm_->EnumerateDevices(MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // Wait to get device callback.
+ message_loop_->RunUntilIdle();
+
+ int video_session_id = vcm_->Open(devices.front());
+
+ media::VideoCaptureParams capture_params;
+ capture_params.session_id = video_session_id;
+ capture_params.width = 320;
+ capture_params.height = 240;
+ capture_params.frame_per_second = 30;
+ vcm_->Start(capture_params, frame_observer_.get());
+
+ vcm_->Stop(video_session_id, base::Closure());
+ vcm_->Close(video_session_id);
+
+ // Wait to check callbacks before removing the listener.
+ message_loop_->RunUntilIdle();
+ vcm_->Unregister();
+}
+
+// Open the same device twice.
+TEST_F(VideoCaptureManagerTest, OpenTwice) {
+ StreamDeviceInfoArray devices;
+
+ InSequence s;
+ EXPECT_CALL(*listener_, DevicesEnumerated(MEDIA_DEVICE_VIDEO_CAPTURE, _))
+ .Times(1).WillOnce(SaveArg<1>(&devices));
+ EXPECT_CALL(*listener_, Opened(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(2);
+ EXPECT_CALL(*listener_, Closed(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(2);
+
+ vcm_->EnumerateDevices(MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // Wait to get device callback.
+ message_loop_->RunUntilIdle();
+
+ int video_session_id_first = vcm_->Open(devices.front());
+
+ // This should trigger an error callback with error code
+ // 'kDeviceAlreadyInUse'.
+ int video_session_id_second = vcm_->Open(devices.front());
+ EXPECT_NE(video_session_id_first, video_session_id_second);
+
+ vcm_->Close(video_session_id_first);
+ vcm_->Close(video_session_id_second);
+
+ // Wait to check callbacks before removing the listener.
+ message_loop_->RunUntilIdle();
+ vcm_->Unregister();
+}
+
+// Open two different devices.
+TEST_F(VideoCaptureManagerTest, OpenTwo) {
+ StreamDeviceInfoArray devices;
+
+ InSequence s;
+ EXPECT_CALL(*listener_, DevicesEnumerated(MEDIA_DEVICE_VIDEO_CAPTURE, _))
+ .Times(1).WillOnce(SaveArg<1>(&devices));
+ EXPECT_CALL(*listener_, Opened(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(2);
+ EXPECT_CALL(*listener_, Closed(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(2);
+
+ vcm_->EnumerateDevices(MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // Wait to get device callback.
+ message_loop_->RunUntilIdle();
+
+ StreamDeviceInfoArray::iterator it = devices.begin();
+
+ int video_session_id_first = vcm_->Open(*it);
+ ++it;
+ int video_session_id_second = vcm_->Open(*it);
+
+ vcm_->Close(video_session_id_first);
+ vcm_->Close(video_session_id_second);
+
+ // Wait to check callbacks before removing the listener.
+ message_loop_->RunUntilIdle();
+ vcm_->Unregister();
+}
+
+// Try open a non-existing device.
+TEST_F(VideoCaptureManagerTest, OpenNotExisting) {
+ StreamDeviceInfoArray devices;
+
+ InSequence s;
+ EXPECT_CALL(*listener_, DevicesEnumerated(MEDIA_DEVICE_VIDEO_CAPTURE, _))
+ .Times(1).WillOnce(SaveArg<1>(&devices));
+ EXPECT_CALL(*listener_, Error(MEDIA_DEVICE_VIDEO_CAPTURE,
+ _, kDeviceNotAvailable))
+ .Times(1);
+
+ vcm_->EnumerateDevices(MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // Wait to get device callback.
+ message_loop_->RunUntilIdle();
+
+ MediaStreamType stream_type = MEDIA_DEVICE_VIDEO_CAPTURE;
+ std::string device_name("device_doesnt_exist");
+ std::string device_id("id_doesnt_exist");
+ StreamDeviceInfo dummy_device(stream_type, device_name, device_id, false);
+
+ // This should fail with error code 'kDeviceNotAvailable'.
+ vcm_->Open(dummy_device);
+
+ // Wait to check callbacks before removing the listener.
+ message_loop_->RunUntilIdle();
+ vcm_->Unregister();
+}
+
+// Start a device using "magic" id, i.e. call Start without calling Open.
+TEST_F(VideoCaptureManagerTest, StartUsingId) {
+ InSequence s;
+ EXPECT_CALL(*listener_, Opened(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(1);
+ EXPECT_CALL(*listener_, Closed(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(1);
+
+ media::VideoCaptureParams capture_params;
+ capture_params.session_id = VideoCaptureManager::kStartOpenSessionId;
+ capture_params.width = 320;
+ capture_params.height = 240;
+ capture_params.frame_per_second = 30;
+
+ // Start shall trigger the Open callback.
+ vcm_->Start(capture_params, frame_observer_.get());
+
+ // Stop shall trigger the Close callback
+ vcm_->Stop(VideoCaptureManager::kStartOpenSessionId, base::Closure());
+
+ // Wait to check callbacks before removing the listener.
+ message_loop_->RunUntilIdle();
+ vcm_->Unregister();
+}
+
+// Open and start a device, close it before calling Stop.
+TEST_F(VideoCaptureManagerTest, CloseWithoutStop) {
+ StreamDeviceInfoArray devices;
+
+ InSequence s;
+ EXPECT_CALL(*listener_, DevicesEnumerated(MEDIA_DEVICE_VIDEO_CAPTURE, _))
+ .Times(1).WillOnce(SaveArg<1>(&devices));
+ EXPECT_CALL(*listener_, Opened(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(1);
+ EXPECT_CALL(*listener_, Closed(MEDIA_DEVICE_VIDEO_CAPTURE, _)).Times(1);
+
+ vcm_->EnumerateDevices(MEDIA_DEVICE_VIDEO_CAPTURE);
+
+ // Wait to get device callback.
+ message_loop_->RunUntilIdle();
+
+ int video_session_id = vcm_->Open(devices.front());
+
+ media::VideoCaptureParams capture_params;
+ capture_params.session_id = video_session_id;
+ capture_params.width = 320;
+ capture_params.height = 240;
+ capture_params.frame_per_second = 30;
+ vcm_->Start(capture_params, frame_observer_.get());
+
+ // Close will stop the running device, an assert will be triggered in
+ // VideoCaptureManager destructor otherwise.
+ vcm_->Close(video_session_id);
+ vcm_->Stop(video_session_id, base::Closure());
+
+ // Wait to check callbacks before removing the listener
+ message_loop_->RunUntilIdle();
+ vcm_->Unregister();
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_oracle.cc b/chromium/content/browser/renderer_host/media/video_capture_oracle.cc
new file mode 100644
index 00000000000..c05ac0f8a37
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_oracle.cc
@@ -0,0 +1,165 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/video_capture_oracle.h"
+
+#include "base/debug/trace_event.h"
+
+namespace content {
+
+namespace {
+
+// This value controls how many redundant, timer-base captures occur when the
+// content is static. Redundantly capturing the same frame allows iterative
+// quality enhancement, and also allows the buffer to fill in "buffered mode".
+//
+// TODO(nick): Controlling this here is a hack and a layering violation, since
+// it's a strategy specific to the WebRTC consumer, and probably just papers
+// over some frame dropping and quality bugs. It should either be controlled at
+// a higher level, or else redundant frame generation should be pushed down
+// further into the WebRTC encoding stack.
+const int kNumRedundantCapturesOfStaticContent = 200;
+
+} // anonymous namespace
+
+VideoCaptureOracle::VideoCaptureOracle(base::TimeDelta capture_period,
+ bool events_are_reliable)
+ : capture_period_(capture_period),
+ frame_number_(0),
+ last_delivered_frame_number_(0),
+ sampler_(capture_period_,
+ events_are_reliable,
+ kNumRedundantCapturesOfStaticContent) {}
+
+bool VideoCaptureOracle::ObserveEventAndDecideCapture(
+ Event event,
+ base::Time event_time) {
+ // Record |event| and decide whether it's a good time to capture.
+ const bool content_is_dirty = (event == kCompositorUpdate ||
+ event == kSoftwarePaint);
+ bool should_sample;
+ if (content_is_dirty) {
+ frame_number_++;
+ should_sample = sampler_.AddEventAndConsiderSampling(event_time);
+ } else {
+ should_sample = sampler_.IsOverdueForSamplingAt(event_time);
+ }
+ return should_sample;
+}
+
+int VideoCaptureOracle::RecordCapture() {
+ sampler_.RecordSample();
+ return frame_number_;
+}
+
+bool VideoCaptureOracle::CompleteCapture(int frame_number,
+ base::Time timestamp) {
+ // Drop frame if previous frame number is higher or we're trying to deliver
+ // a frame with the same timestamp.
+ if (last_delivered_frame_number_ > frame_number ||
+ last_delivered_frame_timestamp_ == timestamp) {
+ LOG(ERROR) << "Frame with same timestamp or out of order delivery. "
+ << "Dropping frame.";
+ return false;
+ }
+
+ if (last_delivered_frame_timestamp_ > timestamp) {
+ // We should not get here unless time was adjusted backwards.
+ LOG(ERROR) << "Frame with past timestamp (" << timestamp.ToInternalValue()
+ << ") was delivered";
+ }
+
+ last_delivered_frame_number_ = frame_number;
+ last_delivered_frame_timestamp_ = timestamp;
+
+ return true;
+}
+
+SmoothEventSampler::SmoothEventSampler(base::TimeDelta capture_period,
+ bool events_are_reliable,
+ int redundant_capture_goal)
+ : events_are_reliable_(events_are_reliable),
+ capture_period_(capture_period),
+ redundant_capture_goal_(redundant_capture_goal),
+ token_bucket_capacity_(capture_period + capture_period / 2),
+ overdue_sample_count_(0),
+ token_bucket_(token_bucket_capacity_) {
+ DCHECK_GT(capture_period_.InMicroseconds(), 0);
+}
+
+bool SmoothEventSampler::AddEventAndConsiderSampling(base::Time event_time) {
+ DCHECK(!event_time.is_null());
+
+ // Add tokens to the bucket based on advancement in time. Then, re-bound the
+ // number of tokens in the bucket. Overflow occurs when there is too much
+ // time between events (a common case), or when RecordSample() is not being
+ // called often enough (a bug). On the other hand, if RecordSample() is being
+ // called too often (e.g., as a reaction to IsOverdueForSamplingAt()), the
+ // bucket will underflow.
+ if (!current_event_.is_null()) {
+ if (current_event_ < event_time) {
+ token_bucket_ += event_time - current_event_;
+ if (token_bucket_ > token_bucket_capacity_)
+ token_bucket_ = token_bucket_capacity_;
+ }
+ // Side note: If the system clock is reset, causing |current_event_| to be
+ // greater than |event_time|, everything here will simply gracefully adjust.
+ if (token_bucket_ < base::TimeDelta())
+ token_bucket_ = base::TimeDelta();
+ TRACE_COUNTER1("mirroring",
+ "MirroringTokenBucketUsec", token_bucket_.InMicroseconds());
+ }
+ current_event_ = event_time;
+
+ // Return true if one capture period's worth of tokens are in the bucket.
+ return token_bucket_ >= capture_period_;
+}
+
+void SmoothEventSampler::RecordSample() {
+ token_bucket_ -= capture_period_;
+ TRACE_COUNTER1("mirroring",
+ "MirroringTokenBucketUsec", token_bucket_.InMicroseconds());
+
+ bool was_paused = overdue_sample_count_ == redundant_capture_goal_;
+ if (HasUnrecordedEvent()) {
+ last_sample_ = current_event_;
+ overdue_sample_count_ = 0;
+ } else {
+ ++overdue_sample_count_;
+ }
+ bool is_paused = overdue_sample_count_ == redundant_capture_goal_;
+
+ LOG_IF(INFO, !was_paused && is_paused)
+ << "Tab content unchanged for " << redundant_capture_goal_
+ << " frames; capture will halt until content changes.";
+ LOG_IF(INFO, was_paused && !is_paused)
+ << "Content changed; capture will resume.";
+}
+
+bool SmoothEventSampler::IsOverdueForSamplingAt(base::Time event_time) const {
+ DCHECK(!event_time.is_null());
+
+ // If we don't get events on compositor updates on this platform, then we
+ // don't reliably know whether we're dirty.
+ if (events_are_reliable_) {
+ if (!HasUnrecordedEvent() &&
+ overdue_sample_count_ >= redundant_capture_goal_) {
+ return false; // Not dirty.
+ }
+ }
+
+ // If we're dirty but not yet old, then we've recently gotten updates, so we
+ // won't request a sample just yet.
+ base::TimeDelta dirty_interval = event_time - last_sample_;
+ if (dirty_interval < capture_period_ * 4)
+ return false;
+ else
+ return true;
+}
+
+bool SmoothEventSampler::HasUnrecordedEvent() const {
+ return !current_event_.is_null() && current_event_ != last_sample_;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/video_capture_oracle.h b/chromium/content/browser/renderer_host/media/video_capture_oracle.h
new file mode 100644
index 00000000000..739b56971a5
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_oracle.h
@@ -0,0 +1,107 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_ORACLE_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_ORACLE_H_
+
+#include "base/callback_forward.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/time/time.h"
+#include "content/common/content_export.h"
+
+namespace content {
+
+// Filters a sequence of events to achieve a target frequency.
+class CONTENT_EXPORT SmoothEventSampler {
+ public:
+ explicit SmoothEventSampler(base::TimeDelta capture_period,
+ bool events_are_reliable,
+ int redundant_capture_goal);
+
+ // Add a new event to the event history, and return whether it ought to be
+ // sampled based on the desired |capture_period|. The event is not recorded as
+ // a sample until RecordSample() is called.
+ bool AddEventAndConsiderSampling(base::Time event_time);
+
+ // Operates on the last event added by AddEventAndConsiderSampling(), marking
+ // it as sampled. After this point we are current in the stream of events, as
+ // we have sampled the most recent event.
+ void RecordSample();
+
+ // Returns true if, at time |event_time|, sampling should occur because too
+ // much time will have passed relative to the last event and/or sample.
+ bool IsOverdueForSamplingAt(base::Time event_time) const;
+
+ // Returns true if AddEventAndConsiderSampling() has been called since the
+ // last call to RecordSample().
+ bool HasUnrecordedEvent() const;
+
+ private:
+ const bool events_are_reliable_;
+ const base::TimeDelta capture_period_;
+ const int redundant_capture_goal_;
+ const base::TimeDelta token_bucket_capacity_;
+
+ base::Time current_event_;
+ base::Time last_sample_;
+ int overdue_sample_count_;
+ base::TimeDelta token_bucket_;
+
+ DISALLOW_COPY_AND_ASSIGN(SmoothEventSampler);
+};
+
+// VideoCaptureOracle manages the producer-side throttling of captured frames
+// from a video capture device. It is informed of every update by the device;
+// this empowers it to look into the future and decide if a particular frame
+// ought to be captured in order to achieve its target frame rate.
+class CONTENT_EXPORT VideoCaptureOracle {
+ public:
+ enum Event {
+ kTimerPoll,
+ kCompositorUpdate,
+ kSoftwarePaint,
+ };
+
+ VideoCaptureOracle(base::TimeDelta capture_period,
+ bool events_are_reliable);
+ virtual ~VideoCaptureOracle() {}
+
+ // Record an event of type |event|, and decide whether the caller should do a
+ // frame capture immediately. Decisions of the oracle are final: the caller
+ // must do what it is told.
+ bool ObserveEventAndDecideCapture(
+ Event event,
+ base::Time event_time);
+
+ // Record the start of a capture. Returns a frame_number to be used with
+ // CompleteCapture().
+ int RecordCapture();
+
+ // Record the completion of a capture. Returns true iff the captured frame
+ // should be delivered.
+ bool CompleteCapture(int frame_number, base::Time timestamp);
+
+ base::TimeDelta capture_period() const { return capture_period_; }
+
+ private:
+
+ // Time between frames.
+ const base::TimeDelta capture_period_;
+
+ // Incremented every time a paint or update event occurs.
+ int frame_number_;
+
+ // Stores the frame number from the last delivered frame.
+ int last_delivered_frame_number_;
+
+ // Stores the timestamp of the last delivered frame.
+ base::Time last_delivered_frame_timestamp_;
+
+ // Tracks present/paint history.
+ SmoothEventSampler sampler_;
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_VIDEO_CAPTURE_ORACLE_H_
diff --git a/chromium/content/browser/renderer_host/media/video_capture_oracle_unittest.cc b/chromium/content/browser/renderer_host/media/video_capture_oracle_unittest.cc
new file mode 100644
index 00000000000..40c1826d957
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/video_capture_oracle_unittest.cc
@@ -0,0 +1,478 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/video_capture_oracle.h"
+
+#include "base/strings/stringprintf.h"
+#include "base/time/time.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace content {
+namespace {
+
+void SteadyStateSampleAndAdvance(base::TimeDelta vsync,
+ SmoothEventSampler* sampler, base::Time* t) {
+ ASSERT_TRUE(sampler->AddEventAndConsiderSampling(*t));
+ ASSERT_TRUE(sampler->HasUnrecordedEvent());
+ sampler->RecordSample();
+ ASSERT_FALSE(sampler->HasUnrecordedEvent());
+ ASSERT_FALSE(sampler->IsOverdueForSamplingAt(*t));
+ *t += vsync;
+ ASSERT_FALSE(sampler->IsOverdueForSamplingAt(*t));
+}
+
+void SteadyStateNoSampleAndAdvance(base::TimeDelta vsync,
+ SmoothEventSampler* sampler, base::Time* t) {
+ ASSERT_FALSE(sampler->AddEventAndConsiderSampling(*t));
+ ASSERT_TRUE(sampler->HasUnrecordedEvent());
+ ASSERT_FALSE(sampler->IsOverdueForSamplingAt(*t));
+ *t += vsync;
+ ASSERT_FALSE(sampler->IsOverdueForSamplingAt(*t));
+}
+
+void TestRedundantCaptureStrategy(base::TimeDelta capture_period,
+ int redundant_capture_goal,
+ SmoothEventSampler* sampler, base::Time* t) {
+ // Before any events have been considered, we're overdue for sampling.
+ ASSERT_TRUE(sampler->IsOverdueForSamplingAt(*t));
+
+ // Consider the first event. We want to sample that.
+ ASSERT_FALSE(sampler->HasUnrecordedEvent());
+ ASSERT_TRUE(sampler->AddEventAndConsiderSampling(*t));
+ ASSERT_TRUE(sampler->HasUnrecordedEvent());
+ sampler->RecordSample();
+ ASSERT_FALSE(sampler->HasUnrecordedEvent());
+
+ // After more than one capture period has passed without considering an event,
+ // we should repeatedly be overdue for sampling. However, once the redundant
+ // capture goal is achieved, we should no longer be overdue for sampling.
+ *t += capture_period * 4;
+ for (int i = 0; i < redundant_capture_goal; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ ASSERT_FALSE(sampler->HasUnrecordedEvent());
+ ASSERT_TRUE(sampler->IsOverdueForSamplingAt(*t))
+ << "Should sample until redundant capture goal is hit";
+ sampler->RecordSample();
+ *t += capture_period; // Timer fires once every capture period.
+ }
+ ASSERT_FALSE(sampler->IsOverdueForSamplingAt(*t))
+ << "Should not be overdue once redundant capture goal achieved.";
+}
+
+// 60Hz sampled at 30Hz should produce 30Hz. In addition, this test contains
+// much more comprehensive before/after/edge-case scenarios than the others.
+TEST(SmoothEventSamplerTest, Sample60HertzAt30Hertz) {
+ const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
+ const int redundant_capture_goal = 200;
+ const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 60;
+
+ SmoothEventSampler sampler(capture_period, true, redundant_capture_goal);
+ base::Time t;
+ ASSERT_TRUE(base::Time::FromString("Sat, 23 Mar 2013 1:21:08 GMT", &t));
+
+ TestRedundantCaptureStrategy(capture_period, redundant_capture_goal,
+ &sampler, &t);
+
+ // Steady state, we should capture every other vsync, indefinitely.
+ for (int i = 0; i < 100; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ }
+
+ // Now pretend we're limited by backpressure in the pipeline. In this scenario
+ // case we are adding events but not sampling them.
+ for (int i = 0; i < 20; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ ASSERT_EQ(i >= 7, sampler.IsOverdueForSamplingAt(t));
+ ASSERT_TRUE(sampler.AddEventAndConsiderSampling(t));
+ ASSERT_TRUE(sampler.HasUnrecordedEvent());
+ t += vsync;
+ }
+
+ // Now suppose we can sample again. We should be back in the steady state,
+ // but at a different phase.
+ ASSERT_TRUE(sampler.IsOverdueForSamplingAt(t));
+ for (int i = 0; i < 100; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ }
+}
+
+// 50Hz sampled at 30Hz should produce a sequence where some frames are skipped.
+TEST(SmoothEventSamplerTest, Sample50HertzAt30Hertz) {
+ const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
+ const int redundant_capture_goal = 2;
+ const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 50;
+
+ SmoothEventSampler sampler(capture_period, true, redundant_capture_goal);
+ base::Time t;
+ ASSERT_TRUE(base::Time::FromString("Sat, 23 Mar 2013 1:21:08 GMT", &t));
+
+ TestRedundantCaptureStrategy(capture_period, redundant_capture_goal,
+ &sampler, &t);
+
+ // Steady state, we should capture 1st, 2nd and 4th frames out of every five
+ // frames, indefinitely.
+ for (int i = 0; i < 100; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ }
+
+ // Now pretend we're limited by backpressure in the pipeline. In this scenario
+ // case we are adding events but not sampling them.
+ for (int i = 0; i < 12; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ ASSERT_EQ(i >= 5, sampler.IsOverdueForSamplingAt(t));
+ ASSERT_TRUE(sampler.AddEventAndConsiderSampling(t));
+ t += vsync;
+ }
+
+ // Now suppose we can sample again. We should be back in the steady state
+ // again.
+ ASSERT_TRUE(sampler.IsOverdueForSamplingAt(t));
+ for (int i = 0; i < 100; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ }
+}
+
+// 75Hz sampled at 30Hz should produce a sequence where some frames are skipped.
+TEST(SmoothEventSamplerTest, Sample75HertzAt30Hertz) {
+ const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
+ const int redundant_capture_goal = 32;
+ const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 75;
+
+ SmoothEventSampler sampler(capture_period, true, redundant_capture_goal);
+ base::Time t;
+ ASSERT_TRUE(base::Time::FromString("Sat, 23 Mar 2013 1:21:08 GMT", &t));
+
+ TestRedundantCaptureStrategy(capture_period, redundant_capture_goal,
+ &sampler, &t);
+
+ // Steady state, we should capture 1st and 3rd frames out of every five
+ // frames, indefinitely.
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ for (int i = 0; i < 100; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ }
+
+ // Now pretend we're limited by backpressure in the pipeline. In this scenario
+ // case we are adding events but not sampling them.
+ for (int i = 0; i < 20; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ ASSERT_EQ(i >= 8, sampler.IsOverdueForSamplingAt(t));
+ ASSERT_TRUE(sampler.AddEventAndConsiderSampling(t));
+ t += vsync;
+ }
+
+ // Now suppose we can sample again. We capture the next frame, and not the one
+ // after that, and then we're back in the steady state again.
+ ASSERT_TRUE(sampler.IsOverdueForSamplingAt(t));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ for (int i = 0; i < 100; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ SteadyStateNoSampleAndAdvance(vsync, &sampler, &t);
+ }
+}
+
+// 30Hz sampled at 30Hz should produce 30Hz.
+TEST(SmoothEventSamplerTest, Sample30HertzAt30Hertz) {
+ const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
+ const int redundant_capture_goal = 1;
+ const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 30;
+
+ SmoothEventSampler sampler(capture_period, true, redundant_capture_goal);
+ base::Time t;
+ ASSERT_TRUE(base::Time::FromString("Sat, 23 Mar 2013 1:21:08 GMT", &t));
+
+ TestRedundantCaptureStrategy(capture_period, redundant_capture_goal,
+ &sampler, &t);
+
+ // Steady state, we should capture every vsync, indefinitely.
+ for (int i = 0; i < 200; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ }
+
+ // Now pretend we're limited by backpressure in the pipeline. In this scenario
+ // case we are adding events but not sampling them.
+ for (int i = 0; i < 7; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ ASSERT_EQ(i >= 3, sampler.IsOverdueForSamplingAt(t));
+ ASSERT_TRUE(sampler.AddEventAndConsiderSampling(t));
+ t += vsync;
+ }
+
+ // Now suppose we can sample again. We should be back in the steady state.
+ ASSERT_TRUE(sampler.IsOverdueForSamplingAt(t));
+ for (int i = 0; i < 100; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ }
+}
+
+// 24Hz sampled at 30Hz should produce 24Hz.
+TEST(SmoothEventSamplerTest, Sample24HertzAt30Hertz) {
+ const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
+ const int redundant_capture_goal = 333;
+ const base::TimeDelta vsync = base::TimeDelta::FromSeconds(1) / 24;
+
+ SmoothEventSampler sampler(capture_period, true, redundant_capture_goal);
+ base::Time t;
+ ASSERT_TRUE(base::Time::FromString("Sat, 23 Mar 2013 1:21:08 GMT", &t));
+
+ TestRedundantCaptureStrategy(capture_period, redundant_capture_goal,
+ &sampler, &t);
+
+ // Steady state, we should capture every vsync, indefinitely.
+ for (int i = 0; i < 200; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ }
+
+ // Now pretend we're limited by backpressure in the pipeline. In this scenario
+ // case we are adding events but not sampling them.
+ for (int i = 0; i < 7; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ ASSERT_EQ(i >= 3, sampler.IsOverdueForSamplingAt(t));
+ ASSERT_TRUE(sampler.AddEventAndConsiderSampling(t));
+ t += vsync;
+ }
+
+ // Now suppose we can sample again. We should be back in the steady state.
+ ASSERT_TRUE(sampler.IsOverdueForSamplingAt(t));
+ for (int i = 0; i < 100; i++) {
+ SCOPED_TRACE(base::StringPrintf("Iteration %d", i));
+ SteadyStateSampleAndAdvance(vsync, &sampler, &t);
+ }
+}
+
+TEST(SmoothEventSamplerTest, DoubleDrawAtOneTimeStillDirties) {
+ const base::TimeDelta capture_period = base::TimeDelta::FromSeconds(1) / 30;
+ const base::TimeDelta overdue_period = base::TimeDelta::FromSeconds(1);
+
+ SmoothEventSampler sampler(capture_period, true, 1);
+ base::Time t;
+ ASSERT_TRUE(base::Time::FromString("Sat, 23 Mar 2013 1:21:08 GMT", &t));
+
+ ASSERT_TRUE(sampler.AddEventAndConsiderSampling(t));
+ sampler.RecordSample();
+ ASSERT_FALSE(sampler.IsOverdueForSamplingAt(t))
+ << "Sampled last event; should not be dirty.";
+ t += overdue_period;
+
+ // Now simulate 2 events with the same clock value.
+ ASSERT_TRUE(sampler.AddEventAndConsiderSampling(t));
+ sampler.RecordSample();
+ ASSERT_FALSE(sampler.AddEventAndConsiderSampling(t))
+ << "Two events at same time -- expected second not to be sampled.";
+ ASSERT_TRUE(sampler.IsOverdueForSamplingAt(t + overdue_period))
+ << "Second event should dirty the capture state.";
+ sampler.RecordSample();
+ ASSERT_FALSE(sampler.IsOverdueForSamplingAt(t + overdue_period));
+}
+
+TEST(SmoothEventSamplerTest, FallbackToPollingIfUpdatesUnreliable) {
+ const base::TimeDelta timer_interval = base::TimeDelta::FromSeconds(1) / 30;
+
+ SmoothEventSampler should_not_poll(timer_interval, true, 1);
+ SmoothEventSampler should_poll(timer_interval, false, 1);
+ base::Time t;
+ ASSERT_TRUE(base::Time::FromString("Sat, 23 Mar 2013 1:21:08 GMT", &t));
+
+ // Do one round of the "happy case" where an event was received and
+ // RecordSample() was called by the client.
+ ASSERT_TRUE(should_not_poll.AddEventAndConsiderSampling(t));
+ ASSERT_TRUE(should_poll.AddEventAndConsiderSampling(t));
+ should_not_poll.RecordSample();
+ should_poll.RecordSample();
+
+ // One time period ahead, neither sampler says we're overdue.
+ for (int i = 0; i < 3; i++) {
+ t += timer_interval;
+ ASSERT_FALSE(should_not_poll.IsOverdueForSamplingAt(t))
+ << "Sampled last event; should not be dirty.";
+ ASSERT_FALSE(should_poll.IsOverdueForSamplingAt(t))
+ << "Dirty interval has not elapsed yet.";
+ }
+
+ // Next time period ahead, both samplers say we're overdue. The non-polling
+ // sampler is returning true here because it has been configured to allow one
+ // redundant capture.
+ t += timer_interval;
+ ASSERT_TRUE(should_not_poll.IsOverdueForSamplingAt(t))
+ << "Sampled last event; is dirty one time only to meet redundancy goal.";
+ ASSERT_TRUE(should_poll.IsOverdueForSamplingAt(t))
+ << "If updates are unreliable, must fall back to polling when idle.";
+ should_not_poll.RecordSample();
+ should_poll.RecordSample();
+
+ // Forever more, the non-polling sampler returns false while the polling one
+ // returns true.
+ for (int i = 0; i < 100; ++i) {
+ t += timer_interval;
+ ASSERT_FALSE(should_not_poll.IsOverdueForSamplingAt(t))
+ << "Sampled last event; should not be dirty.";
+ ASSERT_TRUE(should_poll.IsOverdueForSamplingAt(t))
+ << "If updates are unreliable, must fall back to polling when idle.";
+ should_poll.RecordSample();
+ }
+ t += timer_interval / 3;
+ ASSERT_FALSE(should_not_poll.IsOverdueForSamplingAt(t))
+ << "Sampled last event; should not be dirty.";
+ ASSERT_TRUE(should_poll.IsOverdueForSamplingAt(t))
+ << "If updates are unreliable, must fall back to polling when idle.";
+ should_poll.RecordSample();
+}
+
+struct DataPoint {
+ bool should_capture;
+ double increment_ms;
+};
+
+void ReplayCheckingSamplerDecisions(const DataPoint* data_points,
+ size_t num_data_points,
+ SmoothEventSampler* sampler) {
+ base::Time t;
+ ASSERT_TRUE(base::Time::FromString("Sat, 23 Mar 2013 1:21:08 GMT", &t));
+ for (size_t i = 0; i < num_data_points; ++i) {
+ t += base::TimeDelta::FromMicroseconds(
+ static_cast<int64>(data_points[i].increment_ms * 1000));
+ ASSERT_EQ(data_points[i].should_capture,
+ sampler->AddEventAndConsiderSampling(t))
+ << "at data_points[" << i << ']';
+ if (data_points[i].should_capture)
+ sampler->RecordSample();
+ }
+}
+
+TEST(SmoothEventSamplerTest, DrawingAt24FpsWith60HzVsyncSampledAt30Hertz) {
+ // Actual capturing of timing data: Initial instability as a 24 FPS video was
+ // started from a still screen, then clearly followed by steady-state.
+ static const DataPoint data_points[] = {
+ { true, 1437.93 }, { true, 150.484 }, { true, 217.362 }, { true, 50.161 },
+ { true, 33.44 }, { false, 0 }, { true, 16.721 }, { true, 66.88 },
+ { true, 50.161 }, { false, 0 }, { false, 0 }, { true, 50.16 },
+ { true, 33.441 }, { true, 16.72 }, { false, 16.72 }, { true, 117.041 },
+ { true, 16.72 }, { false, 16.72 }, { true, 50.161 }, { true, 50.16 },
+ { true, 33.441 }, { true, 33.44 }, { true, 33.44 }, { true, 16.72 },
+ { false, 0 }, { true, 50.161 }, { false, 0 }, { true, 33.44 },
+ { true, 16.72 }, { false, 16.721 }, { true, 66.881 }, { false, 0 },
+ { true, 33.441 }, { true, 16.72 }, { true, 50.16 }, { true, 16.72 },
+ { false, 16.721 }, { true, 50.161 }, { true, 50.16 }, { false, 0 },
+ { true, 33.441 }, { true, 50.337 }, { true, 50.183 }, { true, 16.722 },
+ { true, 50.161 }, { true, 33.441 }, { true, 50.16 }, { true, 33.441 },
+ { true, 50.16 }, { true, 33.441 }, { true, 50.16 }, { true, 33.44 },
+ { true, 50.161 }, { true, 50.16 }, { true, 33.44 }, { true, 33.441 },
+ { true, 50.16 }, { true, 50.161 }, { true, 33.44 }, { true, 33.441 },
+ { true, 50.16 }, { true, 33.44 }, { true, 50.161 }, { true, 33.44 },
+ { true, 50.161 }, { true, 33.44 }, { true, 50.161 }, { true, 33.44 },
+ { true, 83.601 }, { true, 16.72 }, { true, 33.44 }, { false, 0 }
+ };
+
+ SmoothEventSampler sampler(base::TimeDelta::FromSeconds(1) / 30, true, 3);
+ ReplayCheckingSamplerDecisions(data_points, arraysize(data_points), &sampler);
+}
+
+TEST(SmoothEventSamplerTest, DrawingAt30FpsWith60HzVsyncSampledAt30Hertz) {
+ // Actual capturing of timing data: Initial instability as a 30 FPS video was
+ // started from a still screen, then followed by steady-state. Drawing
+ // framerate from the video rendering was a bit volatile, but averaged 30 FPS.
+ static const DataPoint data_points[] = {
+ { true, 2407.69 }, { true, 16.733 }, { true, 217.362 }, { true, 33.441 },
+ { true, 33.44 }, { true, 33.44 }, { true, 33.441 }, { true, 33.44 },
+ { true, 33.44 }, { true, 33.441 }, { true, 33.44 }, { true, 33.44 },
+ { true, 16.721 }, { true, 33.44 }, { false, 0 }, { true, 50.161 },
+ { true, 50.16 }, { false, 0 }, { true, 50.161 }, { true, 33.44 },
+ { true, 16.72 }, { false, 0 }, { false, 16.72 }, { true, 66.881 },
+ { false, 0 }, { true, 33.44 }, { true, 16.72 }, { true, 50.161 },
+ { false, 0 }, { true, 33.538 }, { true, 33.526 }, { true, 33.447 },
+ { true, 33.445 }, { true, 33.441 }, { true, 16.721 }, { true, 33.44 },
+ { true, 33.44 }, { true, 50.161 }, { true, 16.72 }, { true, 33.44 },
+ { true, 33.441 }, { true, 33.44 }, { false, 0 }, { false, 16.72 },
+ { true, 66.881 }, { true, 16.72 }, { false, 16.72 }, { true, 50.16 },
+ { true, 33.441 }, { true, 33.44 }, { true, 33.44 }, { true, 33.44 },
+ { true, 33.441 }, { true, 33.44 }, { true, 50.161 }, { false, 0 },
+ { true, 33.44 }, { true, 33.44 }, { true, 50.161 }, { true, 16.72 },
+ { true, 33.44 }, { true, 33.441 }, { false, 0 }, { true, 66.88 },
+ { true, 33.441 }, { true, 33.44 }, { true, 33.44 }, { false, 0 },
+ { true, 33.441 }, { true, 33.44 }, { true, 33.44 }, { false, 0 },
+ { true, 16.72 }, { true, 50.161 }, { false, 0 }, { true, 50.16 },
+ { false, 0.001 }, { true, 16.721 }, { true, 66.88 }, { true, 33.44 },
+ { true, 33.441 }, { true, 33.44 }, { true, 50.161 }, { true, 16.72 },
+ { false, 0 }, { true, 33.44 }, { false, 16.72 }, { true, 66.881 },
+ { true, 33.44 }, { true, 16.72 }, { true, 33.441 }, { false, 16.72 },
+ { true, 66.88 }, { true, 16.721 }, { true, 50.16 }, { true, 33.44 },
+ { true, 16.72 }, { true, 33.441 }, { true, 33.44 }, { true, 33.44 }
+ };
+
+ SmoothEventSampler sampler(base::TimeDelta::FromSeconds(1) / 30, true, 3);
+ ReplayCheckingSamplerDecisions(data_points, arraysize(data_points), &sampler);
+}
+
+TEST(SmoothEventSamplerTest, DrawingAt60FpsWith60HzVsyncSampledAt30Hertz) {
+ // Actual capturing of timing data: WebGL Acquarium demo
+ // (http://webglsamples.googlecode.com/hg/aquarium/aquarium.html) which ran
+ // between 55-60 FPS in the steady-state.
+ static const DataPoint data_points[] = {
+ { true, 16.72 }, { true, 16.72 }, { true, 4163.29 }, { true, 50.193 },
+ { true, 117.041 }, { true, 50.161 }, { true, 50.16 }, { true, 33.441 },
+ { true, 50.16 }, { true, 33.44 }, { false, 0 }, { false, 0 },
+ { true, 50.161 }, { true, 83.601 }, { true, 50.16 }, { true, 16.72 },
+ { true, 33.441 }, { false, 16.72 }, { true, 50.16 }, { true, 16.72 },
+ { false, 0.001 }, { true, 33.441 }, { false, 16.72 }, { true, 16.72 },
+ { true, 50.16 }, { false, 0 }, { true, 16.72 }, { true, 33.441 },
+ { false, 0 }, { true, 33.44 }, { false, 16.72 }, { true, 16.72 },
+ { true, 50.161 }, { false, 0 }, { true, 16.72 }, { true, 33.44 },
+ { false, 0 }, { true, 33.44 }, { false, 16.721 }, { true, 16.721 },
+ { true, 50.161 }, { false, 0 }, { true, 16.72 }, { true, 33.441 },
+ { false, 0 }, { true, 33.44 }, { false, 16.72 }, { true, 33.44 },
+ { false, 0 }, { true, 16.721 }, { true, 50.161 }, { false, 0 },
+ { true, 33.44 }, { false, 0 }, { true, 16.72 }, { true, 33.441 },
+ { false, 0 }, { true, 33.44 }, { false, 16.72 }, { true, 16.72 },
+ { true, 50.16 }, { false, 0 }, { true, 16.721 }, { true, 33.44 },
+ { false, 0 }, { true, 33.44 }, { false, 16.721 }, { true, 16.721 },
+ { true, 50.161 }, { false, 0 }, { true, 16.72 }, { true, 33.44 },
+ { false, 0 }, { true, 33.441 }, { false, 16.72 }, { true, 16.72 },
+ { true, 50.16 }, { false, 0 }, { true, 16.72 }, { true, 33.441 },
+ { true, 33.44 }, { false, 0 }, { true, 33.44 }, { true, 33.441 },
+ { false, 0 }, { true, 33.44 }, { true, 33.441 }, { false, 0 },
+ { true, 33.44 }, { false, 0 }, { true, 33.44 }, { false, 16.72 },
+ { true, 16.721 }, { true, 50.161 }, { false, 0 }, { true, 16.72 },
+ { true, 33.44 }, { true, 33.441 }, { false, 0 }, { true, 33.44 },
+ { true, 33.44 }, { false, 0 }, { true, 33.441 }, { false, 16.72 },
+ { true, 16.72 }, { true, 50.16 }, { false, 0 }, { true, 16.72 },
+ { true, 33.441 }, { false, 0 }, { true, 33.44 }, { false, 16.72 },
+ { true, 33.44 }, { false, 0 }, { true, 16.721 }, { true, 50.161 },
+ { false, 0 }, { true, 16.72 }, { true, 33.44 }, { false, 0 },
+ { true, 33.441 }, { false, 16.72 }, { true, 16.72 }, { true, 50.16 }
+ };
+
+ SmoothEventSampler sampler(base::TimeDelta::FromSeconds(1) / 30, true, 3);
+ ReplayCheckingSamplerDecisions(data_points, arraysize(data_points), &sampler);
+}
+
+} // namespace
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream.cc b/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream.cc
new file mode 100644
index 00000000000..8336eb2c195
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream.cc
@@ -0,0 +1,349 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/web_contents_audio_input_stream.h"
+
+#include <string>
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/logging.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/threading/thread_checker.h"
+#include "content/browser/renderer_host/media/audio_mirroring_manager.h"
+#include "content/browser/renderer_host/media/web_contents_capture_util.h"
+#include "content/browser/renderer_host/media/web_contents_tracker.h"
+#include "content/public/browser/browser_thread.h"
+#include "media/audio/virtual_audio_input_stream.h"
+#include "media/audio/virtual_audio_output_stream.h"
+
+namespace content {
+
+class WebContentsAudioInputStream::Impl
+ : public base::RefCountedThreadSafe<WebContentsAudioInputStream::Impl>,
+ public AudioMirroringManager::MirroringDestination {
+ public:
+ // Takes ownership of |mixer_stream|. The rest outlive this instance.
+ Impl(int render_process_id, int render_view_id,
+ AudioMirroringManager* mirroring_manager,
+ const scoped_refptr<WebContentsTracker>& tracker,
+ media::VirtualAudioInputStream* mixer_stream);
+
+ // Open underlying VirtualAudioInputStream and start tracker.
+ bool Open();
+
+ // Start the underlying VirtualAudioInputStream and instruct
+ // AudioMirroringManager to begin a mirroring session.
+ void Start(AudioInputCallback* callback);
+
+ // Stop the underlying VirtualAudioInputStream and instruct
+ // AudioMirroringManager to shutdown a mirroring session.
+ void Stop();
+
+ // Close the underlying VirtualAudioInputStream and stop the tracker.
+ void Close();
+
+ // Accessor to underlying VirtualAudioInputStream.
+ media::VirtualAudioInputStream* mixer_stream() const {
+ return mixer_stream_.get();
+ }
+
+ private:
+ friend class base::RefCountedThreadSafe<WebContentsAudioInputStream::Impl>;
+
+ enum State {
+ CONSTRUCTED,
+ OPENED,
+ MIRRORING,
+ CLOSED
+ };
+
+ virtual ~Impl();
+
+ // Returns true if the mirroring target has been permanently lost.
+ bool IsTargetLost() const;
+
+ // Notifies the consumer callback that the stream is now dead.
+ void ReportError();
+
+ // Start/Stop mirroring by posting a call to AudioMirroringManager on the IO
+ // BrowserThread.
+ void StartMirroring();
+ void StopMirroring();
+
+ // AudioMirroringManager::MirroringDestination implementation
+ virtual media::AudioOutputStream* AddInput(
+ const media::AudioParameters& params) OVERRIDE;
+
+ // Callback which is run when |stream| is closed. Deletes |stream|.
+ void ReleaseInput(media::VirtualAudioOutputStream* stream);
+
+ // Called by WebContentsTracker when the target of the audio mirroring has
+ // changed.
+ void OnTargetChanged(int render_process_id, int render_view_id);
+
+ // Injected dependencies.
+ AudioMirroringManager* const mirroring_manager_;
+ const scoped_refptr<WebContentsTracker> tracker_;
+ // The AudioInputStream implementation that handles the audio conversion and
+ // mixing details.
+ const scoped_ptr<media::VirtualAudioInputStream> mixer_stream_;
+
+ State state_;
+
+ // Current audio mirroring target.
+ int target_render_process_id_;
+ int target_render_view_id_;
+
+ // Current callback used to consume the resulting mixed audio data.
+ AudioInputCallback* callback_;
+
+ base::ThreadChecker thread_checker_;
+
+ DISALLOW_COPY_AND_ASSIGN(Impl);
+};
+
+WebContentsAudioInputStream::Impl::Impl(
+ int render_process_id, int render_view_id,
+ AudioMirroringManager* mirroring_manager,
+ const scoped_refptr<WebContentsTracker>& tracker,
+ media::VirtualAudioInputStream* mixer_stream)
+ : mirroring_manager_(mirroring_manager),
+ tracker_(tracker), mixer_stream_(mixer_stream), state_(CONSTRUCTED),
+ target_render_process_id_(render_process_id),
+ target_render_view_id_(render_view_id),
+ callback_(NULL) {
+ DCHECK(mirroring_manager_);
+ DCHECK(tracker_.get());
+ DCHECK(mixer_stream_.get());
+
+ // WAIS::Impl can be constructed on any thread, but will DCHECK that all
+ // its methods from here on are called from the same thread.
+ thread_checker_.DetachFromThread();
+}
+
+WebContentsAudioInputStream::Impl::~Impl() {
+ DCHECK(state_ == CONSTRUCTED || state_ == CLOSED);
+}
+
+bool WebContentsAudioInputStream::Impl::Open() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ DCHECK_EQ(CONSTRUCTED, state_) << "Illegal to Open more than once.";
+
+ if (!mixer_stream_->Open())
+ return false;
+
+ state_ = OPENED;
+
+ tracker_->Start(
+ target_render_process_id_, target_render_view_id_,
+ base::Bind(&Impl::OnTargetChanged, this));
+
+ return true;
+}
+
+void WebContentsAudioInputStream::Impl::Start(AudioInputCallback* callback) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(callback);
+
+ if (state_ != OPENED)
+ return;
+
+ callback_ = callback;
+ if (IsTargetLost()) {
+ ReportError();
+ callback_ = NULL;
+ return;
+ }
+
+ state_ = MIRRORING;
+ mixer_stream_->Start(callback);
+
+ StartMirroring();
+}
+
+void WebContentsAudioInputStream::Impl::Stop() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (state_ != MIRRORING)
+ return;
+
+ state_ = OPENED;
+
+ mixer_stream_->Stop();
+ callback_ = NULL;
+
+ if (!IsTargetLost())
+ StopMirroring();
+}
+
+void WebContentsAudioInputStream::Impl::Close() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ Stop();
+
+ if (state_ == OPENED) {
+ state_ = CONSTRUCTED;
+ tracker_->Stop();
+ mixer_stream_->Close();
+ }
+
+ DCHECK_EQ(CONSTRUCTED, state_);
+ state_ = CLOSED;
+}
+
+bool WebContentsAudioInputStream::Impl::IsTargetLost() const {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ return target_render_process_id_ <= 0 || target_render_view_id_ <= 0;
+}
+
+void WebContentsAudioInputStream::Impl::ReportError() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ // TODO(miu): Need clean-up of AudioInputCallback interface in a future
+ // change, since its only implementation ignores the first argument entirely
+ callback_->OnError(NULL);
+}
+
+void WebContentsAudioInputStream::Impl::StartMirroring() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&AudioMirroringManager::StartMirroring,
+ base::Unretained(mirroring_manager_),
+ target_render_process_id_, target_render_view_id_,
+ make_scoped_refptr(this)));
+}
+
+void WebContentsAudioInputStream::Impl::StopMirroring() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(&AudioMirroringManager::StopMirroring,
+ base::Unretained(mirroring_manager_),
+ target_render_process_id_, target_render_view_id_,
+ make_scoped_refptr(this)));
+}
+
+media::AudioOutputStream* WebContentsAudioInputStream::Impl::AddInput(
+ const media::AudioParameters& params) {
+ // Note: The closure created here holds a reference to "this," which will
+ // guarantee the VirtualAudioInputStream (mixer_stream_) outlives the
+ // VirtualAudioOutputStream.
+ return new media::VirtualAudioOutputStream(
+ params,
+ mixer_stream_.get(),
+ base::Bind(&Impl::ReleaseInput, this));
+}
+
+void WebContentsAudioInputStream::Impl::ReleaseInput(
+ media::VirtualAudioOutputStream* stream) {
+ delete stream;
+}
+
+void WebContentsAudioInputStream::Impl::OnTargetChanged(int render_process_id,
+ int render_view_id) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (target_render_process_id_ == render_process_id &&
+ target_render_view_id_ == render_view_id) {
+ return;
+ }
+
+ DVLOG(1) << "Target RenderView has changed from "
+ << target_render_process_id_ << ':' << target_render_view_id_
+ << " to " << render_process_id << ':' << render_view_id;
+
+ if (state_ == MIRRORING)
+ StopMirroring();
+
+ target_render_process_id_ = render_process_id;
+ target_render_view_id_ = render_view_id;
+
+ if (state_ == MIRRORING) {
+ if (IsTargetLost()) {
+ ReportError();
+ Stop();
+ } else {
+ StartMirroring();
+ }
+ }
+}
+
+// static
+WebContentsAudioInputStream* WebContentsAudioInputStream::Create(
+ const std::string& device_id,
+ const media::AudioParameters& params,
+ const scoped_refptr<base::MessageLoopProxy>& worker_loop,
+ AudioMirroringManager* audio_mirroring_manager) {
+ int render_process_id;
+ int render_view_id;
+ if (!WebContentsCaptureUtil::ExtractTabCaptureTarget(
+ device_id, &render_process_id, &render_view_id)) {
+ return NULL;
+ }
+
+ return new WebContentsAudioInputStream(
+ render_process_id, render_view_id,
+ audio_mirroring_manager,
+ new WebContentsTracker(),
+ new media::VirtualAudioInputStream(
+ params, worker_loop,
+ media::VirtualAudioInputStream::AfterCloseCallback()));
+}
+
+WebContentsAudioInputStream::WebContentsAudioInputStream(
+ int render_process_id, int render_view_id,
+ AudioMirroringManager* mirroring_manager,
+ const scoped_refptr<WebContentsTracker>& tracker,
+ media::VirtualAudioInputStream* mixer_stream)
+ : impl_(new Impl(render_process_id, render_view_id,
+ mirroring_manager, tracker, mixer_stream)) {}
+
+WebContentsAudioInputStream::~WebContentsAudioInputStream() {}
+
+bool WebContentsAudioInputStream::Open() {
+ return impl_->Open();
+}
+
+void WebContentsAudioInputStream::Start(AudioInputCallback* callback) {
+ impl_->Start(callback);
+}
+
+void WebContentsAudioInputStream::Stop() {
+ impl_->Stop();
+}
+
+void WebContentsAudioInputStream::Close() {
+ impl_->Close();
+ delete this;
+}
+
+double WebContentsAudioInputStream::GetMaxVolume() {
+ return impl_->mixer_stream()->GetMaxVolume();
+}
+
+void WebContentsAudioInputStream::SetVolume(double volume) {
+ impl_->mixer_stream()->SetVolume(volume);
+}
+
+double WebContentsAudioInputStream::GetVolume() {
+ return impl_->mixer_stream()->GetVolume();
+}
+
+void WebContentsAudioInputStream::SetAutomaticGainControl(bool enabled) {
+ impl_->mixer_stream()->SetAutomaticGainControl(enabled);
+}
+
+bool WebContentsAudioInputStream::GetAutomaticGainControl() {
+ return impl_->mixer_stream()->GetAutomaticGainControl();
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream.h b/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream.h
new file mode 100644
index 00000000000..486547ecaa2
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream.h
@@ -0,0 +1,92 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// An AudioInputStream which provides a loop-back of all audio output generated
+// by the RenderView associated with a WebContents instance. The single stream
+// of data is produced by format-converting and mixing all audio output from a
+// RenderView. In other words, WebContentsAudioInputStream provides tab-level
+// audio mirroring.
+//
+// The implementation observes a WebContents instance (which represents a
+// browser tab) so that it can track the replacement of RenderViews due to
+// navigation, crash/reload, etc. events; and take appropriate actions to
+// provide a seamless, uninterrupted mirroring experience.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_AUDIO_INPUT_STREAM_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_AUDIO_INPUT_STREAM_H_
+
+#include <string>
+
+#include "base/memory/ref_counted.h"
+#include "content/common/content_export.h"
+#include "media/audio/audio_io.h"
+
+namespace base {
+class MessageLoopProxy;
+}
+
+namespace media {
+class AudioParameters;
+class VirtualAudioInputStream;
+}
+
+namespace content {
+
+class AudioMirroringManager;
+class WebContentsTracker;
+
+class CONTENT_EXPORT WebContentsAudioInputStream
+ : NON_EXPORTED_BASE(public media::AudioInputStream) {
+ public:
+ // media::AudioInputStream implementation
+ virtual bool Open() OVERRIDE;
+ virtual void Start(AudioInputCallback* callback) OVERRIDE;
+ virtual void Stop() OVERRIDE;
+ virtual void Close() OVERRIDE;
+ virtual double GetMaxVolume() OVERRIDE;
+ virtual void SetVolume(double volume) OVERRIDE;
+ virtual double GetVolume() OVERRIDE;
+ virtual void SetAutomaticGainControl(bool enabled) OVERRIDE;
+ virtual bool GetAutomaticGainControl() OVERRIDE;
+
+ // Create a new audio mirroring session, or return NULL on error. |device_id|
+ // should be in the format accepted by
+ // WebContentsCaptureUtil::ExtractTabCaptureTarget(). The caller must
+ // guarantee Close() is called on the returned object so that it may
+ // self-destruct.
+ // |worker_loop| is the loop on which AudioInputCallback methods are called
+ // and may or may not be the single thread that invokes the AudioInputStream
+ // methods.
+ static WebContentsAudioInputStream* Create(
+ const std::string& device_id,
+ const media::AudioParameters& params,
+ const scoped_refptr<base::MessageLoopProxy>& worker_loop,
+ AudioMirroringManager* audio_mirroring_manager);
+
+ private:
+ friend class WebContentsAudioInputStreamTest;
+
+ // Maintain most state and functionality in an internal ref-counted
+ // implementation class. This object must outlive a call to Close(), until
+ // the shutdown tasks running on other threads complete: The
+ // AudioMirroringManager on the IO thread, the WebContentsTracker on the UI
+ // thread, and the VirtualAudioOuputStreams on the audio thread.
+ class Impl;
+
+ WebContentsAudioInputStream(
+ int render_process_id, int render_view_id,
+ AudioMirroringManager* mirroring_manager,
+ const scoped_refptr<WebContentsTracker>& tracker,
+ media::VirtualAudioInputStream* mixer_stream);
+
+ virtual ~WebContentsAudioInputStream();
+
+ scoped_refptr<Impl> impl_;
+
+ DISALLOW_COPY_AND_ASSIGN(WebContentsAudioInputStream);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_AUDIO_INPUT_STREAM_H_
diff --git a/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream_unittest.cc b/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream_unittest.cc
new file mode 100644
index 00000000000..3ce336d39c9
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_audio_input_stream_unittest.cc
@@ -0,0 +1,513 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/web_contents_audio_input_stream.h"
+
+#include <list>
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/message_loop/message_loop.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/threading/thread.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/audio_mirroring_manager.h"
+#include "content/browser/renderer_host/media/web_contents_tracker.h"
+#include "media/audio/simple_sources.h"
+#include "media/audio/virtual_audio_input_stream.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::Assign;
+using ::testing::DoAll;
+using ::testing::Invoke;
+using ::testing::InvokeWithoutArgs;
+using ::testing::NotNull;
+using ::testing::SaveArg;
+using ::testing::WithArgs;
+
+using media::AudioInputStream;
+using media::AudioOutputStream;
+using media::AudioParameters;
+using media::SineWaveAudioSource;
+using media::VirtualAudioInputStream;
+using media::VirtualAudioOutputStream;
+
+namespace content {
+
+namespace {
+
+const int kRenderProcessId = 123;
+const int kRenderViewId = 456;
+const int kAnotherRenderProcessId = 789;
+const int kAnotherRenderViewId = 1;
+
+const AudioParameters& TestAudioParameters() {
+ static const AudioParameters params(
+ AudioParameters::AUDIO_FAKE,
+ media::CHANNEL_LAYOUT_STEREO,
+ AudioParameters::kAudioCDSampleRate, 16,
+ AudioParameters::kAudioCDSampleRate / 100);
+ return params;
+}
+
+class MockAudioMirroringManager : public AudioMirroringManager {
+ public:
+ MockAudioMirroringManager() : AudioMirroringManager() {}
+ virtual ~MockAudioMirroringManager() {}
+
+ MOCK_METHOD3(StartMirroring,
+ void(int render_process_id, int render_view_id,
+ MirroringDestination* destination));
+ MOCK_METHOD3(StopMirroring,
+ void(int render_process_id, int render_view_id,
+ MirroringDestination* destination));
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockAudioMirroringManager);
+};
+
+class MockWebContentsTracker : public WebContentsTracker {
+ public:
+ MockWebContentsTracker() : WebContentsTracker() {}
+
+ MOCK_METHOD3(Start,
+ void(int render_process_id, int render_view_id,
+ const ChangeCallback& callback));
+ MOCK_METHOD0(Stop, void());
+
+ private:
+ virtual ~MockWebContentsTracker() {}
+
+ DISALLOW_COPY_AND_ASSIGN(MockWebContentsTracker);
+};
+
+// A fully-functional VirtualAudioInputStream, but methods are mocked to allow
+// tests to check how/when they are invoked.
+class MockVirtualAudioInputStream : public VirtualAudioInputStream {
+ public:
+ explicit MockVirtualAudioInputStream(
+ const scoped_refptr<base::MessageLoopProxy>& worker_loop)
+ : VirtualAudioInputStream(TestAudioParameters(), worker_loop,
+ VirtualAudioInputStream::AfterCloseCallback()),
+ real_(TestAudioParameters(), worker_loop,
+ base::Bind(&MockVirtualAudioInputStream::OnRealStreamHasClosed,
+ base::Unretained(this))),
+ real_stream_is_closed_(false) {
+ // Set default actions of mocked methods to delegate to the concrete
+ // implementation.
+ ON_CALL(*this, Open())
+ .WillByDefault(Invoke(&real_, &VirtualAudioInputStream::Open));
+ ON_CALL(*this, Start(_))
+ .WillByDefault(Invoke(&real_, &VirtualAudioInputStream::Start));
+ ON_CALL(*this, Stop())
+ .WillByDefault(Invoke(&real_, &VirtualAudioInputStream::Stop));
+ ON_CALL(*this, Close())
+ .WillByDefault(Invoke(&real_, &VirtualAudioInputStream::Close));
+ ON_CALL(*this, GetMaxVolume())
+ .WillByDefault(Invoke(&real_, &VirtualAudioInputStream::GetMaxVolume));
+ ON_CALL(*this, SetVolume(_))
+ .WillByDefault(Invoke(&real_, &VirtualAudioInputStream::SetVolume));
+ ON_CALL(*this, GetVolume())
+ .WillByDefault(Invoke(&real_, &VirtualAudioInputStream::GetVolume));
+ ON_CALL(*this, SetAutomaticGainControl(_))
+ .WillByDefault(
+ Invoke(&real_, &VirtualAudioInputStream::SetAutomaticGainControl));
+ ON_CALL(*this, GetAutomaticGainControl())
+ .WillByDefault(
+ Invoke(&real_, &VirtualAudioInputStream::GetAutomaticGainControl));
+ ON_CALL(*this, AddOutputStream(NotNull(), _))
+ .WillByDefault(
+ Invoke(&real_, &VirtualAudioInputStream::AddOutputStream));
+ ON_CALL(*this, RemoveOutputStream(NotNull(), _))
+ .WillByDefault(
+ Invoke(&real_, &VirtualAudioInputStream::RemoveOutputStream));
+ }
+
+ ~MockVirtualAudioInputStream() {
+ DCHECK(real_stream_is_closed_);
+ }
+
+ MOCK_METHOD0(Open, bool());
+ MOCK_METHOD1(Start, void(AudioInputStream::AudioInputCallback*));
+ MOCK_METHOD0(Stop, void());
+ MOCK_METHOD0(Close, void());
+ MOCK_METHOD0(GetMaxVolume, double());
+ MOCK_METHOD1(SetVolume, void(double));
+ MOCK_METHOD0(GetVolume, double());
+ MOCK_METHOD1(SetAutomaticGainControl, void(bool));
+ MOCK_METHOD0(GetAutomaticGainControl, bool());
+ MOCK_METHOD2(AddOutputStream, void(VirtualAudioOutputStream*,
+ const AudioParameters&));
+ MOCK_METHOD2(RemoveOutputStream, void(VirtualAudioOutputStream*,
+ const AudioParameters&));
+
+ private:
+ void OnRealStreamHasClosed(VirtualAudioInputStream* stream) {
+ DCHECK_EQ(&real_, stream);
+ DCHECK(!real_stream_is_closed_);
+ real_stream_is_closed_ = true;
+ }
+
+ VirtualAudioInputStream real_;
+ bool real_stream_is_closed_;
+
+ DISALLOW_COPY_AND_ASSIGN(MockVirtualAudioInputStream);
+};
+
+class MockAudioInputCallback : public AudioInputStream::AudioInputCallback {
+ public:
+ MockAudioInputCallback() {}
+
+ MOCK_METHOD5(OnData, void(AudioInputStream* stream, const uint8* src,
+ uint32 size, uint32 hardware_delay_bytes,
+ double volume));
+ MOCK_METHOD1(OnClose, void(AudioInputStream* stream));
+ MOCK_METHOD1(OnError, void(AudioInputStream* stream));
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockAudioInputCallback);
+};
+
+} // namespace
+
+class WebContentsAudioInputStreamTest : public testing::Test {
+ public:
+ WebContentsAudioInputStreamTest()
+ : audio_thread_("Audio thread"),
+ io_thread_(BrowserThread::IO),
+ mock_mirroring_manager_(new MockAudioMirroringManager()),
+ mock_tracker_(new MockWebContentsTracker()),
+ mock_vais_(NULL),
+ wcais_(NULL),
+ destination_(NULL),
+ current_render_process_id_(kRenderProcessId),
+ current_render_view_id_(kRenderViewId),
+ on_data_event_(false, false) {
+ audio_thread_.Start();
+ io_thread_.Start();
+ }
+
+ virtual ~WebContentsAudioInputStreamTest() {
+ audio_thread_.Stop();
+ io_thread_.Stop();
+
+ DCHECK(!mock_vais_);
+ DCHECK(!wcais_);
+ EXPECT_FALSE(destination_);
+ DCHECK(streams_.empty());
+ DCHECK(sources_.empty());
+ }
+
+ void Open() {
+ mock_vais_ =
+ new MockVirtualAudioInputStream(audio_thread_.message_loop_proxy());
+ EXPECT_CALL(*mock_vais_, Open());
+ EXPECT_CALL(*mock_vais_, Close()); // At Close() time.
+
+ ASSERT_EQ(kRenderProcessId, current_render_process_id_);
+ ASSERT_EQ(kRenderViewId, current_render_view_id_);
+ EXPECT_CALL(*mock_tracker_.get(), Start(kRenderProcessId, kRenderViewId, _))
+ .WillOnce(DoAll(
+ SaveArg<2>(&change_callback_),
+ WithArgs<0, 1>(Invoke(&change_callback_,
+ &WebContentsTracker::ChangeCallback::Run))));
+ EXPECT_CALL(*mock_tracker_.get(), Stop()); // At Close() time.
+
+ wcais_ = new WebContentsAudioInputStream(
+ current_render_process_id_, current_render_view_id_,
+ mock_mirroring_manager_.get(),
+ mock_tracker_, mock_vais_);
+ wcais_->Open();
+ }
+
+ void Start() {
+ EXPECT_CALL(*mock_vais_, Start(&mock_input_callback_));
+ EXPECT_CALL(*mock_vais_, Stop()); // At Stop() time.
+
+ EXPECT_CALL(*mock_mirroring_manager_,
+ StartMirroring(kRenderProcessId, kRenderViewId, NotNull()))
+ .WillOnce(SaveArg<2>(&destination_))
+ .RetiresOnSaturation();
+ // At Stop() time, or when the mirroring target changes:
+ EXPECT_CALL(*mock_mirroring_manager_,
+ StopMirroring(kRenderProcessId, kRenderViewId, NotNull()))
+ .WillOnce(Assign(
+ &destination_,
+ static_cast<AudioMirroringManager::MirroringDestination*>(NULL)))
+ .RetiresOnSaturation();
+
+ EXPECT_CALL(mock_input_callback_, OnData(NotNull(), NotNull(), _, _, _))
+ .WillRepeatedly(
+ InvokeWithoutArgs(&on_data_event_, &base::WaitableEvent::Signal));
+ EXPECT_CALL(mock_input_callback_, OnClose(_)); // At Stop() time.
+
+ wcais_->Start(&mock_input_callback_);
+
+ // Test plumbing of volume controls and automatic gain controls. Calls to
+ // wcais_ methods should delegate directly to mock_vais_.
+ EXPECT_CALL(*mock_vais_, GetVolume());
+ double volume = wcais_->GetVolume();
+ EXPECT_CALL(*mock_vais_, GetMaxVolume());
+ const double max_volume = wcais_->GetMaxVolume();
+ volume *= 2.0;
+ if (volume < max_volume) {
+ volume = max_volume;
+ }
+ EXPECT_CALL(*mock_vais_, SetVolume(volume));
+ wcais_->SetVolume(volume);
+ EXPECT_CALL(*mock_vais_, GetAutomaticGainControl());
+ bool auto_gain = wcais_->GetAutomaticGainControl();
+ auto_gain = !auto_gain;
+ EXPECT_CALL(*mock_vais_, SetAutomaticGainControl(auto_gain));
+ wcais_->SetAutomaticGainControl(auto_gain);
+ }
+
+ void AddAnotherInput() {
+ // Note: WCAIS posts a task to invoke
+ // MockAudioMirroringManager::StartMirroring() on the IO thread, which
+ // causes our mock to set |destination_|. Block until that has happened.
+ base::WaitableEvent done(false, false);
+ BrowserThread::PostTask(
+ BrowserThread::IO, FROM_HERE, base::Bind(
+ &base::WaitableEvent::Signal, base::Unretained(&done)));
+ done.Wait();
+ ASSERT_TRUE(destination_);
+
+ EXPECT_CALL(*mock_vais_, AddOutputStream(NotNull(), _))
+ .RetiresOnSaturation();
+ // Later, when stream is closed:
+ EXPECT_CALL(*mock_vais_, RemoveOutputStream(NotNull(), _))
+ .RetiresOnSaturation();
+
+ const AudioParameters& params = TestAudioParameters();
+ AudioOutputStream* const out = destination_->AddInput(params);
+ ASSERT_TRUE(out);
+ streams_.push_back(out);
+ EXPECT_TRUE(out->Open());
+ SineWaveAudioSource* const source = new SineWaveAudioSource(
+ params.channel_layout(), 200.0, params.sample_rate());
+ sources_.push_back(source);
+ out->Start(source);
+ }
+
+ void RemoveOneInputInFIFOOrder() {
+ ASSERT_FALSE(streams_.empty());
+ AudioOutputStream* const out = streams_.front();
+ streams_.pop_front();
+ out->Stop();
+ out->Close(); // Self-deletes.
+ ASSERT_TRUE(!sources_.empty());
+ delete sources_.front();
+ sources_.pop_front();
+ }
+
+ void ChangeMirroringTarget() {
+ const int next_render_process_id =
+ current_render_process_id_ == kRenderProcessId ?
+ kAnotherRenderProcessId : kRenderProcessId;
+ const int next_render_view_id =
+ current_render_view_id_ == kRenderViewId ?
+ kAnotherRenderViewId : kRenderViewId;
+
+ EXPECT_CALL(*mock_mirroring_manager_,
+ StartMirroring(next_render_process_id, next_render_view_id,
+ NotNull()))
+ .WillOnce(SaveArg<2>(&destination_))
+ .RetiresOnSaturation();
+ // At Stop() time, or when the mirroring target changes:
+ EXPECT_CALL(*mock_mirroring_manager_,
+ StopMirroring(next_render_process_id, next_render_view_id,
+ NotNull()))
+ .WillOnce(Assign(
+ &destination_,
+ static_cast<AudioMirroringManager::MirroringDestination*>(NULL)))
+ .RetiresOnSaturation();
+
+ // Simulate OnTargetChange() callback from WebContentsTracker.
+ EXPECT_FALSE(change_callback_.is_null());
+ change_callback_.Run(next_render_process_id, next_render_view_id);
+
+ current_render_process_id_ = next_render_process_id;
+ current_render_view_id_ = next_render_view_id;
+ }
+
+ void LoseMirroringTarget() {
+ EXPECT_CALL(mock_input_callback_, OnError(_));
+
+ // Simulate OnTargetChange() callback from WebContentsTracker.
+ EXPECT_FALSE(change_callback_.is_null());
+ change_callback_.Run(-1, -1);
+ }
+
+ void Stop() {
+ wcais_->Stop();
+ }
+
+ void Close() {
+ // WebContentsAudioInputStream self-destructs on Close(). Its internal
+ // objects hang around until they are no longer referred to (e.g., as tasks
+ // on other threads shut things down).
+ wcais_->Close();
+ wcais_ = NULL;
+ mock_vais_ = NULL;
+ }
+
+ void RunOnAudioThread(const base::Closure& closure) {
+ audio_thread_.message_loop()->PostTask(FROM_HERE, closure);
+ }
+
+ // Block the calling thread until OnData() callbacks are being made.
+ void WaitForData() {
+ // Note: Arbitrarily chosen, but more iterations causes tests to take
+ // significantly more time.
+ static const int kNumIterations = 3;
+ for (int i = 0; i < kNumIterations; ++i)
+ on_data_event_.Wait();
+ }
+
+ private:
+ base::Thread audio_thread_;
+ BrowserThreadImpl io_thread_;
+
+ scoped_ptr<MockAudioMirroringManager> mock_mirroring_manager_;
+ scoped_refptr<MockWebContentsTracker> mock_tracker_;
+
+ MockVirtualAudioInputStream* mock_vais_; // Owned by wcais_.
+ WebContentsAudioInputStream* wcais_; // Self-destructs on Close().
+
+ // Mock consumer of audio data.
+ MockAudioInputCallback mock_input_callback_;
+
+ // Provided by WebContentsAudioInputStream to the mock WebContentsTracker.
+ // This callback is saved here, and test code will invoke it to simulate
+ // target change events.
+ WebContentsTracker::ChangeCallback change_callback_;
+
+ // Provided by WebContentsAudioInputStream to the mock AudioMirroringManager.
+ // A pointer to the implementation is saved here, and test code will invoke it
+ // to simulate: 1) calls to AddInput(); and 2) diverting audio data.
+ AudioMirroringManager::MirroringDestination* destination_;
+
+ // Current target RenderView. These get flipped in ChangedMirroringTarget().
+ int current_render_process_id_;
+ int current_render_view_id_;
+
+ // Streams provided by calls to WebContentsAudioInputStream::AddInput(). Each
+ // is started with a simulated source of audio data.
+ std::list<AudioOutputStream*> streams_;
+ std::list<SineWaveAudioSource*> sources_; // 1:1 with elements in streams_.
+
+ base::WaitableEvent on_data_event_;
+
+ DISALLOW_COPY_AND_ASSIGN(WebContentsAudioInputStreamTest);
+};
+
+#define RUN_ON_AUDIO_THREAD(method) \
+ RunOnAudioThread(base::Bind(&WebContentsAudioInputStreamTest::method, \
+ base::Unretained(this)))
+
+TEST_F(WebContentsAudioInputStreamTest, OpenedButNeverStarted) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Close);
+}
+
+TEST_F(WebContentsAudioInputStreamTest, MirroringNothing) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Start);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(Stop);
+ RUN_ON_AUDIO_THREAD(Close);
+}
+
+TEST_F(WebContentsAudioInputStreamTest, MirroringOutputOutlivesSession) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Start);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(Stop);
+ RUN_ON_AUDIO_THREAD(Close);
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+}
+
+TEST_F(WebContentsAudioInputStreamTest, MirroringOutputWithinSession) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Start);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+ RUN_ON_AUDIO_THREAD(Stop);
+ RUN_ON_AUDIO_THREAD(Close);
+}
+
+TEST_F(WebContentsAudioInputStreamTest, MirroringNothingWithTargetChange) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Start);
+ RUN_ON_AUDIO_THREAD(ChangeMirroringTarget);
+ RUN_ON_AUDIO_THREAD(Stop);
+ RUN_ON_AUDIO_THREAD(Close);
+}
+
+TEST_F(WebContentsAudioInputStreamTest, MirroringOneStreamAfterTargetChange) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Start);
+ RUN_ON_AUDIO_THREAD(ChangeMirroringTarget);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(Stop);
+ RUN_ON_AUDIO_THREAD(Close);
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+}
+
+TEST_F(WebContentsAudioInputStreamTest, MirroringOneStreamWithTargetChange) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Start);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(ChangeMirroringTarget);
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(Stop);
+ RUN_ON_AUDIO_THREAD(Close);
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+}
+
+TEST_F(WebContentsAudioInputStreamTest, MirroringLostTarget) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Start);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(LoseMirroringTarget);
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+ RUN_ON_AUDIO_THREAD(Stop);
+ RUN_ON_AUDIO_THREAD(Close);
+}
+
+TEST_F(WebContentsAudioInputStreamTest, MirroringMultipleStreamsAndTargets) {
+ RUN_ON_AUDIO_THREAD(Open);
+ RUN_ON_AUDIO_THREAD(Start);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(ChangeMirroringTarget);
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+ RUN_ON_AUDIO_THREAD(AddAnotherInput);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+ WaitForData();
+ RUN_ON_AUDIO_THREAD(ChangeMirroringTarget);
+ RUN_ON_AUDIO_THREAD(RemoveOneInputInFIFOOrder);
+ RUN_ON_AUDIO_THREAD(Stop);
+ RUN_ON_AUDIO_THREAD(Close);
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/web_contents_capture_util.cc b/chromium/content/browser/renderer_host/media/web_contents_capture_util.cc
new file mode 100644
index 00000000000..28b0fa6c8dc
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_capture_util.cc
@@ -0,0 +1,59 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/web_contents_capture_util.h"
+
+#include "base/basictypes.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_piece.h"
+#include "base/strings/string_util.h"
+
+namespace {
+
+const char kVirtualDeviceScheme[] = "virtual-media-stream://";
+
+} // namespace
+
+namespace content {
+
+std::string WebContentsCaptureUtil::AppendWebContentsDeviceScheme(
+ const std::string& device_id) {
+ return kVirtualDeviceScheme + device_id;
+}
+
+std::string WebContentsCaptureUtil::StripWebContentsDeviceScheme(
+ const std::string& device_id) {
+ return (IsWebContentsDeviceId(device_id) ?
+ device_id.substr(arraysize(kVirtualDeviceScheme) - 1) :
+ device_id);
+}
+
+bool WebContentsCaptureUtil::IsWebContentsDeviceId(
+ const std::string& device_id) {
+ return StartsWithASCII(device_id, kVirtualDeviceScheme, true);
+}
+
+bool WebContentsCaptureUtil::ExtractTabCaptureTarget(
+ const std::string& device_id_param,
+ int* render_process_id,
+ int* render_view_id) {
+ if (!IsWebContentsDeviceId(device_id_param))
+ return false;
+
+ const std::string device_id = device_id_param.substr(
+ arraysize(kVirtualDeviceScheme) - 1);
+
+ const size_t sep_pos = device_id.find(':');
+ if (sep_pos == std::string::npos)
+ return false;
+
+ const base::StringPiece component1(device_id.data(), sep_pos);
+ const base::StringPiece component2(device_id.data() + sep_pos + 1,
+ device_id.length() - sep_pos - 1);
+
+ return (base::StringToInt(component1, render_process_id) &&
+ base::StringToInt(component2, render_view_id));
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/web_contents_capture_util.h b/chromium/content/browser/renderer_host/media/web_contents_capture_util.h
new file mode 100644
index 00000000000..8f376afdeec
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_capture_util.h
@@ -0,0 +1,35 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_CAPTURE_UTIL_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_CAPTURE_UTIL_H_
+
+#include <string>
+
+#include "content/common/content_export.h"
+
+namespace content {
+
+class CONTENT_EXPORT WebContentsCaptureUtil {
+ public:
+ // Returns a new id after appending the device id scheme for virtual streams.
+ static std::string AppendWebContentsDeviceScheme(
+ const std::string& device_id_param);
+
+ static std::string StripWebContentsDeviceScheme(
+ const std::string& device_id_param);
+
+ // Check whether the device id indicates that this is a web contents stream.
+ static bool IsWebContentsDeviceId(const std::string& device_id);
+
+ // Function to extract the target renderer id's from a tab media stream
+ // request's device id.
+ static bool ExtractTabCaptureTarget(const std::string& device_id,
+ int* render_process_id,
+ int* render_view_id);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_CAPTURE_UTIL_H_
diff --git a/chromium/content/browser/renderer_host/media/web_contents_tracker.cc b/chromium/content/browser/renderer_host/media/web_contents_tracker.cc
new file mode 100644
index 00000000000..3a75080cb46
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_tracker.cc
@@ -0,0 +1,102 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/web_contents_tracker.h"
+
+#include "base/message_loop/message_loop_proxy.h"
+#include "content/public/browser/browser_thread.h"
+#include "content/public/browser/render_process_host.h"
+#include "content/public/browser/render_view_host.h"
+#include "content/public/browser/web_contents.h"
+
+namespace content {
+
+WebContentsTracker::WebContentsTracker() {}
+
+WebContentsTracker::~WebContentsTracker() {
+ DCHECK(!web_contents()) << "BUG: Still observering!";
+}
+
+void WebContentsTracker::Start(int render_process_id, int render_view_id,
+ const ChangeCallback& callback) {
+ DCHECK(!message_loop_.get() || message_loop_->BelongsToCurrentThread());
+
+ message_loop_ = base::MessageLoopProxy::current();
+ DCHECK(message_loop_.get());
+ callback_ = callback;
+
+ BrowserThread::PostTask(
+ BrowserThread::UI, FROM_HERE,
+ base::Bind(&WebContentsTracker::LookUpAndObserveWebContents, this,
+ render_process_id, render_view_id));
+}
+
+void WebContentsTracker::Stop() {
+ DCHECK(message_loop_->BelongsToCurrentThread());
+
+ callback_.Reset();
+
+ BrowserThread::PostTask(
+ BrowserThread::UI, FROM_HERE,
+ base::Bind(&WebContentsTracker::Observe, this,
+ static_cast<WebContents*>(NULL)));
+}
+
+void WebContentsTracker::OnWebContentsChangeEvent() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ WebContents* const wc = web_contents();
+ RenderViewHost* const rvh = wc ? wc->GetRenderViewHost() : NULL;
+ RenderProcessHost* const rph = rvh ? rvh->GetProcess() : NULL;
+
+ const int render_process_id = rph ? rph->GetID() : MSG_ROUTING_NONE;
+ const int render_view_id = rvh ? rvh->GetRoutingID() : MSG_ROUTING_NONE;
+
+ message_loop_->PostTask(FROM_HERE,
+ base::Bind(&WebContentsTracker::MaybeDoCallback, this,
+ render_process_id, render_view_id));
+}
+
+void WebContentsTracker::MaybeDoCallback(int render_process_id,
+ int render_view_id) {
+ DCHECK(message_loop_->BelongsToCurrentThread());
+
+ if (!callback_.is_null())
+ callback_.Run(render_process_id, render_view_id);
+}
+
+void WebContentsTracker::LookUpAndObserveWebContents(int render_process_id,
+ int render_view_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ RenderViewHost* const rvh =
+ RenderViewHost::FromID(render_process_id, render_view_id);
+ DVLOG_IF(1, !rvh) << "RenderViewHost::FromID("
+ << render_process_id << ", " << render_view_id
+ << ") returned NULL.";
+ Observe(rvh ? WebContents::FromRenderViewHost(rvh) : NULL);
+ DVLOG_IF(1, !web_contents())
+ << "WebContents::FromRenderViewHost(" << rvh << ") returned NULL.";
+
+ OnWebContentsChangeEvent();
+}
+
+void WebContentsTracker::RenderViewReady() {
+ OnWebContentsChangeEvent();
+}
+
+void WebContentsTracker::AboutToNavigateRenderView(RenderViewHost* rvh) {
+ OnWebContentsChangeEvent();
+}
+
+void WebContentsTracker::DidNavigateMainFrame(
+ const LoadCommittedDetails& details, const FrameNavigateParams& params) {
+ OnWebContentsChangeEvent();
+}
+
+void WebContentsTracker::WebContentsDestroyed(WebContents* web_contents) {
+ OnWebContentsChangeEvent();
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/web_contents_tracker.h b/chromium/content/browser/renderer_host/media/web_contents_tracker.h
new file mode 100644
index 00000000000..632ced28be7
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_tracker.h
@@ -0,0 +1,86 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Given a starting render_process_id and render_view_id, the WebContentsTracker
+// tracks RenderViewHost instance swapping during the lifetime of a WebContents
+// instance. This is used when mirroring tab video and audio so that user
+// navigations, crashes, etc., during a tab's lifetime allow the capturing code
+// to remain active on the current/latest RenderView.
+//
+// Threading issues: Start(), Stop() and the ChangeCallback are invoked on the
+// same thread. This can be any thread, and the decision is locked-in by
+// WebContentsTracker when Start() is called.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_TRACKER_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_TRACKER_H_
+
+#include "base/callback.h"
+#include "base/memory/ref_counted.h"
+#include "content/common/content_export.h"
+#include "content/public/browser/web_contents_observer.h"
+
+namespace base {
+class MessageLoopProxy;
+}
+
+namespace content {
+
+class CONTENT_EXPORT WebContentsTracker
+ : public base::RefCountedThreadSafe<WebContentsTracker>,
+ public WebContentsObserver {
+ public:
+ WebContentsTracker();
+
+ // Callback for whenever the target is swapped. The callback is also invoked
+ // with both arguments set to MSG_ROUTING_NONE to indicate tracking will not
+ // continue (i.e., the WebContents instance was not found or has been
+ // destroyed).
+ typedef base::Callback<void(int render_process_id, int render_view_id)>
+ ChangeCallback;
+
+ // Start tracking. The last-known |render_process_id| and |render_view_id|
+ // are provided, and the given callback is invoked asynchronously one or more
+ // times. The callback will be invoked on the same thread calling Start().
+ virtual void Start(int render_process_id, int render_view_id,
+ const ChangeCallback& callback);
+
+ // Stop tracking. Once this method returns, the callback is guaranteed not to
+ // be invoked again.
+ virtual void Stop();
+
+ protected:
+ friend class base::RefCountedThreadSafe<WebContentsTracker>;
+ virtual ~WebContentsTracker();
+
+ private:
+ // Reads the render_process_id/render_view_id from the current WebContents
+ // instance and then invokes the callback.
+ void OnWebContentsChangeEvent();
+
+ // Called on the thread that Start()/Stop() are called on, check whether the
+ // callback is still valid and, if so, invoke it.
+ void MaybeDoCallback(int render_process_id, int render_view_id);
+
+ // Look-up the current WebContents instance associated with the given
+ // |render_process_id| and |render_view_id| and begin observing it.
+ void LookUpAndObserveWebContents(int render_process_id,
+ int render_view_id);
+
+ // WebContentsObserver overrides to react to events of interest.
+ virtual void RenderViewReady() OVERRIDE;
+ virtual void AboutToNavigateRenderView(RenderViewHost* render_view_host)
+ OVERRIDE;
+ virtual void DidNavigateMainFrame(const LoadCommittedDetails& details,
+ const FrameNavigateParams& params) OVERRIDE;
+ virtual void WebContentsDestroyed(WebContents* web_contents) OVERRIDE;
+
+ scoped_refptr<base::MessageLoopProxy> message_loop_;
+ ChangeCallback callback_;
+
+ DISALLOW_COPY_AND_ASSIGN(WebContentsTracker);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_TRACKER_H_
diff --git a/chromium/content/browser/renderer_host/media/web_contents_video_capture_device.cc b/chromium/content/browser/renderer_host/media/web_contents_video_capture_device.cc
new file mode 100644
index 00000000000..e27b703081e
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_video_capture_device.cc
@@ -0,0 +1,1278 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Implementation notes: This needs to work on a variety of hardware
+// configurations where the speed of the CPU and GPU greatly affect overall
+// performance. Spanning several threads, the process of capturing has been
+// split up into four conceptual stages:
+//
+// 1. Reserve Buffer: Before a frame can be captured, a slot in the consumer's
+// shared-memory IPC buffer is reserved. There are only a few of these;
+// when they run out, it indicates that the downstream consumer -- likely a
+// video encoder -- is the performance bottleneck, and that the rate of
+// frame capture should be throttled back.
+//
+// 2. Capture: A bitmap is snapshotted/copied from the RenderView's backing
+// store. This is initiated on the UI BrowserThread, and often occurs
+// asynchronously. Where supported, the GPU scales and color converts
+// frames to our desired size, and the readback happens directly into the
+// shared-memory buffer. But this is not always possible, particularly when
+// accelerated compositing is disabled.
+//
+// 3. Render (if needed): If the web contents cannot be captured directly into
+// our target size and color format, scaling and colorspace conversion must
+// be done on the CPU. A dedicated thread is used for this operation, to
+// avoid blocking the UI thread. The Render stage always reads from a
+// bitmap returned by Capture, and writes into the reserved slot in the
+// shared-memory buffer.
+//
+// 4. Deliver: The rendered video frame is returned to the consumer (which
+// implements the VideoCaptureDevice::EventHandler interface). Because
+// all paths have written the frame into the IPC buffer, this step should
+// never need to do an additional copy of the pixel data.
+//
+// In the best-performing case, the Render step is bypassed: Capture produces
+// ready-to-Deliver frames. But when accelerated readback is not possible, the
+// system is designed so that Capture and Render may run concurrently. A timing
+// diagram helps illustrate this point (@30 FPS):
+//
+// Time: 0ms 33ms 66ms 99ms
+// thread1: |-Capture-f1------v |-Capture-f2------v |-Capture-f3----v |-Capt
+// thread2: |-Render-f1-----v |-Render-f2-----v |-Render-f3
+//
+// In the above example, both capturing and rendering *each* take almost the
+// full 33 ms available between frames, yet we see that the required throughput
+// is obtained.
+//
+// Turning on verbose logging will cause the effective frame rate to be logged
+// at 5-second intervals.
+
+#include "content/browser/renderer_host/media/web_contents_video_capture_device.h"
+
+#include <algorithm>
+#include <list>
+#include <string>
+
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/callback_forward.h"
+#include "base/debug/trace_event.h"
+#include "base/logging.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "base/message_loop/message_loop_proxy.h"
+#include "base/metrics/histogram.h"
+#include "base/sequenced_task_runner.h"
+#include "base/strings/stringprintf.h"
+#include "base/synchronization/lock.h"
+#include "base/threading/thread.h"
+#include "base/threading/thread_checker.h"
+#include "base/time/time.h"
+#include "content/browser/renderer_host/media/video_capture_oracle.h"
+#include "content/browser/renderer_host/media/web_contents_capture_util.h"
+#include "content/browser/renderer_host/render_widget_host_impl.h"
+#include "content/browser/web_contents/web_contents_impl.h"
+#include "content/port/browser/render_widget_host_view_frame_subscriber.h"
+#include "content/port/browser/render_widget_host_view_port.h"
+#include "content/public/browser/browser_thread.h"
+#include "content/public/browser/notification_source.h"
+#include "content/public/browser/notification_types.h"
+#include "content/public/browser/render_process_host.h"
+#include "content/public/browser/render_view_host.h"
+#include "content/public/browser/render_widget_host_view.h"
+#include "content/public/browser/web_contents.h"
+#include "content/public/browser/web_contents_observer.h"
+#include "media/base/bind_to_loop.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
+#include "media/base/yuv_convert.h"
+#include "media/video/capture/video_capture_types.h"
+#include "skia/ext/image_operations.h"
+#include "third_party/skia/include/core/SkBitmap.h"
+#include "third_party/skia/include/core/SkColor.h"
+#include "ui/gfx/rect.h"
+#include "ui/gfx/skia_util.h"
+
+namespace content {
+
+namespace {
+
+const int kMinFrameWidth = 2;
+const int kMinFrameHeight = 2;
+const int kMaxFramesInFlight = 2;
+const int kMaxSnapshotsInFlight = 1;
+
+// TODO(nick): Remove this once frame subscription is supported on Aura and
+// Linux.
+#if (defined(OS_WIN) || defined(OS_MACOSX)) || defined(USE_AURA)
+const bool kAcceleratedSubscriberIsSupported = true;
+#else
+const bool kAcceleratedSubscriberIsSupported = false;
+#endif
+
+// Returns the nearest even integer closer to zero.
+template<typename IntType>
+IntType MakeEven(IntType x) {
+ return x & static_cast<IntType>(-2);
+}
+
+// Compute a letterbox region, aligned to even coordinates.
+gfx::Rect ComputeYV12LetterboxRegion(const gfx::Size& frame_size,
+ const gfx::Size& content_size) {
+
+ gfx::Rect result = media::ComputeLetterboxRegion(gfx::Rect(frame_size),
+ content_size);
+
+ result.set_x(MakeEven(result.x()));
+ result.set_y(MakeEven(result.y()));
+ result.set_width(std::max(kMinFrameWidth, MakeEven(result.width())));
+ result.set_height(std::max(kMinFrameHeight, MakeEven(result.height())));
+
+ return result;
+}
+
+// Thread-safe, refcounted proxy to the VideoCaptureOracle. This proxy wraps
+// the VideoCaptureOracle, which decides which frames to capture, and a
+// VideoCaptureDevice::EventHandler, which allocates and receives the captured
+// frames, in a lock to synchronize state between the two.
+class ThreadSafeCaptureOracle
+ : public base::RefCountedThreadSafe<ThreadSafeCaptureOracle> {
+ public:
+ ThreadSafeCaptureOracle(media::VideoCaptureDevice::EventHandler* consumer,
+ scoped_ptr<VideoCaptureOracle> oracle);
+
+ bool ObserveEventAndDecideCapture(
+ VideoCaptureOracle::Event event,
+ base::Time event_time,
+ scoped_refptr<media::VideoFrame>* storage,
+ RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback* callback);
+
+ base::TimeDelta capture_period() const {
+ return oracle_->capture_period();
+ }
+
+ // Allow new captures to start occurring.
+ void Start();
+
+ // Stop new captures from happening (but doesn't forget the consumer).
+ void Stop();
+
+ // Signal an error to the consumer.
+ void ReportError();
+
+ // Permanently stop capturing. Immediately cease all activity on the
+ // VCD::EventHandler.
+ void InvalidateConsumer();
+
+ private:
+ friend class base::RefCountedThreadSafe<ThreadSafeCaptureOracle>;
+ virtual ~ThreadSafeCaptureOracle() {}
+
+ // Callback invoked on completion of all captures.
+ void DidCaptureFrame(const scoped_refptr<media::VideoFrame>& frame,
+ int frame_number,
+ base::Time timestamp,
+ bool success);
+
+ // Protects everything below it.
+ base::Lock lock_;
+
+ // Recipient of our capture activity. Becomes null after it is invalidated.
+ media::VideoCaptureDevice::EventHandler* consumer_;
+
+ // Makes the decision to capture a frame.
+ const scoped_ptr<VideoCaptureOracle> oracle_;
+
+ // Whether capturing is currently allowed. Can toggle back and forth.
+ bool is_started_;
+};
+
+// FrameSubscriber is a proxy to the ThreadSafeCaptureOracle that's compatible
+// with RenderWidgetHostViewFrameSubscriber. We create one per event type.
+class FrameSubscriber : public RenderWidgetHostViewFrameSubscriber {
+ public:
+ FrameSubscriber(VideoCaptureOracle::Event event_type,
+ const scoped_refptr<ThreadSafeCaptureOracle>& oracle)
+ : event_type_(event_type),
+ oracle_proxy_(oracle) {}
+
+ virtual bool ShouldCaptureFrame(
+ base::Time present_time,
+ scoped_refptr<media::VideoFrame>* storage,
+ RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback*
+ deliver_frame_cb) OVERRIDE;
+
+ private:
+ const VideoCaptureOracle::Event event_type_;
+ scoped_refptr<ThreadSafeCaptureOracle> oracle_proxy_;
+};
+
+// ContentCaptureSubscription is the relationship between a RenderWidgetHost
+// whose content is updating, a subscriber that is deciding which of these
+// updates to capture (and where to deliver them to), and a callback that
+// knows how to do the capture and prepare the result for delivery.
+//
+// In practice, this means (a) installing a RenderWidgetHostFrameSubscriber in
+// the RenderWidgetHostView, to process updates that occur via accelerated
+// compositing, (b) installing itself as an observer of updates to the
+// RenderWidgetHost's backing store, to hook updates that occur via software
+// rendering, and (c) running a timer to possibly initiate non-event-driven
+// captures that the subscriber might request.
+//
+// All of this happens on the UI thread, although the
+// RenderWidgetHostViewFrameSubscriber we install may be dispatching updates
+// autonomously on some other thread.
+class ContentCaptureSubscription : public content::NotificationObserver {
+ public:
+ typedef base::Callback<void(
+ const base::Time&,
+ const scoped_refptr<media::VideoFrame>&,
+ const RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback&)>
+ CaptureCallback;
+
+ // Create a subscription. Whenever a manual capture is required, the
+ // subscription will invoke |capture_callback| on the UI thread to do the
+ // work.
+ ContentCaptureSubscription(
+ const RenderWidgetHost& source,
+ const scoped_refptr<ThreadSafeCaptureOracle>& oracle_proxy,
+ const CaptureCallback& capture_callback);
+ virtual ~ContentCaptureSubscription();
+
+ // content::NotificationObserver implementation.
+ virtual void Observe(int type,
+ const content::NotificationSource& source,
+ const content::NotificationDetails& details) OVERRIDE;
+
+ private:
+ void OnTimer();
+
+ const int render_process_id_;
+ const int render_view_id_;
+
+ FrameSubscriber paint_subscriber_;
+ FrameSubscriber timer_subscriber_;
+ content::NotificationRegistrar registrar_;
+ CaptureCallback capture_callback_;
+ base::Timer timer_;
+
+ DISALLOW_COPY_AND_ASSIGN(ContentCaptureSubscription);
+};
+
+// Render the SkBitmap |input| into the given VideoFrame buffer |output|, then
+// invoke |done_cb| to indicate success or failure. |input| is expected to be
+// ARGB. |output| must be YV12 or I420. Colorspace conversion is always done.
+// Scaling and letterboxing will be done as needed.
+//
+// This software implementation should be used only when GPU acceleration of
+// these activities is not possible. This operation may be expensive (tens to
+// hundreds of milliseconds), so the caller should ensure that it runs on a
+// thread where such a pause would cause UI jank.
+void RenderVideoFrame(const SkBitmap& input,
+ const scoped_refptr<media::VideoFrame>& output,
+ const base::Callback<void(bool)>& done_cb);
+
+// Keeps track of the RenderView to be sourced, and executes copying of the
+// backing store on the UI BrowserThread.
+//
+// TODO(nick): It would be nice to merge this with WebContentsTracker, but its
+// implementation is currently asynchronous -- in our case, the "rvh changed"
+// notification would get posted back to the UI thread and processed later, and
+// this seems disadvantageous.
+class CaptureMachine : public WebContentsObserver,
+ public base::SupportsWeakPtr<CaptureMachine> {
+ public:
+ virtual ~CaptureMachine();
+
+ // Creates a CaptureMachine. Must be run on the UI BrowserThread. Returns
+ // NULL if the indicated render view cannot be found.
+ static scoped_ptr<CaptureMachine> Create(
+ int render_process_id,
+ int render_view_id,
+ const scoped_refptr<base::SequencedTaskRunner>& render_task_runner,
+ const scoped_refptr<ThreadSafeCaptureOracle>& oracle_proxy);
+
+ // Starts a copy from the backing store or the composited surface. Must be run
+ // on the UI BrowserThread. |deliver_frame_cb| will be run when the operation
+ // completes. The copy will occur to |target|.
+ //
+ // This may be used as a ContentCaptureSubscription::CaptureCallback.
+ void Capture(
+ const base::Time& start_time,
+ const scoped_refptr<media::VideoFrame>& target,
+ const RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback&
+ deliver_frame_cb);
+
+ // content::WebContentsObserver implementation.
+ virtual void DidShowFullscreenWidget(int routing_id) OVERRIDE {
+ fullscreen_widget_id_ = routing_id;
+ RenewFrameSubscription();
+ }
+
+ virtual void DidDestroyFullscreenWidget(int routing_id) OVERRIDE {
+ DCHECK_EQ(fullscreen_widget_id_, routing_id);
+ fullscreen_widget_id_ = MSG_ROUTING_NONE;
+ RenewFrameSubscription();
+ }
+
+ virtual void RenderViewReady() OVERRIDE {
+ RenewFrameSubscription();
+ }
+
+ virtual void AboutToNavigateRenderView(RenderViewHost* rvh) OVERRIDE {
+ RenewFrameSubscription();
+ }
+
+ virtual void DidNavigateMainFrame(
+ const LoadCommittedDetails& details,
+ const FrameNavigateParams& params) OVERRIDE {
+ RenewFrameSubscription();
+ }
+
+ virtual void WebContentsDestroyed(WebContents* web_contents) OVERRIDE;
+
+ private:
+ CaptureMachine(
+ const scoped_refptr<base::SequencedTaskRunner>& render_task_runner,
+ const scoped_refptr<ThreadSafeCaptureOracle>& oracle_proxy);
+
+ // Starts observing the web contents, returning false if lookup fails.
+ bool StartObservingWebContents(int initial_render_process_id,
+ int initial_render_view_id);
+
+ // Helper function to determine the view that we are currently tracking.
+ RenderWidgetHost* GetTarget();
+
+ // Response callback for RenderWidgetHost::CopyFromBackingStore().
+ void DidCopyFromBackingStore(
+ const base::Time& start_time,
+ const scoped_refptr<media::VideoFrame>& target,
+ const RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback&
+ deliver_frame_cb,
+ bool success,
+ const SkBitmap& bitmap);
+
+ // Response callback for RWHVP::CopyFromCompositingSurfaceToVideoFrame().
+ void DidCopyFromCompositingSurfaceToVideoFrame(
+ const base::Time& start_time,
+ const RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback&
+ deliver_frame_cb,
+ bool success);
+
+ // Remove the old subscription, and start a new one. This should be called
+ // after any change to the WebContents that affects the RenderWidgetHost or
+ // attached views.
+ void RenewFrameSubscription();
+
+ // The task runner of the thread on which SkBitmap->VideoFrame conversion will
+ // occur. Only used when this activity cannot be done on the GPU.
+ const scoped_refptr<base::SequencedTaskRunner> render_task_runner_;
+
+ // Makes all the decisions about which frames to copy, and how.
+ const scoped_refptr<ThreadSafeCaptureOracle> oracle_proxy_;
+
+ // Routing ID of any active fullscreen render widget or MSG_ROUTING_NONE
+ // otherwise.
+ int fullscreen_widget_id_;
+
+ // Last known RenderView size.
+ gfx::Size last_view_size_;
+
+ // Responsible for forwarding events from the active RenderWidgetHost to the
+ // oracle, and initiating captures accordingly.
+ scoped_ptr<ContentCaptureSubscription> subscription_;
+
+ DISALLOW_COPY_AND_ASSIGN(CaptureMachine);
+};
+
+// Responsible for logging the effective frame rate.
+// TODO(nick): Make this compatible with the push model and hook it back up.
+class VideoFrameDeliveryLog {
+ public:
+ VideoFrameDeliveryLog();
+
+ // Treat |frame_number| as having been delivered, and update the
+ // frame rate statistics accordingly.
+ void ChronicleFrameDelivery(int frame_number);
+
+ private:
+ // The following keep track of and log the effective frame rate whenever
+ // verbose logging is turned on.
+ base::Time last_frame_rate_log_time_;
+ int count_frames_rendered_;
+ int last_frame_number_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoFrameDeliveryLog);
+};
+
+ThreadSafeCaptureOracle::ThreadSafeCaptureOracle(
+ media::VideoCaptureDevice::EventHandler* consumer,
+ scoped_ptr<VideoCaptureOracle> oracle)
+ : consumer_(consumer),
+ oracle_(oracle.Pass()),
+ is_started_(false) {
+}
+
+bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
+ VideoCaptureOracle::Event event,
+ base::Time event_time,
+ scoped_refptr<media::VideoFrame>* storage,
+ RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback* callback) {
+ base::AutoLock guard(lock_);
+
+ if (!consumer_ || !is_started_)
+ return false; // Capture is stopped.
+
+ scoped_refptr<media::VideoFrame> output_buffer =
+ consumer_->ReserveOutputBuffer();
+ const bool should_capture =
+ oracle_->ObserveEventAndDecideCapture(event, event_time);
+ const bool content_is_dirty =
+ (event == VideoCaptureOracle::kCompositorUpdate ||
+ event == VideoCaptureOracle::kSoftwarePaint);
+ const char* event_name =
+ (event == VideoCaptureOracle::kTimerPoll ? "poll" :
+ (event == VideoCaptureOracle::kCompositorUpdate ? "gpu" :
+ "paint"));
+
+ // Consider the various reasons not to initiate a capture.
+ if (should_capture && !output_buffer.get()) {
+ TRACE_EVENT_INSTANT1("mirroring",
+ "EncodeLimited",
+ TRACE_EVENT_SCOPE_THREAD,
+ "trigger",
+ event_name);
+ return false;
+ } else if (!should_capture && output_buffer.get()) {
+ if (content_is_dirty) {
+ // This is a normal and acceptable way to drop a frame. We've hit our
+ // capture rate limit: for example, the content is animating at 60fps but
+ // we're capturing at 30fps.
+ TRACE_EVENT_INSTANT1("mirroring", "FpsRateLimited",
+ TRACE_EVENT_SCOPE_THREAD,
+ "trigger", event_name);
+ }
+ return false;
+ } else if (!should_capture && !output_buffer.get()) {
+ // We decided not to capture, but we wouldn't have been able to if we wanted
+ // to because no output buffer was available.
+ TRACE_EVENT_INSTANT1("mirroring", "NearlyEncodeLimited",
+ TRACE_EVENT_SCOPE_THREAD,
+ "trigger", event_name);
+ return false;
+ }
+ int frame_number = oracle_->RecordCapture();
+ TRACE_EVENT_ASYNC_BEGIN2("mirroring", "Capture", output_buffer.get(),
+ "frame_number", frame_number,
+ "trigger", event_name);
+ *storage = output_buffer;
+ *callback = base::Bind(&ThreadSafeCaptureOracle::DidCaptureFrame,
+ this, output_buffer, frame_number);
+ return true;
+}
+
+void ThreadSafeCaptureOracle::Start() {
+ base::AutoLock guard(lock_);
+ is_started_ = true;
+}
+
+void ThreadSafeCaptureOracle::Stop() {
+ base::AutoLock guard(lock_);
+ is_started_ = false;
+}
+
+void ThreadSafeCaptureOracle::ReportError() {
+ base::AutoLock guard(lock_);
+ if (consumer_)
+ consumer_->OnError();
+}
+
+void ThreadSafeCaptureOracle::InvalidateConsumer() {
+ base::AutoLock guard(lock_);
+
+ TRACE_EVENT_INSTANT0("mirroring", "InvalidateConsumer",
+ TRACE_EVENT_SCOPE_THREAD);
+
+ is_started_ = false;
+ consumer_ = NULL;
+}
+
+void ThreadSafeCaptureOracle::DidCaptureFrame(
+ const scoped_refptr<media::VideoFrame>& frame,
+ int frame_number,
+ base::Time timestamp,
+ bool success) {
+ base::AutoLock guard(lock_);
+
+ TRACE_EVENT_ASYNC_END2("mirroring", "Capture", frame.get(),
+ "success", success,
+ "timestamp", timestamp.ToInternalValue());
+
+ if (!consumer_ || !is_started_)
+ return; // Capture is stopped.
+
+ if (success)
+ if (oracle_->CompleteCapture(frame_number, timestamp))
+ consumer_->OnIncomingCapturedVideoFrame(frame, timestamp);
+}
+
+bool FrameSubscriber::ShouldCaptureFrame(
+ base::Time present_time,
+ scoped_refptr<media::VideoFrame>* storage,
+ DeliverFrameCallback* deliver_frame_cb) {
+ TRACE_EVENT1("mirroring", "FrameSubscriber::ShouldCaptureFrame",
+ "instance", this);
+
+ return oracle_proxy_->ObserveEventAndDecideCapture(event_type_, present_time,
+ storage, deliver_frame_cb);
+}
+
+ContentCaptureSubscription::ContentCaptureSubscription(
+ const RenderWidgetHost& source,
+ const scoped_refptr<ThreadSafeCaptureOracle>& oracle_proxy,
+ const CaptureCallback& capture_callback)
+ : render_process_id_(source.GetProcess()->GetID()),
+ render_view_id_(source.GetRoutingID()),
+ paint_subscriber_(VideoCaptureOracle::kSoftwarePaint, oracle_proxy),
+ timer_subscriber_(VideoCaptureOracle::kTimerPoll, oracle_proxy),
+ capture_callback_(capture_callback),
+ timer_(true, true) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ RenderWidgetHostViewPort* view =
+ RenderWidgetHostViewPort::FromRWHV(source.GetView());
+
+ // Subscribe to accelerated presents. These will be serviced directly by the
+ // oracle.
+ if (view && kAcceleratedSubscriberIsSupported) {
+ scoped_ptr<RenderWidgetHostViewFrameSubscriber> subscriber(
+ new FrameSubscriber(VideoCaptureOracle::kCompositorUpdate,
+ oracle_proxy));
+ view->BeginFrameSubscription(subscriber.Pass());
+ }
+
+ // Subscribe to software paint events. This instance will service these by
+ // reflecting them back to the CaptureMachine via |capture_callback|.
+ registrar_.Add(
+ this, content::NOTIFICATION_RENDER_WIDGET_HOST_DID_UPDATE_BACKING_STORE,
+ Source<RenderWidgetHost>(&source));
+
+ // Subscribe to timer events. This instance will service these as well.
+ timer_.Start(FROM_HERE, oracle_proxy->capture_period(),
+ base::Bind(&ContentCaptureSubscription::OnTimer,
+ base::Unretained(this)));
+}
+
+ContentCaptureSubscription::~ContentCaptureSubscription() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+ if (kAcceleratedSubscriberIsSupported) {
+ RenderViewHost* source = RenderViewHost::FromID(render_process_id_,
+ render_view_id_);
+ if (source) {
+ RenderWidgetHostViewPort* view =
+ RenderWidgetHostViewPort::FromRWHV(source->GetView());
+ if (view)
+ view->EndFrameSubscription();
+ }
+ }
+}
+
+void ContentCaptureSubscription::Observe(
+ int type,
+ const content::NotificationSource& source,
+ const content::NotificationDetails& details) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+ DCHECK_EQ(NOTIFICATION_RENDER_WIDGET_HOST_DID_UPDATE_BACKING_STORE, type);
+
+ RenderWidgetHostImpl* rwh =
+ RenderWidgetHostImpl::From(Source<RenderWidgetHost>(source).ptr());
+
+ // This message occurs on window resizes and visibility changes even when
+ // accelerated compositing is active, so we need to filter out these cases.
+ if (!rwh || !rwh->GetView() || (rwh->is_accelerated_compositing_active() &&
+ rwh->GetView()->IsSurfaceAvailableForCopy()))
+ return;
+
+ TRACE_EVENT1("mirroring", "ContentCaptureSubscription::Observe",
+ "instance", this);
+
+ base::Closure copy_done_callback;
+ scoped_refptr<media::VideoFrame> frame;
+ RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback deliver_frame_cb;
+ const base::Time start_time = base::Time::Now();
+ if (paint_subscriber_.ShouldCaptureFrame(start_time,
+ &frame,
+ &deliver_frame_cb)) {
+ // This message happens just before paint. If we post a task to do the copy,
+ // it should run soon after the paint.
+ BrowserThread::PostTask(
+ BrowserThread::UI, FROM_HERE,
+ base::Bind(capture_callback_, start_time, frame, deliver_frame_cb));
+ }
+}
+
+void ContentCaptureSubscription::OnTimer() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+ TRACE_EVENT0("mirroring", "ContentCaptureSubscription::OnTimer");
+
+ scoped_refptr<media::VideoFrame> frame;
+ RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback deliver_frame_cb;
+
+ const base::Time start_time = base::Time::Now();
+ if (timer_subscriber_.ShouldCaptureFrame(start_time,
+ &frame,
+ &deliver_frame_cb)) {
+ capture_callback_.Run(start_time, frame, deliver_frame_cb);
+ }
+}
+
+void RenderVideoFrame(const SkBitmap& input,
+ const scoped_refptr<media::VideoFrame>& output,
+ const base::Callback<void(bool)>& done_cb) {
+ base::ScopedClosureRunner failure_handler(base::Bind(done_cb, false));
+
+ SkAutoLockPixels locker(input);
+
+ // Sanity-check the captured bitmap.
+ if (input.empty() ||
+ !input.readyToDraw() ||
+ input.config() != SkBitmap::kARGB_8888_Config ||
+ input.width() < 2 || input.height() < 2) {
+ DVLOG(1) << "input unacceptable (size="
+ << input.getSize()
+ << ", ready=" << input.readyToDraw()
+ << ", config=" << input.config() << ')';
+ return;
+ }
+
+ // Sanity-check the output buffer.
+ if (output->format() != media::VideoFrame::I420) {
+ NOTREACHED();
+ return;
+ }
+
+ // Calculate the width and height of the content region in the |output|, based
+ // on the aspect ratio of |input|.
+ gfx::Rect region_in_frame = ComputeYV12LetterboxRegion(
+ output->coded_size(), gfx::Size(input.width(), input.height()));
+
+ // Scale the bitmap to the required size, if necessary.
+ SkBitmap scaled_bitmap;
+ if (input.width() != region_in_frame.width() ||
+ input.height() != region_in_frame.height()) {
+
+ skia::ImageOperations::ResizeMethod method;
+ if (input.width() < region_in_frame.width() ||
+ input.height() < region_in_frame.height()) {
+ // Avoid box filtering when magnifying, because it's actually
+ // nearest-neighbor.
+ method = skia::ImageOperations::RESIZE_HAMMING1;
+ } else {
+ method = skia::ImageOperations::RESIZE_BOX;
+ }
+
+ TRACE_EVENT_ASYNC_STEP0("mirroring", "Capture", output.get(), "Scale");
+ scaled_bitmap = skia::ImageOperations::Resize(input, method,
+ region_in_frame.width(),
+ region_in_frame.height());
+ } else {
+ scaled_bitmap = input;
+ }
+
+ TRACE_EVENT_ASYNC_STEP0("mirroring", "Capture", output.get(), "YUV");
+ {
+ SkAutoLockPixels scaled_bitmap_locker(scaled_bitmap);
+
+ media::CopyRGBToVideoFrame(
+ reinterpret_cast<uint8*>(scaled_bitmap.getPixels()),
+ scaled_bitmap.rowBytes(),
+ region_in_frame,
+ output.get());
+ }
+
+ // The result is now ready.
+ failure_handler.Release();
+ done_cb.Run(true);
+}
+
+VideoFrameDeliveryLog::VideoFrameDeliveryLog()
+ : last_frame_rate_log_time_(),
+ count_frames_rendered_(0),
+ last_frame_number_(0) {
+}
+
+void VideoFrameDeliveryLog::ChronicleFrameDelivery(int frame_number) {
+ // Log frame rate, if verbose logging is turned on.
+ static const base::TimeDelta kFrameRateLogInterval =
+ base::TimeDelta::FromSeconds(10);
+ const base::Time now = base::Time::Now();
+ if (last_frame_rate_log_time_.is_null()) {
+ last_frame_rate_log_time_ = now;
+ count_frames_rendered_ = 0;
+ last_frame_number_ = frame_number;
+ } else {
+ ++count_frames_rendered_;
+ const base::TimeDelta elapsed = now - last_frame_rate_log_time_;
+ if (elapsed >= kFrameRateLogInterval) {
+ const double measured_fps =
+ count_frames_rendered_ / elapsed.InSecondsF();
+ const int frames_elapsed = frame_number - last_frame_number_;
+ const int count_frames_dropped = frames_elapsed - count_frames_rendered_;
+ DCHECK_LE(0, count_frames_dropped);
+ UMA_HISTOGRAM_PERCENTAGE(
+ "TabCapture.FrameDropPercentage",
+ (count_frames_dropped * 100 + frames_elapsed / 2) / frames_elapsed);
+ UMA_HISTOGRAM_COUNTS(
+ "TabCapture.FrameRate",
+ static_cast<int>(measured_fps));
+ VLOG(1) << "Current measured frame rate for "
+ << "WebContentsVideoCaptureDevice is " << measured_fps << " FPS.";
+ last_frame_rate_log_time_ = now;
+ count_frames_rendered_ = 0;
+ last_frame_number_ = frame_number;
+ }
+ }
+}
+
+// static
+scoped_ptr<CaptureMachine> CaptureMachine::Create(
+ int render_process_id,
+ int render_view_id,
+ const scoped_refptr<base::SequencedTaskRunner>& render_task_runner,
+ const scoped_refptr<ThreadSafeCaptureOracle>& oracle_proxy) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+ DCHECK(render_task_runner.get());
+ DCHECK(oracle_proxy.get());
+ scoped_ptr<CaptureMachine> machine(
+ new CaptureMachine(render_task_runner, oracle_proxy));
+
+ if (!machine->StartObservingWebContents(render_process_id, render_view_id))
+ machine.reset();
+
+ return machine.Pass();
+}
+
+CaptureMachine::CaptureMachine(
+ const scoped_refptr<base::SequencedTaskRunner>& render_task_runner,
+ const scoped_refptr<ThreadSafeCaptureOracle>& oracle_proxy)
+ : render_task_runner_(render_task_runner),
+ oracle_proxy_(oracle_proxy),
+ fullscreen_widget_id_(MSG_ROUTING_NONE) {}
+
+CaptureMachine::~CaptureMachine() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI) ||
+ !BrowserThread::IsMessageLoopValid(BrowserThread::UI));
+
+ // Stop observing the web contents.
+ subscription_.reset();
+ if (web_contents()) {
+ web_contents()->DecrementCapturerCount();
+ Observe(NULL);
+ }
+}
+
+void CaptureMachine::Capture(
+ const base::Time& start_time,
+ const scoped_refptr<media::VideoFrame>& target,
+ const RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback&
+ deliver_frame_cb) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ RenderWidgetHost* rwh = GetTarget();
+ RenderWidgetHostViewPort* view =
+ rwh ? RenderWidgetHostViewPort::FromRWHV(rwh->GetView()) : NULL;
+ if (!view || !rwh) {
+ deliver_frame_cb.Run(base::Time(), false);
+ return;
+ }
+
+ gfx::Size video_size = target->coded_size();
+ gfx::Size view_size = view->GetViewBounds().size();
+ gfx::Size fitted_size;
+ if (!view_size.IsEmpty()) {
+ fitted_size = ComputeYV12LetterboxRegion(video_size, view_size).size();
+ }
+ if (view_size != last_view_size_) {
+ last_view_size_ = view_size;
+
+ // Measure the number of kilopixels.
+ UMA_HISTOGRAM_COUNTS_10000(
+ "TabCapture.ViewChangeKiloPixels",
+ view_size.width() * view_size.height() / 1024);
+ }
+
+ if (!view->IsSurfaceAvailableForCopy()) {
+ // Fallback to the more expensive renderer-side copy if the surface and
+ // backing store are not accessible.
+ rwh->GetSnapshotFromRenderer(
+ gfx::Rect(),
+ base::Bind(&CaptureMachine::DidCopyFromBackingStore, this->AsWeakPtr(),
+ start_time, target, deliver_frame_cb));
+ } else if (view->CanCopyToVideoFrame()) {
+ view->CopyFromCompositingSurfaceToVideoFrame(
+ gfx::Rect(view_size),
+ target,
+ base::Bind(&CaptureMachine::DidCopyFromCompositingSurfaceToVideoFrame,
+ this->AsWeakPtr(), start_time, deliver_frame_cb));
+ } else {
+ rwh->CopyFromBackingStore(
+ gfx::Rect(),
+ fitted_size, // Size here is a request not always honored.
+ base::Bind(&CaptureMachine::DidCopyFromBackingStore, this->AsWeakPtr(),
+ start_time, target, deliver_frame_cb));
+ }
+}
+
+bool CaptureMachine::StartObservingWebContents(int initial_render_process_id,
+ int initial_render_view_id) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ // Look-up the RenderViewHost and, from that, the WebContents that wraps it.
+ // If successful, begin observing the WebContents instance.
+ //
+ // Why this can be unsuccessful: The request for mirroring originates in a
+ // render process, and this request is based on the current RenderView
+ // associated with a tab. However, by the time we get up-and-running here,
+ // there have been multiple back-and-forth IPCs between processes, as well as
+ // a bit of indirection across threads. It's easily possible that, in the
+ // meantime, the original RenderView may have gone away.
+ RenderViewHost* const rvh =
+ RenderViewHost::FromID(initial_render_process_id,
+ initial_render_view_id);
+ DVLOG_IF(1, !rvh) << "RenderViewHost::FromID("
+ << initial_render_process_id << ", "
+ << initial_render_view_id << ") returned NULL.";
+ Observe(rvh ? WebContents::FromRenderViewHost(rvh) : NULL);
+
+ WebContentsImpl* contents = static_cast<WebContentsImpl*>(web_contents());
+ if (contents) {
+ contents->IncrementCapturerCount();
+ fullscreen_widget_id_ = contents->GetFullscreenWidgetRoutingID();
+ RenewFrameSubscription();
+ return true;
+ }
+
+ DVLOG(1) << "WebContents::FromRenderViewHost(" << rvh << ") returned NULL.";
+ return false;
+}
+
+void CaptureMachine::WebContentsDestroyed(WebContents* web_contents) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ subscription_.reset();
+ web_contents->DecrementCapturerCount();
+ oracle_proxy_->ReportError();
+}
+
+RenderWidgetHost* CaptureMachine::GetTarget() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+ if (!web_contents())
+ return NULL;
+
+ RenderWidgetHost* rwh = NULL;
+ if (fullscreen_widget_id_ != MSG_ROUTING_NONE) {
+ RenderProcessHost* process = web_contents()->GetRenderProcessHost();
+ if (process)
+ rwh = RenderWidgetHost::FromID(process->GetID(), fullscreen_widget_id_);
+ } else {
+ rwh = web_contents()->GetRenderViewHost();
+ }
+
+ return rwh;
+}
+
+void CaptureMachine::DidCopyFromBackingStore(
+ const base::Time& start_time,
+ const scoped_refptr<media::VideoFrame>& target,
+ const RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback&
+ deliver_frame_cb,
+ bool success,
+ const SkBitmap& bitmap) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ base::Time now = base::Time::Now();
+ if (success) {
+ UMA_HISTOGRAM_TIMES("TabCapture.CopyTimeBitmap", now - start_time);
+ TRACE_EVENT_ASYNC_STEP0("mirroring", "Capture", target.get(), "Render");
+ render_task_runner_->PostTask(FROM_HERE, base::Bind(
+ &RenderVideoFrame, bitmap, target,
+ base::Bind(deliver_frame_cb, start_time)));
+ } else {
+ // Capture can fail due to transient issues, so just skip this frame.
+ DVLOG(1) << "CopyFromBackingStore failed; skipping frame.";
+ deliver_frame_cb.Run(start_time, false);
+ }
+}
+
+void CaptureMachine::DidCopyFromCompositingSurfaceToVideoFrame(
+ const base::Time& start_time,
+ const RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback&
+ deliver_frame_cb,
+ bool success) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+ base::Time now = base::Time::Now();
+
+ if (success) {
+ UMA_HISTOGRAM_TIMES("TabCapture.CopyTimeVideoFrame", now - start_time);
+ } else {
+ // Capture can fail due to transient issues, so just skip this frame.
+ DVLOG(1) << "CopyFromCompositingSurface failed; skipping frame.";
+ }
+ deliver_frame_cb.Run(start_time, success);
+}
+
+void CaptureMachine::RenewFrameSubscription() {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+
+ // Always destroy the old subscription before creating a new one.
+ subscription_.reset();
+
+ RenderWidgetHost* rwh = GetTarget();
+ if (!rwh || !rwh->GetView())
+ return;
+
+ subscription_.reset(new ContentCaptureSubscription(*rwh, oracle_proxy_,
+ base::Bind(&CaptureMachine::Capture, this->AsWeakPtr())));
+}
+
+void DeleteCaptureMachineOnUIThread(
+ scoped_ptr<CaptureMachine> capture_machine) {
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
+ capture_machine.reset();
+}
+
+} // namespace
+
+// The "meat" of the video capture implementation, which is a ref-counted class.
+// Separating this from the "shell class" WebContentsVideoCaptureDevice allows
+// safe destruction without needing to block any threads (e.g., the IO
+// BrowserThread).
+//
+// WebContentsVideoCaptureDevice::Impl manages a simple state machine and the
+// pipeline (see notes at top of this file). It times the start of successive
+// captures and facilitates the processing of each through the stages of the
+// pipeline.
+class WebContentsVideoCaptureDevice::Impl : public base::SupportsWeakPtr<Impl> {
+ public:
+
+ Impl(int render_process_id, int render_view_id);
+ virtual ~Impl();
+
+ // Asynchronous requests to change WebContentsVideoCaptureDevice::Impl state.
+ void Allocate(int width,
+ int height,
+ int frame_rate,
+ media::VideoCaptureDevice::EventHandler* consumer);
+ void Start();
+ void Stop();
+ void DeAllocate();
+
+ private:
+
+ // Flag indicating current state.
+ enum State {
+ kIdle,
+ kAllocated,
+ kCapturing,
+ kError
+ };
+
+ void TransitionStateTo(State next_state);
+
+ // Stops capturing and notifies consumer_ of an error state.
+ void Error();
+
+ // Called in response to CaptureMachine::Create that runs on the UI thread.
+ // It will assign the capture machine to the Impl class if it still exists
+ // otherwise it will post a task to delete CaptureMachine on the UI thread.
+ static void AssignCaptureMachine(
+ base::WeakPtr<WebContentsVideoCaptureDevice::Impl> impl,
+ scoped_ptr<CaptureMachine> capture_machine);
+
+ // Tracks that all activity occurs on the media stream manager's thread.
+ base::ThreadChecker thread_checker_;
+
+ // These values identify the starting view that will be captured. After
+ // capture starts, the target view IDs will change as navigation occurs, and
+ // so these values are not relevant after the initial bootstrapping.
+ const int initial_render_process_id_;
+ const int initial_render_view_id_;
+
+ // Our event handler, which gobbles the frames we capture.
+ VideoCaptureDevice::EventHandler* consumer_;
+
+ // Current lifecycle state.
+ State state_;
+
+ // A dedicated worker thread for doing image operations. Started/joined here,
+ // but used by the CaptureMachine.
+ base::Thread render_thread_;
+
+ // Tracks the CaptureMachine that's doing work on our behalf on the UI thread.
+ // This value should never be dereferenced by this class, other than to
+ // create and destroy it on the UI thread.
+ scoped_ptr<CaptureMachine> capture_machine_;
+
+ // Our thread-safe capture oracle which serves as the gateway to the video
+ // capture pipeline. Besides the WCVCD itself, it is the only component of the
+ // system with direct access to |consumer_|.
+ scoped_refptr<ThreadSafeCaptureOracle> oracle_proxy_;
+
+ DISALLOW_COPY_AND_ASSIGN(Impl);
+};
+
+WebContentsVideoCaptureDevice::Impl::Impl(int render_process_id,
+ int render_view_id)
+ : initial_render_process_id_(render_process_id),
+ initial_render_view_id_(render_view_id),
+ consumer_(NULL),
+ state_(kIdle),
+ render_thread_("WebContentsVideo_RenderThread") {
+}
+
+void WebContentsVideoCaptureDevice::Impl::Allocate(
+ int width,
+ int height,
+ int frame_rate,
+ VideoCaptureDevice::EventHandler* consumer) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (state_ != kIdle) {
+ DVLOG(1) << "Allocate() invoked when not in state Idle.";
+ return;
+ }
+
+ if (frame_rate <= 0) {
+ DVLOG(1) << "invalid frame_rate: " << frame_rate;
+ consumer->OnError();
+ return;
+ }
+
+ if (!render_thread_.Start()) {
+ DVLOG(1) << "Failed to spawn render thread.";
+ consumer->OnError();
+ return;
+ }
+
+ // Frame dimensions must each be a positive, even integer, since the consumer
+ // wants (or will convert to) YUV420.
+ width = MakeEven(width);
+ height = MakeEven(height);
+ if (width < kMinFrameWidth || height < kMinFrameHeight) {
+ DVLOG(1) << "invalid width (" << width << ") and/or height ("
+ << height << ")";
+ consumer->OnError();
+ return;
+ }
+
+ // Initialize capture settings which will be consistent for the
+ // duration of the capture.
+ media::VideoCaptureCapability settings;
+
+ settings.width = width;
+ settings.height = height;
+ settings.frame_rate = frame_rate;
+ // Note: the value of |settings.color| doesn't matter if we use only the
+ // VideoFrame based methods on |consumer|.
+ settings.color = media::VideoCaptureCapability::kI420;
+ settings.expected_capture_delay = 0;
+ settings.interlaced = false;
+
+ base::TimeDelta capture_period = base::TimeDelta::FromMicroseconds(
+ 1000000.0 / settings.frame_rate + 0.5);
+
+ consumer_ = consumer;
+ consumer_->OnFrameInfo(settings);
+ scoped_ptr<VideoCaptureOracle> oracle(
+ new VideoCaptureOracle(capture_period,
+ kAcceleratedSubscriberIsSupported));
+ oracle_proxy_ = new ThreadSafeCaptureOracle(
+ consumer_,
+ oracle.Pass());
+
+ // Allocates the CaptureMachine. The CaptureMachine will be tracking render
+ // view swapping over its lifetime, and we don't want to lose our reference to
+ // the current render view by starting over with the stale
+ // |initial_render_view_id_|.
+ DCHECK(!capture_machine_.get());
+ BrowserThread::PostTaskAndReplyWithResult(
+ BrowserThread::UI, FROM_HERE,
+ base::Bind(&CaptureMachine::Create,
+ initial_render_process_id_,
+ initial_render_view_id_,
+ render_thread_.message_loop_proxy(), oracle_proxy_),
+ base::Bind(&Impl::AssignCaptureMachine, AsWeakPtr()));
+
+ TransitionStateTo(kAllocated);
+}
+
+void WebContentsVideoCaptureDevice::Impl::Start() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (state_ != kAllocated) {
+ return;
+ }
+
+ TransitionStateTo(kCapturing);
+
+ oracle_proxy_->Start();
+}
+
+// static
+void WebContentsVideoCaptureDevice::Impl::AssignCaptureMachine(
+ base::WeakPtr<WebContentsVideoCaptureDevice::Impl> impl,
+ scoped_ptr<CaptureMachine> capture_machine) {
+ DCHECK(!impl.get() || impl->thread_checker_.CalledOnValidThread());
+
+ if (!impl.get()) {
+ // If WCVD::Impl was destroyed before we got back on it's thread and
+ // capture_machine is not NULL, then we need to return to the UI thread to
+ // safely cleanup the CaptureMachine.
+ if (capture_machine) {
+ BrowserThread::PostTask(
+ BrowserThread::UI, FROM_HERE, base::Bind(
+ &DeleteCaptureMachineOnUIThread, base::Passed(&capture_machine)));
+ return;
+ }
+ } else if (!capture_machine) {
+ impl->Error();
+ } else {
+ impl->capture_machine_ = capture_machine.Pass();
+ }
+}
+
+void WebContentsVideoCaptureDevice::Impl::Stop() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (state_ != kCapturing) {
+ return;
+ }
+ oracle_proxy_->Stop();
+
+ TransitionStateTo(kAllocated);
+}
+
+void WebContentsVideoCaptureDevice::Impl::DeAllocate() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ if (state_ == kCapturing) {
+ Stop();
+ }
+ if (state_ == kAllocated) {
+ // |consumer_| is about to be deleted, so we mustn't use it anymore.
+ oracle_proxy_->InvalidateConsumer();
+ consumer_ = NULL;
+ oracle_proxy_ = NULL;
+ render_thread_.Stop();
+
+ TransitionStateTo(kIdle);
+ }
+}
+
+WebContentsVideoCaptureDevice::Impl::~Impl() {
+ // There is still a capture pipeline running that is checking in with the
+ // oracle, and processing captures that are already started in flight. That
+ // pipeline must be shut down asynchronously, on the UI thread.
+ if (capture_machine_) {
+ // The task that is posted to the UI thread might not run if we are shutting
+ // down, so we transfer ownership of CaptureMachine to the closure so that
+ // it is still cleaned up when the closure is deleted.
+ BrowserThread::PostTask(
+ BrowserThread::UI, FROM_HERE, base::Bind(
+ &DeleteCaptureMachineOnUIThread, base::Passed(&capture_machine_)));
+ }
+
+ DCHECK(!capture_machine_) << "Cleanup on UI thread did not happen.";
+ DCHECK(!consumer_) << "Device not DeAllocated -- possible data race.";
+ DVLOG(1) << "WebContentsVideoCaptureDevice::Impl@" << this << " destroying.";
+}
+
+void WebContentsVideoCaptureDevice::Impl::TransitionStateTo(State next_state) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+#ifndef NDEBUG
+ static const char* kStateNames[] = {
+ "Idle", "Allocated", "Capturing", "Error"
+ };
+ DVLOG(1) << "State change: " << kStateNames[state_]
+ << " --> " << kStateNames[next_state];
+#endif
+
+ state_ = next_state;
+}
+
+void WebContentsVideoCaptureDevice::Impl::Error() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (state_ == kIdle)
+ return;
+
+ if (consumer_)
+ consumer_->OnError();
+
+ DeAllocate();
+ TransitionStateTo(kError);
+}
+
+WebContentsVideoCaptureDevice::WebContentsVideoCaptureDevice(
+ const media::VideoCaptureDevice::Name& name,
+ int render_process_id,
+ int render_view_id)
+ : device_name_(name),
+ impl_(new WebContentsVideoCaptureDevice::Impl(render_process_id,
+ render_view_id)) {}
+
+WebContentsVideoCaptureDevice::~WebContentsVideoCaptureDevice() {
+ DVLOG(2) << "WebContentsVideoCaptureDevice@" << this << " destroying.";
+}
+
+// static
+media::VideoCaptureDevice* WebContentsVideoCaptureDevice::Create(
+ const std::string& device_id) {
+ // Parse device_id into render_process_id and render_view_id.
+ int render_process_id = -1;
+ int render_view_id = -1;
+ if (!WebContentsCaptureUtil::ExtractTabCaptureTarget(device_id,
+ &render_process_id,
+ &render_view_id))
+ return NULL;
+
+ std::string device_name;
+ base::SStringPrintf(&device_name,
+ "WebContents[%.*s]",
+ static_cast<int>(device_id.size()), device_id.data());
+ return new WebContentsVideoCaptureDevice(
+ media::VideoCaptureDevice::Name(device_name, device_id),
+ render_process_id, render_view_id);
+}
+
+void WebContentsVideoCaptureDevice::Allocate(
+ const media::VideoCaptureCapability& capture_format,
+ VideoCaptureDevice::EventHandler* observer) {
+ DVLOG(1) << "Allocating " << capture_format.width << "x"
+ << capture_format.height;
+ impl_->Allocate(capture_format.width,
+ capture_format.height,
+ capture_format.frame_rate,
+ observer);
+}
+
+void WebContentsVideoCaptureDevice::Start() {
+ impl_->Start();
+}
+
+void WebContentsVideoCaptureDevice::Stop() {
+ impl_->Stop();
+}
+
+void WebContentsVideoCaptureDevice::DeAllocate() {
+ impl_->DeAllocate();
+}
+
+const media::VideoCaptureDevice::Name&
+WebContentsVideoCaptureDevice::device_name() {
+ return device_name_;
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/web_contents_video_capture_device.h b/chromium/content/browser/renderer_host/media/web_contents_video_capture_device.h
new file mode 100644
index 00000000000..94ac0680f6f
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_video_capture_device.h
@@ -0,0 +1,73 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_VIDEO_CAPTURE_DEVICE_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_VIDEO_CAPTURE_DEVICE_H_
+
+#include <string>
+
+#include "base/memory/scoped_ptr.h"
+#include "content/common/content_export.h"
+#include "media/video/capture/video_capture_device.h"
+
+namespace content {
+
+class RenderWidgetHost;
+
+// A virtualized VideoCaptureDevice that mirrors the displayed contents of a
+// tab (accessed via its associated WebContents instance), producing a stream of
+// video frames.
+//
+// An instance is created by providing a device_id. The device_id contains the
+// routing ID for a RenderViewHost, and from the RenderViewHost instance, a
+// reference to its associated WebContents instance is acquired. From then on,
+// WebContentsVideoCaptureDevice will capture from whatever render view is
+// currently associated with that WebContents instance. This allows the
+// underlying render view to be swapped out (e.g., due to navigation or
+// crashes/reloads), without any interruption in capturing.
+class CONTENT_EXPORT WebContentsVideoCaptureDevice
+ : public media::VideoCaptureDevice {
+ public:
+ // Construct from a |device_id| string of the form:
+ // "virtual-media-stream://render_process_id:render_view_id", where
+ // |render_process_id| and |render_view_id| are decimal integers.
+ // |destroy_cb| is invoked on an outside thread once all outstanding objects
+ // are completely destroyed -- this will be some time after the
+ // WebContentsVideoCaptureDevice is itself deleted.
+ // TODO(miu): Passing a destroy callback suggests needing to revisit the
+ // design philosophy of an asynchronous DeAllocate(). http://crbug.com/158641
+ static media::VideoCaptureDevice* Create(const std::string& device_id);
+
+ virtual ~WebContentsVideoCaptureDevice();
+
+ // VideoCaptureDevice implementation.
+ virtual void Allocate(const media::VideoCaptureCapability& capture_format,
+ VideoCaptureDevice::EventHandler* observer) OVERRIDE;
+ virtual void Start() OVERRIDE;
+ virtual void Stop() OVERRIDE;
+ virtual void DeAllocate() OVERRIDE;
+
+ // Note: The following is just a pass-through of the device_id provided to the
+ // constructor. It does not change when the content of the page changes
+ // (e.g., due to navigation), or when the underlying RenderView is
+ // swapped-out.
+ virtual const Name& device_name() OVERRIDE;
+
+ private:
+ class Impl;
+
+ WebContentsVideoCaptureDevice(const Name& name,
+ int render_process_id,
+ int render_view_id);
+
+ Name device_name_;
+ const scoped_ptr<Impl> impl_;
+
+ DISALLOW_COPY_AND_ASSIGN(WebContentsVideoCaptureDevice);
+};
+
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEB_CONTENTS_VIDEO_CAPTURE_DEVICE_H_
diff --git a/chromium/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc b/chromium/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc
new file mode 100644
index 00000000000..d88d553fa73
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/web_contents_video_capture_device_unittest.cc
@@ -0,0 +1,797 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/web_contents_video_capture_device.h"
+
+#include "base/bind_helpers.h"
+#include "base/debug/debugger.h"
+#include "base/run_loop.h"
+#include "base/test/test_timeouts.h"
+#include "base/time/time.h"
+#include "base/timer/timer.h"
+#include "content/browser/browser_thread_impl.h"
+#include "content/browser/renderer_host/media/video_capture_buffer_pool.h"
+#include "content/browser/renderer_host/media/video_capture_oracle.h"
+#include "content/browser/renderer_host/media/web_contents_capture_util.h"
+#include "content/browser/renderer_host/render_view_host_factory.h"
+#include "content/browser/renderer_host/render_widget_host_impl.h"
+#include "content/browser/renderer_host/test_render_view_host.h"
+#include "content/port/browser/render_widget_host_view_frame_subscriber.h"
+#include "content/public/browser/notification_service.h"
+#include "content/public/browser/notification_types.h"
+#include "content/public/test/mock_render_process_host.h"
+#include "content/public/test/test_browser_context.h"
+#include "content/public/test/test_browser_thread_bundle.h"
+#include "content/public/test/test_utils.h"
+#include "content/test/test_web_contents.h"
+#include "media/base/video_util.h"
+#include "media/base/yuv_convert.h"
+#include "media/video/capture/video_capture_types.h"
+#include "skia/ext/platform_canvas.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/skia/include/core/SkColor.h"
+
+namespace content {
+namespace {
+
+const int kTestWidth = 320;
+const int kTestHeight = 240;
+const int kTestFramesPerSecond = 20;
+const SkColor kNothingYet = 0xdeadbeef;
+const SkColor kNotInterested = ~kNothingYet;
+
+void DeadlineExceeded(base::Closure quit_closure) {
+ if (!base::debug::BeingDebugged()) {
+ quit_closure.Run();
+ FAIL() << "Deadline exceeded while waiting, quitting";
+ } else {
+ LOG(WARNING) << "Deadline exceeded; test would fail if debugger weren't "
+ << "attached.";
+ }
+}
+
+void RunCurrentLoopWithDeadline() {
+ base::Timer deadline(false, false);
+ deadline.Start(FROM_HERE, TestTimeouts::action_max_timeout(), base::Bind(
+ &DeadlineExceeded, base::MessageLoop::current()->QuitClosure()));
+ base::MessageLoop::current()->Run();
+ deadline.Stop();
+}
+
+SkColor ConvertRgbToYuv(SkColor rgb) {
+ uint8 yuv[3];
+ media::ConvertRGB32ToYUV(reinterpret_cast<uint8*>(&rgb),
+ yuv, yuv + 1, yuv + 2, 1, 1, 1, 1, 1);
+ return SkColorSetRGB(yuv[0], yuv[1], yuv[2]);
+}
+
+// Thread-safe class that controls the source pattern to be captured by the
+// system under test. The lifetime of this class is greater than the lifetime
+// of all objects that reference it, so it does not need to be reference
+// counted.
+class CaptureTestSourceController {
+ public:
+
+ CaptureTestSourceController()
+ : color_(SK_ColorMAGENTA),
+ copy_result_size_(kTestWidth, kTestHeight),
+ can_copy_to_video_frame_(false),
+ use_frame_subscriber_(false) {}
+
+ void SetSolidColor(SkColor color) {
+ base::AutoLock guard(lock_);
+ color_ = color;
+ }
+
+ SkColor GetSolidColor() {
+ base::AutoLock guard(lock_);
+ return color_;
+ }
+
+ void SetCopyResultSize(int width, int height) {
+ base::AutoLock guard(lock_);
+ copy_result_size_ = gfx::Size(width, height);
+ }
+
+ gfx::Size GetCopyResultSize() {
+ base::AutoLock guard(lock_);
+ return copy_result_size_;
+ }
+
+ void SignalCopy() {
+ // TODO(nick): This actually should always be happening on the UI thread.
+ base::AutoLock guard(lock_);
+ if (!copy_done_.is_null()) {
+ BrowserThread::PostTask(BrowserThread::UI, FROM_HERE, copy_done_);
+ copy_done_.Reset();
+ }
+ }
+
+ void SetCanCopyToVideoFrame(bool value) {
+ base::AutoLock guard(lock_);
+ can_copy_to_video_frame_ = value;
+ }
+
+ bool CanCopyToVideoFrame() {
+ base::AutoLock guard(lock_);
+ return can_copy_to_video_frame_;
+ }
+
+ void SetUseFrameSubscriber(bool value) {
+ base::AutoLock guard(lock_);
+ use_frame_subscriber_ = value;
+ }
+
+ bool CanUseFrameSubscriber() {
+ base::AutoLock guard(lock_);
+ return use_frame_subscriber_;
+ }
+
+ void WaitForNextCopy() {
+ {
+ base::AutoLock guard(lock_);
+ copy_done_ = base::MessageLoop::current()->QuitClosure();
+ }
+
+ RunCurrentLoopWithDeadline();
+ }
+
+ private:
+ base::Lock lock_; // Guards changes to all members.
+ SkColor color_;
+ gfx::Size copy_result_size_;
+ bool can_copy_to_video_frame_;
+ bool use_frame_subscriber_;
+ base::Closure copy_done_;
+
+ DISALLOW_COPY_AND_ASSIGN(CaptureTestSourceController);
+};
+
+// A stub implementation which returns solid-color bitmaps in calls to
+// CopyFromCompositingSurfaceToVideoFrame(), and which allows the video-frame
+// readback path to be switched on and off. The behavior is controlled by a
+// CaptureTestSourceController.
+class CaptureTestView : public TestRenderWidgetHostView {
+ public:
+ explicit CaptureTestView(RenderWidgetHostImpl* rwh,
+ CaptureTestSourceController* controller)
+ : TestRenderWidgetHostView(rwh),
+ controller_(controller) {}
+
+ virtual ~CaptureTestView() {}
+
+ // TestRenderWidgetHostView overrides.
+ virtual gfx::Rect GetViewBounds() const OVERRIDE {
+ return gfx::Rect(100, 100, 100 + kTestWidth, 100 + kTestHeight);
+ }
+
+ virtual bool CanCopyToVideoFrame() const OVERRIDE {
+ return controller_->CanCopyToVideoFrame();
+ }
+
+ virtual void CopyFromCompositingSurfaceToVideoFrame(
+ const gfx::Rect& src_subrect,
+ const scoped_refptr<media::VideoFrame>& target,
+ const base::Callback<void(bool)>& callback) OVERRIDE {
+ SkColor c = ConvertRgbToYuv(controller_->GetSolidColor());
+ media::FillYUV(
+ target.get(), SkColorGetR(c), SkColorGetG(c), SkColorGetB(c));
+ callback.Run(true);
+ controller_->SignalCopy();
+ }
+
+ virtual void BeginFrameSubscription(
+ scoped_ptr<RenderWidgetHostViewFrameSubscriber> subscriber) OVERRIDE {
+ subscriber_.reset(subscriber.release());
+ }
+
+ virtual void EndFrameSubscription() OVERRIDE {
+ subscriber_.reset();
+ }
+
+ // Simulate a compositor paint event for our subscriber.
+ void SimulateUpdate() {
+ const base::Time present_time = base::Time::Now();
+ RenderWidgetHostViewFrameSubscriber::DeliverFrameCallback callback;
+ scoped_refptr<media::VideoFrame> target;
+ if (subscriber_ && subscriber_->ShouldCaptureFrame(present_time,
+ &target, &callback)) {
+ SkColor c = ConvertRgbToYuv(controller_->GetSolidColor());
+ media::FillYUV(
+ target.get(), SkColorGetR(c), SkColorGetG(c), SkColorGetB(c));
+ BrowserThread::PostTask(BrowserThread::UI,
+ FROM_HERE,
+ base::Bind(callback, present_time, true));
+ controller_->SignalCopy();
+ }
+ }
+
+ private:
+ scoped_ptr<RenderWidgetHostViewFrameSubscriber> subscriber_;
+ CaptureTestSourceController* const controller_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(CaptureTestView);
+};
+
+#if defined(COMPILER_MSVC)
+// MSVC warns on diamond inheritance. See comment for same warning on
+// RenderViewHostImpl.
+#pragma warning(push)
+#pragma warning(disable: 4250)
+#endif
+
+// A stub implementation which returns solid-color bitmaps in calls to
+// CopyFromBackingStore(). The behavior is controlled by a
+// CaptureTestSourceController.
+class CaptureTestRenderViewHost : public TestRenderViewHost {
+ public:
+ CaptureTestRenderViewHost(SiteInstance* instance,
+ RenderViewHostDelegate* delegate,
+ RenderWidgetHostDelegate* widget_delegate,
+ int routing_id,
+ int main_frame_routing_id,
+ bool swapped_out,
+ CaptureTestSourceController* controller)
+ : TestRenderViewHost(instance, delegate, widget_delegate, routing_id,
+ main_frame_routing_id, swapped_out),
+ controller_(controller) {
+ // Override the default view installed by TestRenderViewHost; we need
+ // our special subclass which has mocked-out tab capture support.
+ RenderWidgetHostView* old_view = GetView();
+ SetView(new CaptureTestView(this, controller));
+ delete old_view;
+ }
+
+ // TestRenderViewHost overrides.
+ virtual void CopyFromBackingStore(
+ const gfx::Rect& src_rect,
+ const gfx::Size& accelerated_dst_size,
+ const base::Callback<void(bool, const SkBitmap&)>& callback) OVERRIDE {
+ gfx::Size size = controller_->GetCopyResultSize();
+ SkColor color = controller_->GetSolidColor();
+
+ // Although it's not necessary, use a PlatformBitmap here (instead of a
+ // regular SkBitmap) to exercise possible threading issues.
+ skia::PlatformBitmap output;
+ EXPECT_TRUE(output.Allocate(size.width(), size.height(), false));
+ {
+ SkAutoLockPixels locker(output.GetBitmap());
+ output.GetBitmap().eraseColor(color);
+ }
+ callback.Run(true, output.GetBitmap());
+ controller_->SignalCopy();
+ }
+
+ private:
+ CaptureTestSourceController* controller_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(CaptureTestRenderViewHost);
+};
+
+#if defined(COMPILER_MSVC)
+// Re-enable warning 4250
+#pragma warning(pop)
+#endif
+
+class CaptureTestRenderViewHostFactory : public RenderViewHostFactory {
+ public:
+ explicit CaptureTestRenderViewHostFactory(
+ CaptureTestSourceController* controller) : controller_(controller) {
+ RegisterFactory(this);
+ }
+
+ virtual ~CaptureTestRenderViewHostFactory() {
+ UnregisterFactory();
+ }
+
+ // RenderViewHostFactory implementation.
+ virtual RenderViewHost* CreateRenderViewHost(
+ SiteInstance* instance,
+ RenderViewHostDelegate* delegate,
+ RenderWidgetHostDelegate* widget_delegate,
+ int routing_id,
+ int main_frame_routing_id,
+ bool swapped_out) OVERRIDE {
+ return new CaptureTestRenderViewHost(instance, delegate, widget_delegate,
+ routing_id, main_frame_routing_id,
+ swapped_out, controller_);
+ }
+ private:
+ CaptureTestSourceController* controller_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(CaptureTestRenderViewHostFactory);
+};
+
+// A stub consumer of captured video frames, which checks the output of
+// WebContentsVideoCaptureDevice.
+class StubConsumer : public media::VideoCaptureDevice::EventHandler {
+ public:
+ StubConsumer()
+ : error_encountered_(false),
+ wait_color_yuv_(0xcafe1950) {
+ buffer_pool_ =
+ new VideoCaptureBufferPool(kTestWidth * kTestHeight * 3 / 2, 2);
+ EXPECT_TRUE(buffer_pool_->Allocate());
+ }
+ virtual ~StubConsumer() {}
+
+ void QuitIfConditionMet(SkColor color) {
+ base::AutoLock guard(lock_);
+
+ if (wait_color_yuv_ == color || error_encountered_)
+ base::MessageLoop::current()->Quit();
+ }
+
+ void WaitForNextColor(SkColor expected_color) {
+ {
+ base::AutoLock guard(lock_);
+ wait_color_yuv_ = ConvertRgbToYuv(expected_color);
+ error_encountered_ = false;
+ }
+ RunCurrentLoopWithDeadline();
+ {
+ base::AutoLock guard(lock_);
+ ASSERT_FALSE(error_encountered_);
+ }
+ }
+
+ void WaitForError() {
+ {
+ base::AutoLock guard(lock_);
+ wait_color_yuv_ = kNotInterested;
+ error_encountered_ = false;
+ }
+ RunCurrentLoopWithDeadline();
+ {
+ base::AutoLock guard(lock_);
+ ASSERT_TRUE(error_encountered_);
+ }
+ }
+
+ bool HasError() {
+ base::AutoLock guard(lock_);
+ return error_encountered_;
+ }
+
+ virtual scoped_refptr<media::VideoFrame> ReserveOutputBuffer() OVERRIDE {
+ return buffer_pool_->ReserveI420VideoFrame(gfx::Size(kTestWidth,
+ kTestHeight),
+ 0);
+ }
+
+ virtual void OnIncomingCapturedFrame(
+ const uint8* data,
+ int length,
+ base::Time timestamp,
+ int rotation,
+ bool flip_vert,
+ bool flip_horiz) OVERRIDE {
+ FAIL();
+ }
+
+ virtual void OnIncomingCapturedVideoFrame(
+ const scoped_refptr<media::VideoFrame>& frame,
+ base::Time timestamp) OVERRIDE {
+ EXPECT_EQ(gfx::Size(kTestWidth, kTestHeight), frame->coded_size());
+ EXPECT_EQ(media::VideoFrame::I420, frame->format());
+ EXPECT_LE(
+ 0,
+ buffer_pool_->RecognizeReservedBuffer(frame->shared_memory_handle()));
+ uint8 yuv[3];
+ for (int plane = 0; plane < 3; ++plane) {
+ yuv[plane] = frame->data(plane)[0];
+ }
+ // TODO(nick): We just look at the first pixel presently, because if
+ // the analysis is too slow, the backlog of frames will grow without bound
+ // and trouble erupts. http://crbug.com/174519
+ PostColorOrError(SkColorSetRGB(yuv[0], yuv[1], yuv[2]));
+ }
+
+ void PostColorOrError(SkColor new_color) {
+ BrowserThread::PostTask(BrowserThread::UI, FROM_HERE, base::Bind(
+ &StubConsumer::QuitIfConditionMet, base::Unretained(this), new_color));
+ }
+
+ virtual void OnError() OVERRIDE {
+ {
+ base::AutoLock guard(lock_);
+ error_encountered_ = true;
+ }
+ PostColorOrError(kNothingYet);
+ }
+
+ virtual void OnFrameInfo(const media::VideoCaptureCapability& info) OVERRIDE {
+ EXPECT_EQ(kTestWidth, info.width);
+ EXPECT_EQ(kTestHeight, info.height);
+ EXPECT_EQ(kTestFramesPerSecond, info.frame_rate);
+ EXPECT_EQ(media::VideoCaptureCapability::kI420, info.color);
+ }
+
+ private:
+ base::Lock lock_;
+ bool error_encountered_;
+ SkColor wait_color_yuv_;
+ scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
+
+ DISALLOW_COPY_AND_ASSIGN(StubConsumer);
+};
+
+// Test harness that sets up a minimal environment with necessary stubs.
+class WebContentsVideoCaptureDeviceTest : public testing::Test {
+ public:
+ // This is public because C++ method pointer scoping rules are silly and make
+ // this hard to use with Bind().
+ void ResetWebContents() {
+ web_contents_.reset();
+ }
+
+ protected:
+ virtual void SetUp() {
+ // TODO(nick): Sadness and woe! Much "mock-the-world" boilerplate could be
+ // eliminated here, if only we could use RenderViewHostTestHarness. The
+ // catch is that we need our TestRenderViewHost to support a
+ // CopyFromBackingStore operation that we control. To accomplish that,
+ // either RenderViewHostTestHarness would have to support installing a
+ // custom RenderViewHostFactory, or else we implant some kind of delegated
+ // CopyFromBackingStore functionality into TestRenderViewHost itself.
+
+ render_process_host_factory_.reset(new MockRenderProcessHostFactory());
+ // Create our (self-registering) RVH factory, so that when we create a
+ // WebContents, it in turn creates CaptureTestRenderViewHosts.
+ render_view_host_factory_.reset(
+ new CaptureTestRenderViewHostFactory(&controller_));
+
+ browser_context_.reset(new TestBrowserContext());
+
+ scoped_refptr<SiteInstance> site_instance =
+ SiteInstance::Create(browser_context_.get());
+ SiteInstanceImpl::set_render_process_host_factory(
+ render_process_host_factory_.get());
+ web_contents_.reset(
+ TestWebContents::Create(browser_context_.get(), site_instance.get()));
+
+ // This is actually a CaptureTestRenderViewHost.
+ RenderWidgetHostImpl* rwh =
+ RenderWidgetHostImpl::From(web_contents_->GetRenderViewHost());
+
+ std::string device_id =
+ WebContentsCaptureUtil::AppendWebContentsDeviceScheme(
+ base::StringPrintf("%d:%d", rwh->GetProcess()->GetID(),
+ rwh->GetRoutingID()));
+
+ device_.reset(WebContentsVideoCaptureDevice::Create(device_id));
+
+ base::RunLoop().RunUntilIdle();
+ }
+
+ virtual void TearDown() {
+ // Tear down in opposite order of set-up.
+
+ // The device is destroyed asynchronously, and will notify the
+ // CaptureTestSourceController when it finishes destruction.
+ // Trigger this, and wait.
+ if (device_) {
+ device_->DeAllocate();
+ device_.reset();
+ }
+
+ base::RunLoop().RunUntilIdle();
+
+ // Destroy the browser objects.
+ web_contents_.reset();
+ browser_context_.reset();
+
+ base::RunLoop().RunUntilIdle();
+
+ SiteInstanceImpl::set_render_process_host_factory(NULL);
+ render_view_host_factory_.reset();
+ render_process_host_factory_.reset();
+ }
+
+ // Accessors.
+ CaptureTestSourceController* source() { return &controller_; }
+ media::VideoCaptureDevice* device() { return device_.get(); }
+ StubConsumer* consumer() { return &consumer_; }
+
+ void SimulateDrawEvent() {
+ if (source()->CanUseFrameSubscriber()) {
+ // Print
+ CaptureTestView* test_view = static_cast<CaptureTestView*>(
+ web_contents_->GetRenderViewHost()->GetView());
+ test_view->SimulateUpdate();
+ } else {
+ // Simulate a non-accelerated paint.
+ NotificationService::current()->Notify(
+ NOTIFICATION_RENDER_WIDGET_HOST_DID_UPDATE_BACKING_STORE,
+ Source<RenderWidgetHost>(web_contents_->GetRenderViewHost()),
+ NotificationService::NoDetails());
+ }
+ }
+
+ void DestroyVideoCaptureDevice() { device_.reset(); }
+
+ private:
+ // The consumer is the ultimate recipient of captured pixel data.
+ StubConsumer consumer_;
+
+ // The controller controls which pixel patterns to produce.
+ CaptureTestSourceController controller_;
+
+ // Self-registering RenderProcessHostFactory.
+ scoped_ptr<MockRenderProcessHostFactory> render_process_host_factory_;
+
+ // Creates capture-capable RenderViewHosts whose pixel content production is
+ // under the control of |controller_|.
+ scoped_ptr<CaptureTestRenderViewHostFactory> render_view_host_factory_;
+
+ // A mocked-out browser and tab.
+ scoped_ptr<TestBrowserContext> browser_context_;
+ scoped_ptr<WebContents> web_contents_;
+
+ // Finally, the WebContentsVideoCaptureDevice under test.
+ scoped_ptr<media::VideoCaptureDevice> device_;
+
+ TestBrowserThreadBundle thread_bundle_;
+};
+
+TEST_F(WebContentsVideoCaptureDeviceTest, InvalidInitialWebContentsError) {
+ // Before the installs itself on the UI thread up to start capturing, we'll
+ // delete the web contents. This should trigger an error which can happen in
+ // practice; we should be able to recover gracefully.
+ ResetWebContents();
+
+ media::VideoCaptureCapability capture_format(
+ kTestWidth,
+ kTestHeight,
+ kTestFramesPerSecond,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ device()->Allocate(capture_format, consumer());
+ device()->Start();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForError());
+ device()->DeAllocate();
+}
+
+TEST_F(WebContentsVideoCaptureDeviceTest, WebContentsDestroyed) {
+ // We'll simulate the tab being closed after the capture pipeline is up and
+ // running.
+ media::VideoCaptureCapability capture_format(
+ kTestWidth,
+ kTestHeight,
+ kTestFramesPerSecond,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ device()->Allocate(capture_format, consumer());
+ device()->Start();
+
+ // Do one capture to prove
+ source()->SetSolidColor(SK_ColorRED);
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorRED));
+
+ base::RunLoop().RunUntilIdle();
+
+ // Post a task to close the tab. We should see an error reported to the
+ // consumer.
+ BrowserThread::PostTask(BrowserThread::UI, FROM_HERE,
+ base::Bind(&WebContentsVideoCaptureDeviceTest::ResetWebContents,
+ base::Unretained(this)));
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForError());
+ device()->DeAllocate();
+}
+
+TEST_F(WebContentsVideoCaptureDeviceTest,
+ StopDeviceBeforeCaptureMachineCreation) {
+ media::VideoCaptureCapability capture_format(
+ kTestWidth,
+ kTestHeight,
+ kTestFramesPerSecond,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ device()->Allocate(capture_format, consumer());
+ device()->Start();
+ // Make a point of not running the UI messageloop here.
+ device()->Stop();
+ device()->DeAllocate();
+ DestroyVideoCaptureDevice();
+
+ // Currently, there should be CreateCaptureMachineOnUIThread() and
+ // DestroyCaptureMachineOnUIThread() tasks pending on the current (UI) message
+ // loop. These should both succeed without crashing, and the machine should
+ // wind up in the idle state.
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(WebContentsVideoCaptureDeviceTest, StopWithRendererWorkToDo) {
+ // Set up the test to use RGB copies and an normal
+ source()->SetCanCopyToVideoFrame(false);
+ source()->SetUseFrameSubscriber(false);
+ media::VideoCaptureCapability capture_format(
+ kTestWidth,
+ kTestHeight,
+ kTestFramesPerSecond,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ device()->Allocate(capture_format, consumer());
+
+ device()->Start();
+ // Make a point of not running the UI messageloop here.
+ // TODO(ajwong): Why do we care?
+ base::RunLoop().RunUntilIdle();
+
+ for (int i = 0; i < 10; ++i)
+ SimulateDrawEvent();
+
+ device()->Stop();
+ device()->DeAllocate();
+ // Currently, there should be CreateCaptureMachineOnUIThread() and
+ // DestroyCaptureMachineOnUIThread() tasks pending on the current message
+ // loop. These should both succeed without crashing, and the machine should
+ // wind up in the idle state.
+ ASSERT_FALSE(consumer()->HasError());
+ base::RunLoop().RunUntilIdle();
+ ASSERT_FALSE(consumer()->HasError());
+}
+
+TEST_F(WebContentsVideoCaptureDeviceTest, DeviceRestart) {
+ media::VideoCaptureCapability capture_format(
+ kTestWidth,
+ kTestHeight,
+ kTestFramesPerSecond,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ device()->Allocate(capture_format, consumer());
+ device()->Start();
+ base::RunLoop().RunUntilIdle();
+ source()->SetSolidColor(SK_ColorRED);
+ SimulateDrawEvent();
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorRED));
+ SimulateDrawEvent();
+ SimulateDrawEvent();
+ source()->SetSolidColor(SK_ColorGREEN);
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorGREEN));
+ device()->Stop();
+
+ // Device is stopped, but content can still be animating.
+ SimulateDrawEvent();
+ SimulateDrawEvent();
+ base::RunLoop().RunUntilIdle();
+
+ device()->Start();
+ source()->SetSolidColor(SK_ColorBLUE);
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorBLUE));
+ source()->SetSolidColor(SK_ColorYELLOW);
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorYELLOW));
+ device()->DeAllocate();
+}
+
+// The "happy case" test. No scaling is needed, so we should be able to change
+// the picture emitted from the source and expect to see each delivered to the
+// consumer. The test will alternate between the three capture paths, simulating
+// falling in and out of accelerated compositing.
+TEST_F(WebContentsVideoCaptureDeviceTest, GoesThroughAllTheMotions) {
+ media::VideoCaptureCapability capture_format(
+ kTestWidth,
+ kTestHeight,
+ kTestFramesPerSecond,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ device()->Allocate(capture_format, consumer());
+
+ device()->Start();
+
+ for (int i = 0; i < 6; i++) {
+ const char* name = NULL;
+ switch (i % 3) {
+ case 0:
+ source()->SetCanCopyToVideoFrame(true);
+ source()->SetUseFrameSubscriber(false);
+ name = "VideoFrame";
+ break;
+ case 1:
+ source()->SetCanCopyToVideoFrame(false);
+ source()->SetUseFrameSubscriber(true);
+ name = "Subscriber";
+ break;
+ case 2:
+ source()->SetCanCopyToVideoFrame(false);
+ source()->SetUseFrameSubscriber(false);
+ name = "SkBitmap";
+ break;
+ default:
+ FAIL();
+ }
+
+ SCOPED_TRACE(base::StringPrintf("Using %s path, iteration #%d", name, i));
+
+ source()->SetSolidColor(SK_ColorRED);
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorRED));
+
+ source()->SetSolidColor(SK_ColorGREEN);
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorGREEN));
+
+ source()->SetSolidColor(SK_ColorBLUE);
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorBLUE));
+
+ source()->SetSolidColor(SK_ColorBLACK);
+ SimulateDrawEvent();
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorBLACK));
+ }
+ device()->DeAllocate();
+}
+
+TEST_F(WebContentsVideoCaptureDeviceTest, RejectsInvalidAllocateParams) {
+ media::VideoCaptureCapability capture_format(
+ 1280,
+ 720,
+ -2,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ BrowserThread::PostTask(BrowserThread::UI,
+ FROM_HERE,
+ base::Bind(&media::VideoCaptureDevice::Allocate,
+ base::Unretained(device()),
+ capture_format,
+ consumer()));
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForError());
+}
+
+TEST_F(WebContentsVideoCaptureDeviceTest, BadFramesGoodFrames) {
+ media::VideoCaptureCapability capture_format(
+ kTestWidth,
+ kTestHeight,
+ kTestFramesPerSecond,
+ media::VideoCaptureCapability::kI420,
+ 0,
+ false,
+ media::ConstantResolutionVideoCaptureDevice);
+ device()->Allocate(capture_format, consumer());
+
+ // 1x1 is too small to process; we intend for this to result in an error.
+ source()->SetCopyResultSize(1, 1);
+ source()->SetSolidColor(SK_ColorRED);
+ device()->Start();
+
+ // These frames ought to be dropped during the Render stage. Let
+ // several captures to happen.
+ ASSERT_NO_FATAL_FAILURE(source()->WaitForNextCopy());
+ ASSERT_NO_FATAL_FAILURE(source()->WaitForNextCopy());
+ ASSERT_NO_FATAL_FAILURE(source()->WaitForNextCopy());
+ ASSERT_NO_FATAL_FAILURE(source()->WaitForNextCopy());
+ ASSERT_NO_FATAL_FAILURE(source()->WaitForNextCopy());
+
+ // Now push some good frames through; they should be processed normally.
+ source()->SetCopyResultSize(kTestWidth, kTestHeight);
+ source()->SetSolidColor(SK_ColorGREEN);
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorGREEN));
+ source()->SetSolidColor(SK_ColorRED);
+ ASSERT_NO_FATAL_FAILURE(consumer()->WaitForNextColor(SK_ColorRED));
+
+ device()->Stop();
+ device()->DeAllocate();
+}
+
+} // namespace
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/webrtc_identity_service_host.cc b/chromium/content/browser/renderer_host/media/webrtc_identity_service_host.cc
new file mode 100644
index 00000000000..0230b26eeaa
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/webrtc_identity_service_host.cc
@@ -0,0 +1,87 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/browser/renderer_host/media/webrtc_identity_service_host.h"
+
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "content/browser/child_process_security_policy_impl.h"
+#include "content/browser/media/webrtc_identity_store.h"
+#include "content/common/media/webrtc_identity_messages.h"
+#include "net/base/net_errors.h"
+
+namespace content {
+
+WebRTCIdentityServiceHost::WebRTCIdentityServiceHost(
+ int renderer_process_id,
+ WebRTCIdentityStore* identity_store)
+ : renderer_process_id_(renderer_process_id),
+ identity_store_(identity_store) {}
+
+WebRTCIdentityServiceHost::~WebRTCIdentityServiceHost() {
+ if (!cancel_callback_.is_null())
+ cancel_callback_.Run();
+}
+
+bool WebRTCIdentityServiceHost::OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) {
+ bool handled = true;
+ IPC_BEGIN_MESSAGE_MAP_EX(WebRTCIdentityServiceHost, message, *message_was_ok)
+ IPC_MESSAGE_HANDLER(WebRTCIdentityMsg_RequestIdentity, OnRequestIdentity)
+ IPC_MESSAGE_HANDLER(WebRTCIdentityMsg_CancelRequest, OnCancelRequest)
+ IPC_MESSAGE_UNHANDLED(handled = false)
+ IPC_END_MESSAGE_MAP_EX()
+ return handled;
+}
+
+void WebRTCIdentityServiceHost::OnRequestIdentity(
+ const GURL& origin,
+ const std::string& identity_name,
+ const std::string& common_name) {
+ if (!cancel_callback_.is_null()) {
+ DLOG(WARNING)
+ << "Request rejected because the previous request has not finished.";
+ SendErrorMessage(net::ERR_INSUFFICIENT_RESOURCES);
+ return;
+ }
+
+ ChildProcessSecurityPolicyImpl* policy =
+ ChildProcessSecurityPolicyImpl::GetInstance();
+ if (!policy->CanAccessCookiesForOrigin(renderer_process_id_, origin)) {
+ DLOG(WARNING) << "Request rejected because origin access is denied.";
+ SendErrorMessage(net::ERR_ACCESS_DENIED);
+ return;
+ }
+
+ cancel_callback_ = identity_store_->RequestIdentity(
+ origin,
+ identity_name,
+ common_name,
+ base::Bind(&WebRTCIdentityServiceHost::OnComplete,
+ base::Unretained(this)));
+ if (cancel_callback_.is_null()) {
+ SendErrorMessage(net::ERR_UNEXPECTED);
+ }
+}
+
+void WebRTCIdentityServiceHost::OnCancelRequest() {
+ base::ResetAndReturn(&cancel_callback_).Run();
+}
+
+void WebRTCIdentityServiceHost::OnComplete(int status,
+ const std::string& certificate,
+ const std::string& private_key) {
+ cancel_callback_.Reset();
+ if (status == net::OK) {
+ Send(new WebRTCIdentityHostMsg_IdentityReady(certificate, private_key));
+ } else {
+ SendErrorMessage(status);
+ }
+}
+
+void WebRTCIdentityServiceHost::SendErrorMessage(int error) {
+ Send(new WebRTCIdentityHostMsg_RequestFailed(error));
+}
+
+} // namespace content
diff --git a/chromium/content/browser/renderer_host/media/webrtc_identity_service_host.h b/chromium/content/browser/renderer_host/media/webrtc_identity_service_host.h
new file mode 100644
index 00000000000..3676223fe86
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/webrtc_identity_service_host.h
@@ -0,0 +1,63 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEBRTC_IDENTITY_SERVICE_HOST_H_
+#define CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEBRTC_IDENTITY_SERVICE_HOST_H_
+
+#include <string>
+
+#include "base/basictypes.h"
+#include "content/common/content_export.h"
+#include "content/public/browser/browser_message_filter.h"
+
+class GURL;
+
+namespace content {
+
+class WebRTCIdentityStore;
+
+// This class is the host for WebRTCIdentityService in the browser process.
+// It converts the IPC messages for requesting a WebRTC DTLS identity and
+// cancelling a pending request into calls of WebRTCIdentityStore. It also sends
+// the request result back to the renderer through IPC.
+// Only one outstanding request is allowed per renderer at a time. If a second
+// request is made before the first one completes, an IPC with error
+// ERR_INSUFFICIENT_RESOURCES will be sent back to the renderer.
+class CONTENT_EXPORT WebRTCIdentityServiceHost : public BrowserMessageFilter {
+ public:
+ explicit WebRTCIdentityServiceHost(int renderer_process_id,
+ WebRTCIdentityStore* identity_store);
+
+ protected:
+ virtual ~WebRTCIdentityServiceHost();
+
+ // content::BrowserMessageFilter override.
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE;
+
+ private:
+ // See WebRTCIdentityStore for the meaning of the parameters.
+ void OnComplete(int status,
+ const std::string& certificate,
+ const std::string& private_key);
+
+ // See WebRTCIdentityStore for the meaning of the parameters.
+ void OnRequestIdentity(const GURL& origin,
+ const std::string& identity_name,
+ const std::string& common_name);
+
+ void OnCancelRequest();
+
+ void SendErrorMessage(int error);
+
+ int renderer_process_id_;
+ base::Closure cancel_callback_;
+ WebRTCIdentityStore* identity_store_;
+
+ DISALLOW_COPY_AND_ASSIGN(WebRTCIdentityServiceHost);
+};
+
+} // namespace content
+
+#endif // CONTENT_BROWSER_RENDERER_HOST_MEDIA_WEBRTC_IDENTITY_SERVICE_HOST_H_
diff --git a/chromium/content/browser/renderer_host/media/webrtc_identity_service_host_unittest.cc b/chromium/content/browser/renderer_host/media/webrtc_identity_service_host_unittest.cc
new file mode 100644
index 00000000000..341378d3c7b
--- /dev/null
+++ b/chromium/content/browser/renderer_host/media/webrtc_identity_service_host_unittest.cc
@@ -0,0 +1,187 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <deque>
+
+#include "content/browser/child_process_security_policy_impl.h"
+#include "content/browser/media/webrtc_identity_store.h"
+#include "content/browser/renderer_host/media/webrtc_identity_service_host.h"
+#include "content/common/media/webrtc_identity_messages.h"
+#include "content/public/test/test_browser_thread_bundle.h"
+#include "ipc/ipc_message.h"
+#include "net/base/net_errors.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace content {
+
+namespace {
+
+static const char FAKE_ORIGIN[] = "http://fake.com";
+static const char FAKE_IDENTITY_NAME[] = "fake identity";
+static const char FAKE_COMMON_NAME[] = "fake common name";
+static const char FAKE_CERTIFICATE[] = "fake cert";
+static const char FAKE_PRIVATE_KEY[] = "fake private key";
+static const int FAKE_ERROR = 100;
+static const int FAKE_RENDERER_ID = 10;
+
+class MockWebRTCIdentityStore : public WebRTCIdentityStore {
+ public:
+ MockWebRTCIdentityStore() : WebRTCIdentityStore(base::FilePath(), NULL) {}
+
+ virtual base::Closure RequestIdentity(
+ const GURL& origin,
+ const std::string& identity_name,
+ const std::string& common_name,
+ const CompletionCallback& callback) OVERRIDE {
+ EXPECT_TRUE(callback_.is_null());
+
+ callback_ = callback;
+ return base::Bind(&MockWebRTCIdentityStore::OnCancel,
+ base::Unretained(this));
+ }
+
+ bool HasPendingRequest() const { return !callback_.is_null(); }
+
+ void RunCompletionCallback(int error,
+ const std::string& cert,
+ const std::string& key) {
+ callback_.Run(error, cert, key);
+ callback_.Reset();
+ }
+
+ private:
+ virtual ~MockWebRTCIdentityStore() {}
+
+ void OnCancel() { callback_.Reset(); }
+
+ CompletionCallback callback_;
+};
+
+class WebRTCIdentityServiceHostForTest : public WebRTCIdentityServiceHost {
+ public:
+ explicit WebRTCIdentityServiceHostForTest(WebRTCIdentityStore* identity_store)
+ : WebRTCIdentityServiceHost(FAKE_RENDERER_ID, identity_store) {
+ ChildProcessSecurityPolicyImpl* policy =
+ ChildProcessSecurityPolicyImpl::GetInstance();
+ policy->Add(FAKE_RENDERER_ID);
+ }
+
+ virtual bool Send(IPC::Message* message) OVERRIDE {
+ messages_.push_back(*message);
+ delete message;
+ return true;
+ }
+
+ virtual bool OnMessageReceived(const IPC::Message& message,
+ bool* message_was_ok) OVERRIDE {
+ return WebRTCIdentityServiceHost::OnMessageReceived(message,
+ message_was_ok);
+ }
+
+ IPC::Message GetLastMessage() { return messages_.back(); }
+
+ int GetNumberOfMessages() { return messages_.size(); }
+
+ void ClearMessages() { messages_.clear(); }
+
+ private:
+ virtual ~WebRTCIdentityServiceHostForTest() {
+ ChildProcessSecurityPolicyImpl* policy =
+ ChildProcessSecurityPolicyImpl::GetInstance();
+ policy->Remove(FAKE_RENDERER_ID);
+ }
+
+ std::deque<IPC::Message> messages_;
+};
+
+class WebRTCIdentityServiceHostTest : public ::testing::Test {
+ public:
+ WebRTCIdentityServiceHostTest()
+ : browser_thread_bundle_(TestBrowserThreadBundle::IO_MAINLOOP),
+ store_(new MockWebRTCIdentityStore()),
+ host_(new WebRTCIdentityServiceHostForTest(store_.get())) {}
+
+ void SendRequestToHost() {
+ bool ok;
+ host_->OnMessageReceived(
+ WebRTCIdentityMsg_RequestIdentity(
+ GURL(FAKE_ORIGIN), FAKE_IDENTITY_NAME, FAKE_COMMON_NAME),
+ &ok);
+ ASSERT_TRUE(ok);
+ }
+
+ void SendCancelRequestToHost() {
+ bool ok;
+ host_->OnMessageReceived(WebRTCIdentityMsg_CancelRequest(), &ok);
+ ASSERT_TRUE(ok);
+ }
+
+ void VerifyRequestFailedMessage(int error) {
+ EXPECT_EQ(1, host_->GetNumberOfMessages());
+ IPC::Message ipc = host_->GetLastMessage();
+ EXPECT_EQ(ipc.type(), WebRTCIdentityHostMsg_RequestFailed::ID);
+
+ Tuple1<int> error_in_message;
+ WebRTCIdentityHostMsg_RequestFailed::Read(&ipc, &error_in_message);
+ EXPECT_EQ(error, error_in_message.a);
+ }
+
+ void VerifyIdentityReadyMessage(const std::string& cert,
+ const std::string& key) {
+ EXPECT_EQ(1, host_->GetNumberOfMessages());
+ IPC::Message ipc = host_->GetLastMessage();
+ EXPECT_EQ(ipc.type(), WebRTCIdentityHostMsg_IdentityReady::ID);
+
+ Tuple2<std::string, std::string> identity_in_message;
+ WebRTCIdentityHostMsg_IdentityReady::Read(&ipc, &identity_in_message);
+ EXPECT_EQ(cert, identity_in_message.a);
+ EXPECT_EQ(key, identity_in_message.b);
+ }
+
+ protected:
+ TestBrowserThreadBundle browser_thread_bundle_;
+ scoped_refptr<MockWebRTCIdentityStore> store_;
+ scoped_refptr<WebRTCIdentityServiceHostForTest> host_;
+};
+
+} // namespace
+
+TEST_F(WebRTCIdentityServiceHostTest, TestSendAndCancelRequest) {
+ SendRequestToHost();
+ EXPECT_TRUE(store_->HasPendingRequest());
+ SendCancelRequestToHost();
+ EXPECT_FALSE(store_->HasPendingRequest());
+}
+
+TEST_F(WebRTCIdentityServiceHostTest, TestOnlyOneRequestAllowed) {
+ SendRequestToHost();
+ EXPECT_TRUE(store_->HasPendingRequest());
+ EXPECT_EQ(0, host_->GetNumberOfMessages());
+ SendRequestToHost();
+
+ VerifyRequestFailedMessage(net::ERR_INSUFFICIENT_RESOURCES);
+}
+
+TEST_F(WebRTCIdentityServiceHostTest, TestOnIdentityReady) {
+ SendRequestToHost();
+ store_->RunCompletionCallback(net::OK, FAKE_CERTIFICATE, FAKE_PRIVATE_KEY);
+ VerifyIdentityReadyMessage(FAKE_CERTIFICATE, FAKE_PRIVATE_KEY);
+}
+
+TEST_F(WebRTCIdentityServiceHostTest, TestOnRequestFailed) {
+ SendRequestToHost();
+ store_->RunCompletionCallback(net::ERR_KEY_GENERATION_FAILED, "", "");
+ VerifyRequestFailedMessage(net::ERR_KEY_GENERATION_FAILED);
+}
+
+TEST_F(WebRTCIdentityServiceHostTest, TestOriginAccessDenied) {
+ ChildProcessSecurityPolicyImpl* policy =
+ ChildProcessSecurityPolicyImpl::GetInstance();
+ policy->Remove(FAKE_RENDERER_ID);
+
+ SendRequestToHost();
+ VerifyRequestFailedMessage(net::ERR_ACCESS_DENIED);
+}
+
+} // namespace content