summaryrefslogtreecommitdiff
path: root/chromium/media/renderers
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/media/renderers')
-rw-r--r--chromium/media/renderers/BUILD.gn1
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc47
-rw-r--r--chromium/media/renderers/audio_renderer_impl.h17
-rw-r--r--chromium/media/renderers/audio_renderer_impl_unittest.cc22
-rw-r--r--chromium/media/renderers/decrypting_renderer.cc5
-rw-r--r--chromium/media/renderers/decrypting_renderer.h1
-rw-r--r--chromium/media/renderers/decrypting_renderer_unittest.cc2
-rw-r--r--chromium/media/renderers/default_renderer_factory.cc19
-rw-r--r--chromium/media/renderers/default_renderer_factory.h2
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc183
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.h19
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer_unittest.cc4
-rw-r--r--chromium/media/renderers/renderer_impl.cc20
-rw-r--r--chromium/media/renderers/renderer_impl.h1
-rw-r--r--chromium/media/renderers/video_overlay_factory.cc1
-rw-r--r--chromium/media/renderers/video_renderer_impl.cc26
-rw-r--r--chromium/media/renderers/video_resource_updater.cc52
-rw-r--r--chromium/media/renderers/video_resource_updater_unittest.cc6
-rw-r--r--chromium/media/renderers/win/media_foundation_protection_manager.h2
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer.cc7
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer.h2
-rw-r--r--chromium/media/renderers/win/media_foundation_source_wrapper.h2
-rw-r--r--chromium/media/renderers/win/mf_cdm_proxy.h67
-rw-r--r--chromium/media/renderers/yuv_util.cc245
-rw-r--r--chromium/media/renderers/yuv_util.h7
25 files changed, 391 insertions, 369 deletions
diff --git a/chromium/media/renderers/BUILD.gn b/chromium/media/renderers/BUILD.gn
index 684f41f71fe..3c9967bcce8 100644
--- a/chromium/media/renderers/BUILD.gn
+++ b/chromium/media/renderers/BUILD.gn
@@ -129,7 +129,6 @@ if (is_win) {
"win/media_foundation_stream_wrapper.h",
"win/media_foundation_video_stream.cc",
"win/media_foundation_video_stream.h",
- "win/mf_cdm_proxy.h",
]
deps = [
"//media",
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index 95308f4a746..0c1e6b03bd9 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -48,7 +48,7 @@ AudioRendererImpl::AudioRendererImpl(
media::AudioRendererSink* sink,
const CreateAudioDecodersCB& create_audio_decoders_cb,
MediaLog* media_log,
- const TranscribeAudioCallback& transcribe_audio_callback)
+ SpeechRecognitionClient* speech_recognition_client)
: task_runner_(task_runner),
expecting_config_changes_(false),
sink_(sink),
@@ -70,8 +70,12 @@ AudioRendererImpl::AudioRendererImpl(
received_end_of_stream_(false),
rendered_end_of_stream_(false),
is_suspending_(false),
+#if defined(OS_ANDROID)
+ is_passthrough_(false) {
+#else
is_passthrough_(false),
- transcribe_audio_callback_(transcribe_audio_callback) {
+ speech_recognition_client_(speech_recognition_client) {
+#endif
DCHECK(create_audio_decoders_cb_);
// PowerObserver's must be added and removed from the same thread, but we
@@ -369,6 +373,14 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
sink_->GetOutputDeviceInfoAsync(
base::BindOnce(&AudioRendererImpl::OnDeviceInfoReceived,
weak_factory_.GetWeakPtr(), demuxer_stream_, cdm_context));
+
+#if !defined(OS_ANDROID)
+ if (speech_recognition_client_) {
+ speech_recognition_client_->SetOnReadyCallback(
+ base::BindOnce(&AudioRendererImpl::EnableSpeechRecognition,
+ weak_factory_.GetWeakPtr()));
+ }
+#endif
}
void AudioRendererImpl::OnDeviceInfoReceived(
@@ -619,6 +631,8 @@ void AudioRendererImpl::OnAudioDecoderStreamInitialized(bool success) {
algorithm_->Initialize(audio_parameters_, is_encrypted_);
if (latency_hint_)
algorithm_->SetLatencyHint(latency_hint_);
+
+ algorithm_->SetPreservesPitch(preserves_pitch_);
ConfigureChannelMask();
ChangeState_Locked(kFlushed);
@@ -708,6 +722,15 @@ void AudioRendererImpl::SetLatencyHint(
}
}
+void AudioRendererImpl::SetPreservesPitch(bool preserves_pitch) {
+ base::AutoLock auto_lock(lock_);
+
+ preserves_pitch_ = preserves_pitch;
+
+ if (algorithm_)
+ algorithm_->SetPreservesPitch(preserves_pitch);
+}
+
void AudioRendererImpl::OnSuspend() {
base::AutoLock auto_lock(lock_);
is_suspending_ = true;
@@ -871,8 +894,10 @@ bool AudioRendererImpl::HandleDecodedBuffer_Locked(
if (first_packet_timestamp_ == kNoTimestamp)
first_packet_timestamp_ = buffer->timestamp();
- if (!transcribe_audio_callback_.is_null())
+#if !defined(OS_ANDROID)
+ if (transcribe_audio_callback_)
transcribe_audio_callback_.Run(buffer);
+#endif
if (state_ != kUninitialized)
algorithm_->EnqueueBuffer(std::move(buffer));
@@ -1281,4 +1306,20 @@ void AudioRendererImpl::ConfigureChannelMask() {
algorithm_->SetChannelMask(std::move(channel_mask));
}
+void AudioRendererImpl::EnableSpeechRecognition() {
+#if !defined(OS_ANDROID)
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ transcribe_audio_callback_ = base::BindRepeating(
+ &AudioRendererImpl::TranscribeAudio, weak_factory_.GetWeakPtr());
+#endif
+}
+
+void AudioRendererImpl::TranscribeAudio(
+ scoped_refptr<media::AudioBuffer> buffer) {
+#if !defined(OS_ANDROID)
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ if (speech_recognition_client_)
+ speech_recognition_client_->AddAudio(std::move(buffer));
+#endif
+}
} // namespace media
diff --git a/chromium/media/renderers/audio_renderer_impl.h b/chromium/media/renderers/audio_renderer_impl.h
index 33ef7e22a69..7755d20ce15 100644
--- a/chromium/media/renderers/audio_renderer_impl.h
+++ b/chromium/media/renderers/audio_renderer_impl.h
@@ -49,6 +49,7 @@ namespace media {
class AudioBufferConverter;
class AudioBus;
class AudioClock;
+class SpeechRecognitionClient;
class MEDIA_EXPORT AudioRendererImpl
: public AudioRenderer,
@@ -62,6 +63,9 @@ class MEDIA_EXPORT AudioRendererImpl
using TranscribeAudioCallback =
base::RepeatingCallback<void(scoped_refptr<media::AudioBuffer>)>;
+ using EnableSpeechRecognitionCallback =
+ base::OnceCallback<void(TranscribeAudioCallback)>;
+
// |task_runner| is the thread on which AudioRendererImpl will execute.
//
// |sink| is used as the destination for the rendered audio.
@@ -72,7 +76,7 @@ class MEDIA_EXPORT AudioRendererImpl
AudioRendererSink* sink,
const CreateAudioDecodersCB& create_audio_decoders_cb,
MediaLog* media_log,
- const TranscribeAudioCallback& transcribe_audio_callback);
+ SpeechRecognitionClient* speech_recognition_client = nullptr);
~AudioRendererImpl() override;
// TimeSource implementation.
@@ -95,6 +99,7 @@ class MEDIA_EXPORT AudioRendererImpl
void StartPlaying() override;
void SetVolume(float volume) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void SetPreservesPitch(bool preserves_pitch) override;
// base::PowerObserver implementation.
void OnSuspend() override;
@@ -224,6 +229,9 @@ class MEDIA_EXPORT AudioRendererImpl
// changes. Expect the layout in |last_decoded_channel_layout_|.
void ConfigureChannelMask();
+ void EnableSpeechRecognition();
+ void TranscribeAudio(scoped_refptr<media::AudioBuffer> buffer);
+
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
std::unique_ptr<AudioBufferConverter> buffer_converter_;
@@ -293,6 +301,10 @@ class MEDIA_EXPORT AudioRendererImpl
// during Initialize().
base::Optional<base::TimeDelta> latency_hint_;
+ // Passed to |algorithm_|. Indicates whether |algorithm_| should or should not
+ // make pitch adjustments at playbacks other than 1.0.
+ bool preserves_pitch_ = true;
+
// Simple state tracking variable.
State state_;
@@ -350,7 +362,10 @@ class MEDIA_EXPORT AudioRendererImpl
// End variables which must be accessed under |lock_|. ----------------------
+#if !defined(OS_ANDROID)
+ SpeechRecognitionClient* speech_recognition_client_;
TranscribeAudioCallback transcribe_audio_callback_;
+#endif
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<AudioRendererImpl> weak_factory_{this};
diff --git a/chromium/media/renderers/audio_renderer_impl_unittest.cc b/chromium/media/renderers/audio_renderer_impl_unittest.cc
index dc1dcee3520..39df7808435 100644
--- a/chromium/media/renderers/audio_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/audio_renderer_impl_unittest.cc
@@ -133,9 +133,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
renderer_->tick_clock_ = &tick_clock_;
tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
}
@@ -162,9 +160,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
testing::Mock::VerifyAndClearExpectations(&demuxer_stream_);
ConfigureDemuxerStream(false);
}
@@ -178,9 +174,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
testing::Mock::VerifyAndClearExpectations(&demuxer_stream_);
ConfigureDemuxerStream(true);
}
@@ -191,9 +185,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, mock_sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
testing::Mock::VerifyAndClearExpectations(&demuxer_stream_);
ConfigureDemuxerStream(true);
}
@@ -255,9 +247,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
Initialize();
}
@@ -676,7 +666,7 @@ TEST_F(AudioRendererImplTest, StartTicking) {
TEST_F(AudioRendererImplTest, TranscribeAudioCallback) {
Initialize();
- EXPECT_CALL(*this, TranscribeAudioCallback(_)).Times(testing::AtLeast(1));
+ EXPECT_CALL(*this, TranscribeAudioCallback(_)).Times(0);
Preroll();
StartTicking();
diff --git a/chromium/media/renderers/decrypting_renderer.cc b/chromium/media/renderers/decrypting_renderer.cc
index f2df8566fbc..b231ec7a40a 100644
--- a/chromium/media/renderers/decrypting_renderer.cc
+++ b/chromium/media/renderers/decrypting_renderer.cc
@@ -6,6 +6,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "media/base/cdm_context.h"
#include "media/base/demuxer_stream.h"
#include "media/base/media_log.h"
#include "media/base/media_resource.h"
@@ -110,6 +111,10 @@ void DecryptingRenderer::SetLatencyHint(
renderer_->SetLatencyHint(latency_hint);
}
+void DecryptingRenderer::SetPreservesPitch(bool preserves_pitch) {
+ renderer_->SetPreservesPitch(preserves_pitch);
+}
+
void DecryptingRenderer::Flush(base::OnceClosure flush_cb) {
renderer_->Flush(std::move(flush_cb));
}
diff --git a/chromium/media/renderers/decrypting_renderer.h b/chromium/media/renderers/decrypting_renderer.h
index 6f443b4655e..84b9747b805 100644
--- a/chromium/media/renderers/decrypting_renderer.h
+++ b/chromium/media/renderers/decrypting_renderer.h
@@ -46,6 +46,7 @@ class MEDIA_EXPORT DecryptingRenderer : public Renderer {
PipelineStatusCallback init_cb) override;
void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void SetPreservesPitch(bool preserves_pitch) override;
void Flush(base::OnceClosure flush_cb) override;
void StartPlayingFrom(base::TimeDelta time) override;
diff --git a/chromium/media/renderers/decrypting_renderer_unittest.cc b/chromium/media/renderers/decrypting_renderer_unittest.cc
index d2b5d61ea7d..8d55c714dd9 100644
--- a/chromium/media/renderers/decrypting_renderer_unittest.cc
+++ b/chromium/media/renderers/decrypting_renderer_unittest.cc
@@ -87,7 +87,7 @@ class DecryptingRendererTest : public testing::Test {
bool use_aes_decryptor_ = false;
base::test::TaskEnvironment task_environment_;
- base::MockCallback<CdmAttachedCB> set_cdm_cb_;
+ base::MockCallback<Renderer::CdmAttachedCB> set_cdm_cb_;
base::MockOnceCallback<void(PipelineStatus)> renderer_init_cb_;
NullMediaLog null_media_log_;
StrictMock<MockCdmContext> cdm_context_;
diff --git a/chromium/media/renderers/default_renderer_factory.cc b/chromium/media/renderers/default_renderer_factory.cc
index 9d5c2ac959f..8596f9f58fc 100644
--- a/chromium/media/renderers/default_renderer_factory.cc
+++ b/chromium/media/renderers/default_renderer_factory.cc
@@ -10,7 +10,6 @@
#include "base/bind.h"
#include "build/build_config.h"
#include "media/base/audio_buffer.h"
-#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_factory.h"
#include "media/renderers/audio_renderer_impl.h"
#include "media/renderers/renderer_impl.h"
@@ -92,9 +91,11 @@ std::unique_ptr<Renderer> DefaultRendererFactory::CreateRenderer(
// finishes.
base::BindRepeating(&DefaultRendererFactory::CreateAudioDecoders,
base::Unretained(this), media_task_runner),
- media_log_,
- BindToCurrentLoop(base::BindRepeating(
- &DefaultRendererFactory::TranscribeAudio, base::Unretained(this)))));
+#if defined(OS_ANDROID)
+ media_log_));
+#else
+ media_log_, speech_recognition_client_.get()));
+#endif
GpuVideoAcceleratorFactories* gpu_factories = nullptr;
if (get_gpu_factories_cb_)
@@ -126,14 +127,4 @@ std::unique_ptr<Renderer> DefaultRendererFactory::CreateRenderer(
media_task_runner, std::move(audio_renderer), std::move(video_renderer));
}
-void DefaultRendererFactory::TranscribeAudio(
- scoped_refptr<media::AudioBuffer> buffer) {
-#if !defined(OS_ANDROID)
- if (speech_recognition_client_ &&
- speech_recognition_client_->IsSpeechRecognitionAvailable()) {
- speech_recognition_client_->AddAudio(std::move(buffer));
- }
-#endif
-}
-
} // namespace media
diff --git a/chromium/media/renderers/default_renderer_factory.h b/chromium/media/renderers/default_renderer_factory.h
index 09de8928651..455ce1bb782 100644
--- a/chromium/media/renderers/default_renderer_factory.h
+++ b/chromium/media/renderers/default_renderer_factory.h
@@ -61,8 +61,6 @@ class MEDIA_EXPORT DefaultRendererFactory : public RendererFactory {
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) final;
- void TranscribeAudio(scoped_refptr<media::AudioBuffer> buffer);
-
private:
std::vector<std::unique_ptr<AudioDecoder>> CreateAudioDecoders(
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner);
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index 5eaa9529549..248f46e23d0 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -15,7 +15,7 @@
#include "base/numerics/checked_math.h"
#include "base/synchronization/waitable_event.h"
#include "base/system/sys_info.h"
-#include "base/task/post_task.h"
+#include "base/task/thread_pool.h"
#include "base/threading/thread_restrictions.h"
#include "cc/paint/paint_canvas.h"
#include "cc/paint/paint_flags.h"
@@ -212,16 +212,6 @@ GLuint SynchronizeAndImportMailbox(gpu::gles2::GLES2Interface* gl,
: gl->CreateAndConsumeTextureCHROMIUM(mailbox.name);
}
-// TODO(crbug.com/1023270): Remove this ctor once we're no longer relying on
-// texture ids for Mailbox access as that is only supported on
-// RasterImplementationGLES.
-GLuint SynchronizeAndImportMailbox(gpu::raster::RasterInterface* ri,
- const gpu::SyncToken& sync_token,
- const gpu::Mailbox& mailbox) {
- ri->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
- return ri->CreateAndConsumeForGpuRaster(mailbox);
-}
-
const gpu::MailboxHolder& GetVideoFrameMailboxHolder(VideoFrame* video_frame) {
DCHECK(video_frame->HasTextures());
DCHECK_EQ(video_frame->NumTextures(), 1u);
@@ -340,8 +330,7 @@ void SynchronizeVideoFrameRead(scoped_refptr<VideoFrame> video_frame,
SyncTokenClientImpl client(ri);
video_frame->UpdateReleaseSyncToken(&client);
- if (video_frame->metadata()->IsTrue(
- VideoFrameMetadata::READ_LOCK_FENCES_ENABLED)) {
+ if (video_frame->metadata()->read_lock_fences_enabled) {
// |video_frame| must be kept alive during read operations.
DCHECK(context_support);
unsigned query_id = 0;
@@ -366,6 +355,7 @@ size_t LCM(size_t a, size_t b) {
void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
void* rgb_pixels,
size_t row_bytes,
+ bool premultiply_alpha,
size_t task_index,
size_t n_tasks,
base::RepeatingClosure* done) {
@@ -419,7 +409,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
if (!video_frame->data(VideoFrame::kUPlane) &&
!video_frame->data(VideoFrame::kVPlane)) {
- DCHECK_EQ(video_frame->format(), PIXEL_FORMAT_I420);
+ DCHECK_EQ(format, PIXEL_FORMAT_I420);
auto func = (color_space == kJPEG_SkYUVColorSpace) ? LIBYUV_J400_TO_ARGB
: LIBYUV_I400_TO_ARGB;
func(plane_meta[VideoFrame::kYPlane].data,
@@ -450,7 +440,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
rows);
};
- switch (video_frame->format()) {
+ switch (format) {
case PIXEL_FORMAT_YV12:
case PIXEL_FORMAT_I420:
switch (color_space) {
@@ -490,16 +480,15 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
break;
case PIXEL_FORMAT_I420A:
- LIBYUV_I420ALPHA_TO_ARGB(
- plane_meta[VideoFrame::kYPlane].data,
- plane_meta[VideoFrame::kYPlane].stride,
- plane_meta[VideoFrame::kUPlane].data,
- plane_meta[VideoFrame::kUPlane].stride,
- plane_meta[VideoFrame::kVPlane].data,
- plane_meta[VideoFrame::kVPlane].stride,
- plane_meta[VideoFrame::kAPlane].data,
- plane_meta[VideoFrame::kAPlane].stride, pixels, row_bytes, width,
- rows, 1); // 1 = enable RGB premultiplication by Alpha.
+ LIBYUV_I420ALPHA_TO_ARGB(plane_meta[VideoFrame::kYPlane].data,
+ plane_meta[VideoFrame::kYPlane].stride,
+ plane_meta[VideoFrame::kUPlane].data,
+ plane_meta[VideoFrame::kUPlane].stride,
+ plane_meta[VideoFrame::kVPlane].data,
+ plane_meta[VideoFrame::kVPlane].stride,
+ plane_meta[VideoFrame::kAPlane].data,
+ plane_meta[VideoFrame::kAPlane].stride, pixels,
+ row_bytes, width, rows, premultiply_alpha);
break;
case PIXEL_FORMAT_I444:
@@ -555,7 +544,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
NOTREACHED();
}
break;
- case PIXEL_FORMAT_UYVY:
+
case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV422P9:
case PIXEL_FORMAT_YUV444P9:
@@ -564,7 +553,8 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
case PIXEL_FORMAT_YUV422P12:
case PIXEL_FORMAT_YUV444P12:
case PIXEL_FORMAT_Y16:
- NOTREACHED() << "These cases should be handled above";
+ NOTREACHED()
+ << "These cases should be handled in ConvertVideoFrameToRGBPixels";
break;
case PIXEL_FORMAT_NV12:
@@ -575,6 +565,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
row_bytes, width, rows);
break;
+ case PIXEL_FORMAT_UYVY:
case PIXEL_FORMAT_NV21:
case PIXEL_FORMAT_YUY2:
case PIXEL_FORMAT_ARGB:
@@ -589,7 +580,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_UNKNOWN:
NOTREACHED() << "Only YUV formats and Y16 are supported, got: "
- << media::VideoPixelFormatToString(video_frame->format());
+ << media::VideoPixelFormatToString(format);
}
done->Run();
}
@@ -1110,7 +1101,8 @@ void TextureSubImageUsingIntermediate(unsigned target,
void PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
const VideoFrame* video_frame,
void* rgb_pixels,
- size_t row_bytes) {
+ size_t row_bytes,
+ bool premultiply_alpha) {
if (!video_frame->IsMappable()) {
NOTREACHED() << "Cannot extract pixels from non-CPU frame formats.";
return;
@@ -1163,13 +1155,14 @@ void PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
base::BindOnce(&base::WaitableEvent::Signal, base::Unretained(&event)));
for (size_t i = 1; i < n_tasks; ++i) {
- base::PostTask(FROM_HERE,
- base::BindOnce(ConvertVideoFrameToRGBPixelsTask,
- base::Unretained(video_frame), rgb_pixels,
- row_bytes, i, n_tasks, &barrier));
+ base::ThreadPool::PostTask(
+ FROM_HERE,
+ base::BindOnce(ConvertVideoFrameToRGBPixelsTask,
+ base::Unretained(video_frame), rgb_pixels, row_bytes,
+ premultiply_alpha, i, n_tasks, &barrier));
}
- ConvertVideoFrameToRGBPixelsTask(video_frame, rgb_pixels, row_bytes, 0,
- n_tasks, &barrier);
+ ConvertVideoFrameToRGBPixelsTask(video_frame, rgb_pixels, row_bytes,
+ premultiply_alpha, 0, n_tasks, &barrier);
{
base::ScopedAllowBaseSyncPrimitivesOutsideBlockingScope allow_wait;
event.Wait();
@@ -1223,8 +1216,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
DCHECK(video_frame);
DCHECK(video_frame->HasTextures());
if (video_frame->NumTextures() > 1 ||
- video_frame->metadata()->IsTrue(
- VideoFrameMetadata::READ_LOCK_FENCES_ENABLED)) {
+ video_frame->metadata()->read_lock_fences_enabled) {
if (!raster_context_provider)
return false;
GrContext* gr_context = raster_context_provider->GrContext();
@@ -1361,11 +1353,6 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
bool premultiply_alpha,
bool flip_y) {
DCHECK(raster_context_provider);
- GrContext* gr_context = raster_context_provider->GrContext();
- if (!gr_context) {
- return false;
- }
-
if (!video_frame.IsMappable()) {
return false;
}
@@ -1373,47 +1360,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
if (video_frame.format() != media::PIXEL_FORMAT_I420) {
return false;
}
- // Could handle NV12 here as well. See NewSkImageFromVideoFrameYUVTextures.
-
- static constexpr size_t kNumPlanes = 3;
- DCHECK_EQ(video_frame.NumPlanes(video_frame.format()), kNumPlanes);
- // Y,U,V GPU-side SkImages. (These must outlive the yuv_textures).
- sk_sp<SkImage> yuv_images[kNumPlanes]{};
- // Y,U,V GPU textures from those SkImages.
- // (A GrBackendTexture is a non-owned reference to the SkImage's texture.)
- GrBackendTexture yuv_textures[kNumPlanes]{};
-
- // Upload the whole coded image area (not visible rect).
- gfx::Size y_tex_size = video_frame.coded_size();
- gfx::Size uv_tex_size((y_tex_size.width() + 1) / 2,
- (y_tex_size.height() + 1) / 2);
-
- for (size_t plane = 0; plane < kNumPlanes; ++plane) {
- const uint8_t* data = video_frame.data(plane);
- int plane_stride = video_frame.stride(plane);
-
- bool is_y_plane = plane == media::VideoFrame::kYPlane;
- gfx::Size tex_size = is_y_plane ? y_tex_size : uv_tex_size;
- int data_size = plane_stride * (tex_size.height() - 1) + tex_size.width();
-
- // Create a CPU-side SkImage from the channel.
- sk_sp<SkData> sk_data = SkData::MakeWithoutCopy(data, data_size);
- DCHECK(sk_data);
- SkImageInfo image_info =
- SkImageInfo::Make(tex_size.width(), tex_size.height(),
- kGray_8_SkColorType, kUnknown_SkAlphaType);
- sk_sp<SkImage> plane_image_cpu =
- SkImage::MakeRasterData(image_info, sk_data, plane_stride);
- DCHECK(plane_image_cpu);
-
- // Upload the CPU-side SkImage into a GPU-side SkImage.
- // (Note the original video_frame data is no longer used after this point.)
- yuv_images[plane] = plane_image_cpu->makeTextureImage(gr_context);
- DCHECK(yuv_images[plane]);
-
- // Extract the backend texture from the GPU-side image.
- yuv_textures[plane] = yuv_images[plane]->getBackendTexture(false);
- }
+ // Could handle NV12 here as well. See NewSkImageFromVideoFrameYUV.
auto* sii = raster_context_provider->SharedImageInterface();
gpu::raster::RasterInterface* source_ri =
@@ -1421,52 +1368,39 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
// We need a shared image to receive the intermediate RGB result. Try to reuse
// one if compatible, otherwise create a new one.
- if (yuv_cache_.texture && yuv_cache_.size == video_frame.coded_size() &&
+ gpu::SyncToken token;
+ if (!yuv_cache_.mailbox.IsZero() &&
+ yuv_cache_.size == video_frame.coded_size() &&
yuv_cache_.raster_context_provider == raster_context_provider) {
- source_ri->WaitSyncTokenCHROMIUM(yuv_cache_.sync_token.GetConstData());
+ token = yuv_cache_.sync_token;
} else {
yuv_cache_.Reset();
yuv_cache_.raster_context_provider = raster_context_provider;
yuv_cache_.size = video_frame.coded_size();
- yuv_cache_.mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::RGBA_8888, video_frame.coded_size(),
- gfx::ColorSpace(), gpu::SHARED_IMAGE_USAGE_GLES2);
- yuv_cache_.texture = SynchronizeAndImportMailbox(
- source_ri, sii->GenUnverifiedSyncToken(), yuv_cache_.mailbox);
- }
- // On the source GL context, do the YUV->RGB conversion using Skia.
- gpu::SyncToken post_conversion_sync_token;
- {
- source_ri->BeginSharedImageAccessDirectCHROMIUM(
- yuv_cache_.texture, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
-
- GrGLTextureInfo backend_texture = {};
- backend_texture.fTarget = GL_TEXTURE_2D;
- backend_texture.fID = yuv_cache_.texture;
- backend_texture.fFormat = GL_RGBA8;
- GrBackendTexture result_texture(video_frame.coded_size().width(),
- video_frame.coded_size().height(),
- GrMipMapped::kNo, backend_texture);
-
- sk_sp<SkImage> yuv_image = YUVGrBackendTexturesToSkImage(
- gr_context, video_frame.ColorSpace(), video_frame.format(),
- yuv_textures, result_texture);
-
- gr_context->flush();
- source_ri->EndSharedImageAccessDirectCHROMIUM(yuv_cache_.texture);
-
- source_ri->GenUnverifiedSyncTokenCHROMIUM(
- post_conversion_sync_token.GetData());
-
- if (!yuv_image) {
- // Conversion failed. Note the last use sync token for destruction.
- yuv_cache_.sync_token = post_conversion_sync_token;
- yuv_cache_.Reset();
- return false;
+ uint32_t usage = gpu::SHARED_IMAGE_USAGE_GLES2;
+ if (raster_context_provider->ContextCapabilities().supports_oop_raster) {
+ usage |= gpu::SHARED_IMAGE_USAGE_RASTER |
+ gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION;
}
+
+ yuv_cache_.mailbox = sii->CreateSharedImage(viz::ResourceFormat::RGBA_8888,
+ video_frame.coded_size(),
+ gfx::ColorSpace(), usage);
+ token = sii->GenUnverifiedSyncToken();
}
+ // On the source Raster context, do the YUV->RGB conversion.
+ gpu::MailboxHolder dest_holder;
+ dest_holder.mailbox = yuv_cache_.mailbox;
+ dest_holder.texture_target = GL_TEXTURE_2D;
+ dest_holder.sync_token = token;
+ ConvertFromVideoFrameYUV(&video_frame, raster_context_provider, dest_holder);
+
+ gpu::SyncToken post_conversion_sync_token;
+ source_ri->GenUnverifiedSyncTokenCHROMIUM(
+ post_conversion_sync_token.GetData());
+
// On the destination GL context, do a copy (with cropping) into the
// destination texture.
GLuint intermediate_texture = SynchronizeAndImportMailbox(
@@ -1601,7 +1535,7 @@ bool PaintCanvasVideoRenderer::Cache::Recycle() {
return false;
// Flush any pending GPU work using this texture.
- sk_image->flush(raster_context_provider->GrContext());
+ sk_image->flushAndSubmit(raster_context_provider->GrContext());
// We need a new texture ID because skia will destroy the previous one with
// the SkImage.
@@ -1681,7 +1615,7 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
ConvertFromVideoFrameYUV(video_frame.get(), raster_context_provider,
dest_holder);
}
- raster_context_provider->GrContext()->flush();
+ raster_context_provider->GrContext()->flushAndSubmit();
}
// TODO(jochin): Don't always generate SkImage here.
@@ -1779,18 +1713,17 @@ PaintCanvasVideoRenderer::YUVTextureCache::YUVTextureCache() = default;
PaintCanvasVideoRenderer::YUVTextureCache::~YUVTextureCache() = default;
void PaintCanvasVideoRenderer::YUVTextureCache::Reset() {
- if (!texture)
+ if (mailbox.IsZero())
return;
DCHECK(raster_context_provider);
gpu::raster::RasterInterface* ri = raster_context_provider->RasterInterface();
ri->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
- ri->DeleteGpuRasterTexture(texture);
- texture = 0;
ri->OrderingBarrierCHROMIUM();
auto* sii = raster_context_provider->SharedImageInterface();
sii->DestroySharedImage(sync_token, mailbox);
+ mailbox.SetZero();
// Kick off the GL work up to the OrderingBarrierCHROMIUM above as well as the
// SharedImageInterface work, to ensure the shared image memory is released in
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.h b/chromium/media/renderers/paint_canvas_video_renderer.h
index d5b1a141670..ffe7925bf85 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.h
+++ b/chromium/media/renderers/paint_canvas_video_renderer.h
@@ -61,7 +61,9 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
VideoTransformation video_transformation,
viz::RasterContextProvider* raster_context_provider);
- // Paints |video_frame| scaled to its visible size on |canvas|.
+ // Paints |video_frame|, scaled to its |video_frame->visible_rect().size()|
+ // on |canvas|. Note that the origin of |video_frame->visible_rect()| is
+ // ignored -- the copy is done to the origin of |canvas|.
//
// If the format of |video_frame| is PIXEL_FORMAT_NATIVE_TEXTURE, |context_3d|
// and |context_support| must be provided.
@@ -71,10 +73,18 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// Convert the contents of |video_frame| to raw RGB pixels. |rgb_pixels|
// should point into a buffer large enough to hold as many 32 bit RGBA pixels
- // as are in the visible_rect() area of the frame.
+ // as are in the visible_rect() area of the frame. |premultiply_alpha|
+ // indicates whether the R, G, B samples in |rgb_pixels| should be multiplied
+ // by alpha.
+ //
+ // NOTE: If |video_frame| doesn't have an alpha plane, all the A samples in
+ // |rgb_pixels| will be 255 (equivalent to an alpha of 1.0) and therefore the
+ // value of |premultiply_alpha| has no effect on the R, G, B samples in
+ // |rgb_pixels|.
static void ConvertVideoFrameToRGBPixels(const media::VideoFrame* video_frame,
void* rgb_pixels,
- size_t row_bytes);
+ size_t row_bytes,
+ bool premultiply_alpha = true);
// Copy the visible rect size contents of texture of |video_frame| to
// texture |texture|. |level|, |internal_format|, |type| specify target
@@ -271,9 +281,6 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// The shared image backing the texture.
gpu::Mailbox mailbox;
- // The GL texture.
- uint32_t texture = 0;
-
// A SyncToken after last usage, used for reusing or destroying texture and
// shared image.
gpu::SyncToken sync_token;
diff --git a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
index 33b64084b98..5c1ab11ebda 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
@@ -6,6 +6,7 @@
#include <stdint.h>
#include "base/bind.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
#include "base/sys_byteorder.h"
@@ -1389,8 +1390,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest,
CopyVideoFrameTexturesToGLTextureRGBA_ReadLockFence) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestRGBAFrame(run_loop.QuitClosure());
- frame->metadata()->SetBoolean(VideoFrameMetadata::READ_LOCK_FENCES_ENABLED,
- true);
+ frame->metadata()->read_lock_fences_enabled = true;
CopyVideoFrameTexturesAndCheckPixels(frame, &CheckRGBAFramePixels);
diff --git a/chromium/media/renderers/renderer_impl.cc b/chromium/media/renderers/renderer_impl.cc
index 98b8c805dd6..aa1c01263c5 100644
--- a/chromium/media/renderers/renderer_impl.cc
+++ b/chromium/media/renderers/renderer_impl.cc
@@ -93,7 +93,7 @@ RendererImpl::RendererImpl(
video_renderer_(std::move(video_renderer)),
current_audio_stream_(nullptr),
current_video_stream_(nullptr),
- time_source_(NULL),
+ time_source_(nullptr),
time_ticking_(false),
playback_rate_(0.0),
audio_buffering_state_(BUFFERING_HAVE_NOTHING),
@@ -206,6 +206,14 @@ void RendererImpl::SetLatencyHint(
audio_renderer_->SetLatencyHint(latency_hint);
}
+void RendererImpl::SetPreservesPitch(bool preserves_pitch) {
+ DVLOG(1) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (audio_renderer_)
+ audio_renderer_->SetPreservesPitch(preserves_pitch);
+}
+
void RendererImpl::Flush(base::OnceClosure flush_cb) {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -853,19 +861,15 @@ void RendererImpl::OnRendererEnded(DemuxerStream::Type type) {
DCHECK((type == DemuxerStream::AUDIO) || (type == DemuxerStream::VIDEO));
TRACE_EVENT1("media", "RendererImpl::OnRendererEnded", "type", type_string);
- if (state_ != STATE_PLAYING)
+ // If all streams are ended, do not propagate a redundant ended event.
+ if (state_ != STATE_PLAYING || PlaybackHasEnded())
return;
if (type == DemuxerStream::AUDIO) {
- // If all streams are ended, do not propagate a redundant ended event.
- if (audio_ended_ && PlaybackHasEnded())
- return;
+ DCHECK(audio_renderer_);
audio_ended_ = true;
} else {
DCHECK(video_renderer_);
- // If all streams are ended, do not propagate a redundant ended event.
- if (audio_ended_ && PlaybackHasEnded())
- return;
video_ended_ = true;
video_renderer_->OnTimeStopped();
}
diff --git a/chromium/media/renderers/renderer_impl.h b/chromium/media/renderers/renderer_impl.h
index 847186215b7..f6603d3d084 100644
--- a/chromium/media/renderers/renderer_impl.h
+++ b/chromium/media/renderers/renderer_impl.h
@@ -58,6 +58,7 @@ class MEDIA_EXPORT RendererImpl : public Renderer {
PipelineStatusCallback init_cb) final;
void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) final;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) final;
+ void SetPreservesPitch(bool preserves_pitch) final;
void Flush(base::OnceClosure flush_cb) final;
void StartPlayingFrom(base::TimeDelta time) final;
void SetPlaybackRate(double playback_rate) final;
diff --git a/chromium/media/renderers/video_overlay_factory.cc b/chromium/media/renderers/video_overlay_factory.cc
index be395346f37..d73990b1174 100644
--- a/chromium/media/renderers/video_overlay_factory.cc
+++ b/chromium/media/renderers/video_overlay_factory.cc
@@ -4,6 +4,7 @@
#include "media/renderers/video_overlay_factory.h"
+#include "base/logging.h"
#include "base/time/time.h"
#include "media/base/video_frame.h"
#include "ui/gfx/geometry/size.h"
diff --git a/chromium/media/renderers/video_renderer_impl.cc b/chromium/media/renderers/video_renderer_impl.cc
index af3e6488466..99d67e45c5c 100644
--- a/chromium/media/renderers/video_renderer_impl.cc
+++ b/chromium/media/renderers/video_renderer_impl.cc
@@ -566,8 +566,7 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadStatus status,
last_frame_ready_time_ = tick_clock_->NowTicks();
- const bool is_eos =
- frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM);
+ const bool is_eos = frame->metadata()->end_of_stream;
const bool is_before_start_time = !is_eos && IsBeforeStartTime(*frame);
const bool cant_read = !video_decoder_stream_->CanReadWithoutStalling();
@@ -599,9 +598,9 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadStatus status,
// RemoveFramesForUnderflowOrBackgroundRendering() below to actually expire
// this frame if it's too far behind the current media time. Without this,
// we may resume too soon after a track change in the low delay case.
- if (!frame->metadata()->HasKey(VideoFrameMetadata::FRAME_DURATION)) {
- frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- video_decoder_stream_->AverageDuration());
+ if (!frame->metadata()->frame_duration.has_value()) {
+ frame->metadata()->frame_duration =
+ video_decoder_stream_->AverageDuration();
}
AddReadyFrame_Locked(std::move(frame));
@@ -731,16 +730,12 @@ void VideoRendererImpl::TransitionToHaveNothing_Locked() {
void VideoRendererImpl::AddReadyFrame_Locked(scoped_refptr<VideoFrame> frame) {
DCHECK(task_runner_->BelongsToCurrentThread());
lock_.AssertAcquired();
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
++stats_.video_frames_decoded;
- bool power_efficient = false;
- if (frame->metadata()->GetBoolean(VideoFrameMetadata::POWER_EFFICIENT,
- &power_efficient) &&
- power_efficient) {
+ if (frame->metadata()->power_efficient)
++stats_.video_frames_decoded_power_efficient;
- }
algorithm_->EnqueueFrame(std::move(frame));
}
@@ -929,12 +924,13 @@ base::TimeTicks VideoRendererImpl::GetCurrentMediaTimeAsWallClockTime() {
bool VideoRendererImpl::IsBeforeStartTime(const VideoFrame& frame) {
// Prefer the actual frame duration over the average if available.
- base::TimeDelta metadata_frame_duration;
- if (frame.metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- &metadata_frame_duration)) {
- return frame.timestamp() + metadata_frame_duration < start_timestamp_;
+ if (frame.metadata()->frame_duration.has_value()) {
+ return frame.timestamp() + *frame.metadata()->frame_duration <
+ start_timestamp_;
}
+ // TODO(tguilbert): video_decoder_stream_->AverageDuration() can be accessed
+ // from the wrong thread.
return frame.timestamp() + video_decoder_stream_->AverageDuration() <
start_timestamp_;
}
diff --git a/chromium/media/renderers/video_resource_updater.cc b/chromium/media/renderers/video_resource_updater.cc
index 27678f2b65f..5667c75131c 100644
--- a/chromium/media/renderers/video_resource_updater.cc
+++ b/chromium/media/renderers/video_resource_updater.cc
@@ -13,6 +13,7 @@
#include "base/atomic_sequence_num.h"
#include "base/bind.h"
#include "base/bit_cast.h"
+#include "base/logging.h"
#include "base/memory/shared_memory_mapping.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/strings/stringprintf.h"
@@ -121,6 +122,12 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
buffer_formats[1] = gfx::BufferFormat::RG_88;
return VideoFrameResourceType::YUV;
+ case PIXEL_FORMAT_P016LE:
+ DCHECK_EQ(num_textures, 1);
+ // TODO(mcasas): Support other formats such as e.g. P012.
+ buffer_formats[0] = gfx::BufferFormat::P010;
+ return VideoFrameResourceType::RGB;
+
case PIXEL_FORMAT_UYVY:
NOTREACHED();
FALLTHROUGH;
@@ -143,7 +150,6 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
case PIXEL_FORMAT_YUV444P12:
case PIXEL_FORMAT_Y16:
case PIXEL_FORMAT_XBGR:
- case PIXEL_FORMAT_P016LE:
case PIXEL_FORMAT_UNKNOWN:
break;
}
@@ -465,9 +471,9 @@ VideoResourceUpdater::~VideoResourceUpdater() {
void VideoResourceUpdater::ObtainFrameResources(
scoped_refptr<VideoFrame> video_frame) {
- if (video_frame->metadata()->GetUnguessableToken(
- VideoFrameMetadata::OVERLAY_PLANE_ID, &overlay_plane_id_)) {
+ if (video_frame->metadata()->overlay_plane_id.has_value()) {
// This is a hole punching VideoFrame, there is nothing to display.
+ overlay_plane_id_ = *video_frame->metadata()->overlay_plane_id;
frame_resource_type_ = VideoFrameResourceType::VIDEO_HOLE;
return;
}
@@ -584,8 +590,8 @@ void VideoResourceUpdater::AppendQuads(viz::RenderPass* render_pass,
frame_resources_.size() > 3 ? frame_resources_[3].id : 0,
frame->ColorSpace(), frame_resource_offset_,
frame_resource_multiplier_, frame_bits_per_channel_);
- if (frame->metadata()->IsTrue(VideoFrameMetadata::PROTECTED_VIDEO)) {
- if (frame->metadata()->IsTrue(VideoFrameMetadata::HW_PROTECTED)) {
+ if (frame->metadata()->protected_video) {
+ if (frame->metadata()->hw_protected) {
yuv_video_quad->protected_video_type =
gfx::ProtectedVideoType::kHardwareProtected;
} else {
@@ -613,8 +619,8 @@ void VideoResourceUpdater::AppendQuads(viz::RenderPass* render_pass,
bool nearest_neighbor = false;
gfx::ProtectedVideoType protected_video_type =
gfx::ProtectedVideoType::kClear;
- if (frame->metadata()->IsTrue(VideoFrameMetadata::PROTECTED_VIDEO)) {
- if (frame->metadata()->IsTrue(VideoFrameMetadata::HW_PROTECTED))
+ if (frame->metadata()->protected_video) {
+ if (frame->metadata()->hw_protected)
protected_video_type = gfx::ProtectedVideoType::kHardwareProtected;
else
protected_video_type = gfx::ProtectedVideoType::kSoftwareProtected;
@@ -814,8 +820,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
VideoFrameExternalResources external_resources;
gfx::ColorSpace resource_color_space = video_frame->ColorSpace();
- bool copy_required =
- video_frame->metadata()->IsTrue(VideoFrameMetadata::COPY_REQUIRED);
+ bool copy_required = video_frame->metadata()->copy_required;
GLuint target = video_frame->mailbox_holder(0).texture_target;
// If |copy_required| then we will copy into a GL_TEXTURE_2D target.
@@ -857,19 +862,18 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
auto transfer_resource = viz::TransferableResource::MakeGL(
mailbox_holder.mailbox, GL_LINEAR, mailbox_holder.texture_target,
mailbox_holder.sync_token, plane_size,
- video_frame->metadata()->IsTrue(VideoFrameMetadata::ALLOW_OVERLAY));
+ video_frame->metadata()->allow_overlay);
transfer_resource.color_space = resource_color_space;
transfer_resource.read_lock_fences_enabled =
- video_frame->metadata()->IsTrue(
- VideoFrameMetadata::READ_LOCK_FENCES_ENABLED);
+ video_frame->metadata()->read_lock_fences_enabled;
transfer_resource.format = viz::GetResourceFormat(buffer_formats[i]);
transfer_resource.ycbcr_info = video_frame->ycbcr_info();
#if defined(OS_ANDROID)
transfer_resource.is_backed_by_surface_texture =
- video_frame->metadata()->IsTrue(VideoFrameMetadata::TEXTURE_OWNER);
- transfer_resource.wants_promotion_hint = video_frame->metadata()->IsTrue(
- VideoFrameMetadata::WANTS_PROMOTION_HINT);
+ video_frame->metadata()->texture_owner;
+ transfer_resource.wants_promotion_hint =
+ video_frame->metadata()->wants_promotion_hint;
#endif
external_resources.resources.push_back(std::move(transfer_resource));
external_resources.release_callbacks.push_back(
@@ -990,11 +994,19 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
SkBitmap sk_bitmap;
sk_bitmap.installPixels(info, software_resource->pixels(),
info.minRowBytes());
+ // This is software path, so |canvas| and |video_frame| are always
+ // backed by software.
cc::SkiaPaintCanvas canvas(sk_bitmap);
-
- // This is software path, so canvas and video_frame are always backed
- // by software.
- video_renderer_->Copy(video_frame, &canvas, nullptr);
+ cc::PaintFlags flags;
+ flags.setBlendMode(SkBlendMode::kSrc);
+ flags.setFilterQuality(kLow_SkFilterQuality);
+
+ // Note that PaintCanvasVideoRenderer::Copy would copy to the origin,
+ // not |video_frame->visible_rect|, so call Paint instead.
+ // https://crbug.com/1090435
+ video_renderer_->Paint(video_frame, &canvas,
+ gfx::RectF(video_frame->visible_rect()), flags,
+ media::kNoTransformation, nullptr);
} else {
HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
size_t bytes_per_row = viz::ResourceSizes::CheckedWidthInBytes<size_t>(
@@ -1242,7 +1254,7 @@ void VideoResourceUpdater::RecycleResource(uint32_t plane_resource_id,
if (resource_it == all_resources_.end())
return;
- if (context_provider_ && sync_token.HasData()) {
+ if ((raster_context_provider_ || context_provider_) && sync_token.HasData()) {
auto* gl = raster_context_provider_ ? raster_context_provider_->ContextGL()
: context_provider_->ContextGL();
gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
diff --git a/chromium/media/renderers/video_resource_updater_unittest.cc b/chromium/media/renderers/video_resource_updater_unittest.cc
index 79577a39f81..6e515ba2bae 100644
--- a/chromium/media/renderers/video_resource_updater_unittest.cc
+++ b/chromium/media/renderers/video_resource_updater_unittest.cc
@@ -204,8 +204,7 @@ class VideoResourceUpdaterTest : public testing::Test {
bool needs_copy) {
scoped_refptr<media::VideoFrame> video_frame = CreateTestHardwareVideoFrame(
media::PIXEL_FORMAT_ARGB, GL_TEXTURE_EXTERNAL_OES);
- video_frame->metadata()->SetBoolean(
- media::VideoFrameMetadata::COPY_REQUIRED, needs_copy);
+ video_frame->metadata()->copy_required = needs_copy;
return video_frame;
}
@@ -531,8 +530,7 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes) {
video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_I420, 3,
GL_TEXTURE_RECTANGLE_ARB);
- video_frame->metadata()->SetBoolean(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED, true);
+ video_frame->metadata()->read_lock_fences_enabled = true;
resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_TRUE(resources.resources[0].read_lock_fences_enabled);
diff --git a/chromium/media/renderers/win/media_foundation_protection_manager.h b/chromium/media/renderers/win/media_foundation_protection_manager.h
index 9e428b589ba..3ccb8b39ae9 100644
--- a/chromium/media/renderers/win/media_foundation_protection_manager.h
+++ b/chromium/media/renderers/win/media_foundation_protection_manager.h
@@ -10,7 +10,7 @@
#include <windows.media.protection.h>
#include <wrl.h>
-#include "media/renderers/win/mf_cdm_proxy.h"
+#include "media/base/win/mf_cdm_proxy.h"
namespace media {
diff --git a/chromium/media/renderers/win/media_foundation_renderer.cc b/chromium/media/renderers/win/media_foundation_renderer.cc
index b8cfde9f7b7..99d1dc99b61 100644
--- a/chromium/media/renderers/win/media_foundation_renderer.cc
+++ b/chromium/media/renderers/win/media_foundation_renderer.cc
@@ -21,6 +21,7 @@
#include "base/win/windows_version.h"
#include "base/win/wrapped_window_proc.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/cdm_context.h"
#include "media/base/timestamp_constants.h"
#include "media/base/win/mf_helpers.h"
@@ -330,8 +331,8 @@ void MediaFoundationRenderer::SetLatencyHint(
NOTIMPLEMENTED() << "We do not use the latency hint today";
}
-// TODO(frankli): Use ComPtr<> for |cdm|.
-void MediaFoundationRenderer::OnCdmProxyReceived(IMFCdmProxy* cdm) {
+void MediaFoundationRenderer::OnCdmProxyReceived(
+ ComPtr<IMFCdmProxy> cdm_proxy) {
DVLOG_FUNC(1);
if (!waiting_for_mf_cdm_ || !content_protection_manager_) {
@@ -342,8 +343,6 @@ void MediaFoundationRenderer::OnCdmProxyReceived(IMFCdmProxy* cdm) {
waiting_for_mf_cdm_ = false;
- ComPtr<IMFCdmProxy> cdm_proxy;
- cdm_proxy.Attach(cdm);
content_protection_manager_->SetCdmProxy(cdm_proxy.Get());
mf_source_->SetCdmProxy(cdm_proxy.Get());
HRESULT hr = SetSourceOnMediaEngine();
diff --git a/chromium/media/renderers/win/media_foundation_renderer.h b/chromium/media/renderers/win/media_foundation_renderer.h
index 99c5193550f..d71cc5d7200 100644
--- a/chromium/media/renderers/win/media_foundation_renderer.h
+++ b/chromium/media/renderers/win/media_foundation_renderer.h
@@ -88,7 +88,7 @@ class MediaFoundationRenderer : public Renderer,
void OnVideoNaturalSizeChanged();
void OnTimeUpdate();
- void OnCdmProxyReceived(IMFCdmProxy* cdm);
+ void OnCdmProxyReceived(Microsoft::WRL::ComPtr<IMFCdmProxy> cdm_proxy);
HRESULT SetDCompModeInternal(bool enabled);
HRESULT GetDCompSurfaceInternal(HANDLE* surface_handle);
diff --git a/chromium/media/renderers/win/media_foundation_source_wrapper.h b/chromium/media/renderers/win/media_foundation_source_wrapper.h
index 76e1d2fc34c..4ab2ac0810a 100644
--- a/chromium/media/renderers/win/media_foundation_source_wrapper.h
+++ b/chromium/media/renderers/win/media_foundation_source_wrapper.h
@@ -14,8 +14,8 @@
#include "base/sequenced_task_runner.h"
#include "media/base/media_resource.h"
+#include "media/base/win/mf_cdm_proxy.h"
#include "media/renderers/win/media_foundation_stream_wrapper.h"
-#include "media/renderers/win/mf_cdm_proxy.h"
namespace media {
diff --git a/chromium/media/renderers/win/mf_cdm_proxy.h b/chromium/media/renderers/win/mf_cdm_proxy.h
deleted file mode 100644
index a04dbab3ef7..00000000000
--- a/chromium/media/renderers/win/mf_cdm_proxy.h
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_RENDERERS_WIN_MF_CDM_PROXY_H_
-#define MEDIA_RENDERERS_WIN_MF_CDM_PROXY_H_
-
-#include <stdint.h>
-#include <unknwn.h>
-
-// Interface for clients to get information from MediaFoundationCdm to
-// implements COM interfaces.
-// COM interface is used because we are working with Media Foundation which uses
-// COM extensively for object lifetime management.
-MIDL_INTERFACE("565ab5c2-9923-44e0-997a-f93ccba5dcbf")
-IMFCdmProxy : public IUnknown {
- public:
- // Used by MediaFoundationProtectionManager to get
- // ABI::Windows::Media::Protection::IMediaProtectionPMPServer to implement
- // ABI::Windows::Media::Protection::IMediaProtectionManager::get_Properties as
- // in
- // https://docs.microsoft.com/en-us/uwp/api/windows.media.protection.mediaprotectionmanager
- virtual HRESULT STDMETHODCALLTYPE GetPMPServer(
- /* [in] */ __RPC__in REFIID riid,
- /* [iid_is][out] */ __RPC__deref_out_opt LPVOID * object_result) = 0;
-
- // Used by MediaFoundationSourceWrapper to implement
- // IMFTrustedInput::GetInputTrustAuthority as in
- // https://docs.microsoft.com/en-us/windows/win32/api/mfidl/nn-mfidl-imftrustedinput
- //
- // |content_init_data| is optional initialization data as in
- // https://www.w3.org/TR/encrypted-media/#initialization-data
- virtual HRESULT STDMETHODCALLTYPE GetInputTrustAuthority(
- _In_ uint64_t playback_element_id, _In_ uint32_t stream_id,
- _In_ uint32_t stream_count,
- _In_reads_bytes_opt_(content_init_data_size)
- const uint8_t* content_init_data,
- _In_ uint32_t content_init_data_size, _In_ REFIID riid,
- _COM_Outptr_ IUnknown** object_out) = 0;
-
- // MediaFoundationSourceWrapper provides its last set of key ids
- // associated with a playback element id using SetLastKeyIds when it is
- // destructed.
- // Another instance of MediaFoundationSourceWrapper could then invoke
- // RefreshTrustedInput to let implementation to reuse those key ids
- // information when it happens to have the same playback element id.
- //
- // |playback_element_id| is an ID corresponding to a particular instance of
- // video playback element.
- virtual HRESULT STDMETHODCALLTYPE RefreshTrustedInput(
- _In_ uint64_t playback_element_id) = 0;
-
- virtual HRESULT STDMETHODCALLTYPE SetLastKeyIds(
- _In_ uint64_t playback_element_id, GUID * key_ids,
- uint32_t key_ids_count) = 0;
-
- // Used by MediaFoundationProtectionManager to implement
- // IMFContentProtectionManager::BeginEnableContent as in
- // https://msdn.microsoft.com/en-us/windows/ms694217(v=vs.71)
- //
- // |result| is used to obtain the result of an asynchronous operation as in
- // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/nn-mfobjects-imfasyncresult
- virtual HRESULT STDMETHODCALLTYPE ProcessContentEnabler(
- _In_ IUnknown * request, _In_ IMFAsyncResult * result) = 0;
-};
-
-#endif // MEDIA_RENDERERS_WIN_MF_CDM_PROXY_H_
diff --git a/chromium/media/renderers/yuv_util.cc b/chromium/media/renderers/yuv_util.cc
index 3a66d63bd94..91ede20c4ee 100644
--- a/chromium/media/renderers/yuv_util.cc
+++ b/chromium/media/renderers/yuv_util.cc
@@ -9,7 +9,9 @@
#include "components/viz/common/gpu/raster_context_provider.h"
#include "gpu/GLES2/gl2extchromium.h"
#include "gpu/command_buffer/client/raster_interface.h"
+#include "gpu/command_buffer/client/shared_image_interface.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
+#include "gpu/command_buffer/common/shared_image_usage.h"
#include "media/base/video_frame.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/gpu/GrContext.h"
@@ -18,72 +20,165 @@ namespace media {
namespace {
-static constexpr size_t kNumNV12Planes = 2;
-static constexpr size_t kNumYUVPlanes = 3;
-using YUVMailboxes = std::array<gpu::MailboxHolder, kNumYUVPlanes>;
-
-YUVMailboxes GetYUVMailboxes(const VideoFrame* video_frame,
- gpu::raster::RasterInterface* ri) {
- YUVMailboxes mailboxes;
-
- for (size_t i = 0; i < video_frame->NumTextures(); ++i) {
- mailboxes[i] = video_frame->mailbox_holder(i);
- DCHECK(mailboxes[i].texture_target == GL_TEXTURE_2D ||
- mailboxes[i].texture_target == GL_TEXTURE_EXTERNAL_OES ||
- mailboxes[i].texture_target == GL_TEXTURE_RECTANGLE_ARB)
- << "Unsupported texture target " << std::hex << std::showbase
- << mailboxes[i].texture_target;
- ri->WaitSyncTokenCHROMIUM(mailboxes[i].sync_token.GetConstData());
- }
-
- return mailboxes;
-}
+enum YUVIndex : size_t {
+ kYIndex = 0,
+ kUIndex = 1,
+ kVIndex = 2,
+};
+static constexpr size_t kNumNV12Planes = kUIndex + 1;
+static constexpr size_t kNumYUVPlanes = kVIndex + 1;
+using YUVMailboxes = std::array<gpu::MailboxHolder, kNumYUVPlanes>;
struct YUVPlaneTextureInfo {
GrGLTextureInfo texture = {0, 0};
bool is_shared_image = false;
};
using YUVTexturesInfo = std::array<YUVPlaneTextureInfo, kNumYUVPlanes>;
-YUVTexturesInfo GetYUVTexturesInfo(
- const VideoFrame* video_frame,
- viz::RasterContextProvider* raster_context_provider) {
- gpu::raster::RasterInterface* ri = raster_context_provider->RasterInterface();
- DCHECK(ri);
- YUVMailboxes mailboxes = GetYUVMailboxes(video_frame, ri);
- YUVTexturesInfo yuv_textures_info;
-
- GrGLenum skia_texture_format =
- video_frame->format() == PIXEL_FORMAT_NV12 ? GL_RGB8 : GL_R8_EXT;
- for (size_t i = 0; i < video_frame->NumTextures(); ++i) {
- yuv_textures_info[i].texture.fID =
- ri->CreateAndConsumeForGpuRaster(mailboxes[i].mailbox);
- if (mailboxes[i].mailbox.IsSharedImage()) {
- yuv_textures_info[i].is_shared_image = true;
- ri->BeginSharedImageAccessDirectCHROMIUM(
- yuv_textures_info[i].texture.fID,
- GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
+class VideoFrameYUVMailboxesHolder {
+ public:
+ VideoFrameYUVMailboxesHolder(const VideoFrame* video_frame,
+ viz::RasterContextProvider* provider,
+ bool import_textures)
+ : provider_(provider) {
+ DCHECK(video_frame);
+ DCHECK(video_frame->HasTextures() || video_frame->IsMappable());
+ DCHECK(video_frame->format() == PIXEL_FORMAT_I420 |
+ video_frame->format() == PIXEL_FORMAT_NV12)
+ << "VideoFrame has an unsupported YUV format " << video_frame->format();
+ is_nv12_ = video_frame->format() == PIXEL_FORMAT_NV12;
+
+ DCHECK(provider_);
+ auto* ri = provider_->RasterInterface();
+ DCHECK(ri);
+
+ if (video_frame->HasTextures()) {
+ video_frame_owns_holders_ = true;
+ for (size_t plane = 0; plane < video_frame->NumTextures(); ++plane) {
+ holders_[plane] = video_frame->mailbox_holder(plane);
+ DCHECK(holders_[plane].texture_target == GL_TEXTURE_2D ||
+ holders_[plane].texture_target == GL_TEXTURE_EXTERNAL_OES ||
+ holders_[plane].texture_target == GL_TEXTURE_RECTANGLE_ARB)
+ << "Unsupported texture target " << std::hex << std::showbase
+ << holders_[plane].texture_target;
+ ri->WaitSyncTokenCHROMIUM(holders_[plane].sync_token.GetConstData());
+ }
+ } else {
+ DCHECK(!is_nv12_) << "NV12 CPU backed VideoFrames aren't supported.";
+ video_frame_owns_holders_ = false;
+ gfx::Size y_size = video_frame->coded_size();
+ gfx::Size uv_size = gfx::Size(y_size.width() / 2, y_size.height() / 2);
+
+ auto* sii = provider_->SharedImageInterface();
+ DCHECK(sii);
+ uint32_t mailbox_usage;
+ if (provider_->ContextCapabilities().supports_oop_raster) {
+ mailbox_usage = gpu::SHARED_IMAGE_USAGE_RASTER |
+ gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION;
+ } else {
+ mailbox_usage = gpu::SHARED_IMAGE_USAGE_GLES2;
+ }
+ for (size_t plane = 0; plane < kNumYUVPlanes; ++plane) {
+ gfx::Size tex_size = plane == kYIndex ? y_size : uv_size;
+ holders_[plane].mailbox =
+ sii->CreateSharedImage(viz::ResourceFormat::LUMINANCE_8, tex_size,
+ video_frame->ColorSpace(), mailbox_usage);
+ holders_[plane].texture_target = GL_TEXTURE_2D;
+ }
+
+ // Split up shared image creation from upload so we only have to wait on
+ // one sync token.
+ ri->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
+ for (size_t plane = 0; plane < kNumYUVPlanes; ++plane) {
+ gfx::Size tex_size = plane == kYIndex ? y_size : uv_size;
+ SkImageInfo info =
+ SkImageInfo::Make(tex_size.width(), tex_size.height(),
+ kGray_8_SkColorType, kUnknown_SkAlphaType);
+ ri->WritePixels(holders_[plane].mailbox, 0, 0, GL_TEXTURE_2D,
+ video_frame->stride(plane), info,
+ video_frame->data(plane));
+ }
}
- yuv_textures_info[i].texture.fTarget = mailboxes[i].texture_target;
- yuv_textures_info[i].texture.fFormat = skia_texture_format;
+ if (import_textures) {
+ ImportTextures();
+ }
}
- return yuv_textures_info;
-}
+ ~VideoFrameYUVMailboxesHolder() {
+ auto* ri = provider_->RasterInterface();
+ DCHECK(ri);
+ if (imported_textures_) {
+ for (auto& tex_info : textures_) {
+ if (!tex_info.texture.fID)
+ continue;
+
+ if (tex_info.is_shared_image)
+ ri->EndSharedImageAccessDirectCHROMIUM(tex_info.texture.fID);
+ ri->DeleteGpuRasterTexture(tex_info.texture.fID);
+ }
+ }
-void DeleteYUVTextures(const VideoFrame* video_frame,
- viz::RasterContextProvider* raster_context_provider,
- const YUVTexturesInfo& yuv_textures_info) {
- gpu::raster::RasterInterface* ri = raster_context_provider->RasterInterface();
- DCHECK(ri);
+ // Don't destroy shared images we don't own.
+ if (video_frame_owns_holders_)
+ return;
- for (size_t i = 0; i < video_frame->NumTextures(); ++i) {
- if (yuv_textures_info[i].is_shared_image)
- ri->EndSharedImageAccessDirectCHROMIUM(yuv_textures_info[i].texture.fID);
- ri->DeleteGpuRasterTexture(yuv_textures_info[i].texture.fID);
+ gpu::SyncToken token;
+ ri->GenUnverifiedSyncTokenCHROMIUM(token.GetData());
+
+ auto* sii = provider_->SharedImageInterface();
+ DCHECK(sii);
+ for (auto& mailbox_holder : holders_) {
+ if (!mailbox_holder.mailbox.IsZero())
+ sii->DestroySharedImage(token, mailbox_holder.mailbox);
+ mailbox_holder.mailbox.SetZero();
+ }
}
-}
+
+ bool is_nv12() { return is_nv12_; }
+
+ const gpu::Mailbox& mailbox(size_t plane) {
+ DCHECK_LE(plane, is_nv12_ ? kNumNV12Planes : kNumYUVPlanes);
+ return holders_[plane].mailbox;
+ }
+
+ const GrGLTextureInfo& texture(size_t plane) {
+ DCHECK_LE(plane, is_nv12_ ? kNumNV12Planes : kNumYUVPlanes);
+ DCHECK(imported_textures_);
+ return textures_[plane].texture;
+ }
+
+ private:
+ void ImportTextures() {
+ auto* ri = provider_->RasterInterface();
+ GrGLenum skia_texture_format = is_nv12_ ? GL_RGB8 : GL_LUMINANCE8_EXT;
+ for (size_t plane = 0; plane < NumPlanes(); ++plane) {
+ textures_[plane].texture.fID =
+ ri->CreateAndConsumeForGpuRaster(holders_[plane].mailbox);
+ if (holders_[plane].mailbox.IsSharedImage()) {
+ textures_[plane].is_shared_image = true;
+ ri->BeginSharedImageAccessDirectCHROMIUM(
+ textures_[plane].texture.fID,
+ GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
+ }
+
+ textures_[plane].texture.fTarget = holders_[plane].texture_target;
+ textures_[plane].texture.fFormat = skia_texture_format;
+ }
+
+ imported_textures_ = true;
+ }
+
+ size_t NumPlanes() { return is_nv12_ ? kNumNV12Planes : kNumYUVPlanes; }
+
+ viz::RasterContextProvider* provider_ = nullptr;
+ bool imported_textures_ = false;
+ bool video_frame_owns_holders_ = false;
+ bool is_nv12_ = false;
+
+ YUVMailboxes holders_;
+ YUVTexturesInfo textures_;
+};
void ConvertFromVideoFrameYUVWithGrContext(
const VideoFrame* video_frame,
@@ -99,9 +194,8 @@ void ConvertFromVideoFrameYUVWithGrContext(
dest_tex_id, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
}
// Let the SkImage fall out of scope and track the result using dest_tex_id
- NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
- video_frame, raster_context_provider, dest_mailbox_holder.texture_target,
- dest_tex_id);
+ NewSkImageFromVideoFrameYUV(video_frame, raster_context_provider,
+ dest_mailbox_holder.texture_target, dest_tex_id);
if (dest_mailbox_holder.mailbox.IsSharedImage())
ri->EndSharedImageAccessDirectCHROMIUM(dest_tex_id);
ri->DeleteGpuRasterTexture(dest_tex_id);
@@ -132,28 +226,33 @@ void ConvertFromVideoFrameYUV(
auto* ri = raster_context_provider->RasterInterface();
DCHECK(ri);
ri->WaitSyncTokenCHROMIUM(dest_mailbox_holder.sync_token.GetConstData());
- YUVMailboxes mailboxes = GetYUVMailboxes(video_frame, ri);
SkYUVColorSpace color_space =
ColorSpaceToSkYUVColorSpace(video_frame->ColorSpace());
- if (video_frame->format() == PIXEL_FORMAT_I420) {
+
+ VideoFrameYUVMailboxesHolder yuv_mailboxes(video_frame,
+ raster_context_provider, false);
+
+ if (yuv_mailboxes.is_nv12()) {
+ ri->ConvertNV12MailboxesToRGB(dest_mailbox_holder.mailbox, color_space,
+ yuv_mailboxes.mailbox(kYIndex),
+ yuv_mailboxes.mailbox(kUIndex));
+ } else {
DCHECK_EQ(video_frame->NumTextures(), kNumYUVPlanes);
ri->ConvertYUVMailboxesToRGB(dest_mailbox_holder.mailbox, color_space,
- mailboxes[0].mailbox, mailboxes[1].mailbox,
- mailboxes[2].mailbox);
- } else {
- DCHECK_EQ(video_frame->format(), PIXEL_FORMAT_NV12);
- DCHECK_EQ(video_frame->NumTextures(), kNumNV12Planes);
- ri->ConvertNV12MailboxesToRGB(dest_mailbox_holder.mailbox, color_space,
- mailboxes[0].mailbox, mailboxes[1].mailbox);
+ yuv_mailboxes.mailbox(kYIndex),
+ yuv_mailboxes.mailbox(kUIndex),
+ yuv_mailboxes.mailbox(kVIndex));
}
}
-sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
+sk_sp<SkImage> NewSkImageFromVideoFrameYUV(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
unsigned int texture_target,
unsigned int texture_id) {
- DCHECK(video_frame->HasTextures());
+ DCHECK(video_frame->HasTextures() ||
+ (video_frame->IsMappable() &&
+ video_frame->format() == PIXEL_FORMAT_I420));
GrContext* gr_context = raster_context_provider->GrContext();
DCHECK(gr_context);
// TODO: We should compare the DCHECK vs when UpdateLastImage calls this
@@ -167,16 +266,16 @@ sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
GrGLTextureInfo backend_texture{};
- YUVTexturesInfo yuv_textures_info =
- GetYUVTexturesInfo(video_frame, raster_context_provider);
+ VideoFrameYUVMailboxesHolder yuv_textures_info(video_frame,
+ raster_context_provider, true);
GrBackendTexture yuv_textures[3] = {
GrBackendTexture(ya_tex_size.width(), ya_tex_size.height(),
- GrMipMapped::kNo, yuv_textures_info[0].texture),
+ GrMipMapped::kNo, yuv_textures_info.texture(kYIndex)),
GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
- GrMipMapped::kNo, yuv_textures_info[1].texture),
+ GrMipMapped::kNo, yuv_textures_info.texture(kUIndex)),
GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
- GrMipMapped::kNo, yuv_textures_info[2].texture),
+ GrMipMapped::kNo, yuv_textures_info.texture(kVIndex)),
};
backend_texture.fID = texture_id;
backend_texture.fTarget = texture_target;
@@ -188,9 +287,7 @@ sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
sk_sp<SkImage> img = YUVGrBackendTexturesToSkImage(
gr_context, video_frame->ColorSpace(), video_frame->format(),
yuv_textures, result_texture);
- gr_context->flush();
-
- DeleteYUVTextures(video_frame, raster_context_provider, yuv_textures_info);
+ gr_context->flushAndSubmit();
return img;
}
diff --git a/chromium/media/renderers/yuv_util.h b/chromium/media/renderers/yuv_util.h
index e8fe451ab07..cd17d4d07b4 100644
--- a/chromium/media/renderers/yuv_util.h
+++ b/chromium/media/renderers/yuv_util.h
@@ -29,14 +29,15 @@ class VideoFrame;
// Converts a YUV video frame to RGB format and stores the results in the
// provided mailbox. The caller of this function maintains ownership of the
-// mailbox.
+// mailbox. Automatically handles upload of CPU memory backed VideoFrames in
+// I420 format. VideoFrames that wrap external textures can be I420 or NV12
+// format.
MEDIA_EXPORT void ConvertFromVideoFrameYUV(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
const gpu::MailboxHolder& dest_mailbox_holder);
-MEDIA_EXPORT sk_sp<SkImage>
-NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
+MEDIA_EXPORT sk_sp<SkImage> NewSkImageFromVideoFrameYUV(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
unsigned int texture_target,