diff options
Diffstat (limited to 'chromium/media/gpu/vaapi')
21 files changed, 1296 insertions, 349 deletions
diff --git a/chromium/media/gpu/vaapi/BUILD.gn b/chromium/media/gpu/vaapi/BUILD.gn index 2524a1c31b5..98dbf6acad4 100644 --- a/chromium/media/gpu/vaapi/BUILD.gn +++ b/chromium/media/gpu/vaapi/BUILD.gn @@ -66,11 +66,16 @@ source_set("vaapi") { "vp8_vaapi_video_decoder_delegate.h", "vp9_encoder.cc", "vp9_encoder.h", + "vp9_rate_control.cc", + "vp9_rate_control.h", "vp9_vaapi_video_decoder_delegate.cc", "vp9_vaapi_video_decoder_delegate.h", ] - configs += [ "//build/config/linux/libva" ] + configs += [ + "//build/config/linux/libva", + "//third_party/libvpx:libvpx_config", + ] deps = [ ":common", @@ -83,6 +88,7 @@ source_set("vaapi") { "//media/gpu/chromeos:common", "//media/parsers", "//mojo/public/cpp/bindings", + "//third_party/libvpx:libvp9rc", "//third_party/libyuv", "//ui/gfx", "//ui/gfx/geometry", @@ -116,12 +122,12 @@ source_set("vaapi") { ] } - if (ozone_platform_gbm || use_egl) { + if (use_ozone || use_egl) { sources += [ "vaapi_picture_native_pixmap.cc", "vaapi_picture_native_pixmap.h", ] - if (ozone_platform_gbm) { + if (use_ozone) { sources += [ "vaapi_picture_native_pixmap_ozone.cc", "vaapi_picture_native_pixmap_ozone.h", @@ -194,7 +200,9 @@ source_set("unit_test") { "vaapi_image_decode_accelerator_worker_unittest.cc", "vaapi_video_decode_accelerator_unittest.cc", "vaapi_video_encode_accelerator_unittest.cc", + "vp9_encoder_unittest.cc", ] + configs += [ "//third_party/libvpx:libvpx_config" ] deps = [ ":common", ":vaapi", @@ -206,6 +214,7 @@ source_set("unit_test") { "//mojo/core/embedder", "//testing/gmock", "//testing/gtest", + "//third_party/libvpx:libvp9rc", "//ui/gfx:test_support", "//ui/gfx/geometry", ] diff --git a/chromium/media/gpu/vaapi/accelerated_video_encoder.cc b/chromium/media/gpu/vaapi/accelerated_video_encoder.cc index 71acd16c8aa..4bfdb0dc06c 100644 --- a/chromium/media/gpu/vaapi/accelerated_video_encoder.cc +++ b/chromium/media/gpu/vaapi/accelerated_video_encoder.cc @@ -40,6 +40,12 @@ void AcceleratedVideoEncoder::EncodeJob::AddSetupCallback( setup_callbacks_.push(std::move(cb)); } +void AcceleratedVideoEncoder::EncodeJob::AddPostExecuteCallback( + base::OnceClosure cb) { + DCHECK(!cb.is_null()); + post_execute_callbacks_.push(std::move(cb)); +} + void AcceleratedVideoEncoder::EncodeJob::AddReferencePicture( scoped_refptr<CodecPicture> ref_pic) { DCHECK(ref_pic); @@ -53,10 +59,21 @@ void AcceleratedVideoEncoder::EncodeJob::Execute() { } std::move(execute_callback_).Run(); + + while (!post_execute_callbacks_.empty()) { + std::move(post_execute_callbacks_.front()).Run(); + post_execute_callbacks_.pop(); + } } size_t AcceleratedVideoEncoder::GetBitstreamBufferSize() const { return GetEncodeBitstreamBufferSize(GetCodedSize()); } +void AcceleratedVideoEncoder::BitrateControlUpdate( + uint64_t encoded_chunk_size_bytes) { + NOTREACHED() << __func__ << "() is called to on an" + << "AcceleratedVideoEncoder that doesn't support BitrateControl" + << "::kConstantQuantizationParameter"; +} } // namespace media diff --git a/chromium/media/gpu/vaapi/accelerated_video_encoder.h b/chromium/media/gpu/vaapi/accelerated_video_encoder.h index f214831dd62..e5a51bd96ac 100644 --- a/chromium/media/gpu/vaapi/accelerated_video_encoder.h +++ b/chromium/media/gpu/vaapi/accelerated_video_encoder.h @@ -40,12 +40,23 @@ class AcceleratedVideoEncoder { AcceleratedVideoEncoder() = default; virtual ~AcceleratedVideoEncoder() = default; + enum class BitrateControl { + kConstantBitrate, // Constant Bitrate mode. This class relies on other + // parts (e.g. driver) to achieve the specified bitrate. + kConstantQuantizationParameter // Constant Quantization Parameter mode. + // This class needs to compute a proper + // quantization parameter and give other + // parts (e.g. the driver) the value. + }; + struct Config { // Maxium number of reference frames. // For H.264 encoding, the value represents the maximum number of reference // frames for both the reference picture list 0 (bottom 16 bits) and the // reference picture list 1 (top 16 bits). size_t max_num_ref_frames; + + BitrateControl bitrate_control = BitrateControl::kConstantBitrate; }; // An abstraction of an encode job for one frame. Parameters required for an @@ -71,6 +82,12 @@ class AcceleratedVideoEncoder { // is executed. void AddSetupCallback(base::OnceClosure cb); + // Schedules a callback to be run immediately after this job is executed. + // Can be called multiple times to schedule multiple callbacks, and all + // of them will be run, in order added. Callbacks can be used to e.g. get + // the encoded buffer linear size. + void AddPostExecuteCallback(base::OnceClosure cb); + // Adds |ref_pic| to the list of pictures to be used as reference pictures // for this frame, to ensure they remain valid until the job is executed // (or discarded). @@ -114,6 +131,10 @@ class AcceleratedVideoEncoder { // calls) to set up the job. base::queue<base::OnceClosure> setup_callbacks_; + // Callbacks to be run (in the same order as the order of + // AddPostExecuteCallback() calls) to do post processing after execute. + base::queue<base::OnceClosure> post_execute_callbacks_; + // Callback to be run to execute this job. base::OnceClosure execute_callback_; @@ -153,6 +174,12 @@ class AcceleratedVideoEncoder { // Prepares a new |encode_job| to be executed in Accelerator and returns true // on success. The caller may then call Execute() on the job to run it. virtual bool PrepareEncodeJob(EncodeJob* encode_job) = 0; + + // Notifies the encoded chunk size in bytes to update a bitrate controller in + // AcceleratedVideoEncoder. This should be called only if + // AcceleratedVideoEncoder is configured with + // BitrateControl::kConstantQuantizationParameter. + virtual void BitrateControlUpdate(uint64_t encoded_chunk_size_bytes); }; } // namespace media diff --git a/chromium/media/gpu/vaapi/test_utils.cc b/chromium/media/gpu/vaapi/test_utils.cc index b534b297bd5..f578bae1071 100644 --- a/chromium/media/gpu/vaapi/test_utils.cc +++ b/chromium/media/gpu/vaapi/test_utils.cc @@ -54,9 +54,9 @@ bool CompareImages(const DecodedImage& reference_image, // Uses the reference image's size as the ground truth. const gfx::Size image_size = reference_image.size; if (image_size != hw_decoded_image.size) { - DLOG(ERROR) << "Wrong expected software decoded image size, " - << image_size.ToString() << " versus VaAPI provided " - << hw_decoded_image.size.ToString(); + LOG(ERROR) << "Wrong expected software decoded image size, " + << image_size.ToString() << " versus VaAPI provided " + << hw_decoded_image.size.ToString(); return false; } @@ -100,7 +100,7 @@ bool CompareImages(const DecodedImage& reference_image, image_size.width(), image_size.height()); } if (conversion_result != 0) { - DLOG(ERROR) << "libyuv conversion error"; + LOG(ERROR) << "libyuv conversion error"; return false; } @@ -112,12 +112,12 @@ bool CompareImages(const DecodedImage& reference_image, temp_v.get(), half_image_size.width(), image_size.width(), image_size.height()); } else { - DLOG(ERROR) << "HW FourCC not supported: " << FourccToString(hw_fourcc); + LOG(ERROR) << "HW FourCC not supported: " << FourccToString(hw_fourcc); return false; } if (ssim < min_ssim) { - DLOG(ERROR) << "SSIM too low: " << ssim << " < " << min_ssim; + LOG(ERROR) << "SSIM too low: " << ssim << " < " << min_ssim; return false; } diff --git a/chromium/media/gpu/vaapi/va.sigs b/chromium/media/gpu/vaapi/va.sigs index f333cb33a7b..c24aad2c3e5 100644 --- a/chromium/media/gpu/vaapi/va.sigs +++ b/chromium/media/gpu/vaapi/va.sigs @@ -19,6 +19,7 @@ VAStatus vaDestroyImage(VADisplay dpy, VAImageID image); VAStatus vaDestroySurfaces(VADisplay dpy, VASurfaceID *surfaces, int num_surfaces); int vaDisplayIsValid(VADisplay dpy); VAStatus vaEndPicture(VADisplay dpy, VAContextID context); +const char *vaEntrypointStr(VAEntrypoint entrypoint); const char *vaErrorStr(VAStatus error_status); VAStatus vaExportSurfaceHandle(VADisplay dpy, VASurfaceID surface_id, uint32_t mem_type, uint32_t flags, void *descriptor); VAStatus vaGetConfigAttributes(VADisplay dpy, VAProfile profile, VAEntrypoint entrypoint, VAConfigAttrib *attrib_list, int num_attribs); @@ -29,6 +30,7 @@ int vaMaxNumConfigAttributes(VADisplay dpy); int vaMaxNumEntrypoints(VADisplay dpy); int vaMaxNumImageFormats(VADisplay dpy); int vaMaxNumProfiles(VADisplay dpy); +const char *vaProfileStr(VAProfile profile); VAStatus vaPutImage (VADisplay dpy, VASurfaceID surface, VAImageID image, int src_x, int src_y, unsigned int src_width, unsigned int src_height, int dest_x, int dest_y, unsigned int dest_width, unsigned int dest_height); VAStatus vaQueryConfigAttributes(VADisplay dpy, VAConfigID config_id, VAProfile *profile, VAEntrypoint *entrypoint, VAConfigAttrib *attrib_list, int *num_attribs); VAStatus vaQueryConfigEntrypoints(VADisplay dpy, VAProfile profile, VAEntrypoint *entrypoint_list, int *num_entrypoints); @@ -37,7 +39,6 @@ VAStatus vaQueryImageFormats(VADisplay dpy, VAImageFormat *format_list, int *num VAStatus vaQuerySurfaceAttributes(VADisplay dpy, VAConfigID config, VASurfaceAttrib *attrib_list, unsigned int *num_attribs); const char* vaQueryVendorString(VADisplay dpy); VAStatus vaRenderPicture(VADisplay dpy, VAContextID context, VABufferID *buffers, int num_buffers); -VAStatus vaSetDisplayAttributes(VADisplay dpy, VADisplayAttribute *attr_list, int num_attributes); VAStatus vaSyncSurface(VADisplay dpy, VASurfaceID render_target); VAStatus vaTerminate(VADisplay dpy); VAStatus vaUnmapBuffer(VADisplay dpy, VABufferID buf_id); diff --git a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc index 4cb6bceda56..3c72d13786b 100644 --- a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc +++ b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc @@ -75,6 +75,7 @@ std::unique_ptr<ImageProcessorBackend> VaapiImageProcessorBackend::Create( const PortConfig& input_config, const PortConfig& output_config, const std::vector<OutputMode>& preferred_output_modes, + VideoRotation relative_rotation, ErrorCB error_cb, scoped_refptr<base::SequencedTaskRunner> backend_task_runner) { // VaapiImageProcessorBackend supports ChromeOS only. @@ -136,6 +137,13 @@ std::unique_ptr<ImageProcessorBackend> VaapiImageProcessorBackend::Create( return nullptr; } + // Checks if VA-API driver supports rotation. + if (relative_rotation != VIDEO_ROTATION_0 && + !vaapi_wrapper->IsRotationSupported()) { + VLOGF(1) << "VaapiIP doesn't support rotation"; + return nullptr; + } + // We should restrict the acceptable PortConfig for input and output both to // the one returned by GetPlatformVideoFrameLayout(). However, // ImageProcessorFactory interface doesn't provide information about what @@ -146,7 +154,7 @@ std::unique_ptr<ImageProcessorBackend> VaapiImageProcessorBackend::Create( // scenario. return base::WrapUnique<ImageProcessorBackend>(new VaapiImageProcessorBackend( std::move(vaapi_wrapper), input_config, output_config, OutputMode::IMPORT, - std::move(error_cb), std::move(backend_task_runner))); + relative_rotation, std::move(error_cb), std::move(backend_task_runner))); #endif } @@ -155,11 +163,13 @@ VaapiImageProcessorBackend::VaapiImageProcessorBackend( const PortConfig& input_config, const PortConfig& output_config, OutputMode output_mode, + VideoRotation relative_rotation, ErrorCB error_cb, scoped_refptr<base::SequencedTaskRunner> backend_task_runner) : ImageProcessorBackend(input_config, output_config, output_mode, + relative_rotation, std::move(error_cb), std::move(backend_task_runner)), vaapi_wrapper_(std::move(vaapi_wrapper)) {} @@ -206,9 +216,9 @@ void VaapiImageProcessorBackend::Process(scoped_refptr<VideoFrame> input_frame, return; // VA-API performs pixel format conversion and scaling without any filters. - if (!vaapi_wrapper_->BlitSurface(*src_va_surface, *dst_va_surface, - input_frame->visible_rect(), - output_frame->visible_rect())) { + if (!vaapi_wrapper_->BlitSurface( + *src_va_surface, *dst_va_surface, input_frame->visible_rect(), + output_frame->visible_rect(), relative_rotation_)) { // Failed to execute BlitSurface(). Since VaapiWrapper has invoked // ReportToUMA(), calling error_cb_ here is not needed. return; diff --git a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h index 8abbb323dd8..8d5da751214 100644 --- a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h +++ b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h @@ -28,6 +28,7 @@ class VaapiImageProcessorBackend : public ImageProcessorBackend { const PortConfig& input_config, const PortConfig& output_config, const std::vector<OutputMode>& preferred_output_modes, + VideoRotation relative_rotation, ErrorCB error_cb, scoped_refptr<base::SequencedTaskRunner> backend_task_runner); @@ -42,6 +43,7 @@ class VaapiImageProcessorBackend : public ImageProcessorBackend { const PortConfig& input_config, const PortConfig& output_config, OutputMode output_mode, + VideoRotation relative_rotation, ErrorCB error_cb, scoped_refptr<base::SequencedTaskRunner> backend_task_runner); ~VaapiImageProcessorBackend() override; diff --git a/chromium/media/gpu/vaapi/vaapi_unittest.cc b/chromium/media/gpu/vaapi/vaapi_unittest.cc index d3d459fadf8..abb662d777a 100644 --- a/chromium/media/gpu/vaapi/vaapi_unittest.cc +++ b/chromium/media/gpu/vaapi/vaapi_unittest.cc @@ -11,9 +11,11 @@ #include <vector> #include <va/va.h> +#include <va/va_str.h> #include "base/files/file.h" #include "base/files/scoped_file.h" +#include "base/logging.h" #include "base/optional.h" #include "base/process/launch.h" #include "base/stl_util.h" @@ -34,10 +36,8 @@ base::Optional<VAProfile> ConvertToVAProfile(VideoCodecProfile profile) { {VP8PROFILE_ANY, VAProfileVP8Version0_3}, {VP9PROFILE_PROFILE0, VAProfileVP9Profile0}, {VP9PROFILE_PROFILE1, VAProfileVP9Profile1}, - // TODO(crbug.com/1011454, crbug.com/1011469): Reenable - // VP9PROFILE_PROFILE2 and _PROFILE3 when P010 is completely supported. - //{VP9PROFILE_PROFILE2, VAProfileVP9Profile2}, - //{VP9PROFILE_PROFILE3, VAProfileVP9Profile3}, + {VP9PROFILE_PROFILE2, VAProfileVP9Profile2}, + {VP9PROFILE_PROFILE3, VAProfileVP9Profile3}, }; auto it = kProfileMap.find(profile); return it != kProfileMap.end() ? base::make_optional<VAProfile>(it->second) @@ -56,10 +56,8 @@ base::Optional<VAProfile> StringToVAProfile(const std::string& va_profile) { {"VAProfileVP8Version0_3", VAProfileVP8Version0_3}, {"VAProfileVP9Profile0", VAProfileVP9Profile0}, {"VAProfileVP9Profile1", VAProfileVP9Profile1}, - // TODO(crbug.com/1011454, crbug.com/1011469): Reenable - // VP9PROFILE_PROFILE2 and _PROFILE3 when P010 is completely supported. - // {"VAProfileVP9Profile2", VAProfileVP9Profile2}, - // {"VAProfileVP9Profile3", VAProfileVP9Profile3}, + {"VAProfileVP9Profile2", VAProfileVP9Profile2}, + {"VAProfileVP9Profile3", VAProfileVP9Profile3}, }; auto it = kStringToVAProfile.find(va_profile); @@ -165,7 +163,8 @@ TEST_F(VaapiTest, VaapiProfiles) { va_info[VAProfileH264ConstrainedBaseline], VAEntrypointVLD); } - EXPECT_TRUE(is_profile_supported) << " profile: " << profile.profile; + EXPECT_TRUE(is_profile_supported) + << " profile: " << GetProfileName(profile.profile); } for (const auto& profile : VaapiWrapper::GetSupportedEncodeProfiles()) { @@ -184,7 +183,8 @@ TEST_F(VaapiTest, VaapiProfiles) { VAEntrypointEncSliceLP); } - EXPECT_TRUE(is_profile_supported) << " profile: " << profile.profile; + EXPECT_TRUE(is_profile_supported) + << " profile: " << GetProfileName(profile.profile); } EXPECT_EQ(VaapiWrapper::IsDecodeSupported(VAProfileJPEGBaseline), @@ -194,21 +194,24 @@ TEST_F(VaapiTest, VaapiProfiles) { base::Contains(va_info[VAProfileJPEGBaseline], VAEntrypointEncPicture)); } +// Verifies that the default VAEntrypoint as per VaapiWrapper is indeed among +// the supported ones. TEST_F(VaapiTest, DefaultEntrypointIsSupported) { for (size_t i = 0; i < VaapiWrapper::kCodecModeMax; ++i) { - const VaapiWrapper::CodecMode mode = - static_cast<VaapiWrapper::CodecMode>(i); + const auto wrapper_mode = static_cast<VaapiWrapper::CodecMode>(i); std::map<VAProfile, std::vector<VAEntrypoint>> configurations = - VaapiWrapper::GetSupportedConfigurationsForCodecModeForTesting(mode); + VaapiWrapper::GetSupportedConfigurationsForCodecModeForTesting( + wrapper_mode); for (const auto& profile_and_entrypoints : configurations) { const VAEntrypoint default_entrypoint = - VaapiWrapper::GetDefaultVaEntryPoint(mode, + VaapiWrapper::GetDefaultVaEntryPoint(wrapper_mode, profile_and_entrypoints.first); const auto& supported_entrypoints = profile_and_entrypoints.second; EXPECT_TRUE(base::Contains(supported_entrypoints, default_entrypoint)) - << "Default VAEntrypoint " << default_entrypoint - << " (mode = " << mode << ") is not supported for VAProfile = " - << profile_and_entrypoints.first; + << "Default VAEntrypoint " << vaEntrypointStr(default_entrypoint) + << " (VaapiWrapper mode = " << wrapper_mode + << ") is not supported for " + << vaProfileStr(profile_and_entrypoints.first); } } } diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h index ed053f16ec5..62b90c85858 100644 --- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h +++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h @@ -19,7 +19,6 @@ #include "base/containers/queue.h" #include "base/containers/small_map.h" -#include "base/logging.h" #include "base/macros.h" #include "base/memory/weak_ptr.h" #include "base/single_thread_task_runner.h" diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc index c97f1a06cd9..48b9092156b 100644 --- a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc +++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc @@ -33,17 +33,15 @@ constexpr size_t kTimestampCacheSize = 128; // Returns the preferred VA_RT_FORMAT for the given |profile|. unsigned int GetVaFormatForVideoCodecProfile(VideoCodecProfile profile) { - switch (profile) { - case VP9PROFILE_PROFILE2: - case VP9PROFILE_PROFILE3: - return VA_RT_FORMAT_YUV420_10BPP; - default: - return VA_RT_FORMAT_YUV420; - } + if (profile == VP9PROFILE_PROFILE2 || profile == VP9PROFILE_PROFILE3) + return VA_RT_FORMAT_YUV420_10BPP; + return VA_RT_FORMAT_YUV420; } -gfx::BufferFormat GetBufferFormat() { +gfx::BufferFormat GetBufferFormat(VideoCodecProfile profile) { #if defined(USE_OZONE) + if (profile == VP9PROFILE_PROFILE2 || profile == VP9PROFILE_PROFILE3) + return gfx::BufferFormat::P010; return gfx::BufferFormat::YUV_420_BIPLANAR; #else return gfx::BufferFormat::RGBX_8888; @@ -341,7 +339,7 @@ scoped_refptr<VASurface> VaapiVideoDecoder::CreateSurface() { void VaapiVideoDecoder::SurfaceReady(scoped_refptr<VASurface> va_surface, int32_t buffer_id, const gfx::Rect& visible_rect, - const VideoColorSpace& /*color_space*/) { + const VideoColorSpace& color_space) { DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_); DCHECK_EQ(state_, State::kDecoding); DVLOGF(3); @@ -360,16 +358,7 @@ void VaapiVideoDecoder::SurfaceReady(scoped_refptr<VASurface> va_surface, // Find the frame associated with the surface. We won't erase it from // |output_frames_| yet, as the decoder might still be using it for reference. DCHECK_EQ(output_frames_.count(va_surface->id()), 1u); - OutputFrameTask(output_frames_[va_surface->id()], visible_rect, timestamp); -} - -void VaapiVideoDecoder::OutputFrameTask(scoped_refptr<VideoFrame> video_frame, - const gfx::Rect& visible_rect, - base::TimeDelta timestamp) { - DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_); - DCHECK_EQ(state_, State::kDecoding); - DCHECK(video_frame); - DVLOGF(4); + scoped_refptr<VideoFrame> video_frame = output_frames_[va_surface->id()]; // Set the timestamp at which the decode operation started on the // |video_frame|. If the frame has been outputted before (e.g. because of VP9 @@ -389,6 +378,10 @@ void VaapiVideoDecoder::OutputFrameTask(scoped_refptr<VideoFrame> video_frame, video_frame = std::move(wrapped_frame); } + const auto gfx_color_space = color_space.ToGfxColorSpace(); + if (gfx_color_space.IsValid()) + video_frame->set_color_space(gfx_color_space); + output_cb_.Run(std::move(video_frame)); } @@ -403,12 +396,18 @@ void VaapiVideoDecoder::ApplyResolutionChange() { gfx::Size natural_size = GetNaturalSize(visible_rect, pixel_aspect_ratio_); pic_size_ = decoder_->GetPicSize(); const base::Optional<VideoPixelFormat> format = - GfxBufferFormatToVideoPixelFormat(GetBufferFormat()); + GfxBufferFormatToVideoPixelFormat( + GetBufferFormat(decoder_->GetProfile())); CHECK(format); auto format_fourcc = Fourcc::FromVideoPixelFormat(*format); CHECK(format_fourcc); - frame_pool_->Initialize(*format_fourcc, pic_size_, visible_rect, natural_size, - decoder_->GetRequiredNumOfPictures()); + if (!frame_pool_->Initialize(*format_fourcc, pic_size_, visible_rect, + natural_size, + decoder_->GetRequiredNumOfPictures())) { + DLOG(WARNING) << "Failed Initialize()ing the frame pool."; + SetState(State::kError); + return; + } // All pending decode operations will be completed before triggering a // resolution change, so we can safely destroy the context here. diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.h b/chromium/media/gpu/vaapi/vaapi_video_decoder.h index db186f14734..d7a4d3e18c0 100644 --- a/chromium/media/gpu/vaapi/vaapi_video_decoder.h +++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.h @@ -102,10 +102,6 @@ class VaapiVideoDecoder : public DecoderInterface, // resetting or destroying the decoder, or encountering an error. void ClearDecodeTaskQueue(DecodeStatus status); - // Output a single |video_frame| on the decoder thread. - void OutputFrameTask(scoped_refptr<VideoFrame> video_frame, - const gfx::Rect& visible_rect, - base::TimeDelta timestamp); // Release the video frame associated with the specified |surface_id| on the // decoder thread. This is called when the last reference to the associated // VASurface has been released, which happens when the decoder outputted the diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc index c7ae04b8be9..528c424a1a8 100644 --- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc +++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc @@ -18,6 +18,7 @@ #include "base/bind_helpers.h" #include "base/bits.h" #include "base/callback.h" +#include "base/callback_helpers.h" #include "base/macros.h" #include "base/memory/ptr_util.h" #include "base/metrics/histogram_macros.h" @@ -77,12 +78,6 @@ static void ReportToUMA(VAVEAEncoderFailure failure) { // requirements. gfx::Size GetInputFrameSize(VideoPixelFormat format, const gfx::Size& visible_size) { - if (format == PIXEL_FORMAT_I420) { - // Since we don't have gfx::BufferFormat for I420, replace I420 with YV12. - // Remove this workaround once crrev.com/c/1573718 is landed. - format = PIXEL_FORMAT_YV12; - } - std::unique_ptr<::gpu::GpuMemoryBufferFactory> gpu_memory_buffer_factory = ::gpu::GpuMemoryBufferFactory::CreateNativeType(nullptr); // Get a VideoFrameLayout of a graphic buffer with the same gfx::BufferUsage @@ -168,6 +163,8 @@ struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { VideoEncodeAccelerator::SupportedProfiles VaapiVideoEncodeAccelerator::GetSupportedProfiles() { + if (IsConfiguredForTesting()) + return supported_profiles_for_testing_; return VaapiWrapper::GetSupportedEncodeProfiles(); } @@ -341,13 +338,18 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config, return false; } - vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec( - VaapiWrapper::kEncode, config.output_profile, - base::Bind(&ReportToUMA, VAAPI_ERROR)); - if (!vaapi_wrapper_) { - VLOGF(1) << "Failed initializing VAAPI for profile " - << GetProfileName(config.output_profile); - return false; + DCHECK_EQ(IsConfiguredForTesting(), !!vaapi_wrapper_); + if (!IsConfiguredForTesting()) { + VaapiWrapper::CodecMode mode = + codec == kCodecVP9 ? VaapiWrapper::kEncodeConstantQuantizationParameter + : VaapiWrapper::kEncode; + vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec( + mode, config.output_profile, base::Bind(&ReportToUMA, VAAPI_ERROR)); + if (!vaapi_wrapper_) { + VLOGF(1) << "Failed initializing VAAPI for profile " + << GetProfileName(config.output_profile); + return false; + } } // Finish remaining initialization on the encoder thread. @@ -363,33 +365,46 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) { VLOGF(2); VideoCodec codec = VideoCodecProfileToVideoCodec(config.output_profile); + AcceleratedVideoEncoder::Config ave_config{}; + DCHECK_EQ(IsConfiguredForTesting(), !!encoder_); switch (codec) { case kCodecH264: - encoder_ = std::make_unique<H264Encoder>( - std::make_unique<H264Accelerator>(this)); + if (!IsConfiguredForTesting()) { + encoder_ = std::make_unique<H264Encoder>( + std::make_unique<H264Accelerator>(this)); + } + DCHECK_EQ(ave_config.bitrate_control, + AcceleratedVideoEncoder::BitrateControl::kConstantBitrate); break; - case kCodecVP8: - encoder_ = - std::make_unique<VP8Encoder>(std::make_unique<VP8Accelerator>(this)); + if (!IsConfiguredForTesting()) { + encoder_ = std::make_unique<VP8Encoder>( + std::make_unique<VP8Accelerator>(this)); + } + DCHECK_EQ(ave_config.bitrate_control, + AcceleratedVideoEncoder::BitrateControl::kConstantBitrate); break; - case kCodecVP9: - encoder_ = - std::make_unique<VP9Encoder>(std::make_unique<VP9Accelerator>(this)); + if (!IsConfiguredForTesting()) { + encoder_ = std::make_unique<VP9Encoder>( + std::make_unique<VP9Accelerator>(this)); + } + ave_config.bitrate_control = AcceleratedVideoEncoder::BitrateControl:: + kConstantQuantizationParameter; break; - default: NOTREACHED() << "Unsupported codec type " << GetCodecName(codec); return; } - AcceleratedVideoEncoder::Config ave_config; if (!vaapi_wrapper_->GetVAEncMaxNumOfRefFrames( - config.output_profile, &ave_config.max_num_ref_frames)) + config.output_profile, &ave_config.max_num_ref_frames)) { + NOTIFY_ERROR(kPlatformFailureError, + "Failed getting max number of reference frames" + "supported by the driver"); return; + } DCHECK_GT(ave_config.max_num_ref_frames, 0u); - if (!encoder_->Initialize(config, ave_config)) { NOTIFY_ERROR(kInvalidArgumentError, "Failed initializing encoder"); return; @@ -409,13 +424,17 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) { expected_input_coded_size_.width() <= encoder_->GetCodedSize().width() && expected_input_coded_size_.height() <= encoder_->GetCodedSize().height()); - // The aligned surface size must be the same as a size of a native graphic - // buffer. - aligned_va_surface_size_ = - GetInputFrameSize(config.input_format, config.input_visible_size); - if (aligned_va_surface_size_.IsEmpty()) { - NOTIFY_ERROR(kPlatformFailureError, "Failed to get frame size"); - return; + DCHECK_EQ(IsConfiguredForTesting(), !aligned_va_surface_size_.IsEmpty()); + if (!IsConfiguredForTesting()) { + // The aligned VA surface size must be the same as a size of a native + // graphics buffer. Since the VA surface's format is NV12, we specify NV12 + // to query the size of the native graphics buffer. + aligned_va_surface_size_ = + GetInputFrameSize(PIXEL_FORMAT_NV12, config.input_visible_size); + if (aligned_va_surface_size_.IsEmpty()) { + NOTIFY_ERROR(kPlatformFailureError, "Failed to get frame size"); + return; + } } va_surfaces_per_video_frame_ = @@ -536,6 +555,19 @@ void VaapiVideoEncodeAccelerator::SubmitH264BitstreamBuffer( } } +void VaapiVideoEncodeAccelerator::NotifyEncodedChunkSize( + VABufferID buffer_id, + VASurfaceID sync_surface_id) { + DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_); + const uint64_t encoded_chunk_size = + vaapi_wrapper_->GetEncodedChunkSize(buffer_id, sync_surface_id); + if (encoded_chunk_size == 0) + NOTIFY_ERROR(kPlatformFailureError, "Failed getting an encoded chunksize"); + + DCHECK(encoder_); + encoder_->BitrateControlUpdate(encoded_chunk_size); +} + void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_); @@ -567,26 +599,27 @@ void VaapiVideoEncodeAccelerator::ReturnBitstreamBuffer( std::unique_ptr<VaapiEncodeJob> encode_job, std::unique_ptr<BitstreamBufferRef> buffer) { DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_); + const VABufferID coded_buffer_id = encode_job->coded_buffer_id(); + base::ScopedClosureRunner scoped_buffer( + base::BindOnce(&VaapiWrapper::DestroyVABuffer, + base::Unretained(vaapi_wrapper_.get()), coded_buffer_id)); uint8_t* target_data = static_cast<uint8_t*>(buffer->shm->memory()); size_t data_size = 0; - if (!vaapi_wrapper_->DownloadFromVABuffer( encode_job->coded_buffer_id(), encode_job->input_surface()->id(), target_data, buffer->shm->size(), &data_size)) { NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer"); return; } - DVLOGF(4) << "Returning bitstream buffer " << (encode_job->IsKeyframeRequested() ? "(keyframe)" : "") << " id: " << buffer->id << " size: " << data_size; + scoped_buffer.RunAndReset(); child_task_runner_->PostTask( FROM_HERE, base::BindOnce(&Client::BitstreamBufferReady, client_, buffer->id, encode_job->Metadata(data_size))); - - vaapi_wrapper_->DestroyVABuffer(encode_job->coded_buffer_id()); } void VaapiVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame, @@ -1476,6 +1509,25 @@ bool VaapiVideoEncodeAccelerator::VP9Accelerator::SubmitFrameParameters( pic_param.log2_tile_rows = frame_header->tile_rows_log2; pic_param.log2_tile_columns = frame_header->tile_cols_log2; + job->AddSetupCallback( + base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer, + base::Unretained(vea_), VAEncSequenceParameterBufferType, + MakeRefCountedBytes(&seq_param, sizeof(seq_param)))); + + job->AddSetupCallback( + base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer, + base::Unretained(vea_), VAEncPictureParameterBufferType, + MakeRefCountedBytes(&pic_param, sizeof(pic_param)))); + + if (bitrate_control_ == + AcceleratedVideoEncoder::BitrateControl::kConstantQuantizationParameter) { + job->AddPostExecuteCallback(base::BindOnce( + &VaapiVideoEncodeAccelerator::NotifyEncodedChunkSize, + base::Unretained(vea_), job->AsVaapiEncodeJob()->coded_buffer_id(), + job->AsVaapiEncodeJob()->input_surface()->id())); + return true; + } + VAEncMiscParameterRateControl rate_control_param = {}; rate_control_param.bits_per_second = encode_params.bitrate_allocation.GetSumBps(); @@ -1493,16 +1545,6 @@ bool VaapiVideoEncodeAccelerator::VP9Accelerator::SubmitFrameParameters( hrd_param.buffer_size = encode_params.cpb_size_bits; hrd_param.initial_buffer_fullness = hrd_param.buffer_size / 2; - job->AddSetupCallback( - base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer, - base::Unretained(vea_), VAEncSequenceParameterBufferType, - MakeRefCountedBytes(&seq_param, sizeof(seq_param)))); - - job->AddSetupCallback( - base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer, - base::Unretained(vea_), VAEncPictureParameterBufferType, - MakeRefCountedBytes(&pic_param, sizeof(pic_param)))); - job->AddSetupCallback(base::BindOnce( &VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer, base::Unretained(vea_), VAEncMiscParameterTypeRateControl, diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h index cdb90fd455d..ec0f1ca5860 100644 --- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h +++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h @@ -35,7 +35,7 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator ~VaapiVideoEncodeAccelerator() override; // VideoEncodeAccelerator implementation. - VideoEncodeAccelerator::SupportedProfiles GetSupportedProfiles() override; + SupportedProfiles GetSupportedProfiles() override; bool Initialize(const Config& config, Client* client) override; void Encode(scoped_refptr<VideoFrame> frame, bool force_keyframe) override; void UseOutputBitstreamBuffer(BitstreamBuffer buffer) override; @@ -49,6 +49,7 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator bool IsFlushSupported() override; private: + friend class VaapiVideoEncodeAcceleratorTest; class H264Accelerator; class VP8Accelerator; class VP9Accelerator; @@ -148,6 +149,15 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator // Submits a H264BitstreamBuffer |buffer| to the driver. void SubmitH264BitstreamBuffer(scoped_refptr<H264BitstreamBuffer> buffer); + // Gets the encoded chunk size whose id is |buffer_id| and notifies |encoder_| + // the size. + void NotifyEncodedChunkSize(VABufferID buffer_id, + VASurfaceID sync_surface_id); + + bool IsConfiguredForTesting() const { + return !supported_profiles_for_testing_.empty(); + } + // The unchanged values are filled upon the construction. The varied values // (e.g. ScalingSettings) are filled properly during encoding. VideoEncoderInfo encoder_info_; @@ -240,6 +250,9 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator // The completion callback of the Flush() function. FlushCallback flush_callback_; + // Supported profiles that are filled if and only if in a unit test. + SupportedProfiles supported_profiles_for_testing_; + // WeakPtr of this, bound to |child_task_runner_|. base::WeakPtr<VaapiVideoEncodeAccelerator> child_weak_this_; // WeakPtr of this, bound to |encoder_task_runner_|. diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc index 01bfbb3a6e0..896a7251dbc 100644 --- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc +++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc @@ -4,22 +4,48 @@ #include "media/gpu/vaapi/vaapi_video_encode_accelerator.h" +#include <memory> +#include <numeric> +#include <vector> + +#include "base/run_loop.h" +#include "base/test/gmock_callback_support.h" #include "base/test/task_environment.h" #include "media/video/video_encode_accelerator.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" +using base::test::RunClosure; +using ::testing::_; +using ::testing::Return; +using ::testing::WithArgs; + namespace media { namespace { constexpr gfx::Size kDefaultEncodeSize(1280, 720); constexpr uint32_t kDefaultBitrateBps = 4 * 1000 * 1000; constexpr uint32_t kDefaultFramerate = 30; -const VideoEncodeAccelerator::Config kDefaultVEAConfig(PIXEL_FORMAT_I420, - kDefaultEncodeSize, - VP8PROFILE_ANY, - kDefaultBitrateBps, - kDefaultFramerate); +constexpr size_t kMaxNumOfRefFrames = 3u; +const VideoEncodeAccelerator::Config kDefaultVideoEncodeAcceleratorConfig( + PIXEL_FORMAT_I420, + kDefaultEncodeSize, + VP9PROFILE_PROFILE0, + kDefaultBitrateBps, + kDefaultFramerate); + +MATCHER_P2(MatchesAcceleratedVideoEncoderConfig, + max_ref_frames, + bitrate_control, + "") { + return arg.max_num_ref_frames == max_ref_frames && + arg.bitrate_control == bitrate_control; +} + +MATCHER_P2(MatchesBitstreamBufferMetadata, payload_size_bytes, key_frame, "") { + return arg.payload_size_bytes == payload_size_bytes && + arg.key_frame == key_frame; +} class MockVideoEncodeAcceleratorClient : public VideoEncodeAccelerator::Client { public: @@ -27,35 +53,239 @@ class MockVideoEncodeAcceleratorClient : public VideoEncodeAccelerator::Client { virtual ~MockVideoEncodeAcceleratorClient() = default; MOCK_METHOD3(RequireBitstreamBuffers, - void(unsigned int, const gfx::Size&, size_t output_buffer_size)); + void(unsigned int, const gfx::Size&, size_t)); MOCK_METHOD2(BitstreamBufferReady, void(int32_t, const BitstreamBufferMetadata&)); MOCK_METHOD1(NotifyError, void(VideoEncodeAccelerator::Error)); - MOCK_METHOD1(NotifyEncoderInfoChange, void(const VideoEncoderInfo& info)); + MOCK_METHOD1(NotifyEncoderInfoChange, void(const VideoEncoderInfo&)); }; -struct VaapiVEAInitializeTestParam { - uint8_t num_of_temporal_layers = 0; - uint8_t num_of_spatial_layers = 0; - bool expected_result; +class MockVaapiWrapper : public VaapiWrapper { + public: + MockVaapiWrapper(CodecMode mode) : VaapiWrapper(mode) {} + MOCK_METHOD2(GetVAEncMaxNumOfRefFrames, bool(VideoCodecProfile, size_t*)); + MOCK_METHOD5(CreateContextAndSurfaces, + bool(unsigned int, + const gfx::Size&, + SurfaceUsageHint, + size_t, + std::vector<VASurfaceID>*)); + MOCK_METHOD2(CreateVABuffer, bool(size_t, VABufferID*)); + MOCK_METHOD2(GetEncodedChunkSize, uint64_t(VABufferID, VASurfaceID)); + MOCK_METHOD5(DownloadFromVABuffer, + bool(VABufferID, VASurfaceID, uint8_t*, size_t, size_t*)); + MOCK_METHOD3(UploadVideoFrameToSurface, + bool(const VideoFrame&, VASurfaceID, const gfx::Size&)); + MOCK_METHOD1(ExecuteAndDestroyPendingBuffers, bool(VASurfaceID)); + MOCK_METHOD1(DestroyVABuffer, void(VABufferID)); + MOCK_METHOD0(DestroyContext, void()); + MOCK_METHOD1(DestroySurfaces, void(std::vector<VASurfaceID> va_surface_ids)); + + private: + ~MockVaapiWrapper() override = default; +}; + +class MockAcceleratedVideoEncoder : public AcceleratedVideoEncoder { + public: + MOCK_METHOD2(Initialize, + bool(const VideoEncodeAccelerator::Config&, + const AcceleratedVideoEncoder::Config&)); + MOCK_CONST_METHOD0(GetCodedSize, gfx::Size()); + MOCK_CONST_METHOD0(GetBitstreamBufferSize, size_t()); + MOCK_CONST_METHOD0(GetMaxNumOfRefFrames, size_t()); + MOCK_METHOD1(PrepareEncodeJob, bool(EncodeJob*)); + MOCK_METHOD1(BitrateControlUpdate, void(uint64_t)); + bool UpdateRates(const VideoBitrateAllocation&, uint32_t) override { + return false; + } + ScalingSettings GetScalingSettings() const override { + return ScalingSettings(); + } }; +} // namespace + +struct VaapiVideoEncodeAcceleratorTestParam; -class VaapiVEAInitializeTest - : public ::testing::TestWithParam<VaapiVEAInitializeTestParam> { +class VaapiVideoEncodeAcceleratorTest + : public ::testing::TestWithParam<VaapiVideoEncodeAcceleratorTestParam> { protected: - VaapiVEAInitializeTest() = default; - ~VaapiVEAInitializeTest() override = default; + VaapiVideoEncodeAcceleratorTest() = default; + ~VaapiVideoEncodeAcceleratorTest() override = default; + + void SetUp() override { + mock_vaapi_wrapper_ = + base::MakeRefCounted<MockVaapiWrapper>(VaapiWrapper::kEncode); + encoder_.reset(new VaapiVideoEncodeAccelerator); + auto* vaapi_encoder = + reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder_.get()); + vaapi_encoder->vaapi_wrapper_ = mock_vaapi_wrapper_; + vaapi_encoder->encoder_ = std::make_unique<MockAcceleratedVideoEncoder>(); + mock_encoder_ = reinterpret_cast<MockAcceleratedVideoEncoder*>( + vaapi_encoder->encoder_.get()); + } + + void SetDefaultMocksBehavior(const VideoEncodeAccelerator::Config& config) { + ASSERT_TRUE(mock_vaapi_wrapper_); + ASSERT_TRUE(mock_encoder_); + + ON_CALL(*mock_vaapi_wrapper_, GetVAEncMaxNumOfRefFrames) + .WillByDefault(WithArgs<1>([](size_t* max_ref_frames) { + *max_ref_frames = kMaxNumOfRefFrames; + return true; + })); + + ON_CALL(*mock_encoder_, GetBitstreamBufferSize) + .WillByDefault(Return(config.input_visible_size.GetArea())); + ON_CALL(*mock_encoder_, GetCodedSize()) + .WillByDefault(Return(config.input_visible_size)); + ON_CALL(*mock_encoder_, GetMaxNumOfRefFrames()) + .WillByDefault(Return(kMaxNumOfRefFrames)); + } + + bool InitializeVideoEncodeAccelerator( + const VideoEncodeAccelerator::Config& config) { + VideoEncodeAccelerator::SupportedProfile profile(config.output_profile, + config.input_visible_size); + auto* vaapi_encoder = + reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder_.get()); + vaapi_encoder->supported_profiles_for_testing_.push_back(profile); + vaapi_encoder->aligned_va_surface_size_ = config.input_visible_size; + if (config.input_visible_size.IsEmpty()) + return false; + return encoder_->Initialize(config, &client_); + } + + void InitializeSequenceForVP9(const VideoEncodeAccelerator::Config& config) { + base::RunLoop run_loop; + base::Closure quit_closure = run_loop.QuitClosure(); + ::testing::InSequence s; + constexpr auto kBitrateControl = + AcceleratedVideoEncoder::BitrateControl::kConstantQuantizationParameter; + EXPECT_CALL(*mock_encoder_, + Initialize(_, MatchesAcceleratedVideoEncoderConfig( + kMaxNumOfRefFrames, kBitrateControl))) + .WillOnce(Return(true)); + EXPECT_CALL(*mock_vaapi_wrapper_, + CreateContextAndSurfaces( + _, kDefaultEncodeSize, + VaapiWrapper::SurfaceUsageHint::kVideoEncoder, _, _)) + .WillOnce(WithArgs<3, 4>( + [&surfaces = this->va_surfaces_]( + size_t num_surfaces, std::vector<VASurfaceID>* va_surface_ids) { + surfaces.resize(num_surfaces); + std::iota(surfaces.begin(), surfaces.end(), 0); + *va_surface_ids = surfaces; + return true; + })); + EXPECT_CALL(client_, RequireBitstreamBuffers(_, kDefaultEncodeSize, _)) + .WillOnce(WithArgs<2>([this, &quit_closure](size_t output_buffer_size) { + this->output_buffer_size_ = output_buffer_size; + quit_closure.Run(); + })); + ASSERT_TRUE(InitializeVideoEncodeAccelerator(config)); + run_loop.Run(); + } + + void EncodeSequenceForVP9() { + base::RunLoop run_loop; + base::Closure quit_closure = run_loop.QuitClosure(); + ::testing::InSequence s; + + constexpr VABufferID kCodedBufferId = 123; + EXPECT_CALL(*mock_vaapi_wrapper_, CreateVABuffer(output_buffer_size_, _)) + .WillOnce(WithArgs<1>([](VABufferID* va_buffer_id) { + *va_buffer_id = kCodedBufferId; + return true; + })); + + ASSERT_FALSE(va_surfaces_.empty()); + const VASurfaceID kInputSurfaceId = va_surfaces_.back(); + EXPECT_CALL(*mock_encoder_, PrepareEncodeJob(_)) + .WillOnce(WithArgs<0>( + [encoder = encoder_.get(), kCodedBufferId, + kInputSurfaceId](AcceleratedVideoEncoder::EncodeJob* job) { + job->AddPostExecuteCallback(base::BindOnce( + &VaapiVideoEncodeAccelerator::NotifyEncodedChunkSize, + base::Unretained( + reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder)), + kCodedBufferId, kInputSurfaceId)); + return true; + })); + EXPECT_CALL( + *mock_vaapi_wrapper_, + UploadVideoFrameToSurface(_, kInputSurfaceId, kDefaultEncodeSize)) + .WillOnce(Return(true)); + EXPECT_CALL(*mock_vaapi_wrapper_, + ExecuteAndDestroyPendingBuffers(kInputSurfaceId)) + .WillOnce(Return(true)); + + constexpr uint64_t kEncodedChunkSize = 1234; + ASSERT_LE(kEncodedChunkSize, output_buffer_size_); + EXPECT_CALL(*mock_vaapi_wrapper_, + GetEncodedChunkSize(kCodedBufferId, kInputSurfaceId)) + .WillOnce(Return(kEncodedChunkSize)); + EXPECT_CALL(*mock_encoder_, BitrateControlUpdate(kEncodedChunkSize)) + .WillOnce(Return()); + EXPECT_CALL(*mock_vaapi_wrapper_, + DownloadFromVABuffer(kCodedBufferId, kInputSurfaceId, _, + output_buffer_size_, _)) + .WillOnce(WithArgs<4>([](size_t* coded_data_size) { + *coded_data_size = kEncodedChunkSize; + return true; + })); + EXPECT_CALL(*mock_vaapi_wrapper_, DestroyVABuffer(kCodedBufferId)) + .WillOnce(Return()); + + constexpr int32_t kBitstreamId = 12; + EXPECT_CALL(client_, BitstreamBufferReady(kBitstreamId, + MatchesBitstreamBufferMetadata( + kEncodedChunkSize, false))) + .WillOnce(RunClosure(quit_closure)); + + auto region = base::UnsafeSharedMemoryRegion::Create(output_buffer_size_); + ASSERT_TRUE(region.IsValid()); + encoder_->UseOutputBitstreamBuffer( + BitstreamBuffer(kBitstreamId, std::move(region), output_buffer_size_)); + + auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_I420, kDefaultEncodeSize, + gfx::Rect(kDefaultEncodeSize), + kDefaultEncodeSize, base::TimeDelta()); + ASSERT_TRUE(frame); + encoder_->Encode(std::move(frame), false /* force_keyframe */); + run_loop.Run(); + } + + size_t output_buffer_size_ = 0; + std::vector<VASurfaceID> va_surfaces_; base::test::TaskEnvironment task_environment_; + MockVideoEncodeAcceleratorClient client_; + std::unique_ptr<VideoEncodeAccelerator> encoder_; + scoped_refptr<MockVaapiWrapper> mock_vaapi_wrapper_; + MockAcceleratedVideoEncoder* mock_encoder_ = nullptr; }; -TEST_P(VaapiVEAInitializeTest, SpatialLayerAndTemporalLayerEncoding) { - VideoEncodeAccelerator::Config config = kDefaultVEAConfig; - const uint8_t num_of_temporal_layers = GetParam().num_of_temporal_layers; +struct VaapiVideoEncodeAcceleratorTestParam { + uint8_t num_of_spatial_layers = 0; + uint8_t num_of_temporal_layers = 0; +} kTestCases[]{ + {1u, 1u}, // Single spatial layer, single temporal layer. + {1u, 3u}, // Single spatial layer, multiple temporal layers. + {3u, 1u}, // Multiple spatial layers, single temporal layer. + {3u, 3u}, // Multiple spatial layers, multiple temporal layers. +}; + +TEST_P(VaapiVideoEncodeAcceleratorTest, + InitializeVP9WithMultipleSpatialLayers) { const uint8_t num_of_spatial_layers = GetParam().num_of_spatial_layers; + if (num_of_spatial_layers <= 1) + GTEST_SKIP() << "Test only meant for multiple spatial layers configuration"; + + VideoEncodeAccelerator::Config config = kDefaultVideoEncodeAcceleratorConfig; + const uint8_t num_of_temporal_layers = GetParam().num_of_temporal_layers; constexpr int kDenom[] = {4, 2, 1}; for (uint8_t i = 0; i < num_of_spatial_layers; ++i) { VideoEncodeAccelerator::Config::SpatialLayer spatial_layer; - int denom = kDenom[i]; + const int denom = kDenom[i]; spatial_layer.width = kDefaultEncodeSize.width() / denom; spatial_layer.height = kDefaultEncodeSize.height() / denom; spatial_layer.bitrate_bps = kDefaultBitrateBps / denom; @@ -65,18 +295,29 @@ TEST_P(VaapiVEAInitializeTest, SpatialLayerAndTemporalLayerEncoding) { config.spatial_layers.push_back(spatial_layer); } - VaapiVideoEncodeAccelerator vea; - MockVideoEncodeAcceleratorClient client; - EXPECT_EQ(vea.Initialize(config, &client), GetParam().expected_result); + EXPECT_FALSE(InitializeVideoEncodeAccelerator(config)); } -constexpr VaapiVEAInitializeTestParam kTestCases[] = { - {1u, 3u, false}, // Spatial Layer only. - {3u, 3u, false}, // Temporal + Spatial Layer. -}; +TEST_P(VaapiVideoEncodeAcceleratorTest, EncodeVP9WithSingleSpatialLayer) { + if (GetParam().num_of_spatial_layers > 1u) + GTEST_SKIP() << "Test only meant for single spatial layer"; + + VideoEncodeAccelerator::Config config = kDefaultVideoEncodeAcceleratorConfig; + VideoEncodeAccelerator::Config::SpatialLayer spatial_layer; + spatial_layer.width = kDefaultEncodeSize.width(); + spatial_layer.height = kDefaultEncodeSize.height(); + spatial_layer.bitrate_bps = kDefaultBitrateBps; + spatial_layer.framerate = kDefaultFramerate; + spatial_layer.max_qp = 30; + spatial_layer.num_of_temporal_layers = GetParam().num_of_temporal_layers; + config.spatial_layers.push_back(spatial_layer); + SetDefaultMocksBehavior(config); -INSTANTIATE_TEST_SUITE_P(SpatialLayerAndTemporalLayerEncoding, - VaapiVEAInitializeTest, + InitializeSequenceForVP9(config); + EncodeSequenceForVP9(); +} + +INSTANTIATE_TEST_SUITE_P(, + VaapiVideoEncodeAcceleratorTest, ::testing::ValuesIn(kTestCases)); -} // namespace } // namespace media diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.cc b/chromium/media/gpu/vaapi/vaapi_wrapper.cc index f238e6f0851..ad898555fe7 100644 --- a/chromium/media/gpu/vaapi/vaapi_wrapper.cc +++ b/chromium/media/gpu/vaapi/vaapi_wrapper.cc @@ -10,6 +10,7 @@ #include <va/va.h> #include <va/va_drm.h> #include <va/va_drmcommon.h> +#include <va/va_str.h> #include <va/va_version.h> #include <algorithm> @@ -115,8 +116,10 @@ uint32_t BufferFormatToVAFourCC(gfx::BufferFormat fmt) { return VA_FOURCC_YV12; case gfx::BufferFormat::YUV_420_BIPLANAR: return VA_FOURCC_NV12; + case gfx::BufferFormat::P010: + return VA_FOURCC_P010; default: - NOTREACHED(); + NOTREACHED() << gfx::BufferFormatToString(fmt); return 0; } } @@ -142,6 +145,21 @@ namespace media { namespace { +// Returns true if the SoC has a Gen9 GPU. CPU model ID's are referenced from +// the following file in the kernel source: arch/x86/include/asm/intel-family.h. +bool IsGen9Gpu() { + constexpr int kPentiumAndLaterFamily = 0x06; + constexpr int kSkyLakeModelId = 0x5E; + constexpr int kSkyLake_LModelId = 0x4E; + constexpr int kApolloLakeModelId = 0x5c; + static base::NoDestructor<base::CPU> cpuid; + static const bool is_gen9_gpu = cpuid->family() == kPentiumAndLaterFamily && + (cpuid->model() == kSkyLakeModelId || + cpuid->model() == kSkyLake_LModelId || + cpuid->model() == kApolloLakeModelId); + return is_gen9_gpu; +} + // Returns true if the SoC has a 9.5 GPU. CPU model IDs are referenced from the // following file in the kernel source: arch/x86/include/asm/intel-family.h. bool IsGen95Gpu() { @@ -246,111 +264,18 @@ static const struct { {H264PROFILE_HIGH, VAProfileH264High}, {VP8PROFILE_ANY, VAProfileVP8Version0_3}, {VP9PROFILE_PROFILE0, VAProfileVP9Profile0}, - // VP9 hw encode/decode on profile 1 is not enabled on chromium-vaapi. + // Chrome does not support VP9 Profile 1, see b/153680337. // {VP9PROFILE_PROFILE1, VAProfileVP9Profile1}, - // TODO(crbug.com/1011454, crbug.com/1011469): Reenable VP9PROFILE_PROFILE2 - // and _PROFILE3 when P010 is completely supported. - //{VP9PROFILE_PROFILE2, VAProfileVP9Profile2}, - //{VP9PROFILE_PROFILE3, VAProfileVP9Profile3}, + {VP9PROFILE_PROFILE2, VAProfileVP9Profile2}, + {VP9PROFILE_PROFILE3, VAProfileVP9Profile3}, }; -// Converts the given |va_profile| to the corresponding string. -// See: http://go/gh/intel/libva/blob/master/va/va.h#L359 -std::string VAProfileToString(VAProfile va_profile) { - switch (va_profile) { - case VAProfileNone: - return "VAProfileNone"; - case VAProfileMPEG2Simple: - return "VAProfileMPEG2Simple"; - case VAProfileMPEG2Main: - return "VAProfileMPEG2Main"; - case VAProfileMPEG4Simple: - return "VAProfileMPEG4Simple"; - case VAProfileMPEG4AdvancedSimple: - return "VAProfileMPEG4AdvancedSimple"; - case VAProfileMPEG4Main: - return "VAProfileMPEG4Main"; - case VAProfileH264Baseline: - return "VAProfileH264Baseline"; - case VAProfileH264Main: - return "VAProfileH264Main"; - case VAProfileH264High: - return "VAProfileH264High"; - case VAProfileVC1Simple: - return "VAProfileVC1Simple"; - case VAProfileVC1Main: - return "VAProfileVC1Main"; - case VAProfileVC1Advanced: - return "VAProfileVC1Advanced"; - case VAProfileH263Baseline: - return "VAProfileH263Baseline"; - case VAProfileJPEGBaseline: - return "VAProfileJPEGBaseline"; - case VAProfileH264ConstrainedBaseline: - return "VAProfileH264ConstrainedBaseline"; - case VAProfileVP8Version0_3: - return "VAProfileVP8Version0_3"; - case VAProfileH264MultiviewHigh: - return "VAProfileH264MultiviewHigh"; - case VAProfileH264StereoHigh: - return "VAProfileH264StereoHigh"; - case VAProfileHEVCMain: - return "VAProfileHEVCMain"; - case VAProfileHEVCMain10: - return "VAProfileHEVCMain10"; - case VAProfileVP9Profile0: - return "VAProfileVP9Profile0"; - case VAProfileVP9Profile1: - return "VAProfileVP9Profile1"; - case VAProfileVP9Profile2: - return "VAProfileVP9Profile2"; - case VAProfileVP9Profile3: - return "VAProfileVP9Profile3"; -#if VA_MAJOR_VERSION >= 2 || (VA_MAJOR_VERSION == 1 && VA_MINOR_VERSION >= 2) - case VAProfileHEVCMain12: - return "VAProfileHEVCMain12"; - case VAProfileHEVCMain422_10: - return "VAProfileHEVCMain422_10"; - case VAProfileHEVCMain422_12: - return "VAProfileHEVCMain422_12"; - case VAProfileHEVCMain444: - return "VAProfileHEVCMain444"; - case VAProfileHEVCMain444_10: - return "VAProfileHEVCMain444_10"; - case VAProfileHEVCMain444_12: - return "VAProfileHEVCMain444_12"; - case VAProfileHEVCSccMain: - return "VAProfileHEVCSccMain"; - case VAProfileHEVCSccMain10: - return "VAProfileHEVCSccMain10"; - case VAProfileHEVCSccMain444: - return "VAProfileHEVCSccMain444"; -#endif - default: - NOTREACHED(); - return ""; - } -} - bool IsBlackListedDriver(const std::string& va_vendor_string, VaapiWrapper::CodecMode mode, VAProfile va_profile) { if (!IsModeEncoding(mode)) return false; - // TODO(crbug.com/828482): Remove once H264 encoder on AMD is enabled by - // default. - if (VendorStringToImplementationType(va_vendor_string) == - VAImplementation::kMesaGallium && - base::Contains(va_vendor_string, "AMD STONEY") && - !base::FeatureList::IsEnabled(kVaapiH264AMDEncoder)) { - constexpr VAProfile kH264Profiles[] = {VAProfileH264Baseline, - VAProfileH264Main, VAProfileH264High, - VAProfileH264ConstrainedBaseline}; - if (base::Contains(kH264Profiles, va_profile)) - return true; - } - // TODO(posciak): Remove once VP8 encoding is to be enabled by default. if (va_profile == VAProfileVP8Version0_3 && !base::FeatureList::IsEnabled(kVaapiVP8Encoder)) { @@ -639,8 +564,8 @@ static bool GetRequiredAttribs(const base::Lock* va_lock, VAStatus va_res = vaGetConfigAttributes(va_display, profile, entrypoint, &attrib, 1); if (va_res != VA_STATUS_SUCCESS) { - LOG(ERROR) << "GetConfigAttributes failed for va_profile " - << VAProfileToString(profile); + LOG(ERROR) << "vaGetConfigAttributes failed for " + << vaProfileStr(profile); return false; } @@ -761,7 +686,7 @@ VASupportedProfiles::VASupportedProfiles() static_assert(std::extent<decltype(supported_profiles_)>() == VaapiWrapper::kCodecModeMax, - "The array size of supported profile is incorrect."); + "|supported_profiles_| size is incorrect."); if (!display_state->Initialize()) return; @@ -793,6 +718,18 @@ VASupportedProfiles::GetSupportedProfileInfosForCodecModeInternal( VaapiWrapper::CodecMode mode) const { std::vector<ProfileInfo> supported_profile_infos; std::vector<VAProfile> va_profiles; + // VAProfiles supported by VaapiWrapper. + constexpr VAProfile kSupportedVaProfiles[] = { + VAProfileH264ConstrainedBaseline, + VAProfileH264Main, + VAProfileH264High, + VAProfileJPEGBaseline, + VAProfileVP8Version0_3, + VAProfileVP9Profile0, + // Chrome does not support VP9 Profile 1, see b/153680337. + // VAProfileVP9Profile1, + VAProfileVP9Profile2, + VAProfileVP9Profile3}; if (!GetSupportedVAProfiles(&va_profiles)) return supported_profile_infos; @@ -802,6 +739,10 @@ VASupportedProfiles::GetSupportedProfileInfosForCodecModeInternal( VADisplayState::Get()->va_vendor_string(); for (const auto& va_profile : va_profiles) { + if ((mode != VaapiWrapper::CodecMode::kVideoProcess) && + !base::Contains(kSupportedVaProfiles, va_profile)) { + continue; + } const std::vector<VAEntrypoint> supported_entrypoints = GetEntryPointsForProfile(va_lock_, va_display_, mode, va_profile); if (supported_entrypoints.empty()) @@ -824,8 +765,8 @@ VASupportedProfiles::GetSupportedProfileInfosForCodecModeInternal( if (!FillProfileInfo_Locked(va_profile, entrypoint, required_attribs, &profile_info)) { LOG(ERROR) << "FillProfileInfo_Locked failed for va_profile " - << VAProfileToString(va_profile) << " and entrypoint " - << entrypoint; + << vaProfileStr(va_profile) << " and entrypoint " + << vaEntrypointStr(entrypoint); continue; } supported_profile_infos.push_back(profile_info); @@ -1195,6 +1136,37 @@ bool VASupportedImageFormats::InitSupportedImageFormats_Locked() { return true; } +bool IsLowPowerEncSupported(VAProfile va_profile) { + constexpr VAProfile kSupportedLowPowerEncodeProfiles[] = { + VAProfileH264ConstrainedBaseline, + VAProfileH264Main, + VAProfileH264High, + VAProfileVP9Profile0, + VAProfileVP9Profile1, + VAProfileVP9Profile2, + VAProfileVP9Profile3}; + if (!base::Contains(kSupportedLowPowerEncodeProfiles, va_profile)) + return false; + + if ((IsGen95Gpu() || IsGen9Gpu()) && + !base::FeatureList::IsEnabled(kVaapiLowPowerEncoderGen9x)) { + return false; + } + + const std::vector<VASupportedProfiles::ProfileInfo>& encode_profile_infos = + VASupportedProfiles::Get().GetSupportedProfileInfosForCodecMode( + VaapiWrapper::kEncode); + + for (const auto& profile_info : encode_profile_infos) { + if (profile_info.va_profile == va_profile && + profile_info.va_entrypoint == VAEntrypointEncSliceLP) { + return true; + } + } + + return false; +} + } // namespace NativePixmapAndSizeInfo::NativePixmapAndSizeInfo() = default; @@ -1212,7 +1184,7 @@ scoped_refptr<VaapiWrapper> VaapiWrapper::Create( VAProfile va_profile, const base::Closure& report_error_to_uma_cb) { if (!VASupportedProfiles::Get().IsProfileSupported(mode, va_profile)) { - DVLOG(1) << "Unsupported va_profile: " << va_profile; + DVLOG(1) << "Unsupported va_profile: " << vaProfileStr(va_profile); return nullptr; } @@ -1222,7 +1194,7 @@ scoped_refptr<VaapiWrapper> VaapiWrapper::Create( return vaapi_wrapper; } LOG(ERROR) << "Failed to create VaapiWrapper for va_profile: " - << VAProfileToString(va_profile); + << vaProfileStr(va_profile); return nullptr; } @@ -1494,8 +1466,10 @@ VAEntrypoint VaapiWrapper::GetDefaultVaEntryPoint(CodecMode mode, case VaapiWrapper::kEncodeConstantQuantizationParameter: if (profile == VAProfileJPEGBaseline) return VAEntrypointEncPicture; - else - return VAEntrypointEncSlice; + DCHECK(IsModeEncoding(mode)); + if (IsLowPowerEncSupported(profile)) + return VAEntrypointEncSliceLP; + return VAEntrypointEncSlice; case VaapiWrapper::kVideoProcess: return VAEntrypointVideoProc; case VaapiWrapper::kCodecModeMax: @@ -1515,8 +1489,10 @@ uint32_t VaapiWrapper::BufferFormatToVARTFormat(gfx::BufferFormat fmt) { case gfx::BufferFormat::YVU_420: case gfx::BufferFormat::YUV_420_BIPLANAR: return VA_RT_FORMAT_YUV420; + case gfx::BufferFormat::P010: + return VA_RT_FORMAT_YUV420_10BPP; default: - NOTREACHED(); + NOTREACHED() << gfx::BufferFormatToString(fmt); return 0; } } @@ -2028,6 +2004,28 @@ bool VaapiWrapper::CreateVABuffer(size_t size, VABufferID* buffer_id) { return true; } +uint64_t VaapiWrapper::GetEncodedChunkSize(VABufferID buffer_id, + VASurfaceID sync_surface_id) { + TRACE_EVENT0("media,gpu", "VaapiWrapper::GetEncodedChunkSize"); + base::AutoLock auto_lock(*va_lock_); + TRACE_EVENT0("media,gpu", "VaapiWrapper::GetEncodedChunkSizeLocked"); + VAStatus va_res = vaSyncSurface(va_display_, sync_surface_id); + VA_SUCCESS_OR_RETURN(va_res, "vaSyncSurface", 0u); + + ScopedVABufferMapping mapping(va_lock_, va_display_, buffer_id); + if (!mapping.IsValid()) + return 0u; + + uint64_t coded_data_size = 0; + for (auto* buffer_segment = + reinterpret_cast<VACodedBufferSegment*>(mapping.data()); + buffer_segment; buffer_segment = reinterpret_cast<VACodedBufferSegment*>( + buffer_segment->next)) { + coded_data_size += buffer_segment->size; + } + return coded_data_size; +} + bool VaapiWrapper::DownloadFromVABuffer(VABufferID buffer_id, VASurfaceID sync_surface_id, uint8_t* target_ptr, @@ -2062,13 +2060,11 @@ bool VaapiWrapper::DownloadFromVABuffer(VABufferID buffer_id, << ", the buffer segment size: " << buffer_segment->size; break; } - memcpy(target_ptr, buffer_segment->buf, buffer_segment->size); target_ptr += buffer_segment->size; - *coded_data_size += buffer_segment->size; target_size -= buffer_segment->size; - + *coded_data_size += buffer_segment->size; buffer_segment = reinterpret_cast<VACodedBufferSegment*>(buffer_segment->next); } @@ -2113,10 +2109,28 @@ void VaapiWrapper::DestroyVABuffers() { va_buffers_.clear(); } +bool VaapiWrapper::IsRotationSupported() { + base::AutoLock auto_lock(*va_lock_); + VAProcPipelineCaps pipeline_caps; + memset(&pipeline_caps, 0, sizeof(pipeline_caps)); + VAStatus va_res = vaQueryVideoProcPipelineCaps(va_display_, va_context_id_, + nullptr, 0, &pipeline_caps); + if (va_res != VA_STATUS_SUCCESS) { + LOG_VA_ERROR_AND_REPORT(va_res, "vaQueryVideoProcPipelineCaps failed"); + return false; + } + if (!pipeline_caps.rotation_flags) { + DVLOG(2) << "VA-API driver doesn't support any rotation"; + return false; + } + return true; +} + bool VaapiWrapper::BlitSurface(const VASurface& va_surface_src, const VASurface& va_surface_dest, base::Optional<gfx::Rect> src_rect, - base::Optional<gfx::Rect> dest_rect) { + base::Optional<gfx::Rect> dest_rect, + VideoRotation rotation) { base::AutoLock auto_lock(*va_lock_); if (va_buffers_.empty()) { @@ -2165,6 +2179,21 @@ bool VaapiWrapper::BlitSurface(const VASurface& va_surface_src, pipeline_param->output_color_standard = VAProcColorStandardNone; pipeline_param->filter_flags = VA_FILTER_SCALING_DEFAULT; + switch (rotation) { + case VIDEO_ROTATION_0: + pipeline_param->rotation_state = VA_ROTATION_NONE; + break; + case VIDEO_ROTATION_90: + pipeline_param->rotation_state = VA_ROTATION_90; + break; + case VIDEO_ROTATION_180: + pipeline_param->rotation_state = VA_ROTATION_180; + break; + case VIDEO_ROTATION_270: + pipeline_param->rotation_state = VA_ROTATION_270; + break; + } + VA_SUCCESS_OR_RETURN(mapping.Unmap(), "Vpp Buffer unmapping", false); } @@ -2241,15 +2270,7 @@ bool VaapiWrapper::Initialize(CodecMode mode, VAProfile va_profile) { } #endif // DCHECK_IS_ON() - if (mode != kVideoProcess) - TryToSetVADisplayAttributeToLocalGPU(); - - VAEntrypoint entrypoint = GetDefaultVaEntryPoint(mode, va_profile); - if (IsModeEncoding(mode) && IsLowPowerEncSupported(va_profile, mode) && - base::FeatureList::IsEnabled(kVaapiLowPowerEncoder)) { - entrypoint = VAEntrypointEncSliceLP; - DVLOG(2) << "Enable VA-API Low-Power Encode Entrypoint"; - } + const VAEntrypoint entrypoint = GetDefaultVaEntryPoint(mode, va_profile); base::AutoLock auto_lock(*va_lock_); std::vector<VAConfigAttrib> required_attribs; @@ -2258,7 +2279,7 @@ bool VaapiWrapper::Initialize(CodecMode mode, VAProfile va_profile) { return false; } - VAStatus va_res = + const VAStatus va_res = vaCreateConfig(va_display_, va_profile, entrypoint, required_attribs.empty() ? nullptr : &required_attribs[0], required_attribs.size(), &va_config_id_); @@ -2456,54 +2477,4 @@ bool VaapiWrapper::Execute_Locked(VASurfaceID va_surface_id) { return true; } -void VaapiWrapper::TryToSetVADisplayAttributeToLocalGPU() { - base::AutoLock auto_lock(*va_lock_); - VADisplayAttribute item = {VADisplayAttribRenderMode, - 1, // At least support '_LOCAL_OVERLAY'. - -1, // The maximum possible support 'ALL'. - VA_RENDER_MODE_LOCAL_GPU, - VA_DISPLAY_ATTRIB_SETTABLE}; - - VAStatus va_res = vaSetDisplayAttributes(va_display_, &item, 1); - if (va_res != VA_STATUS_SUCCESS) - DVLOG(2) << "vaSetDisplayAttributes unsupported, ignoring by default."; -} - -// Check the support for low-power encode -bool VaapiWrapper::IsLowPowerEncSupported(VAProfile va_profile, - CodecMode mode) const { - // Enabled only for H264/AVC & VP9 Encoders - if (va_profile != VAProfileH264ConstrainedBaseline && - va_profile != VAProfileH264Main && va_profile != VAProfileH264High && - va_profile != VAProfileVP9Profile0 && va_profile != VAProfileVP9Profile1) - return false; - - constexpr VAEntrypoint kLowPowerEncEntryPoint = VAEntrypointEncSliceLP; - std::vector<VAConfigAttrib> required_attribs; - - base::AutoLock auto_lock(*va_lock_); - GetRequiredAttribs(va_lock_, va_display_, mode, va_profile, - kLowPowerEncEntryPoint, &required_attribs); - // Query the driver for required attributes. - std::vector<VAConfigAttrib> attribs = required_attribs; - for (size_t i = 0; i < required_attribs.size(); ++i) - attribs[i].value = 0; - - VAStatus va_res = - vaGetConfigAttributes(va_display_, va_profile, kLowPowerEncEntryPoint, - &attribs[0], attribs.size()); - VA_SUCCESS_OR_RETURN(va_res, "vaGetConfigAttributes", false); - - for (size_t i = 0; i < required_attribs.size(); ++i) { - if (attribs[i].type != required_attribs[i].type || - (attribs[i].value & required_attribs[i].value) != - required_attribs[i].value) { - DVLOG(1) << "Unsupported value " << required_attribs[i].value - << " for attribute type " << required_attribs[i].type; - return false; - } - } - return true; -} - } // namespace media diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.h b/chromium/media/gpu/vaapi/vaapi_wrapper.h index 7f087039c58..c4d005ba456 100644 --- a/chromium/media/gpu/vaapi/vaapi_wrapper.h +++ b/chromium/media/gpu/vaapi/vaapi_wrapper.h @@ -322,9 +322,9 @@ class MEDIA_GPU_EXPORT VaapiWrapper // Useful when a pending job is to be cancelled (on reset or error). void DestroyPendingBuffers(); - // Execute job in hardware on target |va_surface_id| and destroy pending - // buffers. Return false if Execute() fails. - bool ExecuteAndDestroyPendingBuffers(VASurfaceID va_surface_id); + // Executes job in hardware on target |va_surface_id| and destroys pending + // buffers. Returns false if Execute() fails. + virtual bool ExecuteAndDestroyPendingBuffers(VASurfaceID va_surface_id); #if defined(USE_X11) // Put data from |va_surface_id| into |x_pixmap| of size @@ -343,28 +343,37 @@ class MEDIA_GPU_EXPORT VaapiWrapper VAImageFormat* format, const gfx::Size& size); - // Upload contents of |frame| into |va_surface_id| for encode. - bool UploadVideoFrameToSurface(const VideoFrame& frame, - VASurfaceID va_surface_id, - const gfx::Size& va_surface_size); + // Uploads contents of |frame| into |va_surface_id| for encode. + virtual bool UploadVideoFrameToSurface(const VideoFrame& frame, + VASurfaceID va_surface_id, + const gfx::Size& va_surface_size); - // Create a buffer of |size| bytes to be used as encode output. - bool CreateVABuffer(size_t size, VABufferID* buffer_id); + // Creates a buffer of |size| bytes to be used as encode output. + virtual bool CreateVABuffer(size_t size, VABufferID* buffer_id); - // Download the contents of the buffer with given |buffer_id| into a buffer of - // size |target_size|, pointed to by |target_ptr|. The number of bytes + // Gets the encoded frame linear size of the buffer with given |buffer_id|. + // |sync_surface_id| will be used as a sync point, i.e. it will have to become + // idle before starting the acquirement. |sync_surface_id| should be the + // source surface passed to the encode job. Returns 0 if it fails for any + // reason. + virtual uint64_t GetEncodedChunkSize(VABufferID buffer_id, + VASurfaceID sync_surface_id); + + // Downloads the contents of the buffer with given |buffer_id| into a buffer + // of size |target_size|, pointed to by |target_ptr|. The number of bytes // downloaded will be returned in |coded_data_size|. |sync_surface_id| will // be used as a sync point, i.e. it will have to become idle before starting // the download. |sync_surface_id| should be the source surface passed - // to the encode job. - bool DownloadFromVABuffer(VABufferID buffer_id, - VASurfaceID sync_surface_id, - uint8_t* target_ptr, - size_t target_size, - size_t* coded_data_size); + // to the encode job. Returns false if it fails for any reason. For example, + // the linear size of the resulted encoded frame is larger than |target_size|. + virtual bool DownloadFromVABuffer(VABufferID buffer_id, + VASurfaceID sync_surface_id, + uint8_t* target_ptr, + size_t target_size, + size_t* coded_data_size); // Deletes the VA buffer identified by |buffer_id|. - void DestroyVABuffer(VABufferID buffer_id); + virtual void DestroyVABuffer(VABufferID buffer_id); // Destroy all previously-allocated (and not yet destroyed) buffers. void DestroyVABuffers(); @@ -374,23 +383,27 @@ class MEDIA_GPU_EXPORT VaapiWrapper // For H.264 encoding, the value represents the maximum number of reference // frames for both the reference picture list 0 (bottom 16 bits) and the // reference picture list 1 (top 16 bits). - bool GetVAEncMaxNumOfRefFrames(VideoCodecProfile profile, - size_t* max_ref_frames); + virtual bool GetVAEncMaxNumOfRefFrames(VideoCodecProfile profile, + size_t* max_ref_frames); + + // Checks if the driver supports frame rotation. + bool IsRotationSupported(); // Blits a VASurface |va_surface_src| into another VASurface - // |va_surface_dest| applying pixel format conversion, cropping and scaling - // if needed. |src_rect| and |dest_rect| are optional. They can be used to - // specify the area used in the blit. + // |va_surface_dest| applying pixel format conversion, rotation, cropping + // and scaling if needed. |src_rect| and |dest_rect| are optional. They can + // be used to specify the area used in the blit. bool BlitSurface(const VASurface& va_surface_src, const VASurface& va_surface_dest, base::Optional<gfx::Rect> src_rect = base::nullopt, - base::Optional<gfx::Rect> dest_rect = base::nullopt); + base::Optional<gfx::Rect> dest_rect = base::nullopt, + VideoRotation rotation = VIDEO_ROTATION_0); // Initialize static data before sandbox is enabled. static void PreSandboxInitialization(); // vaDestroySurfaces() a vector or a single VASurfaceID. - void DestroySurfaces(std::vector<VASurfaceID> va_surfaces); + virtual void DestroySurfaces(std::vector<VASurfaceID> va_surfaces); virtual void DestroySurface(VASurfaceID va_surface_id); protected: @@ -425,12 +438,6 @@ class MEDIA_GPU_EXPORT VaapiWrapper void DestroyPendingBuffers_Locked() EXCLUSIVE_LOCKS_REQUIRED(va_lock_); - // Attempt to set render mode to "render to texture.". Failure is non-fatal. - void TryToSetVADisplayAttributeToLocalGPU(); - - // Check low-power encode support for |profile| and |mode|. - bool IsLowPowerEncSupported(VAProfile va_profile, CodecMode mode) const; - const CodecMode mode_; // Pointer to VADisplayState's member |va_lock_|. Guaranteed to be valid for diff --git a/chromium/media/gpu/vaapi/vp9_encoder.cc b/chromium/media/gpu/vaapi/vp9_encoder.cc index 140ac37af4c..0c125f02b36 100644 --- a/chromium/media/gpu/vaapi/vp9_encoder.cc +++ b/chromium/media/gpu/vaapi/vp9_encoder.cc @@ -4,8 +4,12 @@ #include "media/gpu/vaapi/vp9_encoder.h" +#include <algorithm> + #include "base/bits.h" #include "media/gpu/macros.h" +#include "media/gpu/vaapi/vp9_rate_control.h" +#include "third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h" namespace media { @@ -19,6 +23,9 @@ constexpr int kCPBWindowSizeMs = 500; // Quantization parameter. They are vp9 ac/dc indices and their ranges are // 0-255. Based on WebRTC's defaults. constexpr int kMinQP = 4; +// TODO(crbug.com/1060775): Relax this max quantization parameter upper bound +// so that our encoder and bitrate controller can select a higher value in the +// case a requested bitrate is small. constexpr int kMaxQP = 112; // This stands for 31 as a real ac value (see rfc 8.6.1 table // ac_qlookup[3][256]). Note: This needs to be revisited once we have 10&12 bit @@ -29,6 +36,84 @@ constexpr int kDefaultQP = 24; // we set a constant value (== 10) which is what other VA-API // implementations like libyami and gstreamer-vaapi are using. constexpr uint8_t kDefaultLfLevel = 10; + +// Convert Qindex, whose range is 0-255, to the quantizer parameter used in +// libvpx vp9 rate control, whose range is 0-63. +// Cited from //third_party/libvpx/source/libvpx/vp9/encoder/vp9_quantize.cc. +int QindexToQuantizer(int q_index) { + constexpr int kQuantizerToQindex[] = { + 0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, + 52, 56, 60, 64, 68, 72, 76, 80, 84, 88, 92, 96, 100, + 104, 108, 112, 116, 120, 124, 128, 132, 136, 140, 144, 148, 152, + 156, 160, 164, 168, 172, 176, 180, 184, 188, 192, 196, 200, 204, + 208, 212, 216, 220, 224, 228, 232, 236, 240, 244, 249, 255, + }; + + for (size_t q = 0; q < base::size(kQuantizerToQindex); ++q) { + if (kQuantizerToQindex[q] >= q_index) + return q; + } + return base::size(kQuantizerToQindex) - 1; +} + +// The return value is expressed as a percentage of the average. For example, +// to allocate no more than 4.5 frames worth of bitrate to a keyframe, the +// return value is 450. +uint32_t MaxSizeOfKeyframeAsPercentage(uint32_t optimal_buffer_size, + uint32_t max_framerate) { + // Set max to the optimal buffer level (normalized by target BR), + // and scaled by a scale_par. + // Max target size = scale_par * optimal_buffer_size * targetBR[Kbps]. + // This value is presented in percentage of perFrameBw: + // perFrameBw = targetBR[Kbps] * 1000 / framerate. + // The target in % is as follows: + const double target_size_byte_per_frame = optimal_buffer_size * 0.5; + const uint32_t target_size_kbyte = + target_size_byte_per_frame * max_framerate / 1000; + const uint32_t target_size_kbyte_as_percent = target_size_kbyte * 100; + + // Don't go below 3 times the per frame bandwidth. + constexpr uint32_t kMinIntraSizePercentage = 300u; + return std::max(kMinIntraSizePercentage, target_size_kbyte_as_percent); +} + +libvpx::VP9RateControlRtcConfig CreateRCConfig( + const gfx::Size& encode_size, + const VP9Encoder::EncodeParams& encode_params) { + libvpx::VP9RateControlRtcConfig rc_cfg{}; + rc_cfg.width = encode_size.width(); + rc_cfg.height = encode_size.height(); + rc_cfg.max_quantizer = + QindexToQuantizer(encode_params.scaling_settings.max_qp); + rc_cfg.min_quantizer = + QindexToQuantizer(encode_params.scaling_settings.min_qp); + // libvpx::VP9RateControlRtcConfig is kbps. + rc_cfg.target_bandwidth = + encode_params.bitrate_allocation.GetSumBps() / 1000.0; + // These default values come from + // //third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc. + rc_cfg.buf_initial_sz = 500; + rc_cfg.buf_optimal_sz = 600; + rc_cfg.buf_sz = 1000; + rc_cfg.undershoot_pct = 50; + rc_cfg.overshoot_pct = 50; + rc_cfg.max_intra_bitrate_pct = MaxSizeOfKeyframeAsPercentage( + rc_cfg.buf_optimal_sz, encode_params.framerate); + rc_cfg.framerate = encode_params.framerate; + + // Spatial layer variables. + rc_cfg.ss_number_layers = 1; + rc_cfg.max_quantizers[0] = rc_cfg.max_quantizer; + rc_cfg.min_quantizers[0] = rc_cfg.min_quantizer; + // TODO(crbug.com/1030199): Fill multiple temporal layers variables. + // Temporal layer variables. + rc_cfg.ts_number_layers = 1; + rc_cfg.scaling_factor_num[0] = 1; + rc_cfg.scaling_factor_den[0] = 1; + rc_cfg.layer_target_bitrate[0] = rc_cfg.target_bandwidth; + rc_cfg.ts_rate_decimator[0] = 1; + return rc_cfg; +} } // namespace VP9Encoder::EncodeParams::EncodeParams() @@ -40,6 +125,11 @@ VP9Encoder::EncodeParams::EncodeParams() scaling_settings(kMinQP, kMaxQP), error_resilient_mode(false) {} +void VP9Encoder::set_rate_ctrl_for_testing( + std::unique_ptr<VP9RateControl> rate_ctrl) { + rate_ctrl_ = std::move(rate_ctrl); +} + void VP9Encoder::Reset() { current_params_ = EncodeParams(); reference_frames_.Clear(); @@ -66,20 +156,27 @@ bool VP9Encoder::Initialize(const VideoEncodeAccelerator::Config& config, DVLOGF(1) << "Input visible size could not be empty"; return false; } - // 4:2:0 format has to be 2-aligned. - if ((config.input_visible_size.width() % 2 != 0) || - (config.input_visible_size.height() % 2 != 0)) { - DVLOGF(1) << "The pixel sizes are not even: " - << config.input_visible_size.ToString(); - return false; - } + accelerator_->set_bitrate_control(ave_config.bitrate_control); visible_size_ = config.input_visible_size; coded_size_ = gfx::Size(base::bits::Align(visible_size_.width(), 16), base::bits::Align(visible_size_.height(), 16)); - Reset(); + if (ave_config.bitrate_control == + BitrateControl::kConstantQuantizationParameter) { + // |rate_ctrl_| might be injected for tests. + if (!rate_ctrl_) { + rate_ctrl_ = VP9RateControl::Create( + CreateRCConfig(visible_size_, current_params_)); + } + if (!rate_ctrl_) + return false; + } else { + DCHECK(!rate_ctrl_) << "|rate_ctrl_| should only be configured when in " + "kConstantQuantizationParameter"; + } + VideoBitrateAllocation initial_bitrate_allocation; initial_bitrate_allocation.SetBitrate(0, 0, config.initial_bitrate); return UpdateRates(initial_bitrate_allocation, @@ -121,13 +218,14 @@ bool VP9Encoder::PrepareEncodeJob(EncodeJob* encode_job) { scoped_refptr<VP9Picture> picture = accelerator_->GetPicture(encode_job); DCHECK(picture); - UpdateFrameHeader(encode_job->IsKeyframeRequested()); + const bool keyframe = encode_job->IsKeyframeRequested(); + UpdateFrameHeader(keyframe); *picture->frame_hdr = current_frame_hdr_; // Use last, golden and altref for references. - constexpr std::array<bool, kVp9NumRefsPerFrame> ref_frames_used = {true, true, - true}; + const std::array<bool, kVp9NumRefsPerFrame> ref_frames_used = { + !keyframe, !keyframe, !keyframe}; if (!accelerator_->SubmitFrameParameters(encode_job, current_params_, picture, reference_frames_, ref_frames_used)) { @@ -139,6 +237,18 @@ bool VP9Encoder::PrepareEncodeJob(EncodeJob* encode_job) { return true; } +void VP9Encoder::BitrateControlUpdate(uint64_t encoded_chunk_size_bytes) { + if (accelerator_->bitrate_control() != + BitrateControl::kConstantQuantizationParameter || + !rate_ctrl_) { + DLOG(ERROR) << __func__ << "() is called when no bitrate controller exists"; + return; + } + + DVLOGF(4) << "|encoded_chunk_size_bytes|=" << encoded_chunk_size_bytes; + rate_ctrl_->PostEncodeUpdate(encoded_chunk_size_bytes); +} + bool VP9Encoder::UpdateRates(const VideoBitrateAllocation& bitrate_allocation, uint32_t framerate) { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); @@ -160,6 +270,10 @@ bool VP9Encoder::UpdateRates(const VideoBitrateAllocation& bitrate_allocation, current_params_.bitrate_allocation.GetSumBps() * current_params_.cpb_window_size_ms / 1000; + if (!rate_ctrl_) + return true; + + rate_ctrl_->UpdateRateControl(CreateRCConfig(visible_size_, current_params_)); return true; } @@ -181,8 +295,6 @@ void VP9Encoder::UpdateFrameHeader(bool keyframe) { current_frame_hdr_.refresh_frame_flags = 0xff; ref_frame_index_ = 0; } else { - // TODO(crbug.com/811912): Add temporal layer support when there is a driver - // support. Use the last three frames for reference. current_frame_hdr_.frame_type = Vp9FrameHeader::INTERFRAME; current_frame_hdr_.ref_frame_idx[0] = ref_frame_index_; current_frame_hdr_.ref_frame_idx[1] = @@ -192,6 +304,19 @@ void VP9Encoder::UpdateFrameHeader(bool keyframe) { ref_frame_index_ = (ref_frame_index_ + 1) % kVp9NumRefFrames; current_frame_hdr_.refresh_frame_flags = 1 << ref_frame_index_; } + + if (!rate_ctrl_) + return; + + libvpx::VP9FrameParamsQpRTC frame_params{}; + frame_params.frame_type = + keyframe ? FRAME_TYPE::KEY_FRAME : FRAME_TYPE::INTER_FRAME; + rate_ctrl_->ComputeQP(frame_params); + // TODO(crbug.com/1030199): Fill temporal layer id. + current_frame_hdr_.quant_params.base_q_idx = rate_ctrl_->GetQP(); + current_frame_hdr_.loop_filter.level = rate_ctrl_->GetLoopfilterLevel(); + DVLOGF(4) << "|qp|=" << rate_ctrl_->GetQP() + << ", |filter_level|=" << rate_ctrl_->GetLoopfilterLevel(); } void VP9Encoder::UpdateReferenceFrames(scoped_refptr<VP9Picture> picture) { diff --git a/chromium/media/gpu/vaapi/vp9_encoder.h b/chromium/media/gpu/vaapi/vp9_encoder.h index 2f3eda4b440..9c0ad1cb9f3 100644 --- a/chromium/media/gpu/vaapi/vp9_encoder.h +++ b/chromium/media/gpu/vaapi/vp9_encoder.h @@ -19,6 +19,7 @@ #include "media/gpu/vp9_reference_frame_vector.h" namespace media { +class VP9RateControl; class VP9Encoder : public AcceleratedVideoEncoder { public: @@ -71,6 +72,12 @@ class VP9Encoder : public AcceleratedVideoEncoder { const Vp9ReferenceFrameVector& ref_frames, const std::array<bool, kVp9NumRefsPerFrame>& ref_frames_used) = 0; + void set_bitrate_control(BitrateControl bc) { bitrate_control_ = bc; } + BitrateControl bitrate_control() { return bitrate_control_; } + + protected: + BitrateControl bitrate_control_ = BitrateControl::kConstantBitrate; + DISALLOW_COPY_AND_ASSIGN(Accelerator); }; @@ -86,8 +93,13 @@ class VP9Encoder : public AcceleratedVideoEncoder { size_t GetMaxNumOfRefFrames() const override; ScalingSettings GetScalingSettings() const override; bool PrepareEncodeJob(EncodeJob* encode_job) override; + void BitrateControlUpdate(uint64_t encoded_chunk_size_bytes) override; private: + friend class VP9EncoderTest; + + void set_rate_ctrl_for_testing(std::unique_ptr<VP9RateControl> rate_ctrl); + void InitializeFrameHeader(); void UpdateFrameHeader(bool keyframe); void UpdateReferenceFrames(scoped_refptr<VP9Picture> picture); @@ -105,6 +117,7 @@ class VP9Encoder : public AcceleratedVideoEncoder { Vp9FrameHeader current_frame_hdr_; Vp9ReferenceFrameVector reference_frames_; + std::unique_ptr<VP9RateControl> rate_ctrl_; const std::unique_ptr<Accelerator> accelerator_; SEQUENCE_CHECKER(sequence_checker_); diff --git a/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc b/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc new file mode 100644 index 00000000000..fa0f8b53d3c --- /dev/null +++ b/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc @@ -0,0 +1,381 @@ +// Copyright 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "media/gpu/vaapi/vp9_encoder.h" + +#include <memory> +#include <numeric> + +#include "base/bind_helpers.h" +#include "base/callback.h" +#include "base/logging.h" +#include "base/optional.h" +#include "media/filters/vp9_parser.h" +#include "media/gpu/vaapi/vp9_rate_control.h" +#include "testing/gmock/include/gmock/gmock.h" +#include "testing/gtest/include/gtest/gtest.h" +#include "third_party/libvpx/source/libvpx/vp9/common/vp9_blockd.h" +#include "third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h" + +using ::testing::_; +using ::testing::InSequence; +using ::testing::Invoke; +using ::testing::Return; + +namespace media { +namespace { + +constexpr size_t kDefaultMaxNumRefFrames = kVp9NumRefsPerFrame; + +AcceleratedVideoEncoder::Config kDefaultAcceleratedVideoEncoderConfig{ + kDefaultMaxNumRefFrames, + AcceleratedVideoEncoder::BitrateControl::kConstantBitrate}; + +VideoEncodeAccelerator::Config kDefaultVideoEncodeAcceleratorConfig( + PIXEL_FORMAT_I420, + gfx::Size(1280, 720), + VP9PROFILE_PROFILE0, + 14000000 /* = maximum bitrate in bits per second for level 3.1 */, + VideoEncodeAccelerator::kDefaultFramerate, + base::nullopt /* gop_length */, + base::nullopt /* h264 output level*/, + VideoEncodeAccelerator::Config::StorageType::kShmem); + +const std::vector<bool> kRefFramesUsedForKeyFrame = {false, false, false}; +const std::vector<bool> kRefFramesUsedForInterFrame = {true, true, true}; + +MATCHER_P4(MatchRtcConfigWithRates, + size, + bitrate_allocation, + framerate, + num_temporal_layers, + "") { + if (arg.target_bandwidth != + static_cast<int64_t>(bitrate_allocation.GetSumBps() / 1000.0)) { + return false; + } + + if (arg.framerate != static_cast<double>(framerate)) + return false; + + for (size_t i = 0; i < num_temporal_layers; i++) { + if (arg.layer_target_bitrate[i] != + static_cast<int>(bitrate_allocation.GetBitrateBps(0, i) / 1000.0)) { + return false; + } + if (arg.ts_rate_decimator[i] != (1 << i)) + return false; + } + + return arg.width == size.width() && arg.height == size.height() && + base::checked_cast<size_t>(arg.ts_number_layers) == + num_temporal_layers && + arg.ss_number_layers == 1 && arg.scaling_factor_num[0] == 1 && + arg.scaling_factor_den[0] == 1; +} + +MATCHER_P2(MatchFrameParam, frame_type, temporal_idx, "") { + return arg.frame_type == frame_type && + (!temporal_idx || arg.temporal_layer_id == *temporal_idx); +} + +class MockVP9Accelerator : public VP9Encoder::Accelerator { + public: + MockVP9Accelerator() = default; + ~MockVP9Accelerator() override = default; + MOCK_METHOD1(GetPicture, + scoped_refptr<VP9Picture>(AcceleratedVideoEncoder::EncodeJob*)); + + MOCK_METHOD5(SubmitFrameParameters, + bool(AcceleratedVideoEncoder::EncodeJob*, + const VP9Encoder::EncodeParams&, + scoped_refptr<VP9Picture>, + const Vp9ReferenceFrameVector&, + const std::array<bool, kVp9NumRefsPerFrame>&)); +}; + +class MockVP9RateControl : public VP9RateControl { + public: + MockVP9RateControl() = default; + ~MockVP9RateControl() override = default; + + MOCK_METHOD1(UpdateRateControl, void(const libvpx::VP9RateControlRtcConfig&)); + MOCK_CONST_METHOD0(GetQP, int()); + MOCK_CONST_METHOD0(GetLoopfilterLevel, int()); + MOCK_METHOD1(ComputeQP, void(const libvpx::VP9FrameParamsQpRTC&)); + MOCK_METHOD1(PostEncodeUpdate, void(uint64_t)); +}; +} // namespace + +struct VP9EncoderTestParam; + +class VP9EncoderTest : public ::testing::TestWithParam<VP9EncoderTestParam> { + public: + using BitrateControl = AcceleratedVideoEncoder::BitrateControl; + + VP9EncoderTest() = default; + ~VP9EncoderTest() override = default; + + void SetUp() override; + + protected: + using FrameType = Vp9FrameHeader::FrameType; + + void InitializeVP9Encoder(BitrateControl bitrate_control); + void EncodeSequence(FrameType frame_type); + void EncodeConstantQuantizationParameterSequence( + FrameType frame_type, + const std::vector<bool>& expected_ref_frames_used, + base::Optional<uint8_t> expected_temporal_idx = base::nullopt); + void UpdateRatesTest(BitrateControl bitrate_control, + size_t num_temporal_layers); + + private: + std::unique_ptr<AcceleratedVideoEncoder::EncodeJob> CreateEncodeJob( + bool keyframe); + void UpdateRatesSequence(const VideoBitrateAllocation& bitrate_allocation, + uint32_t framerate, + BitrateControl bitrate_control); + + std::unique_ptr<VP9Encoder> encoder_; + MockVP9Accelerator* mock_accelerator_ = nullptr; + MockVP9RateControl* mock_rate_ctrl_ = nullptr; +}; + +void VP9EncoderTest::SetUp() { + auto mock_accelerator = std::make_unique<MockVP9Accelerator>(); + mock_accelerator_ = mock_accelerator.get(); + auto rate_ctrl = std::make_unique<MockVP9RateControl>(); + mock_rate_ctrl_ = rate_ctrl.get(); + + encoder_ = std::make_unique<VP9Encoder>(std::move(mock_accelerator)); + encoder_->set_rate_ctrl_for_testing(std::move(rate_ctrl)); +} + +std::unique_ptr<AcceleratedVideoEncoder::EncodeJob> +VP9EncoderTest::CreateEncodeJob(bool keyframe) { + auto input_frame = VideoFrame::CreateFrame( + kDefaultVideoEncodeAcceleratorConfig.input_format, + kDefaultVideoEncodeAcceleratorConfig.input_visible_size, + gfx::Rect(kDefaultVideoEncodeAcceleratorConfig.input_visible_size), + kDefaultVideoEncodeAcceleratorConfig.input_visible_size, + base::TimeDelta()); + LOG_ASSERT(input_frame) << " Failed to create VideoFrame"; + return std::make_unique<AcceleratedVideoEncoder::EncodeJob>( + input_frame, keyframe, base::DoNothing()); +} + +void VP9EncoderTest::InitializeVP9Encoder(BitrateControl bitrate_control) { + auto ave_config = kDefaultAcceleratedVideoEncoderConfig; + ave_config.bitrate_control = bitrate_control; + if (bitrate_control == BitrateControl::kConstantQuantizationParameter) { + constexpr size_t kNumTemporalLayers = 1u; + VideoBitrateAllocation initial_bitrate_allocation; + initial_bitrate_allocation.SetBitrate( + 0, 0, kDefaultVideoEncodeAcceleratorConfig.initial_bitrate); + + EXPECT_CALL( + *mock_rate_ctrl_, + UpdateRateControl(MatchRtcConfigWithRates( + kDefaultVideoEncodeAcceleratorConfig.input_visible_size, + initial_bitrate_allocation, + VideoEncodeAccelerator::kDefaultFramerate, kNumTemporalLayers))) + .Times(1) + .WillOnce(Return()); + } + EXPECT_TRUE( + encoder_->Initialize(kDefaultVideoEncodeAcceleratorConfig, ave_config)); +} + +void VP9EncoderTest::EncodeSequence(FrameType frame_type) { + InSequence seq; + const bool keyframe = frame_type == FrameType::KEYFRAME; + auto encode_job = CreateEncodeJob(keyframe); + scoped_refptr<VP9Picture> picture(new VP9Picture); + EXPECT_CALL(*mock_accelerator_, GetPicture(encode_job.get())) + .WillOnce(Invoke( + [picture](AcceleratedVideoEncoder::EncodeJob*) { return picture; })); + const auto& expected_ref_frames_used = + keyframe ? kRefFramesUsedForKeyFrame : kRefFramesUsedForInterFrame; + EXPECT_CALL(*mock_accelerator_, + SubmitFrameParameters( + encode_job.get(), _, _, _, + ::testing::ElementsAreArray(expected_ref_frames_used))) + .WillOnce(Return(true)); + EXPECT_TRUE(encoder_->PrepareEncodeJob(encode_job.get())); + // TODO(hiroh): Test for encoder_->reference_frames_. +} + +void VP9EncoderTest::EncodeConstantQuantizationParameterSequence( + FrameType frame_type, + const std::vector<bool>& expected_ref_frames_used, + base::Optional<uint8_t> expected_temporal_idx) { + const bool keyframe = frame_type == FrameType::KEYFRAME; + InSequence seq; + auto encode_job = CreateEncodeJob(keyframe); + scoped_refptr<VP9Picture> picture(new VP9Picture); + EXPECT_CALL(*mock_accelerator_, GetPicture(encode_job.get())) + .WillOnce(Invoke( + [picture](AcceleratedVideoEncoder::EncodeJob*) { return picture; })); + + FRAME_TYPE libvpx_frame_type = + keyframe ? FRAME_TYPE::KEY_FRAME : FRAME_TYPE::INTER_FRAME; + EXPECT_CALL(*mock_rate_ctrl_, ComputeQP(MatchFrameParam( + libvpx_frame_type, expected_temporal_idx))) + .WillOnce(Return()); + constexpr int kDefaultQP = 34; + constexpr int kDefaultLoopFilterLevel = 8; + EXPECT_CALL(*mock_rate_ctrl_, GetQP()).WillOnce(Return(kDefaultQP)); + EXPECT_CALL(*mock_rate_ctrl_, GetLoopfilterLevel()) + .WillOnce(Return(kDefaultLoopFilterLevel)); + if (!expected_ref_frames_used.empty()) { + EXPECT_CALL(*mock_accelerator_, + SubmitFrameParameters( + encode_job.get(), _, _, _, + ::testing::ElementsAreArray(expected_ref_frames_used))) + .WillOnce(Return(true)); + } else { + EXPECT_CALL(*mock_accelerator_, + SubmitFrameParameters(encode_job.get(), _, _, _, _)) + .WillOnce(Return(true)); + } + EXPECT_TRUE(encoder_->PrepareEncodeJob(encode_job.get())); + + // TODO(hiroh): Test for encoder_->reference_frames_. + + constexpr size_t kDefaultEncodedFrameSize = 123456; + // For BitrateControlUpdate sequence. + EXPECT_CALL(*mock_rate_ctrl_, PostEncodeUpdate(kDefaultEncodedFrameSize)) + .WillOnce(Return()); + encoder_->BitrateControlUpdate(kDefaultEncodedFrameSize); +} + +void VP9EncoderTest::UpdateRatesSequence( + const VideoBitrateAllocation& bitrate_allocation, + uint32_t framerate, + BitrateControl bitrate_control) { + ASSERT_TRUE(encoder_->current_params_.bitrate_allocation != + bitrate_allocation || + encoder_->current_params_.framerate != framerate); + + if (bitrate_control == BitrateControl::kConstantQuantizationParameter) { + constexpr size_t kNumTemporalLayers = 1u; + EXPECT_CALL(*mock_rate_ctrl_, + UpdateRateControl(MatchRtcConfigWithRates( + encoder_->visible_size_, bitrate_allocation, framerate, + kNumTemporalLayers))) + .Times(1) + .WillOnce(Return()); + } + + EXPECT_TRUE(encoder_->UpdateRates(bitrate_allocation, framerate)); + EXPECT_EQ(encoder_->current_params_.bitrate_allocation, bitrate_allocation); + EXPECT_EQ(encoder_->current_params_.framerate, framerate); +} + +void VP9EncoderTest::UpdateRatesTest(BitrateControl bitrate_control, + size_t num_temporal_layers) { + ASSERT_LE(num_temporal_layers, 3u); + auto create_allocation = + [num_temporal_layers](uint32_t bitrate) -> VideoBitrateAllocation { + VideoBitrateAllocation bitrate_allocation; + constexpr int kTemporalLayerBitrateScaleFactor[] = {1, 2, 4}; + const int kScaleFactors = + std::accumulate(std::cbegin(kTemporalLayerBitrateScaleFactor), + std::cend(kTemporalLayerBitrateScaleFactor), 0); + for (size_t ti = 0; ti < num_temporal_layers; ti++) { + bitrate_allocation.SetBitrate( + 0, ti, + bitrate * kTemporalLayerBitrateScaleFactor[ti] / kScaleFactors); + } + return bitrate_allocation; + }; + + const auto update_rates_and_encode = + [this, bitrate_control](FrameType frame_type, + const VideoBitrateAllocation& bitrate_allocation, + uint32_t framerate) { + UpdateRatesSequence(bitrate_allocation, framerate, bitrate_control); + if (bitrate_control == BitrateControl::kConstantQuantizationParameter) { + EncodeConstantQuantizationParameterSequence(frame_type, {}, + base::nullopt); + } else { + EncodeSequence(frame_type); + } + }; + + const uint32_t kBitrate = + kDefaultVideoEncodeAcceleratorConfig.initial_bitrate; + const uint32_t kFramerate = + *kDefaultVideoEncodeAcceleratorConfig.initial_framerate; + // Call UpdateRates before Encode. + update_rates_and_encode(FrameType::KEYFRAME, create_allocation(kBitrate / 2), + kFramerate); + // Bitrate change only. + update_rates_and_encode(FrameType::INTERFRAME, create_allocation(kBitrate), + kFramerate); + // Framerate change only. + update_rates_and_encode(FrameType::INTERFRAME, create_allocation(kBitrate), + kFramerate + 2); + // Bitrate + Frame changes. + update_rates_and_encode(FrameType::INTERFRAME, + create_allocation(kBitrate * 3 / 4), kFramerate - 5); +} + +struct VP9EncoderTestParam { + VP9EncoderTest::BitrateControl bitrate_control; +} kTestCasesForVP9EncoderTest[] = { + {VP9EncoderTest::BitrateControl::kConstantBitrate}, + {VP9EncoderTest::BitrateControl::kConstantQuantizationParameter}, +}; + +TEST_P(VP9EncoderTest, Initialize) { + InitializeVP9Encoder(GetParam().bitrate_control); +} + +TEST_P(VP9EncoderTest, Encode) { + const auto& bitrate_control = GetParam().bitrate_control; + InitializeVP9Encoder(bitrate_control); + if (bitrate_control == BitrateControl::kConstantBitrate) { + EncodeSequence(FrameType::KEYFRAME); + EncodeSequence(FrameType::INTERFRAME); + } else { + EncodeConstantQuantizationParameterSequence(FrameType::KEYFRAME, + kRefFramesUsedForKeyFrame); + EncodeConstantQuantizationParameterSequence(FrameType::INTERFRAME, + kRefFramesUsedForInterFrame); + } +} + +TEST_P(VP9EncoderTest, UpdateRates) { + const auto& bitrate_control = GetParam().bitrate_control; + InitializeVP9Encoder(bitrate_control); + constexpr size_t kNumTemporalLayers = 1u; + UpdateRatesTest(bitrate_control, kNumTemporalLayers); +} + +TEST_P(VP9EncoderTest, ForceKeyFrame) { + const auto& bitrate_control = GetParam().bitrate_control; + InitializeVP9Encoder(GetParam().bitrate_control); + if (bitrate_control == BitrateControl::kConstantBitrate) { + EncodeSequence(FrameType::KEYFRAME); + EncodeSequence(FrameType::INTERFRAME); + EncodeSequence(FrameType::KEYFRAME); + EncodeSequence(FrameType::INTERFRAME); + } else { + EncodeConstantQuantizationParameterSequence(FrameType::KEYFRAME, + kRefFramesUsedForKeyFrame); + EncodeConstantQuantizationParameterSequence(FrameType::INTERFRAME, + kRefFramesUsedForInterFrame); + EncodeConstantQuantizationParameterSequence(FrameType::KEYFRAME, + kRefFramesUsedForKeyFrame); + EncodeConstantQuantizationParameterSequence(FrameType::INTERFRAME, + kRefFramesUsedForInterFrame); + } +} + +INSTANTIATE_TEST_SUITE_P(, + VP9EncoderTest, + ::testing::ValuesIn(kTestCasesForVP9EncoderTest)); +} // namespace media diff --git a/chromium/media/gpu/vaapi/vp9_rate_control.cc b/chromium/media/gpu/vaapi/vp9_rate_control.cc new file mode 100644 index 00000000000..f4d6beb6129 --- /dev/null +++ b/chromium/media/gpu/vaapi/vp9_rate_control.cc @@ -0,0 +1,53 @@ +// Copyright 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "media/gpu/vaapi/vp9_rate_control.h" + +#include "base/logging.h" +#include "base/memory/ptr_util.h" +#include "third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h" + +namespace media { +namespace { +class LibvpxVP9RateControl : public VP9RateControl { + public: + explicit LibvpxVP9RateControl(std::unique_ptr<libvpx::VP9RateControlRTC> impl) + : impl_(std::move(impl)) {} + + ~LibvpxVP9RateControl() override = default; + LibvpxVP9RateControl(const LibvpxVP9RateControl&) = delete; + LibvpxVP9RateControl& operator=(const LibvpxVP9RateControl&) = delete; + + void UpdateRateControl( + const libvpx::VP9RateControlRtcConfig& rate_control_config) override { + impl_->UpdateRateControl(rate_control_config); + } + int GetQP() const override { return impl_->GetQP(); } + int GetLoopfilterLevel() const override { + return impl_->GetLoopfilterLevel(); + } + void ComputeQP(const libvpx::VP9FrameParamsQpRTC& frame_params) override { + impl_->ComputeQP(frame_params); + } + void PostEncodeUpdate(uint64_t encoded_frame_size) override { + impl_->PostEncodeUpdate(encoded_frame_size); + } + + private: + const std::unique_ptr<libvpx::VP9RateControlRTC> impl_; +}; + +} // namespace + +// static +std::unique_ptr<VP9RateControl> VP9RateControl::Create( + const libvpx::VP9RateControlRtcConfig& config) { + auto impl = libvpx::VP9RateControlRTC::Create(config); + if (!impl) { + DLOG(ERROR) << "Failed creating libvpx::VP9RateControlRTC"; + return nullptr; + } + return std::make_unique<LibvpxVP9RateControl>(std::move(impl)); +} +} // namespace media diff --git a/chromium/media/gpu/vaapi/vp9_rate_control.h b/chromium/media/gpu/vaapi/vp9_rate_control.h new file mode 100644 index 00000000000..116f47f5895 --- /dev/null +++ b/chromium/media/gpu/vaapi/vp9_rate_control.h @@ -0,0 +1,38 @@ +// Copyright 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +#ifndef MEDIA_GPU_VAAPI_VP9_RATE_CONTROL_H_ +#define MEDIA_GPU_VAAPI_VP9_RATE_CONTROL_H_ + +#include <memory> + +#include "base/callback.h" +#include "base/optional.h" + +namespace libvpx { +struct VP9FrameParamsQpRTC; +struct VP9RateControlRtcConfig; +} // namespace libvpx + +namespace media { +// VP9RateControl is an interface to compute proper quantization +// parameter and loop filter level for vp9. +class VP9RateControl { + public: + // Creates VP9RateControl using libvpx implementation. + static std::unique_ptr<VP9RateControl> Create( + const libvpx::VP9RateControlRtcConfig& config); + + virtual ~VP9RateControl() = default; + + virtual void UpdateRateControl( + const libvpx::VP9RateControlRtcConfig& rate_control_config) = 0; + // libvpx::VP9FrameParamsQpRTC takes 0-63 quantization parameter. + virtual void ComputeQP(const libvpx::VP9FrameParamsQpRTC& frame_params) = 0; + // GetQP() returns vp9 ac/dc table index. The range is 0-255. + virtual int GetQP() const = 0; + virtual int GetLoopfilterLevel() const = 0; + virtual void PostEncodeUpdate(uint64_t encoded_frame_size) = 0; +}; +} // namespace media +#endif // MEDIA_GPU_VAAPI_VP9_RATE_CONTROL_H_ |