summaryrefslogtreecommitdiff
path: root/chromium/third_party/webrtc
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/third_party/webrtc')
-rw-r--r--chromium/third_party/webrtc/AUTHORS3
-rw-r--r--chromium/third_party/webrtc/BUILD.gn12
-rw-r--r--chromium/third_party/webrtc/DEPS162
-rwxr-xr-xchromium/third_party/webrtc/PRESUBMIT.py48
-rw-r--r--chromium/third_party/webrtc/abseil-in-webrtc.md13
-rw-r--r--chromium/third_party/webrtc/api/BUILD.gn128
-rw-r--r--chromium/third_party/webrtc/api/DEPS5
-rw-r--r--chromium/third_party/webrtc/api/adaptation/BUILD.gn23
-rw-r--r--chromium/third_party/webrtc/api/adaptation/DEPS7
-rw-r--r--chromium/third_party/webrtc/api/adaptation/resource.cc30
-rw-r--r--chromium/third_party/webrtc/api/adaptation/resource.h67
-rw-r--r--chromium/third_party/webrtc/api/array_view_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/api/audio/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/api/audio/audio_frame.cc24
-rw-r--r--chromium/third_party/webrtc/api/audio/audio_frame.h4
-rw-r--r--chromium/third_party/webrtc/api/audio/test/audio_frame_unittest.cc50
-rw-r--r--chromium/third_party/webrtc/api/audio_codecs/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/api/audio_codecs/L16/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/api/audio_codecs/g711/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/api/audio_codecs/g722/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/api/audio_codecs/ilbc/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/api/audio_codecs/isac/BUILD.gn8
-rw-r--r--chromium/third_party/webrtc/api/audio_codecs/opus/BUILD.gn10
-rw-r--r--chromium/third_party/webrtc/api/audio_options.h2
-rw-r--r--chromium/third_party/webrtc/api/data_channel_interface.h5
-rw-r--r--chromium/third_party/webrtc/api/frame_transformer_interface.h3
-rw-r--r--chromium/third_party/webrtc/api/neteq/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/api/neteq/neteq.cc3
-rw-r--r--chromium/third_party/webrtc/api/neteq/neteq.h4
-rw-r--r--chromium/third_party/webrtc/api/peer_connection_interface.h41
-rw-r--r--chromium/third_party/webrtc/api/peer_connection_proxy.h1
-rw-r--r--chromium/third_party/webrtc/api/priority.h26
-rw-r--r--chromium/third_party/webrtc/api/proxy.cc25
-rw-r--r--chromium/third_party/webrtc/api/proxy.h64
-rw-r--r--chromium/third_party/webrtc/api/rtc_event_log_output_file_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/api/rtp_headers.cc4
-rw-r--r--chromium/third_party/webrtc/api/rtp_headers.h6
-rw-r--r--chromium/third_party/webrtc/api/rtp_parameters.cc16
-rw-r--r--chromium/third_party/webrtc/api/rtp_parameters.h25
-rw-r--r--chromium/third_party/webrtc/api/rtp_transceiver_interface.cc6
-rw-r--r--chromium/third_party/webrtc/api/rtp_transceiver_interface.h7
-rw-r--r--chromium/third_party/webrtc/api/stats/rtc_stats.h8
-rw-r--r--chromium/third_party/webrtc/api/stats/rtcstats_objects.h2
-rw-r--r--chromium/third_party/webrtc/api/task_queue/BUILD.gn6
-rw-r--r--chromium/third_party/webrtc/api/task_queue/task_queue_test.cc11
-rw-r--r--chromium/third_party/webrtc/api/test/audioproc_float.cc6
-rw-r--r--chromium/third_party/webrtc/api/test/audioproc_float.h16
-rw-r--r--chromium/third_party/webrtc/api/test/compile_all_headers.cc4
-rw-r--r--chromium/third_party/webrtc/api/test/create_network_emulation_manager.h2
-rw-r--r--chromium/third_party/webrtc/api/test/create_peerconnection_quality_test_fixture.h1
-rw-r--r--chromium/third_party/webrtc/api/test/create_time_controller.cc8
-rw-r--r--chromium/third_party/webrtc/api/test/fake_datagram_transport.h121
-rw-r--r--chromium/third_party/webrtc/api/test/fake_media_transport.h74
-rw-r--r--chromium/third_party/webrtc/api/test/loopback_media_transport.cc373
-rw-r--r--chromium/third_party/webrtc/api/test/loopback_media_transport.h269
-rw-r--r--chromium/third_party/webrtc/api/test/loopback_media_transport_unittest.cc201
-rw-r--r--chromium/third_party/webrtc/api/test/mock_audio_mixer.h9
-rw-r--r--chromium/third_party/webrtc/api/test/mock_peer_connection_factory_interface.h75
-rw-r--r--chromium/third_party/webrtc/api/test/mock_transformable_video_frame.h38
-rw-r--r--chromium/third_party/webrtc/api/test/mock_video_bitrate_allocator_factory.h2
-rw-r--r--chromium/third_party/webrtc/api/test/mock_video_decoder_factory.h4
-rw-r--r--chromium/third_party/webrtc/api/test/mock_video_encoder.h54
-rw-r--r--chromium/third_party/webrtc/api/test/mock_video_encoder_factory.h6
-rw-r--r--chromium/third_party/webrtc/api/test/network_emulation/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/api/test/peerconnection_quality_test_fixture.h13
-rw-r--r--chromium/third_party/webrtc/api/test/stats_observer_interface.h10
-rw-r--r--chromium/third_party/webrtc/api/test/test_dependency_factory.cc16
-rw-r--r--chromium/third_party/webrtc/api/test/video_quality_analyzer_interface.h49
-rw-r--r--chromium/third_party/webrtc/api/transport/BUILD.gn28
-rw-r--r--chromium/third_party/webrtc/api/transport/congestion_control_interface.h75
-rw-r--r--chromium/third_party/webrtc/api/transport/data_channel_transport_interface.h4
-rw-r--r--chromium/third_party/webrtc/api/transport/datagram_transport_interface.h151
-rw-r--r--chromium/third_party/webrtc/api/transport/media/BUILD.gn52
-rw-r--r--chromium/third_party/webrtc/api/transport/media/audio_transport.cc54
-rw-r--r--chromium/third_party/webrtc/api/transport/media/audio_transport.h120
-rw-r--r--chromium/third_party/webrtc/api/transport/media/media_transport_config.cc29
-rw-r--r--chromium/third_party/webrtc/api/transport/media/media_transport_config.h38
-rw-r--r--chromium/third_party/webrtc/api/transport/media/media_transport_interface.cc108
-rw-r--r--chromium/third_party/webrtc/api/transport/media/media_transport_interface.h320
-rw-r--r--chromium/third_party/webrtc/api/transport/media/video_transport.cc56
-rw-r--r--chromium/third_party/webrtc/api/transport/media/video_transport.h101
-rw-r--r--chromium/third_party/webrtc/api/transport/rtp/BUILD.gn11
-rw-r--r--chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.cc54
-rw-r--r--chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.h45
-rw-r--r--chromium/third_party/webrtc/api/transport/test/mock_network_control.h15
-rw-r--r--chromium/third_party/webrtc/api/units/data_rate_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/api/video/BUILD.gn44
-rw-r--r--chromium/third_party/webrtc/api/video/OWNERS1
-rw-r--r--chromium/third_party/webrtc/api/video/test/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/api/video/test/mock_recordable_encoded_frame.h19
-rw-r--r--chromium/third_party/webrtc/api/video/video_adaptation_counters.cc9
-rw-r--r--chromium/third_party/webrtc/api/video/video_adaptation_counters.h4
-rw-r--r--chromium/third_party/webrtc/api/video/video_frame_marking.h29
-rw-r--r--chromium/third_party/webrtc/api/video/video_frame_metadata.cc28
-rw-r--r--chromium/third_party/webrtc/api/video/video_frame_metadata.h59
-rw-r--r--chromium/third_party/webrtc/api/video/video_frame_metadata_unittest.cc120
-rw-r--r--chromium/third_party/webrtc/api/video/video_stream_encoder_interface.h11
-rw-r--r--chromium/third_party/webrtc/api/video_codecs/BUILD.gn6
-rw-r--r--chromium/third_party/webrtc/api/video_codecs/test/BUILD.gn1
-rw-r--r--chromium/third_party/webrtc/api/video_codecs/video_codec.h2
-rw-r--r--chromium/third_party/webrtc/api/voip/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/audio/BUILD.gn3
-rw-r--r--chromium/third_party/webrtc/audio/audio_send_stream.cc19
-rw-r--r--chromium/third_party/webrtc/audio/audio_send_stream.h5
-rw-r--r--chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc64
-rw-r--r--chromium/third_party/webrtc/audio/audio_state_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/audio/channel_receive.cc43
-rw-r--r--chromium/third_party/webrtc/audio/channel_send.cc92
-rw-r--r--chromium/third_party/webrtc/audio/channel_send.h5
-rw-r--r--chromium/third_party/webrtc/audio/mock_voe_channel_proxy.h224
-rw-r--r--chromium/third_party/webrtc/audio/null_audio_poller.cc3
-rw-r--r--chromium/third_party/webrtc/audio/test/low_bandwidth_audio_test.cc1
-rw-r--r--chromium/third_party/webrtc/audio/test/pc_low_bandwidth_audio_test.cc1
-rw-r--r--chromium/third_party/webrtc/audio/utility/audio_frame_operations_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/audio/voip/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/audio/voip/audio_channel.cc5
-rw-r--r--chromium/third_party/webrtc/audio/voip/audio_channel.h4
-rw-r--r--chromium/third_party/webrtc/audio/voip/audio_egress.cc2
-rw-r--r--chromium/third_party/webrtc/audio/voip/audio_egress.h6
-rw-r--r--chromium/third_party/webrtc/audio/voip/audio_ingress.cc2
-rw-r--r--chromium/third_party/webrtc/audio/voip/audio_ingress.h8
-rw-r--r--chromium/third_party/webrtc/audio/voip/test/BUILD.gn3
-rw-r--r--chromium/third_party/webrtc/audio/voip/test/audio_egress_unittest.cc13
-rw-r--r--chromium/third_party/webrtc/audio/voip/test/audio_ingress_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/build_overrides/build.gni6
-rw-r--r--chromium/third_party/webrtc/call/BUILD.gn35
-rw-r--r--chromium/third_party/webrtc/call/adaptation/BUILD.gn29
-rw-r--r--chromium/third_party/webrtc/call/adaptation/adaptation_constraint.cc17
-rw-r--r--chromium/third_party/webrtc/call/adaptation/adaptation_constraint.h43
-rw-r--r--chromium/third_party/webrtc/call/adaptation/adaptation_listener.cc17
-rw-r--r--chromium/third_party/webrtc/call/adaptation/adaptation_listener.h41
-rw-r--r--chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener.cc120
-rw-r--r--chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener.h75
-rw-r--r--chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener_unittest.cc121
-rw-r--r--chromium/third_party/webrtc/call/adaptation/resource.cc93
-rw-r--r--chromium/third_party/webrtc/call/adaptation/resource.h92
-rw-r--r--chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.cc520
-rw-r--r--chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.h148
-rw-r--r--chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.cc5
-rw-r--r--chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.h42
-rw-r--r--chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_unittest.cc888
-rw-r--r--chromium/third_party/webrtc/call/adaptation/resource_unittest.cc60
-rw-r--r--chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_constraint.cc39
-rw-r--r--chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_constraint.h42
-rw-r--r--chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_listener.cc32
-rw-r--r--chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_listener.h38
-rw-r--r--chromium/third_party/webrtc/call/adaptation/test/fake_frame_rate_provider.h58
-rw-r--r--chromium/third_party/webrtc/call/adaptation/test/fake_resource.cc43
-rw-r--r--chromium/third_party/webrtc/call/adaptation/test/fake_resource.h30
-rw-r--r--chromium/third_party/webrtc/call/adaptation/test/mock_resource_listener.h31
-rw-r--r--chromium/third_party/webrtc/call/adaptation/video_source_restrictions.cc14
-rw-r--r--chromium/third_party/webrtc/call/adaptation/video_source_restrictions.h3
-rw-r--r--chromium/third_party/webrtc/call/adaptation/video_stream_adapter.cc74
-rw-r--r--chromium/third_party/webrtc/call/adaptation/video_stream_adapter.h26
-rw-r--r--chromium/third_party/webrtc/call/adaptation/video_stream_adapter_unittest.cc19
-rw-r--r--chromium/third_party/webrtc/call/audio_send_stream.cc2
-rw-r--r--chromium/third_party/webrtc/call/audio_send_stream.h1
-rw-r--r--chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/call/call.cc665
-rw-r--r--chromium/third_party/webrtc/call/call.h47
-rw-r--r--chromium/third_party/webrtc/call/call_factory.cc14
-rw-r--r--chromium/third_party/webrtc/call/call_factory.h9
-rw-r--r--chromium/third_party/webrtc/call/call_unittest.cc202
-rw-r--r--chromium/third_party/webrtc/call/degraded_call.cc5
-rw-r--r--chromium/third_party/webrtc/call/degraded_call.h2
-rw-r--r--chromium/third_party/webrtc/call/fake_network_pipe_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/call/flexfec_receive_stream_impl.cc7
-rw-r--r--chromium/third_party/webrtc/call/flexfec_receive_stream_impl.h3
-rw-r--r--chromium/third_party/webrtc/call/rampup_tests.cc8
-rw-r--r--chromium/third_party/webrtc/call/rtcp_demuxer_unittest.cc28
-rw-r--r--chromium/third_party/webrtc/call/rtp_demuxer.cc57
-rw-r--r--chromium/third_party/webrtc/call/rtp_demuxer.h3
-rw-r--r--chromium/third_party/webrtc/call/rtp_demuxer_unittest.cc41
-rw-r--r--chromium/third_party/webrtc/call/rtp_payload_params.cc34
-rw-r--r--chromium/third_party/webrtc/call/rtp_payload_params.h3
-rw-r--r--chromium/third_party/webrtc/call/rtp_payload_params_unittest.cc67
-rw-r--r--chromium/third_party/webrtc/call/rtp_transport_controller_send.cc17
-rw-r--r--chromium/third_party/webrtc/call/rtp_video_sender.cc15
-rw-r--r--chromium/third_party/webrtc/call/rtp_video_sender.h8
-rw-r--r--chromium/third_party/webrtc/call/rtp_video_sender_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/call/test/mock_audio_send_stream.h31
-rw-r--r--chromium/third_party/webrtc/call/test/mock_bitrate_allocator.h13
-rw-r--r--chromium/third_party/webrtc/call/test/mock_rtp_packet_sink_interface.h2
-rw-r--r--chromium/third_party/webrtc/call/test/mock_rtp_transport_controller_send.h106
-rw-r--r--chromium/third_party/webrtc/call/video_send_stream.h11
-rw-r--r--chromium/third_party/webrtc/common_audio/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/common_audio/OWNERS1
-rw-r--r--chromium/third_party/webrtc/common_audio/channel_buffer_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/common_audio/mocks/mock_smoothing_filter.h6
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc8
-rw-r--r--chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.h4
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/mock/mock_vad.h14
-rw-r--r--chromium/third_party/webrtc/common_video/BUILD.gn3
-rw-r--r--chromium/third_party/webrtc/common_video/generic_frame_descriptor/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.cc37
-rw-r--r--chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.h9
-rw-r--r--chromium/third_party/webrtc/docs/faq.md4
-rw-r--r--chromium/third_party/webrtc/examples/BUILD.gn18
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java4
-rw-r--r--chromium/third_party/webrtc/examples/androidnativeapi/BUILD.gn1
-rw-r--r--chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.cc12
-rw-r--r--chromium/third_party/webrtc/logging/BUILD.gn22
-rw-r--r--chromium/third_party/webrtc/logging/rtc_event_log/encoder/blob_encoding.h2
-rw-r--r--chromium/third_party/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/logging/rtc_event_log/mock/mock_rtc_event_log.h18
-rw-r--r--chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc33
-rw-r--r--chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.h22
-rw-r--r--chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/media/BUILD.gn28
-rw-r--r--chromium/third_party/webrtc/media/base/fake_network_interface.h33
-rw-r--r--chromium/third_party/webrtc/media/base/media_channel.cc5
-rw-r--r--chromium/third_party/webrtc/media/base/media_channel.h49
-rw-r--r--chromium/third_party/webrtc/media/base/media_constants.cc4
-rw-r--r--chromium/third_party/webrtc/media/base/media_constants.h7
-rw-r--r--chromium/third_party/webrtc/media/base/media_engine_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/media/base/rtp_data_engine_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/media/base/rtp_utils.cc57
-rw-r--r--chromium/third_party/webrtc/media/base/rtp_utils_unittest.cc107
-rw-r--r--chromium/third_party/webrtc/media/base/video_adapter.cc2
-rw-r--r--chromium/third_party/webrtc/media/base/video_adapter.h35
-rw-r--r--chromium/third_party/webrtc/media/base/vp9_profile.cc4
-rw-r--r--chromium/third_party/webrtc/media/base/vp9_profile.h1
-rw-r--r--chromium/third_party/webrtc/media/engine/fake_webrtc_call.cc11
-rw-r--r--chromium/third_party/webrtc/media/engine/fake_webrtc_call.h7
-rw-r--r--chromium/third_party/webrtc/media/engine/internal_decoder_factory.cc2
-rw-r--r--chromium/third_party/webrtc/media/engine/internal_decoder_factory_unittest.cc21
-rw-r--r--chromium/third_party/webrtc/media/engine/payload_type_mapper.cc1
-rw-r--r--chromium/third_party/webrtc/media/engine/payload_type_mapper_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc20
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtc_video_engine.cc15
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtc_video_engine.h4
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtc_video_engine_unittest.cc32
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtc_voice_engine.cc113
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtc_voice_engine_unittest.cc108
-rw-r--r--chromium/third_party/webrtc/media/sctp/sctp_transport.cc8
-rw-r--r--chromium/third_party/webrtc/media/sctp/sctp_transport_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/modules/BUILD.gn6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/BUILD.gn59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_coding.gni3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/config.proto19
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h44
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc121
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h35
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h28
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h61
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc28
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc94
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc147
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc103
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.h16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc26
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc49
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h17
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_expand.h30
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_histogram.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h79
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h74
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_statistics_calculator.h9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc77
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc181
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.cc3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.cc32
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java19
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/mock_audio_device.h185
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/mock_audio_transport.h66
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc10
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h35
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc18
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h20
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mock_audio_device_buffer.h14
-rw-r--r--chromium/third_party/webrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/audio_mixer/frame_combiner_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/BUILD.gn22
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft_unittest.cc16
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/alignment_mixer_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/block_framer_unittest.cc28
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/block_processor_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/clockdrift_detector.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/decimator_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/echo_canceller3_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/fft_data_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/frame_blocker_unittest.cc27
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h31
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h27
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h35
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h17
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/render_buffer_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_buffer_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_controller_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/subtractor_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_gain_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec_dump/mock_aec_dump.h70
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/agc_manager_direct_unittest.cc33
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/mock_agc.h16
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc2/BUILD.gn1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc2/rnn_vad/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc2/signal_classifier.cc12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_buffer_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h214
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.cc10
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.cc86
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.h9
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.cc68
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/echo_control_mock.h26
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.cc22
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/pffft_wrapper_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/goog_cc/BUILD.gn18
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/pcc/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/pcc/bitrate_controller_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/receive_side_congestion_controller_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/rtp/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn8
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.h7
-rw-r--r--chromium/third_party/webrtc/modules/pacing/BUILD.gn3
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender.cc15
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc15
-rw-r--r--chromium/third_party/webrtc/modules/pacing/pacing_controller.cc4
-rw-r--r--chromium/third_party/webrtc/modules/pacing/pacing_controller.h2
-rw-r--r--chromium/third_party/webrtc/modules/pacing/pacing_controller_unittest.cc30
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router.cc76
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router.h58
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc91
-rw-r--r--chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.cc26
-rw-r--r--chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.h23
-rw-r--r--chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender_unittest.cc572
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn24
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h448
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h98
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.cc127
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.h60
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc295
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc472
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h149
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc177
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h118
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc61
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc10
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h15
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc119
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc20
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc44
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc80
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h18
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc44
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h18
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc769
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h316
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc630
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h424
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h126
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc63
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc21
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc12
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc159
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_video_header.h5
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/linux/device_info_linux.cc21
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/BUILD.gn95
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator.cc62
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator.h46
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator_unittest.cc126
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/BUILD.gn91
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc346
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc59
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc233
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc123
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.h48
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc109
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.h53
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc100
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.h43
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc105
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h43
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc126
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.h53
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc128
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h53
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc128
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h53
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc108
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.h45
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc220
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.h49
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc91
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.h41
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_unittest.cc319
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller.h137
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc67
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h38
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.cc78
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.h74
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_libaom.cc97
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc70
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc131
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h148
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc287
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.cc16
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc79
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/deprecated/BUILD.gn33
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/deprecated/nack_module.cc (renamed from chromium/third_party/webrtc/modules/video_coding/nack_module.cc)63
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/deprecated/nack_module.h (renamed from chromium/third_party/webrtc/modules/video_coding/nack_module.h)17
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/encoded_frame.cc14
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc19
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc35
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_object.cc4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_object.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/h264_sps_pps_tracker.cc5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_estimator.cc34
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_estimator.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_estimator_tests.cc16
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module2.cc343
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module2.h140
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module2_unittest.cc411
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc51
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/packet_buffer.h50
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc126
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc373
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/session_info.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc135
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h60
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc17
-rw-r--r--chromium/third_party/webrtc/p2p/BUILD.gn14
-rw-r--r--chromium/third_party/webrtc/p2p/base/basic_async_resolver_factory_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/p2p/base/connection.h2
-rw-r--r--chromium/third_party/webrtc/p2p/base/ice_controller_interface.h19
-rw-r--r--chromium/third_party/webrtc/p2p/base/mock_async_resolver.h13
-rw-r--r--chromium/third_party/webrtc/p2p/base/mock_ice_transport.h23
-rw-r--r--chromium/third_party/webrtc/p2p/base/p2p_transport_channel.cc36
-rw-r--r--chromium/third_party/webrtc/p2p/base/p2p_transport_channel.h12
-rw-r--r--chromium/third_party/webrtc/p2p/base/p2p_transport_channel_unittest.cc262
-rw-r--r--chromium/third_party/webrtc/p2p/base/port.h2
-rw-r--r--chromium/third_party/webrtc/p2p/base/pseudo_tcp_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/p2p/base/stun_port_unittest.cc41
-rw-r--r--chromium/third_party/webrtc/p2p/base/stun_request.h2
-rw-r--r--chromium/third_party/webrtc/p2p/base/transport_description.cc4
-rw-r--r--chromium/third_party/webrtc/p2p/base/transport_description.h23
-rw-r--r--chromium/third_party/webrtc/p2p/base/transport_description_factory.cc9
-rw-r--r--chromium/third_party/webrtc/p2p/base/transport_description_factory.h3
-rw-r--r--chromium/third_party/webrtc/p2p/base/transport_description_factory_unittest.cc68
-rw-r--r--chromium/third_party/webrtc/p2p/base/turn_port.cc17
-rw-r--r--chromium/third_party/webrtc/p2p/base/turn_port_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/p2p/base/turn_server.cc4
-rw-r--r--chromium/third_party/webrtc/p2p/base/turn_server.h2
-rw-r--r--chromium/third_party/webrtc/p2p/client/basic_port_allocator.cc11
-rw-r--r--chromium/third_party/webrtc/p2p/client/basic_port_allocator.h8
-rw-r--r--chromium/third_party/webrtc/pc/BUILD.gn24
-rw-r--r--chromium/third_party/webrtc/pc/channel.cc234
-rw-r--r--chromium/third_party/webrtc/pc/channel.h22
-rw-r--r--chromium/third_party/webrtc/pc/channel_manager.cc21
-rw-r--r--chromium/third_party/webrtc/pc/channel_manager.h3
-rw-r--r--chromium/third_party/webrtc/pc/channel_manager_unittest.cc22
-rw-r--r--chromium/third_party/webrtc/pc/channel_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/pc/data_channel.cc167
-rw-r--r--chromium/third_party/webrtc/pc/data_channel.h164
-rw-r--r--chromium/third_party/webrtc/pc/data_channel_controller.cc155
-rw-r--r--chromium/third_party/webrtc/pc/data_channel_controller.h40
-rw-r--r--chromium/third_party/webrtc/pc/data_channel_unittest.cc27
-rw-r--r--chromium/third_party/webrtc/pc/datagram_rtp_transport.cc380
-rw-r--r--chromium/third_party/webrtc/pc/datagram_rtp_transport.h173
-rw-r--r--chromium/third_party/webrtc/pc/jsep_transport.cc140
-rw-r--r--chromium/third_party/webrtc/pc/jsep_transport.h97
-rw-r--r--chromium/third_party/webrtc/pc/jsep_transport_controller.cc330
-rw-r--r--chromium/third_party/webrtc/pc/jsep_transport_controller.h78
-rw-r--r--chromium/third_party/webrtc/pc/jsep_transport_controller_unittest.cc526
-rw-r--r--chromium/third_party/webrtc/pc/jsep_transport_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/pc/media_session.cc209
-rw-r--r--chromium/third_party/webrtc/pc/media_session.h27
-rw-r--r--chromium/third_party/webrtc/pc/media_session_unittest.cc523
-rw-r--r--chromium/third_party/webrtc/pc/media_stream_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection.cc297
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection.h78
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection_adaptation_integrationtest.cc161
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection_factory.cc2
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection_factory.h6
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection_header_extension_unittest.cc97
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection_integrationtest.cc1020
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection_interface_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection_internal.h7
-rw-r--r--chromium/third_party/webrtc/pc/peer_connection_media_unittest.cc17
-rw-r--r--chromium/third_party/webrtc/pc/proxy_unittest.cc16
-rw-r--r--chromium/third_party/webrtc/pc/remote_audio_source.cc3
-rw-r--r--chromium/third_party/webrtc/pc/rtc_stats_collector.cc61
-rw-r--r--chromium/third_party/webrtc/pc/rtc_stats_collector.h1
-rw-r--r--chromium/third_party/webrtc/pc/rtc_stats_collector_unittest.cc27
-rw-r--r--chromium/third_party/webrtc/pc/rtc_stats_integrationtest.cc51
-rw-r--r--chromium/third_party/webrtc/pc/rtp_sender_receiver_unittest.cc13
-rw-r--r--chromium/third_party/webrtc/pc/rtp_transceiver.cc48
-rw-r--r--chromium/third_party/webrtc/pc/rtp_transceiver.h8
-rw-r--r--chromium/third_party/webrtc/pc/rtp_transceiver_unittest.cc111
-rw-r--r--chromium/third_party/webrtc/pc/sctp_utils.cc40
-rw-r--r--chromium/third_party/webrtc/pc/sctp_utils_unittest.cc28
-rw-r--r--chromium/third_party/webrtc/pc/session_description.h9
-rw-r--r--chromium/third_party/webrtc/pc/stats_collector.cc15
-rw-r--r--chromium/third_party/webrtc/pc/test/fake_audio_capture_module.cc72
-rw-r--r--chromium/third_party/webrtc/pc/test/fake_audio_capture_module.h55
-rw-r--r--chromium/third_party/webrtc/pc/test/fake_peer_connection_base.h5
-rw-r--r--chromium/third_party/webrtc/pc/test/fake_peer_connection_for_stats.h15
-rw-r--r--chromium/third_party/webrtc/pc/test/fake_periodic_video_source.h12
-rw-r--r--chromium/third_party/webrtc/pc/test/fake_periodic_video_track_source.h4
-rw-r--r--chromium/third_party/webrtc/pc/test/fake_rtc_certificate_generator.h2
-rw-r--r--chromium/third_party/webrtc/pc/test/mock_channel_interface.h53
-rw-r--r--chromium/third_party/webrtc/pc/test/mock_data_channel.h41
-rw-r--r--chromium/third_party/webrtc/pc/test/mock_delayable.h11
-rw-r--r--chromium/third_party/webrtc/pc/test/mock_rtp_receiver_internal.h75
-rw-r--r--chromium/third_party/webrtc/pc/test/mock_rtp_sender_internal.h86
-rw-r--r--chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.cc16
-rw-r--r--chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.h9
-rw-r--r--chromium/third_party/webrtc/pc/track_media_info_map_unittest.cc25
-rw-r--r--chromium/third_party/webrtc/pc/video_rtp_receiver_unittest.cc16
-rw-r--r--chromium/third_party/webrtc/pc/video_rtp_track_source_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/pc/webrtc_sdp.cc73
-rw-r--r--chromium/third_party/webrtc/pc/webrtc_sdp_unittest.cc100
-rw-r--r--chromium/third_party/webrtc/pc/webrtc_session_description_factory.cc3
-rw-r--r--chromium/third_party/webrtc/rtc_base/BUILD.gn58
-rw-r--r--chromium/third_party/webrtc/rtc_base/DEPS2
-rw-r--r--chromium/third_party/webrtc/rtc_base/async_invoker.h2
-rw-r--r--chromium/third_party/webrtc/rtc_base/bit_buffer_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/rtc_base/buffer.h4
-rw-r--r--chromium/third_party/webrtc/rtc_base/buffer_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/rtc_base/checks.h2
-rw-r--r--chromium/third_party/webrtc/rtc_base/checks_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/rtc_base/critical_section.cc16
-rw-r--r--chromium/third_party/webrtc/rtc_base/critical_section_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/rtc_base/deprecated/signal_thread.cc (renamed from chromium/third_party/webrtc/rtc_base/signal_thread.cc)46
-rw-r--r--chromium/third_party/webrtc/rtc_base/deprecated/signal_thread.h166
-rw-r--r--chromium/third_party/webrtc/rtc_base/deprecated/signal_thread_unittest.cc (renamed from chromium/third_party/webrtc/rtc_base/signal_thread_unittest.cc)8
-rw-r--r--chromium/third_party/webrtc/rtc_base/experiments/BUILD.gn30
-rw-r--r--chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.cc4
-rw-r--r--chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.h2
-rw-r--r--chromium/third_party/webrtc/rtc_base/fake_network.h3
-rw-r--r--chromium/third_party/webrtc/rtc_base/logging.cc21
-rw-r--r--chromium/third_party/webrtc/rtc_base/message_handler.cc13
-rw-r--r--chromium/third_party/webrtc/rtc_base/message_handler.h28
-rw-r--r--chromium/third_party/webrtc/rtc_base/net_helpers.cc62
-rw-r--r--chromium/third_party/webrtc/rtc_base/net_helpers.h36
-rw-r--r--chromium/third_party/webrtc/rtc_base/network.h2
-rw-r--r--chromium/third_party/webrtc/rtc_base/network/BUILD.gn6
-rw-r--r--chromium/third_party/webrtc/rtc_base/network_monitor.cc7
-rw-r--r--chromium/third_party/webrtc/rtc_base/null_socket_server_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/rtc_base/openssl_adapter.h3
-rw-r--r--chromium/third_party/webrtc/rtc_base/openssl_adapter_unittest.cc40
-rw-r--r--chromium/third_party/webrtc/rtc_base/operations_chain_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/rtc_base/physical_socket_server.cc22
-rw-r--r--chromium/third_party/webrtc/rtc_base/physical_socket_server.h23
-rw-r--r--chromium/third_party/webrtc/rtc_base/platform_thread_types.cc28
-rw-r--r--chromium/third_party/webrtc/rtc_base/rate_statistics.cc109
-rw-r--r--chromium/third_party/webrtc/rtc_base/rate_statistics.h26
-rw-r--r--chromium/third_party/webrtc/rtc_base/rtc_certificate_generator.cc2
-rw-r--r--chromium/third_party/webrtc/rtc_base/signal_thread.h148
-rw-r--r--chromium/third_party/webrtc/rtc_base/socket_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/rtc_base/ssl_adapter_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/rtc_base/stream.h2
-rw-r--r--chromium/third_party/webrtc/rtc_base/strings/string_builder_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/rtc_base/swap_queue_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/BUILD.gn58
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/DEPS11
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/mutex.cc39
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/mutex.h145
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/mutex_abseil.h37
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/mutex_benchmark.cc95
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/mutex_critical_section.h54
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/mutex_pthread.h53
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/mutex_unittest.cc206
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.cc47
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.h20
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/yield.cc36
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/yield.h20
-rw-r--r--chromium/third_party/webrtc/rtc_base/synchronization/yield_policy_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/rtc_base/system/BUILD.gn6
-rw-r--r--chromium/third_party/webrtc/rtc_base/task_utils/BUILD.gn5
-rw-r--r--chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.cc15
-rw-r--r--chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.h36
-rw-r--r--chromium/third_party/webrtc/rtc_base/task_utils/repeating_task_unittest.cc52
-rw-r--r--chromium/third_party/webrtc/rtc_base/thread.cc169
-rw-r--r--chromium/third_party/webrtc/rtc_base/thread.h30
-rw-r--r--chromium/third_party/webrtc/rtc_base/thread_unittest.cc21
-rw-r--r--chromium/third_party/webrtc/rtc_base/virtual_socket_server.h2
-rw-r--r--chromium/third_party/webrtc/rtc_base/virtual_socket_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/rtc_tools/BUILD.gn20
-rw-r--r--chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn8
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.cc227
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.h86
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc503
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.h75
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc938
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h201
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc83
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.h182
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc115
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc229
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h30
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc55
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h5
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc163
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h3
-rw-r--r--chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/triage_notifications.h158
-rw-r--r--chromium/third_party/webrtc/sdk/BUILD.gn10
-rw-r--r--chromium/third_party/webrtc/sdk/android/BUILD.gn37
-rw-r--r--chromium/third_party/webrtc/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java22
-rw-r--r--chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnection.java24
-rw-r--r--chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnectionFactory.java13
-rw-r--r--chromium/third_party/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java11
-rw-r--r--chromium/third_party/webrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java20
-rw-r--r--chromium/third_party/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc28
-rw-r--r--chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection.cc5
-rw-r--r--chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc11
-rw-r--r--chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.h12
-rw-r--r--chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm9
-rw-r--r--chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm1
-rw-r--r--chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h23
-rw-r--r--chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm48
-rw-r--r--chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm5
-rw-r--r--chromium/third_party/webrtc/sdk/objc/native/src/audio/audio_device_ios.mm4
-rw-r--r--chromium/third_party/webrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm7
-rw-r--r--chromium/third_party/webrtc/stats/rtcstats_objects.cc6
-rw-r--r--chromium/third_party/webrtc/style-guide.md4
-rw-r--r--chromium/third_party/webrtc/system_wrappers/BUILD.gn4
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/field_trial_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/metrics_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/test/BUILD.gn85
-rw-r--r--chromium/third_party/webrtc/test/DEPS3
-rw-r--r--chromium/third_party/webrtc/test/benchmark_main.cc17
-rw-r--r--chromium/third_party/webrtc/test/explicit_key_value_config.cc57
-rw-r--r--chromium/third_party/webrtc/test/explicit_key_value_config.h35
-rw-r--r--chromium/third_party/webrtc/test/fake_encoder.cc6
-rw-r--r--chromium/third_party/webrtc/test/fake_encoder.h25
-rw-r--r--chromium/third_party/webrtc/test/frame_forwarder.cc10
-rw-r--r--chromium/third_party/webrtc/test/frame_forwarder.h18
-rw-r--r--chromium/third_party/webrtc/test/frame_generator_capturer_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/test/fuzzers/BUILD.gn35
-rw-r--r--chromium/third_party/webrtc/test/logging/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/test/mock_audio_decoder.h22
-rw-r--r--chromium/third_party/webrtc/test/mock_audio_decoder_factory.h18
-rw-r--r--chromium/third_party/webrtc/test/mock_audio_encoder.h65
-rw-r--r--chromium/third_party/webrtc/test/mock_audio_encoder_factory.h24
-rw-r--r--chromium/third_party/webrtc/test/mock_transport.h2
-rw-r--r--chromium/third_party/webrtc/test/network/BUILD.gn6
-rw-r--r--chromium/third_party/webrtc/test/network/cross_traffic.cc13
-rw-r--r--chromium/third_party/webrtc/test/network/network_emulation_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/BUILD.gn1069
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc126
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h22
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc675
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h308
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc388
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc18
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h34
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/multi_head_queue.h99
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/multi_head_queue_test.cc103
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc19
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h9
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc21
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h9
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc81
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h7
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc74
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc48
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h32
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc112
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h23
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/echo/echo_emulation.cc10
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/media/media_helper.cc2
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.cc52
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.h10
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/peer_configurer.h17
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc129
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.cc28
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.h4
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test_params.h4
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/stats_poller.cc9
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/stats_poller.h6
-rw-r--r--chromium/third_party/webrtc/test/pc/e2e/test_peer_factory.cc18
-rw-r--r--chromium/third_party/webrtc/test/peer_scenario/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/test/run_loop_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/test/scenario/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/test/scenario/call_client.cc13
-rw-r--r--chromium/third_party/webrtc/test/scenario/call_client.h2
-rw-r--r--chromium/third_party/webrtc/test/scenario/scenario_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/test/test_main.cc7
-rw-r--r--chromium/third_party/webrtc/test/test_main_lib.cc66
-rw-r--r--chromium/third_party/webrtc/test/testsupport/mock/mock_frame_reader.h10
-rw-r--r--chromium/third_party/webrtc/test/testsupport/perf_test.h1
-rw-r--r--chromium/third_party/webrtc/test/testsupport/resources_dir_flag.cc2
-rw-r--r--chromium/third_party/webrtc/test/testsupport/resources_dir_flag.h2
-rw-r--r--chromium/third_party/webrtc/test/testsupport/test_artifacts_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/test/time_controller/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/test/time_controller/simulated_time_controller.cc8
-rw-r--r--chromium/third_party/webrtc/test/time_controller/simulated_time_controller.h22
-rwxr-xr-xchromium/third_party/webrtc/tools_webrtc/get_landmines.py1
-rwxr-xr-xchromium/third_party/webrtc/tools_webrtc/libs/generate_licenses.py1
-rw-r--r--chromium/third_party/webrtc/tools_webrtc/mb/mb_config.pyl13
-rw-r--r--chromium/third_party/webrtc/tools_webrtc/sancov/README9
-rw-r--r--chromium/third_party/webrtc/tools_webrtc/sancov/blacklist.txt21
-rw-r--r--chromium/third_party/webrtc/video/BUILD.gn27
-rw-r--r--chromium/third_party/webrtc/video/adaptation/BUILD.gn12
-rw-r--r--chromium/third_party/webrtc/video/adaptation/encode_usage_resource.cc16
-rw-r--r--chromium/third_party/webrtc/video/adaptation/encode_usage_resource.h11
-rw-r--r--chromium/third_party/webrtc/video/adaptation/overuse_frame_detector_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/video/adaptation/quality_rampup_experiment_helper.cc80
-rw-r--r--chromium/third_party/webrtc/video/adaptation/quality_rampup_experiment_helper.h68
-rw-r--r--chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.cc44
-rw-r--r--chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.h25
-rw-r--r--chromium/third_party/webrtc/video/adaptation/quality_scaler_resource_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource.cc85
-rw-r--r--chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource.h80
-rw-r--r--chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc485
-rw-r--r--chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.h149
-rw-r--r--chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager_unittest.cc98
-rw-r--r--chromium/third_party/webrtc/video/call_stats2_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/video/call_stats_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/video/end_to_end_tests/bandwidth_tests.cc13
-rw-r--r--chromium/third_party/webrtc/video/end_to_end_tests/codec_tests.cc30
-rw-r--r--chromium/third_party/webrtc/video/full_stack_tests.cc109
-rw-r--r--chromium/third_party/webrtc/video/pc_full_stack_tests.cc102
-rw-r--r--chromium/third_party/webrtc/video/receive_statistics_proxy2.cc11
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver.cc62
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver.h60
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver2.cc1154
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver2.h367
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver2_unittest.cc1221
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc10
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h17
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc130
-rw-r--r--chromium/third_party/webrtc/video/rtp_video_stream_receiver_unittest.cc53
-rw-r--r--chromium/third_party/webrtc/video/send_statistics_proxy.cc44
-rw-r--r--chromium/third_party/webrtc/video/send_statistics_proxy.h5
-rw-r--r--chromium/third_party/webrtc/video/test/mock_video_stream_encoder.h54
-rw-r--r--chromium/third_party/webrtc/video/video_quality_test.cc5
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream.h1
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream2.cc188
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream2.h53
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream2_unittest.cc571
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream_unittest.cc34
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream.cc12
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream.h3
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream_impl_unittest.cc66
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream_tests.cc46
-rw-r--r--chromium/third_party/webrtc/video/video_source_sink_controller.cc22
-rw-r--r--chromium/third_party/webrtc/video/video_source_sink_controller.h2
-rw-r--r--chromium/third_party/webrtc/video/video_source_sink_controller_unittest.cc17
-rw-r--r--chromium/third_party/webrtc/video/video_stream_decoder_impl_unittest.cc36
-rw-r--r--chromium/third_party/webrtc/video/video_stream_encoder.cc199
-rw-r--r--chromium/third_party/webrtc/video/video_stream_encoder.h37
-rw-r--r--chromium/third_party/webrtc/video/video_stream_encoder_unittest.cc680
-rw-r--r--chromium/third_party/webrtc/webrtc.gni52
-rw-r--r--chromium/third_party/webrtc/whitespace.txt2
837 files changed, 30971 insertions, 17222 deletions
diff --git a/chromium/third_party/webrtc/AUTHORS b/chromium/third_party/webrtc/AUTHORS
index 499c3406390..188503e7f46 100644
--- a/chromium/third_party/webrtc/AUTHORS
+++ b/chromium/third_party/webrtc/AUTHORS
@@ -92,6 +92,8 @@ Raman Budny <budnyjj@gmail.com>
Stephan Hartmann <stha09@googlemail.com>
&yet LLC <*@andyet.com>
+8x8 Inc. <*@sip-communicator.org>
+8x8 Inc. <*@8x8.com>
Agora IO <*@agora.io>
ARM Holdings <*@arm.com>
BroadSoft Inc. <*@broadsoft.com>
@@ -108,6 +110,7 @@ Opera Software ASA <*@opera.com>
Optical Tone Ltd <*@opticaltone.com>
Pengutronix e.K. <*@pengutronix.de>
RingCentral, Inc. <*@ringcentral.com>
+Signal Messenger, LLC <*@signal.org>
Sinch AB <*@sinch.com>
struktur AG <*@struktur.de>
Telenor Digital AS <*@telenor.com>
diff --git a/chromium/third_party/webrtc/BUILD.gn b/chromium/third_party/webrtc/BUILD.gn
index f7d15f47a9f..adb78df4246 100644
--- a/chromium/third_party/webrtc/BUILD.gn
+++ b/chromium/third_party/webrtc/BUILD.gn
@@ -265,6 +265,10 @@ config("common_config") {
defines += [ "WEBRTC_USE_H264" ]
}
+ if (rtc_use_absl_mutex) {
+ defines += [ "WEBRTC_ABSL_MUTEX" ]
+ }
+
if (rtc_disable_logging) {
defines += [ "RTC_DISABLE_LOGGING" ]
}
@@ -580,6 +584,14 @@ if (rtc_include_tests) {
}
}
+ rtc_test("benchmarks") {
+ testonly = true
+ deps = [
+ "rtc_base/synchronization:mutex_benchmark",
+ "test:benchmark_main",
+ ]
+ }
+
# This runs tests that must run in real time and therefore can take some
# time to execute. They are in a separate executable to avoid making the
# regular unittest suite too slow to run frequently.
diff --git a/chromium/third_party/webrtc/DEPS b/chromium/third_party/webrtc/DEPS
index 406f042aaba..717f9571b87 100644
--- a/chromium/third_party/webrtc/DEPS
+++ b/chromium/third_party/webrtc/DEPS
@@ -8,37 +8,37 @@ vars = {
# chromium waterfalls. More info at: crbug.com/570091.
'checkout_configuration': 'default',
'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"',
- 'chromium_revision': '8ffd72401d4e9b12d3b8979c8ef9549d32741e8c',
+ 'chromium_revision': '4d95e6c77b6c37d8ea56bb81f14cb9c12a1cc1a3',
}
deps = {
# TODO(kjellander): Move this to be Android-only once the libevent dependency
# in base/third_party/libevent is solved.
'src/base':
- 'https://chromium.googlesource.com/chromium/src/base@e6c0c5b9adccfbc6d1f810cf15b300c3ce19107b',
+ 'https://chromium.googlesource.com/chromium/src/base@2df7267880bf7d4086d55c0e56cd72c394bfda36',
'src/build':
- 'https://chromium.googlesource.com/chromium/src/build@f70e3b9685e03176b131ef03d185ba367e981c1d',
+ 'https://chromium.googlesource.com/chromium/src/build@a03951acb996e9cea78b4ab575896bf1bfcd9668',
'src/buildtools':
- 'https://chromium.googlesource.com/chromium/src/buildtools@204a35a2a64f7179f8b76d7a0385653690839e21',
+ 'https://chromium.googlesource.com/chromium/src/buildtools@1b066f021638735d72aa799ae6bc37e0b8963c67',
# Gradle 4.3-rc4. Used for testing Android Studio project generation for WebRTC.
'src/examples/androidtests/third_party/gradle': {
'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@89af43c4d0506f69980f00dde78c97b2f81437f8',
'condition': 'checkout_android',
},
'src/ios': {
- 'url': 'https://chromium.googlesource.com/chromium/src/ios@7b694bd9367de782f0ae61fa0c713d2a0745c412',
+ 'url': 'https://chromium.googlesource.com/chromium/src/ios@9200aad36b240166dcf8d771b95102f8193e737f',
'condition': 'checkout_ios',
},
'src/testing':
- 'https://chromium.googlesource.com/chromium/src/testing@5a5fb44e80d7fa2a1bb7c86467d7217335e6bae8',
+ 'https://chromium.googlesource.com/chromium/src/testing@502600d41a00af23dd09e02ea358061e3c951634',
'src/third_party':
- 'https://chromium.googlesource.com/chromium/src/third_party@57686d64cb3c949799993d7732c981c64d9d47f4',
+ 'https://chromium.googlesource.com/chromium/src/third_party@e0df6e10adc084f88dda51c0cbab84645db6c135',
'src/buildtools/linux64': {
'packages': [
{
'package': 'gn/gn/linux-amd64',
- 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
+ 'version': 'git_revision:d0a6f072070988e7b038496c4e7d6c562b649732',
}
],
'dep_type': 'cipd',
@@ -48,7 +48,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/mac-amd64',
- 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
+ 'version': 'git_revision:d0a6f072070988e7b038496c4e7d6c562b649732',
}
],
'dep_type': 'cipd',
@@ -58,7 +58,7 @@ deps = {
'packages': [
{
'package': 'gn/gn/windows-amd64',
- 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9',
+ 'version': 'git_revision:d0a6f072070988e7b038496c4e7d6c562b649732',
}
],
'dep_type': 'cipd',
@@ -119,20 +119,20 @@ deps = {
},
'src/third_party/boringssl/src':
- 'https://boringssl.googlesource.com/boringssl.git@a810d82575ecbde26406fa583371f807f8721ed7',
+ 'https://boringssl.googlesource.com/boringssl.git@88024df12147e56b6abd66b743ff441a0aaa09a8',
'src/third_party/breakpad/breakpad':
- 'https://chromium.googlesource.com/breakpad/breakpad.git@2ffe116322aa4373d408a72b665fa7fe7a504d4a',
+ 'https://chromium.googlesource.com/breakpad/breakpad.git@2757a2c9c819fcae3784576aef0c8400c7ad06d7',
'src/third_party/catapult':
- 'https://chromium.googlesource.com/catapult.git@087cffcba472d70f3d0b1115d0b9100c365073d1',
+ 'https://chromium.googlesource.com/catapult.git@2ad47493f833c5191f56c74d3f1aac10e7c105e8',
'src/third_party/ced/src': {
'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5',
},
'src/third_party/colorama/src':
'https://chromium.googlesource.com/external/colorama.git@799604a1041e9b3bc5d2789ecbd7e8db2e18e6b8',
'src/third_party/depot_tools':
- 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@30ef5cb43761b8536b071a26ca59fca17e6a7de6',
+ 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@37e562110fa58a913b13ed2258f18449f90c6ad7',
'src/third_party/ffmpeg':
- 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@587a3f48499df05d3c65f1529fd08b0783217b39',
+ 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@be66dc5fd0e3c53646107b2dc5d7594a869ebdc6',
'src/third_party/findbugs': {
'url': 'https://chromium.googlesource.com/chromium/deps/findbugs.git@4275d9ac8610db6b1bc9a5e887f97e41b33fac67',
'condition': 'checkout_android',
@@ -143,9 +143,12 @@ deps = {
'condition': 'checkout_linux',
},
'src/third_party/freetype/src':
- 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@3f70e6d20c82b28174096adcd0657b3c998b007b',
+ 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@62fea391fa9993f8c1d206a50080d690178ce518',
'src/third_party/harfbuzz-ng/src':
- 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@100d40c827eb8336b2b671856f151275d47e71ad',
+ 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@e3af529e511ca492284cdd9f4584666b88a9e00f',
+ 'src/third_party/google_benchmark/src': {
+ 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@367119482ff4abc3d73e4a109b410090fc281337',
+ },
# WebRTC-only dependency (not present in Chromium).
'src/third_party/gtest-parallel':
'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@df0b4e476f98516cea7d593e5dbb0fca44f6ee7f',
@@ -160,9 +163,9 @@ deps = {
'dep_type': 'cipd',
},
'src/third_party/googletest/src':
- 'https://chromium.googlesource.com/external/github.com/google/googletest.git@a09ea700d32bab83325aff9ff34d0582e50e3997',
+ 'https://chromium.googlesource.com/external/github.com/google/googletest.git@4fe018038f87675c083d0cfb6a6b57c274fb1753',
'src/third_party/icu': {
- 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@f2223961702f00a8833874b0560d615a2cc42738',
+ 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@630b884f84d1d5e92aeda3463dca99fe2befd30e',
},
'src/third_party/jdk': {
'packages': [
@@ -192,13 +195,15 @@ deps = {
'src/third_party/libsrtp':
'https://chromium.googlesource.com/chromium/deps/libsrtp.git@650611720ecc23e0e6b32b0e3100f8b4df91696c',
'src/third_party/libaom/source/libaom':
- 'https://aomedia.googlesource.com/aom.git@c810066815b80dd1ac8ade15170ce962d6646368',
+ 'https://aomedia.googlesource.com/aom.git@2aa13c436e4dc6d78e05d13e6be73a23b3810bd3',
'src/third_party/libunwindstack': {
- 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@acf93761dc00ac67bd7534c4040699abed4f8d94',
+ 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@046920fc491aba67c6f6a750b4be7b835cff4e5b',
'condition': 'checkout_android',
},
+ 'src/third_party/perfetto':
+ 'https://android.googlesource.com/platform/external/perfetto.git@60cf022c0223b4c28424509dca35e347872c4832',
'src/third_party/libvpx/source/libvpx':
- 'https://chromium.googlesource.com/webm/libvpx.git@77960f37b3d328cf7552f6cd69a083f4005aed7b',
+ 'https://chromium.googlesource.com/webm/libvpx.git@c1765573149e2c0fe2acabc224c0f9085b9e7f2b',
'src/third_party/libyuv':
'https://chromium.googlesource.com/libyuv/libyuv.git@6afd9becdf58822b1da6770598d8597c583ccfad',
'src/third_party/lss': {
@@ -221,7 +226,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/r8',
- 'version': 'UAycWqc5QfELtJhhnoU4jQHjsyxPjRNyZ0EfvlojaY4C',
+ 'version': 'gobCh01BNwJNyLHHNFUmLWSMaAbe4x3izuzBFzxQpDoC',
},
],
'condition': 'checkout_android',
@@ -241,10 +246,6 @@ deps = {
'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@f172b30356d821d180fa4ecfa3e71c7274a32de4',
'condition': 'checkout_android',
},
- 'src/third_party/robolectric/robolectric': {
- 'url': 'https://chromium.googlesource.com/external/robolectric.git@f2df0efb033bb402399ebfb9bf58aefee5cced05',
- 'condition': 'checkout_android',
- },
'src/third_party/ub-uiautomator/lib': {
'url': 'https://chromium.googlesource.com/chromium/third_party/ub-uiautomator.git@00270549ce3161ae72ceb24712618ea28b4f9434',
'condition': 'checkout_android',
@@ -257,9 +258,9 @@ deps = {
'condition': 'checkout_win',
},
'src/tools':
- 'https://chromium.googlesource.com/chromium/src/tools@b64de32dc94866c1446065a6ce8703c856dd518b',
+ 'https://chromium.googlesource.com/chromium/src/tools@050a4a5e267e98c79fe632d84bbc2fbaa4d22fd4',
'src/tools/swarming_client':
- 'https://chromium.googlesource.com/infra/luci/client-py.git@160b445a44e0daacf6f3f8570ca2707ec451f374',
+ 'https://chromium.googlesource.com/infra/luci/client-py.git@4c095d04179dc725a300085ae21fe3b79900d072',
'src/third_party/accessibility_test_framework': {
'packages': [
@@ -617,6 +618,17 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/androidx_annotation_annotation_experimental': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_annotation_annotation_experimental',
+ 'version': 'version:1.0.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/androidx_appcompat_appcompat': {
'packages': [
{
@@ -709,7 +721,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_coordinatorlayout_coordinatorlayout',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.1.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1039,7 +1051,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_recyclerview_recyclerview',
- 'version': 'version:1.0.0-cr0',
+ 'version': 'version:1.1.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1090,11 +1102,22 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib',
+ 'version': 'version:3.2.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/androidx_test_espresso_espresso_core': {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_core',
- 'version': 'version:3.1.0-cr0',
+ 'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1105,7 +1128,29 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_idling_resource',
- 'version': 'version:3.1.0-cr0',
+ 'version': 'version:3.2.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_intents': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_intents',
+ 'version': 'version:3.2.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_web': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_web',
+ 'version': 'version:3.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1171,7 +1216,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/androidx_transition_transition',
- 'version': 'version:1.0.0-rc02-cr0',
+ 'version': 'version:1.2.0-cr0',
},
],
'condition': 'checkout_android',
@@ -1222,6 +1267,17 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/androidx_viewpager2_viewpager2': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/androidx_viewpager2_viewpager2',
+ 'version': 'version:1.0.0-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/androidx_viewpager_viewpager': {
'packages': [
{
@@ -1952,7 +2008,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_android_material_material',
- 'version': 'version:1.0.0-rc02-cr0',
+ 'version': 'version:1.2.0-alpha06-cr0',
},
],
'condition': 'checkout_android',
@@ -2216,7 +2272,7 @@ deps = {
'packages': [
{
'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite',
- 'version': 'version:3.11.4-cr0',
+ 'version': 'version:3.12.2-cr0',
},
],
'condition': 'checkout_android',
@@ -2520,6 +2576,17 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup',
+ 'version': 'version:1.2.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/org_checkerframework_checker_compat_qual': {
'packages': [
{
@@ -2861,6 +2928,28 @@ deps = {
'dep_type': 'cipd',
},
+ 'src/third_party/android_deps/libs/org_robolectric_shadows_multidex': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_multidex',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
+ 'src/third_party/android_deps/libs/org_robolectric_shadows_playservices': {
+ 'packages': [
+ {
+ 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_playservices',
+ 'version': 'version:4.3.1-cr0',
+ },
+ ],
+ 'condition': 'checkout_android',
+ 'dep_type': 'cipd',
+ },
+
'src/third_party/android_deps/libs/org_robolectric_utils': {
'packages': [
{
@@ -3140,6 +3229,7 @@ include_rules = [
"+absl/algorithm/container.h",
"+absl/base/attributes.h",
"+absl/base/config.h",
+ "+absl/base/const_init.h",
"+absl/base/macros.h",
"+absl/container/inlined_vector.h",
"+absl/memory/memory.h",
diff --git a/chromium/third_party/webrtc/PRESUBMIT.py b/chromium/third_party/webrtc/PRESUBMIT.py
index 247b78eaa0f..e1c9b8e92e2 100755
--- a/chromium/third_party/webrtc/PRESUBMIT.py
+++ b/chromium/third_party/webrtc/PRESUBMIT.py
@@ -47,10 +47,13 @@ CPPLINT_BLACKLIST = [
# - build/c++11 : Rvalue ref checks are unreliable (false positives),
# include file and feature blacklists are
# google3-specific.
+# - runtime/references : Mutable references are not banned by the Google
+# C++ style guide anymore (starting from May 2020).
# - whitespace/operators: Same as above (doesn't seem sufficient to eliminate
# all move-related errors).
BLACKLIST_LINT_FILTERS = [
'-build/c++11',
+ '-runtime/references',
'-whitespace/operators',
]
@@ -94,15 +97,20 @@ LEGACY_API_DIRS = (
API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:]
# TARGET_RE matches a GN target, and extracts the target name and the contents.
-TARGET_RE = re.compile(r'(?P<indent>\s*)\w+\("(?P<target_name>\w+)"\) {'
- r'(?P<target_contents>.*?)'
- r'(?P=indent)}',
- re.MULTILINE | re.DOTALL)
+TARGET_RE = re.compile(
+ r'(?P<indent>\s*)(?P<target_type>\w+)\("(?P<target_name>\w+)"\) {'
+ r'(?P<target_contents>.*?)'
+ r'(?P=indent)}',
+ re.MULTILINE | re.DOTALL)
# SOURCES_RE matches a block of sources inside a GN target.
SOURCES_RE = re.compile(r'sources \+?= \[(?P<sources>.*?)\]',
re.MULTILINE | re.DOTALL)
+# DEPS_RE matches a block of sources inside a GN target.
+DEPS_RE = re.compile(r'\bdeps \+?= \[(?P<deps>.*?)\]',
+ re.MULTILINE | re.DOTALL)
+
# FILE_PATH_RE matchies a file path.
FILE_PATH_RE = re.compile(r'"(?P<file_path>(\w|\/)+)(?P<extension>\.\w+)"')
@@ -338,6 +346,37 @@ def CheckNoSourcesAbove(input_api, gn_files, output_api):
return []
+def CheckAbseilDependencies(input_api, gn_files, output_api):
+ """Checks that Abseil dependencies are declared in `absl_deps`."""
+ absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL)
+ target_types_to_check = [
+ 'rtc_library',
+ 'rtc_source_set',
+ 'rtc_static_library',
+ 'webrtc_fuzzer_test',
+ ]
+ error_msg = ('Abseil dependencies in target "%s" (file: %s) '
+ 'should be moved to the "absl_deps" parameter.')
+ errors = []
+
+ for gn_file in gn_files:
+ gn_file_content = input_api.ReadFile(gn_file)
+ for target_match in TARGET_RE.finditer(gn_file_content):
+ target_type = target_match.group('target_type')
+ target_name = target_match.group('target_name')
+ target_contents = target_match.group('target_contents')
+ if target_type in target_types_to_check:
+ for deps_match in DEPS_RE.finditer(target_contents):
+ deps = deps_match.group('deps').splitlines()
+ for dep in deps:
+ if re.search(absl_re, dep):
+ errors.append(
+ output_api.PresubmitError(error_msg % (target_name,
+ gn_file.LocalPath())))
+ break # no need to warn more than once per target
+ return errors
+
+
def CheckNoMixingSources(input_api, gn_files, output_api):
"""Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target.
@@ -577,6 +616,7 @@ def CheckGnChanges(input_api, output_api):
if gn_files:
result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api))
result.extend(CheckNoMixingSources(input_api, gn_files, output_api))
+ result.extend(CheckAbseilDependencies(input_api, gn_files, output_api))
result.extend(CheckNoPackageBoundaryViolations(input_api, gn_files,
output_api))
result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api))
diff --git a/chromium/third_party/webrtc/abseil-in-webrtc.md b/chromium/third_party/webrtc/abseil-in-webrtc.md
index 0541d3c7a5a..da03af07b1a 100644
--- a/chromium/third_party/webrtc/abseil-in-webrtc.md
+++ b/chromium/third_party/webrtc/abseil-in-webrtc.md
@@ -23,9 +23,11 @@ adds the first use.
* `absl::variant` and related stuff from `absl/types/variant.h`.
* The functions in `absl/algorithm/algorithm.h` and
`absl/algorithm/container.h`.
+* `absl/base/const_init.h` for mutex initialization.
* The macros in `absl/base/attributes.h`, `absl/base/config.h` and
`absl/base/macros.h`.
+
## **Disallowed**
### `absl::make_unique`
@@ -34,7 +36,7 @@ adds the first use.
### `absl::Mutex`
-*Use `rtc::CriticalSection` instead.*
+*Use `webrtc::Mutex` instead.*
Chromium has a ban on new static initializers, and `absl::Mutex` uses
one. To make `absl::Mutex` available, we would need to nicely ask the
@@ -61,3 +63,12 @@ has decided if they will change `absl::Span` to match.
These are optimized for speed, not binary size. Even `StrCat` calls
with a modest number of arguments can easily add several hundred bytes
to the binary.
+
+## How to depend on Abseil
+
+For build targets `rtc_library`, `rtc_source_set` and `rtc_static_library`,
+dependencies on Abseil need to be listed in `absl_deps` instead of `deps`.
+
+This is needed in order to support the Abseil component build in Chromium. In
+such build mode, WebRTC will depend on a unique Abseil build target what will
+generate a shared library.
diff --git a/chromium/third_party/webrtc/api/BUILD.gn b/chromium/third_party/webrtc/api/BUILD.gn
index c5629c0fcda..571b89aacbc 100644
--- a/chromium/third_party/webrtc/api/BUILD.gn
+++ b/chromium/third_party/webrtc/api/BUILD.gn
@@ -71,8 +71,8 @@ rtc_library("rtp_headers") {
"..:webrtc_common",
"units:timestamp",
"video:video_rtp_headers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtp_packet_info") {
@@ -90,8 +90,8 @@ rtc_library("rtp_packet_info") {
"..:webrtc_common",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("media_stream_interface") {
@@ -111,8 +111,8 @@ rtc_library("media_stream_interface") {
"../rtc_base/system:rtc_export",
"video:recordable_encoded_frame",
"video:video_frame",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("libjingle_peerconnection_api") {
@@ -166,12 +166,14 @@ rtc_library("libjingle_peerconnection_api") {
":media_stream_interface",
":network_state_predictor_api",
":packet_socket_factory",
+ ":priority",
":rtc_error",
":rtc_stats_api",
":rtp_packet_info",
":rtp_parameters",
":rtp_transceiver_direction",
":scoped_refptr",
+ "adaptation:resource_adaptation_api",
"audio:audio_mixer_api",
"audio_codecs:audio_codecs_api",
"crypto:frame_decryptor_interface",
@@ -181,23 +183,15 @@ rtc_library("libjingle_peerconnection_api") {
"rtc_event_log",
"task_queue",
"transport:bitrate_settings",
- "transport:datagram_transport_interface",
"transport:enums",
"transport:network_control",
"transport:webrtc_key_value_config",
- "transport/media:audio_interfaces",
- "transport/media:media_transport_interface",
- "transport/media:video_interfaces",
"transport/rtp:rtp_source",
"units:data_rate",
"units:timestamp",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
- "//third_party/abseil-cpp/absl/algorithm:container",
- "//third_party/abseil-cpp/absl/memory",
- "//third_party/abseil-cpp/absl/strings",
- "//third_party/abseil-cpp/absl/types:optional",
# Basically, don't add stuff here. You might break sensitive downstream
# targets like pnacl. API should not depend on anything outside of this
@@ -212,6 +206,12 @@ rtc_library("libjingle_peerconnection_api") {
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
}
rtc_source_set("frame_transformer_interface") {
@@ -221,6 +221,7 @@ rtc_source_set("frame_transformer_interface") {
":scoped_refptr",
"../rtc_base:refcount",
"video:encoded_frame",
+ "video:video_frame_metadata",
]
}
@@ -235,8 +236,8 @@ rtc_library("rtc_error") {
"../rtc_base:logging",
"../rtc_base:macromagic",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("packet_socket_factory") {
@@ -272,7 +273,6 @@ rtc_source_set("video_quality_test_fixture_api") {
"../test:video_test_common",
"transport:bitrate_settings",
"transport:network_control",
- "transport/media:media_transport_interface",
"video_codecs:video_codecs_api",
]
}
@@ -283,11 +283,15 @@ rtc_source_set("video_quality_analyzer_api") {
sources = [ "test/video_quality_analyzer_interface.h" ]
deps = [
+ ":array_view",
":stats_observer_interface",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
"video_codecs:video_codecs_api",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -302,6 +306,10 @@ rtc_source_set("rtp_transceiver_direction") {
sources = [ "rtp_transceiver_direction.h" ]
}
+rtc_source_set("priority") {
+ sources = [ "priority.h" ]
+}
+
rtc_library("rtp_parameters") {
visibility = [ "*" ]
sources = [
@@ -312,18 +320,21 @@ rtc_library("rtp_parameters") {
]
deps = [
":array_view",
+ ":priority",
":rtp_transceiver_direction",
"../rtc_base:checks",
"../rtc_base:stringutils",
"../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
if (is_android) {
- java_cpp_enum("rtp_parameters_enums") {
- sources = [ "rtp_parameters.h" ]
+ java_cpp_enum("priority_enums") {
+ sources = [ "priority.h" ]
}
}
@@ -343,11 +354,9 @@ rtc_source_set("stats_observer_interface") {
testonly = true
sources = [ "test/stats_observer_interface.h" ]
- deps = [
- # For api/stats_types.h
- ":libjingle_peerconnection_api",
- ":rtp_parameters",
- ]
+ deps = [ ":rtc_stats_api" ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_source_set("peer_connection_quality_test_fixture_api") {
@@ -370,14 +379,16 @@ rtc_source_set("peer_connection_quality_test_fixture_api") {
":stats_observer_interface",
":video_quality_analyzer_api",
"../media:rtc_media_base",
+ "../rtc_base:deprecation",
"../rtc_base:rtc_base",
"rtc_event_log",
"task_queue",
"transport:network_control",
- "transport/media:media_transport_interface",
"units:time_delta",
"video:video_frame",
"video_codecs:video_codecs_api",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -392,8 +403,8 @@ rtc_source_set("frame_generator_api") {
deps = [
":scoped_refptr",
"video:video_frame",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("test_dependency_factory") {
@@ -406,7 +417,7 @@ rtc_library("test_dependency_factory") {
deps = [
":video_quality_test_fixture_api",
"../rtc_base:checks",
- "../rtc_base:thread_checker",
+ "../rtc_base:platform_thread_types",
]
}
@@ -470,8 +481,8 @@ rtc_library("create_frame_generator") {
"../rtc_base:checks",
"../system_wrappers",
"../test:frame_generator_impl",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("create_peer_connection_quality_test_frame_generator") {
@@ -487,8 +498,8 @@ rtc_library("create_peer_connection_quality_test_frame_generator") {
":peer_connection_quality_test_fixture_api",
"../rtc_base:checks",
"../test:fileutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("libjingle_logging_api") {
@@ -541,8 +552,8 @@ rtc_library("audio_options_api") {
":array_view",
"../rtc_base:stringutils",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("transport_api") {
@@ -569,8 +580,8 @@ rtc_source_set("simulated_network_api") {
deps = [
"../rtc_base",
"../rtc_base:criticalsection",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
# TODO(srte): Move to network_emulation sub directory.
@@ -703,6 +714,8 @@ if (rtc_include_tests) {
"../modules/audio_coding:neteq_test_factory",
"../rtc_base:checks",
"neteq:neteq_api",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/strings",
@@ -853,6 +866,7 @@ if (rtc_include_tests) {
}
rtc_source_set("mock_peerconnectioninterface") {
+ visibility = [ "*" ]
testonly = true
sources = [ "test/mock_peerconnectioninterface.h" ]
@@ -862,6 +876,17 @@ if (rtc_include_tests) {
]
}
+ rtc_source_set("mock_peer_connection_factory_interface") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [ "test/mock_peer_connection_factory_interface.h" ]
+
+ deps = [
+ ":libjingle_peerconnection_api",
+ "../test:test_support",
+ ]
+ }
+
rtc_source_set("mock_rtp") {
testonly = true
sources = [
@@ -875,6 +900,16 @@ if (rtc_include_tests) {
]
}
+ rtc_source_set("mock_transformable_video_frame") {
+ testonly = true
+ sources = [ "test/mock_transformable_video_frame.h" ]
+
+ deps = [
+ ":frame_transformer_interface",
+ "../test:test_support",
+ ]
+ }
+
rtc_source_set("mock_video_bitrate_allocator") {
testonly = true
sources = [ "test/mock_video_bitrate_allocator.h" ]
@@ -932,39 +967,6 @@ if (rtc_include_tests) {
]
}
- rtc_source_set("fake_media_transport") {
- testonly = true
-
- sources = [
- "test/fake_datagram_transport.h",
- "test/fake_media_transport.h",
- ]
-
- deps = [
- "../rtc_base:checks",
- "transport:datagram_transport_interface",
- "transport/media:media_transport_interface",
- "//third_party/abseil-cpp/absl/algorithm:container",
- ]
- }
-
- rtc_library("loopback_media_transport") {
- testonly = true
-
- sources = [
- "test/loopback_media_transport.cc",
- "test/loopback_media_transport.h",
- ]
-
- deps = [
- "../rtc_base",
- "../rtc_base:checks",
- "transport:datagram_transport_interface",
- "transport/media:media_transport_interface",
- "//third_party/abseil-cpp/absl/algorithm:container",
- ]
- }
-
rtc_library("create_time_controller") {
visibility = [ "*" ]
testonly = true
@@ -995,7 +997,6 @@ if (rtc_include_tests) {
"rtp_parameters_unittest.cc",
"scoped_refptr_unittest.cc",
"test/create_time_controller_unittest.cc",
- "test/loopback_media_transport_unittest.cc",
]
deps = [
@@ -1003,7 +1004,6 @@ if (rtc_include_tests) {
":create_time_controller",
":function_view",
":libjingle_peerconnection_api",
- ":loopback_media_transport",
":rtc_error",
":rtc_event_log_output_file",
":rtp_packet_info",
@@ -1034,13 +1034,13 @@ if (rtc_include_tests) {
":dummy_peer_connection",
":fake_frame_decryptor",
":fake_frame_encryptor",
- ":fake_media_transport",
- ":loopback_media_transport",
":mock_audio_mixer",
":mock_frame_decryptor",
":mock_frame_encryptor",
+ ":mock_peer_connection_factory_interface",
":mock_peerconnectioninterface",
":mock_rtp",
+ ":mock_transformable_video_frame",
":mock_video_bitrate_allocator",
":mock_video_bitrate_allocator_factory",
":mock_video_codec_factory",
diff --git a/chromium/third_party/webrtc/api/DEPS b/chromium/third_party/webrtc/api/DEPS
index 1212b43be8a..995664e93ec 100644
--- a/chromium/third_party/webrtc/api/DEPS
+++ b/chromium/third_party/webrtc/api/DEPS
@@ -115,11 +115,6 @@ specific_include_rules = {
"+rtc_base/ref_count.h",
],
- "media_transport_interface\.h": [
- "+rtc_base/copy_on_write_buffer.h", # As used by datachannelinterface.h
- "+rtc_base/network_route.h",
- ],
-
"packet_socket_factory\.h": [
"+rtc_base/proxy_info.h",
"+rtc_base/async_packet_socket.h",
diff --git a/chromium/third_party/webrtc/api/adaptation/BUILD.gn b/chromium/third_party/webrtc/api/adaptation/BUILD.gn
new file mode 100644
index 00000000000..dc4c73711ec
--- /dev/null
+++ b/chromium/third_party/webrtc/api/adaptation/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved.
+#
+# Use of this source code is governed by a BSD - style license
+# that can be found in the LICENSE file in the root of the source
+# tree.An additional intellectual property rights grant can be found
+# in the file PATENTS.All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+rtc_source_set("resource_adaptation_api") {
+ visibility = [ "*" ]
+ sources = [
+ "resource.cc",
+ "resource.h",
+ ]
+ deps = [
+ "../../api:scoped_refptr",
+ "../../rtc_base:refcount",
+ "../../rtc_base:rtc_base_approved",
+ "../../rtc_base/system:rtc_export",
+ ]
+}
diff --git a/chromium/third_party/webrtc/api/adaptation/DEPS b/chromium/third_party/webrtc/api/adaptation/DEPS
new file mode 100644
index 00000000000..cab7fb8e140
--- /dev/null
+++ b/chromium/third_party/webrtc/api/adaptation/DEPS
@@ -0,0 +1,7 @@
+specific_include_rules = {
+ "resource\.h": [
+ # ref_count.h is a public_deps of rtc_base_approved. Necessary because of
+ # rtc::RefCountInterface.
+ "+rtc_base/ref_count.h",
+ ],
+} \ No newline at end of file
diff --git a/chromium/third_party/webrtc/api/adaptation/resource.cc b/chromium/third_party/webrtc/api/adaptation/resource.cc
new file mode 100644
index 00000000000..0a9c83a3112
--- /dev/null
+++ b/chromium/third_party/webrtc/api/adaptation/resource.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2019 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/adaptation/resource.h"
+
+namespace webrtc {
+
+const char* ResourceUsageStateToString(ResourceUsageState usage_state) {
+ switch (usage_state) {
+ case ResourceUsageState::kOveruse:
+ return "kOveruse";
+ case ResourceUsageState::kUnderuse:
+ return "kUnderuse";
+ }
+}
+
+ResourceListener::~ResourceListener() {}
+
+Resource::Resource() {}
+
+Resource::~Resource() {}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/adaptation/resource.h b/chromium/third_party/webrtc/api/adaptation/resource.h
new file mode 100644
index 00000000000..9b3968055f6
--- /dev/null
+++ b/chromium/third_party/webrtc/api/adaptation/resource.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2019 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_ADAPTATION_RESOURCE_H_
+#define API_ADAPTATION_RESOURCE_H_
+
+#include <string>
+
+#include "api/scoped_refptr.h"
+#include "rtc_base/ref_count.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+class Resource;
+
+enum class ResourceUsageState {
+ // Action is needed to minimze the load on this resource.
+ kOveruse,
+ // Increasing the load on this resource is desired, if possible.
+ kUnderuse,
+};
+
+RTC_EXPORT const char* ResourceUsageStateToString(
+ ResourceUsageState usage_state);
+
+class RTC_EXPORT ResourceListener {
+ public:
+ virtual ~ResourceListener();
+
+ virtual void OnResourceUsageStateMeasured(
+ rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) = 0;
+};
+
+// A Resource monitors an implementation-specific resource. It may report
+// kOveruse or kUnderuse when resource usage is high or low enough that we
+// should perform some sort of mitigation to fulfil the resource's constraints.
+//
+// The methods on this interface are invoked on the adaptation task queue.
+// Resource usage measurements may be performed on an any task queue.
+//
+// The Resource is reference counted to prevent use-after-free when posting
+// between task queues. As such, the implementation MUST NOT make any
+// assumptions about which task queue Resource is destructed on.
+class RTC_EXPORT Resource : public rtc::RefCountInterface {
+ public:
+ Resource();
+ // Destruction may happen on any task queue.
+ ~Resource() override;
+
+ virtual std::string Name() const = 0;
+ // The |listener| may be informed of resource usage measurements on any task
+ // queue, but not after this method is invoked with the null argument.
+ virtual void SetResourceListener(ResourceListener* listener) = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_ADAPTATION_RESOURCE_H_
diff --git a/chromium/third_party/webrtc/api/array_view_unittest.cc b/chromium/third_party/webrtc/api/array_view_unittest.cc
index 8aa858805f0..0357f68aa2e 100644
--- a/chromium/third_party/webrtc/api/array_view_unittest.cc
+++ b/chromium/third_party/webrtc/api/array_view_unittest.cc
@@ -38,7 +38,7 @@ void CallFixed(ArrayView<T, N> av) {}
} // namespace
-TEST(ArrayViewTest, TestConstructFromPtrAndArray) {
+TEST(ArrayViewDeathTest, TestConstructFromPtrAndArray) {
char arr[] = "Arrr!";
const char carr[] = "Carrr!";
EXPECT_EQ(6u, Call<const char>(arr));
@@ -409,7 +409,7 @@ TEST(FixArrayViewTest, TestSwapFixed) {
// swap(x, w); // Compile error, because different sizes.
}
-TEST(ArrayViewTest, TestIndexing) {
+TEST(ArrayViewDeathTest, TestIndexing) {
char arr[] = "abcdefg";
ArrayView<char> x(arr);
const ArrayView<char> y(arr);
diff --git a/chromium/third_party/webrtc/api/audio/BUILD.gn b/chromium/third_party/webrtc/api/audio/BUILD.gn
index 4c8004ed2db..117e5cc0abe 100644
--- a/chromium/third_party/webrtc/api/audio/BUILD.gn
+++ b/chromium/third_party/webrtc/api/audio/BUILD.gn
@@ -61,8 +61,8 @@ rtc_library("aec3_config_json") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_json",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("aec3_factory") {
diff --git a/chromium/third_party/webrtc/api/audio/audio_frame.cc b/chromium/third_party/webrtc/api/audio/audio_frame.cc
index 47459ac333b..c6e5cf4dd61 100644
--- a/chromium/third_party/webrtc/api/audio/audio_frame.cc
+++ b/chromium/third_party/webrtc/api/audio/audio_frame.cc
@@ -11,6 +11,8 @@
#include "api/audio/audio_frame.h"
#include <string.h>
+#include <algorithm>
+#include <utility>
#include "rtc_base/checks.h"
#include "rtc_base/time_utils.h"
@@ -22,6 +24,28 @@ AudioFrame::AudioFrame() {
static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes");
}
+void swap(AudioFrame& a, AudioFrame& b) {
+ using std::swap;
+ swap(a.timestamp_, b.timestamp_);
+ swap(a.elapsed_time_ms_, b.elapsed_time_ms_);
+ swap(a.ntp_time_ms_, b.ntp_time_ms_);
+ swap(a.samples_per_channel_, b.samples_per_channel_);
+ swap(a.sample_rate_hz_, b.sample_rate_hz_);
+ swap(a.num_channels_, b.num_channels_);
+ swap(a.channel_layout_, b.channel_layout_);
+ swap(a.speech_type_, b.speech_type_);
+ swap(a.vad_activity_, b.vad_activity_);
+ swap(a.profile_timestamp_ms_, b.profile_timestamp_ms_);
+ swap(a.packet_infos_, b.packet_infos_);
+ const size_t length_a = a.samples_per_channel_ * a.num_channels_;
+ const size_t length_b = b.samples_per_channel_ * b.num_channels_;
+ RTC_DCHECK_LE(length_a, AudioFrame::kMaxDataSizeSamples);
+ RTC_DCHECK_LE(length_b, AudioFrame::kMaxDataSizeSamples);
+ std::swap_ranges(a.data_, a.data_ + std::max(length_a, length_b), b.data_);
+ swap(a.muted_, b.muted_);
+ swap(a.absolute_capture_timestamp_ms_, b.absolute_capture_timestamp_ms_);
+}
+
void AudioFrame::Reset() {
ResetWithoutMuting();
muted_ = true;
diff --git a/chromium/third_party/webrtc/api/audio/audio_frame.h b/chromium/third_party/webrtc/api/audio/audio_frame.h
index 06b0b28b38f..78539f57eb5 100644
--- a/chromium/third_party/webrtc/api/audio/audio_frame.h
+++ b/chromium/third_party/webrtc/api/audio/audio_frame.h
@@ -14,6 +14,8 @@
#include <stddef.h>
#include <stdint.h>
+#include <utility>
+
#include "api/audio/channel_layout.h"
#include "api/rtp_packet_infos.h"
#include "rtc_base/constructor_magic.h"
@@ -58,6 +60,8 @@ class AudioFrame {
AudioFrame();
+ friend void swap(AudioFrame& a, AudioFrame& b);
+
// Resets all members to their default state.
void Reset();
// Same as Reset(), but leaves mute state unchanged. Muting a frame requires
diff --git a/chromium/third_party/webrtc/api/audio/test/audio_frame_unittest.cc b/chromium/third_party/webrtc/api/audio/test/audio_frame_unittest.cc
index dbf45ceabc8..f8d33182749 100644
--- a/chromium/third_party/webrtc/api/audio/test/audio_frame_unittest.cc
+++ b/chromium/third_party/webrtc/api/audio/test/audio_frame_unittest.cc
@@ -133,4 +133,54 @@ TEST(AudioFrameTest, CopyFrom) {
EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples)));
}
+TEST(AudioFrameTest, SwapFrames) {
+ AudioFrame frame1, frame2;
+ int16_t samples1[kNumChannelsMono * kSamplesPerChannel];
+ for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) {
+ samples1[i] = i;
+ }
+ frame1.UpdateFrame(kTimestamp, samples1, kSamplesPerChannel, kSampleRateHz,
+ AudioFrame::kPLC, AudioFrame::kVadActive,
+ kNumChannelsMono);
+ frame1.set_absolute_capture_timestamp_ms(12345678);
+ const auto frame1_channel_layout = frame1.channel_layout();
+
+ int16_t samples2[(kNumChannelsMono + 1) * (kSamplesPerChannel + 1)];
+ for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1);
+ ++i) {
+ samples2[i] = 1000 + i;
+ }
+ frame2.UpdateFrame(kTimestamp + 1, samples2, kSamplesPerChannel + 1,
+ kSampleRateHz + 1, AudioFrame::kNormalSpeech,
+ AudioFrame::kVadPassive, kNumChannelsMono + 1);
+ const auto frame2_channel_layout = frame2.channel_layout();
+
+ swap(frame1, frame2);
+
+ EXPECT_EQ(kTimestamp + 1, frame1.timestamp_);
+ ASSERT_EQ(kSamplesPerChannel + 1, frame1.samples_per_channel_);
+ EXPECT_EQ(kSampleRateHz + 1, frame1.sample_rate_hz_);
+ EXPECT_EQ(AudioFrame::kNormalSpeech, frame1.speech_type_);
+ EXPECT_EQ(AudioFrame::kVadPassive, frame1.vad_activity_);
+ ASSERT_EQ(kNumChannelsMono + 1, frame1.num_channels_);
+ for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1);
+ ++i) {
+ EXPECT_EQ(samples2[i], frame1.data()[i]);
+ }
+ EXPECT_FALSE(frame1.absolute_capture_timestamp_ms());
+ EXPECT_EQ(frame2_channel_layout, frame1.channel_layout());
+
+ EXPECT_EQ(kTimestamp, frame2.timestamp_);
+ ASSERT_EQ(kSamplesPerChannel, frame2.samples_per_channel_);
+ EXPECT_EQ(kSampleRateHz, frame2.sample_rate_hz_);
+ EXPECT_EQ(AudioFrame::kPLC, frame2.speech_type_);
+ EXPECT_EQ(AudioFrame::kVadActive, frame2.vad_activity_);
+ ASSERT_EQ(kNumChannelsMono, frame2.num_channels_);
+ for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) {
+ EXPECT_EQ(samples1[i], frame2.data()[i]);
+ }
+ EXPECT_EQ(12345678, frame2.absolute_capture_timestamp_ms());
+ EXPECT_EQ(frame1_channel_layout, frame2.channel_layout());
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/audio_codecs/BUILD.gn b/chromium/third_party/webrtc/api/audio_codecs/BUILD.gn
index 987e20f178b..b6292de570d 100644
--- a/chromium/third_party/webrtc/api/audio_codecs/BUILD.gn
+++ b/chromium/third_party/webrtc/api/audio_codecs/BUILD.gn
@@ -38,6 +38,8 @@ rtc_library("audio_codecs_api") {
"../../rtc_base:sanitizer",
"../../rtc_base/system:rtc_export",
"../units:time_delta",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/api/audio_codecs/L16/BUILD.gn b/chromium/third_party/webrtc/api/audio_codecs/L16/BUILD.gn
index bef671237e2..1f7a1e5a0b6 100644
--- a/chromium/third_party/webrtc/api/audio_codecs/L16/BUILD.gn
+++ b/chromium/third_party/webrtc/api/audio_codecs/L16/BUILD.gn
@@ -25,6 +25,8 @@ rtc_library("audio_encoder_L16") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -42,6 +44,8 @@ rtc_library("audio_decoder_L16") {
"../../../modules/audio_coding:pcm16b",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/api/audio_codecs/g711/BUILD.gn b/chromium/third_party/webrtc/api/audio_codecs/g711/BUILD.gn
index ba0586b9010..92d77bed9f8 100644
--- a/chromium/third_party/webrtc/api/audio_codecs/g711/BUILD.gn
+++ b/chromium/third_party/webrtc/api/audio_codecs/g711/BUILD.gn
@@ -25,6 +25,8 @@ rtc_library("audio_encoder_g711") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -42,6 +44,8 @@ rtc_library("audio_decoder_g711") {
"../../../modules/audio_coding:g711",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/api/audio_codecs/g722/BUILD.gn b/chromium/third_party/webrtc/api/audio_codecs/g722/BUILD.gn
index 8738ef889aa..a186eabbb7a 100644
--- a/chromium/third_party/webrtc/api/audio_codecs/g722/BUILD.gn
+++ b/chromium/third_party/webrtc/api/audio_codecs/g722/BUILD.gn
@@ -31,6 +31,8 @@ rtc_library("audio_encoder_g722") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -48,6 +50,8 @@ rtc_library("audio_decoder_g722") {
"../../../modules/audio_coding:g722",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/api/audio_codecs/ilbc/BUILD.gn b/chromium/third_party/webrtc/api/audio_codecs/ilbc/BUILD.gn
index 066a73cff22..b6a5045eaf9 100644
--- a/chromium/third_party/webrtc/api/audio_codecs/ilbc/BUILD.gn
+++ b/chromium/third_party/webrtc/api/audio_codecs/ilbc/BUILD.gn
@@ -30,6 +30,8 @@ rtc_library("audio_encoder_ilbc") {
"../../../modules/audio_coding:ilbc",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -46,6 +48,8 @@ rtc_library("audio_decoder_ilbc") {
"..:audio_codecs_api",
"../../../modules/audio_coding:ilbc",
"../../../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/api/audio_codecs/isac/BUILD.gn b/chromium/third_party/webrtc/api/audio_codecs/isac/BUILD.gn
index 9eb32147e14..6ff6e5f0923 100644
--- a/chromium/third_party/webrtc/api/audio_codecs/isac/BUILD.gn
+++ b/chromium/third_party/webrtc/api/audio_codecs/isac/BUILD.gn
@@ -68,6 +68,8 @@ rtc_library("audio_encoder_isac_fix") {
"../../../modules/audio_coding:isac_fix",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -85,6 +87,8 @@ rtc_library("audio_decoder_isac_fix") {
"../../../modules/audio_coding:isac_fix",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -102,6 +106,8 @@ rtc_library("audio_encoder_isac_float") {
"../../../modules/audio_coding:isac",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -119,6 +125,8 @@ rtc_library("audio_decoder_isac_float") {
"../../../modules/audio_coding:isac",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/api/audio_codecs/opus/BUILD.gn b/chromium/third_party/webrtc/api/audio_codecs/opus/BUILD.gn
index 5fb626d9904..586e9b3dd88 100644
--- a/chromium/third_party/webrtc/api/audio_codecs/opus/BUILD.gn
+++ b/chromium/third_party/webrtc/api/audio_codecs/opus/BUILD.gn
@@ -23,8 +23,8 @@ rtc_library("audio_encoder_opus_config") {
deps = [
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
if (rtc_opus_variable_complexity) {
defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=1" ]
@@ -49,6 +49,8 @@ rtc_library("audio_encoder_opus") {
"../../../modules/audio_coding:webrtc_opus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -66,6 +68,8 @@ rtc_library("audio_decoder_opus") {
"../../../modules/audio_coding:webrtc_opus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -82,8 +86,8 @@ rtc_library("audio_encoder_multiopus") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
"../opus:audio_encoder_opus_config",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_decoder_multiopus") {
@@ -99,6 +103,8 @@ rtc_library("audio_decoder_multiopus") {
"../../../modules/audio_coding:webrtc_multiopus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/chromium/third_party/webrtc/api/audio_options.h b/chromium/third_party/webrtc/api/audio_options.h
index b714998c6b7..1b0d1ad0bd1 100644
--- a/chromium/third_party/webrtc/api/audio_options.h
+++ b/chromium/third_party/webrtc/api/audio_options.h
@@ -75,6 +75,8 @@ struct RTC_EXPORT AudioOptions {
// and check if any other AudioOptions members are unused.
absl::optional<bool> combined_audio_video_bwe;
// Enable audio network adaptor.
+ // TODO(webrtc:11717): Remove this API in favor of adaptivePtime in
+ // RtpEncodingParameters.
absl::optional<bool> audio_network_adaptor;
// Config string for audio network adaptor.
absl::optional<std::string> audio_network_adaptor_config;
diff --git a/chromium/third_party/webrtc/api/data_channel_interface.h b/chromium/third_party/webrtc/api/data_channel_interface.h
index e08830feaf3..5b2b1263ab7 100644
--- a/chromium/third_party/webrtc/api/data_channel_interface.h
+++ b/chromium/third_party/webrtc/api/data_channel_interface.h
@@ -20,6 +20,7 @@
#include <string>
#include "absl/types/optional.h"
+#include "api/priority.h"
#include "api/rtc_error.h"
#include "rtc_base/checks.h"
#include "rtc_base/copy_on_write_buffer.h"
@@ -61,6 +62,9 @@ struct DataChannelInit {
// The stream id, or SID, for SCTP data channels. -1 if unset (see above).
int id = -1;
+
+ // https://w3c.github.io/webrtc-priority/#new-rtcdatachannelinit-member
+ absl::optional<Priority> priority;
};
// At the JavaScript level, data can be passed in as a string or a blob, so
@@ -154,6 +158,7 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface {
// If negotiated in-band, this ID will be populated once the DTLS role is
// determined, and until then this will return -1.
virtual int id() const = 0;
+ virtual Priority priority() const { return Priority::kLow; }
virtual DataState state() const = 0;
// When state is kClosed, and the DataChannel was not closed using
// the closing procedure, returns the error information about the closing.
diff --git a/chromium/third_party/webrtc/api/frame_transformer_interface.h b/chromium/third_party/webrtc/api/frame_transformer_interface.h
index e712b3c190a..2cfe6edb884 100644
--- a/chromium/third_party/webrtc/api/frame_transformer_interface.h
+++ b/chromium/third_party/webrtc/api/frame_transformer_interface.h
@@ -16,6 +16,7 @@
#include "api/scoped_refptr.h"
#include "api/video/encoded_frame.h"
+#include "api/video/video_frame_metadata.h"
#include "rtc_base/ref_count.h"
namespace webrtc {
@@ -48,6 +49,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface {
// TODO(bugs.webrtc.org/11380) remove from interface once
// webrtc::RtpDescriptorAuthentication is exposed in api/.
virtual std::vector<uint8_t> GetAdditionalData() const = 0;
+
+ virtual const VideoFrameMetadata& GetMetadata() const = 0;
};
// Extends the TransformableFrameInterface to expose audio-specific information.
diff --git a/chromium/third_party/webrtc/api/neteq/BUILD.gn b/chromium/third_party/webrtc/api/neteq/BUILD.gn
index 1ab02ec92b1..4e85c4d268c 100644
--- a/chromium/third_party/webrtc/api/neteq/BUILD.gn
+++ b/chromium/third_party/webrtc/api/neteq/BUILD.gn
@@ -23,8 +23,8 @@ rtc_source_set("neteq_api") {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:system_wrappers",
"../audio_codecs:audio_codecs_api",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("custom_neteq_factory") {
@@ -56,8 +56,8 @@ rtc_source_set("neteq_controller_api") {
":tick_timer",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("default_neteq_controller_factory") {
diff --git a/chromium/third_party/webrtc/api/neteq/neteq.cc b/chromium/third_party/webrtc/api/neteq/neteq.cc
index 155ddf2cf3d..e8ef4dbd39b 100644
--- a/chromium/third_party/webrtc/api/neteq/neteq.cc
+++ b/chromium/third_party/webrtc/api/neteq/neteq.cc
@@ -30,7 +30,8 @@ std::string NetEq::Config::ToString() const {
<< ", min_delay_ms=" << min_delay_ms << ", enable_fast_accelerate="
<< (enable_fast_accelerate ? "true" : "false")
<< ", enable_muted_state=" << (enable_muted_state ? "true" : "false")
- << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false");
+ << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false")
+ << ", extra_output_delay_ms=" << extra_output_delay_ms;
return ss.str();
}
diff --git a/chromium/third_party/webrtc/api/neteq/neteq.h b/chromium/third_party/webrtc/api/neteq/neteq.h
index f62d3795f05..15ad3aac0ec 100644
--- a/chromium/third_party/webrtc/api/neteq/neteq.h
+++ b/chromium/third_party/webrtc/api/neteq/neteq.h
@@ -138,6 +138,10 @@ class NetEq {
bool enable_rtx_handling = false;
absl::optional<AudioCodecPairId> codec_pair_id;
bool for_test_no_time_stretching = false; // Use only for testing.
+ // Adds extra delay to the output of NetEq, without affecting jitter or
+ // loss behavior. This is mainly for testing. Value must be a non-negative
+ // multiple of 10 ms.
+ int extra_output_delay_ms = 0;
};
enum ReturnCodes { kOK = 0, kFail = -1 };
diff --git a/chromium/third_party/webrtc/api/peer_connection_interface.h b/chromium/third_party/webrtc/api/peer_connection_interface.h
index 1d81de74d83..2664af4e5e6 100644
--- a/chromium/third_party/webrtc/api/peer_connection_interface.h
+++ b/chromium/third_party/webrtc/api/peer_connection_interface.h
@@ -73,6 +73,7 @@
#include <string>
#include <vector>
+#include "api/adaptation/resource.h"
#include "api/async_resolver_factory.h"
#include "api/audio/audio_mixer.h"
#include "api/audio_codecs/audio_decoder_factory.h"
@@ -102,7 +103,6 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/transport/bitrate_settings.h"
#include "api/transport/enums.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/transport/webrtc_key_value_config.h"
#include "api/turn_customizer.h"
@@ -613,34 +613,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// correctly. This flag will be deprecated soon. Do not rely on it.
bool active_reset_srtp_params = false;
- // DEPRECATED. Do not use. This option is ignored by peer connection.
- // TODO(webrtc:9719): Delete this option.
- bool use_media_transport = false;
-
- // DEPRECATED. Do not use. This option is ignored by peer connection.
- // TODO(webrtc:9719): Delete this option.
- bool use_media_transport_for_data_channels = false;
-
- // If MediaTransportFactory is provided in PeerConnectionFactory, this flag
- // informs PeerConnection that it should use the DatagramTransportInterface
- // for packets instead DTLS. It's invalid to set it to |true| if the
- // MediaTransportFactory wasn't provided.
- absl::optional<bool> use_datagram_transport;
-
- // If MediaTransportFactory is provided in PeerConnectionFactory, this flag
- // informs PeerConnection that it should use the DatagramTransport's
- // implementation of DataChannelTransportInterface for data channels instead
- // of SCTP-DTLS.
- absl::optional<bool> use_datagram_transport_for_data_channels;
-
- // If true, this PeerConnection will only use datagram transport for data
- // channels when receiving an incoming offer that includes datagram
- // transport parameters. It will not request use of a datagram transport
- // when it creates the initial, outgoing offer.
- // This setting only applies when |use_datagram_transport_for_data_channels|
- // is true.
- absl::optional<bool> use_datagram_transport_for_data_channels_receive_only;
-
// Defines advanced optional cryptographic settings related to SRTP and
// frame encryption for native WebRTC. Setting this will overwrite any
// settings set in PeerConnectionFactory (which is deprecated).
@@ -666,8 +638,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// Whether network condition based codec switching is allowed.
absl::optional<bool> allow_codec_switching;
- bool enable_simulcast_stats = true;
-
//
// Don't forget to update operator== if adding something.
//
@@ -1118,6 +1088,14 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
return absl::nullopt;
}
+ // When a resource is overused, the PeerConnection will try to reduce the load
+ // on the sysem, for example by reducing the resolution or frame rate of
+ // encoded streams. The Resource API allows injecting platform-specific usage
+ // measurements. The conditions to trigger kOveruse or kUnderuse are up to the
+ // implementation.
+ // TODO(hbos): Make pure virtual when implemented by downstream projects.
+ virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) {}
+
// Start RtcEventLog using an existing output-sink. Takes ownership of
// |output| and passes it on to Call, which will take the ownership. If the
// operation fails the output will be closed and deallocated. The event log
@@ -1332,7 +1310,6 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final {
std::unique_ptr<NetworkStatePredictorFactoryInterface>
network_state_predictor_factory;
std::unique_ptr<NetworkControllerFactoryInterface> network_controller_factory;
- std::unique_ptr<MediaTransportFactory> media_transport_factory;
std::unique_ptr<NetEqFactory> neteq_factory;
std::unique_ptr<WebRtcKeyValueConfig> trials;
};
diff --git a/chromium/third_party/webrtc/api/peer_connection_proxy.h b/chromium/third_party/webrtc/api/peer_connection_proxy.h
index c278308ccbe..23887e53dae 100644
--- a/chromium/third_party/webrtc/api/peer_connection_proxy.h
+++ b/chromium/third_party/webrtc/api/peer_connection_proxy.h
@@ -132,6 +132,7 @@ PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state)
PROXY_METHOD0(PeerConnectionState, peer_connection_state)
PROXY_METHOD0(IceGatheringState, ice_gathering_state)
PROXY_METHOD0(absl::optional<bool>, can_trickle_ice_candidates)
+PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr<Resource>)
PROXY_METHOD2(bool,
StartRtcEventLog,
std::unique_ptr<RtcEventLogOutput>,
diff --git a/chromium/third_party/webrtc/api/priority.h b/chromium/third_party/webrtc/api/priority.h
new file mode 100644
index 00000000000..4953e453a32
--- /dev/null
+++ b/chromium/third_party/webrtc/api/priority.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_PRIORITY_H_
+#define API_PRIORITY_H_
+
+namespace webrtc {
+
+// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
+enum class Priority {
+ kVeryLow,
+ kLow,
+ kMedium,
+ kHigh,
+};
+
+} // namespace webrtc
+
+#endif // API_PRIORITY_H_
diff --git a/chromium/third_party/webrtc/api/proxy.cc b/chromium/third_party/webrtc/api/proxy.cc
index e668285ba29..67318e7dab0 100644
--- a/chromium/third_party/webrtc/api/proxy.cc
+++ b/chromium/third_party/webrtc/api/proxy.cc
@@ -10,28 +10,3 @@
#include "api/proxy.h"
-namespace webrtc {
-namespace internal {
-
-SynchronousMethodCall::SynchronousMethodCall(rtc::MessageHandler* proxy)
- : proxy_(proxy) {}
-
-SynchronousMethodCall::~SynchronousMethodCall() = default;
-
-void SynchronousMethodCall::Invoke(const rtc::Location& posted_from,
- rtc::Thread* t) {
- if (t->IsCurrent()) {
- proxy_->OnMessage(nullptr);
- } else {
- t->Post(posted_from, this, 0);
- e_.Wait(rtc::Event::kForever);
- }
-}
-
-void SynchronousMethodCall::OnMessage(rtc::Message*) {
- proxy_->OnMessage(nullptr);
- e_.Set();
-}
-
-} // namespace internal
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/proxy.h b/chromium/third_party/webrtc/api/proxy.h
index 385992e6594..0253ba3dc47 100644
--- a/chromium/third_party/webrtc/api/proxy.h
+++ b/chromium/third_party/webrtc/api/proxy.h
@@ -55,9 +55,12 @@
#include <memory>
#include <string>
#include <tuple>
+#include <type_traits>
#include <utility>
#include "api/scoped_refptr.h"
+#include "api/task_queue/queued_task.h"
+#include "api/task_queue/task_queue_base.h"
#include "rtc_base/event.h"
#include "rtc_base/message_handler.h"
#include "rtc_base/ref_counted_object.h"
@@ -95,27 +98,8 @@ class ReturnType<void> {
void moved_result() {}
};
-namespace internal {
-
-class RTC_EXPORT SynchronousMethodCall : public rtc::MessageData,
- public rtc::MessageHandler {
- public:
- explicit SynchronousMethodCall(rtc::MessageHandler* proxy);
- ~SynchronousMethodCall() override;
-
- void Invoke(const rtc::Location& posted_from, rtc::Thread* t);
-
- private:
- void OnMessage(rtc::Message*) override;
-
- rtc::Event e_;
- rtc::MessageHandler* proxy_;
-};
-
-} // namespace internal
-
template <typename C, typename R, typename... Args>
-class MethodCall : public rtc::Message, public rtc::MessageHandler {
+class MethodCall : public QueuedTask {
public:
typedef R (C::*Method)(Args...);
MethodCall(C* c, Method m, Args&&... args)
@@ -124,12 +108,21 @@ class MethodCall : public rtc::Message, public rtc::MessageHandler {
args_(std::forward_as_tuple(std::forward<Args>(args)...)) {}
R Marshal(const rtc::Location& posted_from, rtc::Thread* t) {
- internal::SynchronousMethodCall(this).Invoke(posted_from, t);
+ if (t->IsCurrent()) {
+ Invoke(std::index_sequence_for<Args...>());
+ } else {
+ t->PostTask(std::unique_ptr<QueuedTask>(this));
+ event_.Wait(rtc::Event::kForever);
+ }
return r_.moved_result();
}
private:
- void OnMessage(rtc::Message*) { Invoke(std::index_sequence_for<Args...>()); }
+ bool Run() override {
+ Invoke(std::index_sequence_for<Args...>());
+ event_.Set();
+ return false;
+ }
template <size_t... Is>
void Invoke(std::index_sequence<Is...>) {
@@ -140,10 +133,11 @@ class MethodCall : public rtc::Message, public rtc::MessageHandler {
Method m_;
ReturnType<R> r_;
std::tuple<Args&&...> args_;
+ rtc::Event event_;
};
template <typename C, typename R, typename... Args>
-class ConstMethodCall : public rtc::Message, public rtc::MessageHandler {
+class ConstMethodCall : public QueuedTask {
public:
typedef R (C::*Method)(Args...) const;
ConstMethodCall(const C* c, Method m, Args&&... args)
@@ -152,12 +146,21 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler {
args_(std::forward_as_tuple(std::forward<Args>(args)...)) {}
R Marshal(const rtc::Location& posted_from, rtc::Thread* t) {
- internal::SynchronousMethodCall(this).Invoke(posted_from, t);
+ if (t->IsCurrent()) {
+ Invoke(std::index_sequence_for<Args...>());
+ } else {
+ t->PostTask(std::unique_ptr<QueuedTask>(this));
+ event_.Wait(rtc::Event::kForever);
+ }
return r_.moved_result();
}
private:
- void OnMessage(rtc::Message*) { Invoke(std::index_sequence_for<Args...>()); }
+ bool Run() override {
+ Invoke(std::index_sequence_for<Args...>());
+ event_.Set();
+ return false;
+ }
template <size_t... Is>
void Invoke(std::index_sequence<Is...>) {
@@ -168,6 +171,7 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler {
Method m_;
ReturnType<R> r_;
std::tuple<Args&&...> args_;
+ rtc::Event event_;
};
// Helper macros to reduce code duplication.
@@ -396,6 +400,16 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler {
return call.Marshal(RTC_FROM_HERE, worker_thread_); \
}
+// For use when returning purely const state (set during construction).
+// Use with caution. This method should only be used when the return value will
+// always be the same.
+#define BYPASS_PROXY_CONSTMETHOD0(r, method) \
+ r method() const override { \
+ static_assert(!std::is_pointer<r>::value, "Type is a pointer"); \
+ static_assert(!std::is_reference<r>::value, "Type is a reference"); \
+ return c_->method(); \
+ }
+
} // namespace webrtc
#endif // API_PROXY_H_
diff --git a/chromium/third_party/webrtc/api/rtc_event_log_output_file_unittest.cc b/chromium/third_party/webrtc/api/rtc_event_log_output_file_unittest.cc
index 071909b2c54..4274215491f 100644
--- a/chromium/third_party/webrtc/api/rtc_event_log_output_file_unittest.cc
+++ b/chromium/third_party/webrtc/api/rtc_event_log_output_file_unittest.cc
@@ -141,14 +141,16 @@ TEST_F(RtcEventLogOutputFileTest, AllowReasonableFileSizeLimits) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(RtcEventLogOutputFileTest, WritingToInactiveFileForbidden) {
+class RtcEventLogOutputFileDeathTest : public RtcEventLogOutputFileTest {};
+
+TEST_F(RtcEventLogOutputFileDeathTest, WritingToInactiveFileForbidden) {
RtcEventLogOutputFile output_file(output_file_name_, 2);
ASSERT_FALSE(output_file.Write("abc"));
ASSERT_FALSE(output_file.IsActive());
EXPECT_DEATH(output_file.Write("abc"), "");
}
-TEST_F(RtcEventLogOutputFileTest, DisallowUnreasonableFileSizeLimits) {
+TEST_F(RtcEventLogOutputFileDeathTest, DisallowUnreasonableFileSizeLimits) {
// Keeping in a temporary unique_ptr to make it clearer that the death is
// triggered by construction, not destruction.
std::unique_ptr<RtcEventLogOutputFile> output_file;
diff --git a/chromium/third_party/webrtc/api/rtp_headers.cc b/chromium/third_party/webrtc/api/rtp_headers.cc
index bf973b6fe57..e0ad9eb26ef 100644
--- a/chromium/third_party/webrtc/api/rtp_headers.cc
+++ b/chromium/third_party/webrtc/api/rtp_headers.cc
@@ -26,9 +26,7 @@ RTPHeaderExtension::RTPHeaderExtension()
videoRotation(kVideoRotation_0),
hasVideoContentType(false),
videoContentType(VideoContentType::UNSPECIFIED),
- has_video_timing(false),
- has_frame_marking(false),
- frame_marking({false, false, false, false, false, 0xFF, 0, 0}) {}
+ has_video_timing(false) {}
RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) =
default;
diff --git a/chromium/third_party/webrtc/api/rtp_headers.h b/chromium/third_party/webrtc/api/rtp_headers.h
index 163347f6758..454149ca6e7 100644
--- a/chromium/third_party/webrtc/api/rtp_headers.h
+++ b/chromium/third_party/webrtc/api/rtp_headers.h
@@ -21,10 +21,9 @@
#include "api/units/timestamp.h"
#include "api/video/color_space.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame_marking.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
-#include "common_types.h" // NOLINT(build/include)
+#include "common_types.h" // NOLINT (build/include)
namespace webrtc {
@@ -143,9 +142,6 @@ struct RTPHeaderExtension {
bool has_video_timing;
VideoSendTiming video_timing;
- bool has_frame_marking;
- FrameMarking frame_marking;
-
PlayoutDelay playout_delay = {-1, -1};
// For identification of a stream when ssrc is not signaled. See
diff --git a/chromium/third_party/webrtc/api/rtp_parameters.cc b/chromium/third_party/webrtc/api/rtp_parameters.cc
index a05b2bfa7bb..28acb68be6a 100644
--- a/chromium/third_party/webrtc/api/rtp_parameters.cc
+++ b/chromium/third_party/webrtc/api/rtp_parameters.cc
@@ -18,6 +18,20 @@
namespace webrtc {
+const char* DegradationPreferenceToString(
+ DegradationPreference degradation_preference) {
+ switch (degradation_preference) {
+ case DegradationPreference::DISABLED:
+ return "disabled";
+ case DegradationPreference::MAINTAIN_FRAMERATE:
+ return "maintain-framerate";
+ case DegradationPreference::MAINTAIN_RESOLUTION:
+ return "maintain-resolution";
+ case DegradationPreference::BALANCED:
+ return "balanced";
+ }
+}
+
const double kDefaultBitratePriority = 1.0;
RtcpFeedback::RtcpFeedback() = default;
@@ -105,7 +119,6 @@ constexpr char RtpExtension::kAbsoluteCaptureTimeUri[];
constexpr char RtpExtension::kVideoRotationUri[];
constexpr char RtpExtension::kVideoContentTypeUri[];
constexpr char RtpExtension::kVideoTimingUri[];
-constexpr char RtpExtension::kFrameMarkingUri[];
constexpr char RtpExtension::kGenericFrameDescriptorUri00[];
constexpr char RtpExtension::kDependencyDescriptorUri[];
constexpr char RtpExtension::kTransportSequenceNumberUri[];
@@ -144,7 +157,6 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) {
uri == webrtc::RtpExtension::kVideoContentTypeUri ||
uri == webrtc::RtpExtension::kVideoTimingUri ||
uri == webrtc::RtpExtension::kMidUri ||
- uri == webrtc::RtpExtension::kFrameMarkingUri ||
uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00 ||
uri == webrtc::RtpExtension::kDependencyDescriptorUri ||
uri == webrtc::RtpExtension::kColorSpaceUri ||
diff --git a/chromium/third_party/webrtc/api/rtp_parameters.h b/chromium/third_party/webrtc/api/rtp_parameters.h
index 49c1e0c8852..b667bf812ca 100644
--- a/chromium/third_party/webrtc/api/rtp_parameters.h
+++ b/chromium/third_party/webrtc/api/rtp_parameters.h
@@ -20,6 +20,7 @@
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/media_types.h"
+#include "api/priority.h"
#include "api/rtp_transceiver_direction.h"
#include "rtc_base/system/rtc_export.h"
@@ -91,15 +92,10 @@ enum class DegradationPreference {
BALANCED,
};
-RTC_EXPORT extern const double kDefaultBitratePriority;
+RTC_EXPORT const char* DegradationPreferenceToString(
+ DegradationPreference degradation_preference);
-// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
-enum class Priority {
- kVeryLow,
- kLow,
- kMedium,
- kHigh,
-};
+RTC_EXPORT extern const double kDefaultBitratePriority;
struct RTC_EXPORT RtcpFeedback {
RtcpFeedbackType type = RtcpFeedbackType::CCM;
@@ -226,7 +222,7 @@ struct RTC_EXPORT RtpHeaderExtensionCapability {
bool preferred_encrypt = false;
// The direction of the extension. The kStopped value is only used with
- // RtpTransceiverInterface::header_extensions_offered() and
+ // RtpTransceiverInterface::HeaderExtensionsToOffer() and
// SetOfferedRtpHeaderExtensions().
RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv;
@@ -314,10 +310,6 @@ struct RTC_EXPORT RtpExtension {
static constexpr char kVideoTimingUri[] =
"http://www.webrtc.org/experiments/rtp-hdrext/video-timing";
- // Header extension for video frame marking.
- static constexpr char kFrameMarkingUri[] =
- "http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07";
-
// Experimental codec agnostic frame descriptor.
static constexpr char kGenericFrameDescriptorUri00[] =
"http://www.webrtc.org/experiments/rtp-hdrext/"
@@ -481,6 +473,10 @@ struct RTC_EXPORT RtpEncodingParameters {
// Called "encodingId" in ORTC.
std::string rid;
+ // Allow dynamic frame length changes for audio:
+ // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime
+ bool adaptive_ptime = false;
+
bool operator==(const RtpEncodingParameters& o) const {
return ssrc == o.ssrc && bitrate_priority == o.bitrate_priority &&
network_priority == o.network_priority &&
@@ -489,7 +485,8 @@ struct RTC_EXPORT RtpEncodingParameters {
max_framerate == o.max_framerate &&
num_temporal_layers == o.num_temporal_layers &&
scale_resolution_down_by == o.scale_resolution_down_by &&
- active == o.active && rid == o.rid;
+ active == o.active && rid == o.rid &&
+ adaptive_ptime == o.adaptive_ptime;
}
bool operator!=(const RtpEncodingParameters& o) const {
return !(*this == o);
diff --git a/chromium/third_party/webrtc/api/rtp_transceiver_interface.cc b/chromium/third_party/webrtc/api/rtp_transceiver_interface.cc
index d4e2b26e333..e795e51dfb1 100644
--- a/chromium/third_party/webrtc/api/rtp_transceiver_interface.cc
+++ b/chromium/third_party/webrtc/api/rtp_transceiver_interface.cc
@@ -41,4 +41,10 @@ RtpTransceiverInterface::HeaderExtensionsToOffer() const {
return {};
}
+webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions(
+ rtc::ArrayView<const RtpHeaderExtensionCapability>
+ header_extensions_to_offer) {
+ return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/rtp_transceiver_interface.h b/chromium/third_party/webrtc/api/rtp_transceiver_interface.h
index 9dbafd46ecd..13277d9a502 100644
--- a/chromium/third_party/webrtc/api/rtp_transceiver_interface.h
+++ b/chromium/third_party/webrtc/api/rtp_transceiver_interface.h
@@ -133,6 +133,13 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface {
virtual std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer()
const;
+ // The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation
+ // so that it negotiates use of header extensions which are not kStopped.
+ // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface
+ virtual webrtc::RTCError SetOfferedRtpHeaderExtensions(
+ rtc::ArrayView<const RtpHeaderExtensionCapability>
+ header_extensions_to_offer);
+
protected:
~RtpTransceiverInterface() override = default;
};
diff --git a/chromium/third_party/webrtc/api/stats/rtc_stats.h b/chromium/third_party/webrtc/api/stats/rtc_stats.h
index d45902e0a5c..5de5b7fbb04 100644
--- a/chromium/third_party/webrtc/api/stats/rtc_stats.h
+++ b/chromium/third_party/webrtc/api/stats/rtc_stats.h
@@ -319,6 +319,14 @@ class RTCStatsMember : public RTCStatsMemberInterface {
std::string ValueToString() const override;
std::string ValueToJson() const override;
+ template <typename U>
+ inline T ValueOrDefault(U default_value) const {
+ if (is_defined()) {
+ return *(*this);
+ }
+ return default_value;
+ }
+
// Assignment operators.
T& operator=(const T& value) {
value_ = value;
diff --git a/chromium/third_party/webrtc/api/stats/rtcstats_objects.h b/chromium/third_party/webrtc/api/stats/rtcstats_objects.h
index 28d841db09b..3458d6fef7c 100644
--- a/chromium/third_party/webrtc/api/stats/rtcstats_objects.h
+++ b/chromium/third_party/webrtc/api/stats/rtcstats_objects.h
@@ -134,7 +134,7 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats {
RTCStatsMember<std::string> label;
RTCStatsMember<std::string> protocol;
- RTCStatsMember<int32_t> datachannelid;
+ RTCStatsMember<int32_t> data_channel_identifier;
// TODO(hbos): Support enum types? "RTCStatsMember<RTCDataChannelState>"?
RTCStatsMember<std::string> state;
RTCStatsMember<uint32_t> messages_sent;
diff --git a/chromium/third_party/webrtc/api/task_queue/BUILD.gn b/chromium/third_party/webrtc/api/task_queue/BUILD.gn
index 4c9f591ec1d..1072057e3fc 100644
--- a/chromium/third_party/webrtc/api/task_queue/BUILD.gn
+++ b/chromium/third_party/webrtc/api/task_queue/BUILD.gn
@@ -21,6 +21,8 @@ rtc_library("task_queue") {
"../../rtc_base:checks",
"../../rtc_base:macromagic",
"../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:config",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/strings",
@@ -51,6 +53,8 @@ rtc_library("task_queue_test") {
deps = [
"../../../webrtc_overrides:webrtc_component",
"../../test:test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
@@ -62,6 +66,8 @@ rtc_library("task_queue_test") {
"../../rtc_base:timeutils",
"../../rtc_base/task_utils:to_queued_task",
"../../test:test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
diff --git a/chromium/third_party/webrtc/api/task_queue/task_queue_test.cc b/chromium/third_party/webrtc/api/task_queue/task_queue_test.cc
index a8a799f11b7..0d411d2d9cb 100644
--- a/chromium/third_party/webrtc/api/task_queue/task_queue_test.cc
+++ b/chromium/third_party/webrtc/api/task_queue/task_queue_test.cc
@@ -37,9 +37,11 @@ TEST_P(TaskQueueTest, PostAndCheckCurrent) {
rtc::Event event;
auto queue = CreateTaskQueue(factory, "PostAndCheckCurrent");
- // We're not running a task, so there shouldn't be a current queue.
+ // We're not running a task, so |queue| shouldn't be current.
+ // Note that because rtc::Thread also supports the TQ interface and
+ // TestMainImpl::Init wraps the main test thread (bugs.webrtc.org/9714), that
+ // means that TaskQueueBase::Current() will still return a valid value.
EXPECT_FALSE(queue->IsCurrent());
- EXPECT_FALSE(TaskQueueBase::Current());
queue->PostTask(ToQueuedTask([&event, &queue] {
EXPECT_TRUE(queue->IsCurrent());
@@ -269,5 +271,10 @@ TEST_P(TaskQueueTest, PostTwoWithSharedUnprotectedState) {
EXPECT_TRUE(done.Wait(1000));
}
+// TaskQueueTest is a set of tests for any implementation of the TaskQueueBase.
+// Tests are instantiated next to the concrete implementation(s).
+// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#creating-value-parameterized-abstract-tests
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TaskQueueTest);
+
} // namespace
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/test/audioproc_float.cc b/chromium/third_party/webrtc/api/test/audioproc_float.cc
index bba9c622a12..c8d7ff71939 100644
--- a/chromium/third_party/webrtc/api/test/audioproc_float.cc
+++ b/chromium/third_party/webrtc/api/test/audioproc_float.cc
@@ -17,6 +17,12 @@
namespace webrtc {
namespace test {
+int AudioprocFloat(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ int argc,
+ char* argv[]) {
+ return AudioprocFloatImpl(std::move(audio_processing), argc, argv);
+}
+
int AudioprocFloat(std::unique_ptr<AudioProcessingBuilder> ap_builder,
int argc,
char* argv[]) {
diff --git a/chromium/third_party/webrtc/api/test/audioproc_float.h b/chromium/third_party/webrtc/api/test/audioproc_float.h
index 2625e6ad9ac..fec2ad11fa1 100644
--- a/chromium/third_party/webrtc/api/test/audioproc_float.h
+++ b/chromium/third_party/webrtc/api/test/audioproc_float.h
@@ -22,6 +22,22 @@ namespace test {
// This is an interface for the audio processing simulation utility. This
// utility can be used to simulate the audioprocessing module using a recording
// (either an AEC dump or wav files), and generate the output as a wav file.
+// Any audio_processing object specified in the input is used for the
+// simulation. The optional |audio_processing| object provides the
+// AudioProcessing instance that is used during the simulation. Note that when
+// the audio_processing object is specified all functionality that relies on
+// using the AudioProcessingBuilder is deactivated, since the AudioProcessing
+// object is already created and the builder is not used in the simulation. It
+// is needed to pass the command line flags as |argc| and |argv|, so these can
+// be interpreted properly by the utility. To see a list of all supported
+// command line flags, run the executable with the '--help' flag.
+int AudioprocFloat(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ int argc,
+ char* argv[]);
+
+// This is an interface for the audio processing simulation utility. This
+// utility can be used to simulate the audioprocessing module using a recording
+// (either an AEC dump or wav files), and generate the output as a wav file.
// The |ap_builder| object will be used to create the AudioProcessing instance
// that is used during the simulation. The |ap_builder| supports setting of
// injectable components, which will be passed on to the created AudioProcessing
diff --git a/chromium/third_party/webrtc/api/test/compile_all_headers.cc b/chromium/third_party/webrtc/api/test/compile_all_headers.cc
index 47c5c6ec841..44b9b1fe687 100644
--- a/chromium/third_party/webrtc/api/test/compile_all_headers.cc
+++ b/chromium/third_party/webrtc/api/test/compile_all_headers.cc
@@ -29,14 +29,14 @@
#include "api/test/fake_frame_decryptor.h"
#include "api/test/fake_frame_encryptor.h"
-#include "api/test/fake_media_transport.h"
-#include "api/test/loopback_media_transport.h"
#include "api/test/mock_audio_mixer.h"
#include "api/test/mock_frame_decryptor.h"
#include "api/test/mock_frame_encryptor.h"
+#include "api/test/mock_peer_connection_factory_interface.h"
#include "api/test/mock_peerconnectioninterface.h"
#include "api/test/mock_rtpreceiver.h"
#include "api/test/mock_rtpsender.h"
+#include "api/test/mock_transformable_video_frame.h"
#include "api/test/mock_video_bitrate_allocator.h"
#include "api/test/mock_video_bitrate_allocator_factory.h"
#include "api/test/mock_video_decoder.h"
diff --git a/chromium/third_party/webrtc/api/test/create_network_emulation_manager.h b/chromium/third_party/webrtc/api/test/create_network_emulation_manager.h
index c57c34874c3..f4447437865 100644
--- a/chromium/third_party/webrtc/api/test/create_network_emulation_manager.h
+++ b/chromium/third_party/webrtc/api/test/create_network_emulation_manager.h
@@ -1,4 +1,3 @@
-
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
@@ -18,6 +17,7 @@
namespace webrtc {
+// Returns a non-null NetworkEmulationManager instance.
std::unique_ptr<NetworkEmulationManager> CreateNetworkEmulationManager(
TimeMode mode = TimeMode::kRealTime);
diff --git a/chromium/third_party/webrtc/api/test/create_peerconnection_quality_test_fixture.h b/chromium/third_party/webrtc/api/test/create_peerconnection_quality_test_fixture.h
index 330d86de02a..9d9d0ef5e4a 100644
--- a/chromium/third_party/webrtc/api/test/create_peerconnection_quality_test_fixture.h
+++ b/chromium/third_party/webrtc/api/test/create_peerconnection_quality_test_fixture.h
@@ -25,6 +25,7 @@ namespace webrtc_pc_e2e {
// During the test Alice will be caller and Bob will answer the call.
// |test_case_name| is a name of test case, that will be used for all metrics
// reporting.
+// Returns a non-null PeerConnectionE2EQualityTestFixture instance.
std::unique_ptr<PeerConnectionE2EQualityTestFixture>
CreatePeerConnectionE2EQualityTestFixture(
std::string test_case_name,
diff --git a/chromium/third_party/webrtc/api/test/create_time_controller.cc b/chromium/third_party/webrtc/api/test/create_time_controller.cc
index d3b046bd61e..1a49020aa43 100644
--- a/chromium/third_party/webrtc/api/test/create_time_controller.cc
+++ b/chromium/third_party/webrtc/api/test/create_time_controller.cc
@@ -35,13 +35,17 @@ std::unique_ptr<CallFactoryInterface> CreateTimeControllerBasedCallFactory(
explicit TimeControllerBasedCallFactory(TimeController* time_controller)
: time_controller_(time_controller) {}
Call* CreateCall(const Call::Config& config) override {
- return Call::Create(config, time_controller_->GetClock(),
- time_controller_->CreateProcessThread("CallModules"),
+ if (!module_thread_) {
+ module_thread_ = SharedModuleThread::Create(
+ "CallModules", [this]() { module_thread_ = nullptr; });
+ }
+ return Call::Create(config, time_controller_->GetClock(), module_thread_,
time_controller_->CreateProcessThread("Pacer"));
}
private:
TimeController* time_controller_;
+ rtc::scoped_refptr<SharedModuleThread> module_thread_;
};
return std::make_unique<TimeControllerBasedCallFactory>(time_controller);
}
diff --git a/chromium/third_party/webrtc/api/test/fake_datagram_transport.h b/chromium/third_party/webrtc/api/test/fake_datagram_transport.h
deleted file mode 100644
index 847b4d842ab..00000000000
--- a/chromium/third_party/webrtc/api/test/fake_datagram_transport.h
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright 2019 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
-#define API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
-
-#include <cstddef>
-#include <string>
-
-#include "api/transport/datagram_transport_interface.h"
-#include "api/transport/media/media_transport_interface.h"
-
-namespace webrtc {
-
-// Maxmum size of datagrams sent by |FakeDatagramTransport|.
-constexpr size_t kMaxFakeDatagramSize = 1000;
-
-// Fake datagram transport. Does not support making an actual connection
-// or sending data. Only used for tests that need to stub out a transport.
-class FakeDatagramTransport : public DatagramTransportInterface {
- public:
- FakeDatagramTransport(
- const MediaTransportSettings& settings,
- std::string transport_parameters,
- const std::function<bool(absl::string_view, absl::string_view)>&
- are_parameters_compatible)
- : settings_(settings),
- transport_parameters_(transport_parameters),
- are_parameters_compatible_(are_parameters_compatible) {}
-
- ~FakeDatagramTransport() override { RTC_DCHECK(!state_callback_); }
-
- void Connect(rtc::PacketTransportInternal* packet_transport) override {
- packet_transport_ = packet_transport;
- }
-
- CongestionControlInterface* congestion_control() override {
- return nullptr; // Datagram interface doesn't provide this yet.
- }
-
- void SetTransportStateCallback(
- MediaTransportStateCallback* callback) override {
- state_callback_ = callback;
- }
-
- RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) override {
- return RTCError::OK();
- }
-
- size_t GetLargestDatagramSize() const override {
- return kMaxFakeDatagramSize;
- }
-
- void SetDatagramSink(DatagramSinkInterface* sink) override {}
-
- std::string GetTransportParameters() const override {
- if (settings_.remote_transport_parameters) {
- return *settings_.remote_transport_parameters;
- }
- return transport_parameters_;
- }
-
- RTCError SetRemoteTransportParameters(
- absl::string_view remote_parameters) override {
- if (are_parameters_compatible_(GetTransportParameters(),
- remote_parameters)) {
- return RTCError::OK();
- }
- return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
- "Incompatible remote transport parameters");
- }
-
- RTCError OpenChannel(int channel_id) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- RTCError CloseChannel(int channel_id) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- void SetDataSink(DataChannelSink* /*sink*/) override {}
-
- bool IsReadyToSend() const override { return false; }
-
- rtc::PacketTransportInternal* packet_transport() { return packet_transport_; }
-
- void set_state(webrtc::MediaTransportState state) {
- if (state_callback_) {
- state_callback_->OnStateChanged(state);
- }
- }
-
- const MediaTransportSettings& settings() { return settings_; }
-
- private:
- const MediaTransportSettings settings_;
- const std::string transport_parameters_;
- const std::function<bool(absl::string_view, absl::string_view)>
- are_parameters_compatible_;
-
- rtc::PacketTransportInternal* packet_transport_ = nullptr;
- MediaTransportStateCallback* state_callback_ = nullptr;
-};
-
-} // namespace webrtc
-
-#endif // API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
diff --git a/chromium/third_party/webrtc/api/test/fake_media_transport.h b/chromium/third_party/webrtc/api/test/fake_media_transport.h
deleted file mode 100644
index 530394710ab..00000000000
--- a/chromium/third_party/webrtc/api/test/fake_media_transport.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_TEST_FAKE_MEDIA_TRANSPORT_H_
-#define API_TEST_FAKE_MEDIA_TRANSPORT_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "absl/algorithm/container.h"
-#include "api/test/fake_datagram_transport.h"
-#include "api/transport/media/media_transport_interface.h"
-
-namespace webrtc {
-
-// Fake media transport factory creates fake media transport.
-// Also creates fake datagram transport, since both media and datagram
-// transports are created by |MediaTransportFactory|.
-class FakeMediaTransportFactory : public MediaTransportFactory {
- public:
- explicit FakeMediaTransportFactory(
- const absl::optional<std::string>& transport_offer = "")
- : transport_offer_(transport_offer) {}
- ~FakeMediaTransportFactory() = default;
-
- std::string GetTransportName() const override { return "fake"; }
-
- RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
- rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
- }
-
- RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
- CreateDatagramTransport(rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override {
- return std::unique_ptr<DatagramTransportInterface>(
- new FakeDatagramTransport(settings, transport_offer_.value_or(""),
- transport_parameters_comparison_));
- }
-
- void set_transport_parameters_comparison(
- std::function<bool(absl::string_view, absl::string_view)> comparison) {
- transport_parameters_comparison_ = std::move(comparison);
- }
-
- private:
- const absl::optional<std::string> transport_offer_;
- std::function<bool(absl::string_view, absl::string_view)>
- transport_parameters_comparison_ =
- [](absl::string_view local, absl::string_view remote) {
- return local == remote;
- };
-};
-
-} // namespace webrtc
-
-#endif // API_TEST_FAKE_MEDIA_TRANSPORT_H_
diff --git a/chromium/third_party/webrtc/api/test/loopback_media_transport.cc b/chromium/third_party/webrtc/api/test/loopback_media_transport.cc
deleted file mode 100644
index 18ce93cd7ec..00000000000
--- a/chromium/third_party/webrtc/api/test/loopback_media_transport.cc
+++ /dev/null
@@ -1,373 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "api/test/loopback_media_transport.h"
-
-#include <memory>
-
-#include "absl/algorithm/container.h"
-#include "rtc_base/time_utils.h"
-
-namespace webrtc {
-
-namespace {
-
-constexpr size_t kLoopbackMaxDatagramSize = 1200;
-
-class WrapperDatagramTransport : public DatagramTransportInterface {
- public:
- explicit WrapperDatagramTransport(DatagramTransportInterface* wrapped)
- : wrapped_(wrapped) {}
-
- // Datagram transport overrides.
- void Connect(rtc::PacketTransportInternal* packet_transport) override {
- return wrapped_->Connect(packet_transport);
- }
-
- CongestionControlInterface* congestion_control() override {
- return wrapped_->congestion_control();
- }
-
- void SetTransportStateCallback(
- MediaTransportStateCallback* callback) override {
- return wrapped_->SetTransportStateCallback(callback);
- }
-
- RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) override {
- return wrapped_->SendDatagram(data, datagram_id);
- }
-
- size_t GetLargestDatagramSize() const override {
- return wrapped_->GetLargestDatagramSize();
- }
-
- void SetDatagramSink(DatagramSinkInterface* sink) override {
- return wrapped_->SetDatagramSink(sink);
- }
-
- std::string GetTransportParameters() const override {
- return wrapped_->GetTransportParameters();
- }
-
- RTCError SetRemoteTransportParameters(absl::string_view parameters) override {
- return wrapped_->SetRemoteTransportParameters(parameters);
- }
-
- // Data channel overrides.
- RTCError OpenChannel(int channel_id) override {
- return wrapped_->OpenChannel(channel_id);
- }
-
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override {
- return wrapped_->SendData(channel_id, params, buffer);
- }
-
- RTCError CloseChannel(int channel_id) override {
- return wrapped_->CloseChannel(channel_id);
- }
-
- void SetDataSink(DataChannelSink* sink) override {
- wrapped_->SetDataSink(sink);
- }
-
- bool IsReadyToSend() const override { return wrapped_->IsReadyToSend(); }
-
- private:
- DatagramTransportInterface* wrapped_;
-};
-
-} // namespace
-
-WrapperMediaTransportFactory::WrapperMediaTransportFactory(
- DatagramTransportInterface* wrapped_datagram_transport)
- : wrapped_datagram_transport_(wrapped_datagram_transport) {}
-
-WrapperMediaTransportFactory::WrapperMediaTransportFactory(
- MediaTransportFactory* wrapped)
- : wrapped_factory_(wrapped) {}
-
-RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
-WrapperMediaTransportFactory::CreateMediaTransport(
- rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
-}
-
-RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
-WrapperMediaTransportFactory::CreateDatagramTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- created_transport_count_++;
- if (wrapped_factory_) {
- return wrapped_factory_->CreateDatagramTransport(network_thread, settings);
- }
- return {
- std::make_unique<WrapperDatagramTransport>(wrapped_datagram_transport_)};
-}
-
-std::string WrapperMediaTransportFactory::GetTransportName() const {
- if (wrapped_factory_) {
- return wrapped_factory_->GetTransportName();
- }
- return "wrapped-transport";
-}
-
-int WrapperMediaTransportFactory::created_transport_count() const {
- return created_transport_count_;
-}
-
-RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
-WrapperMediaTransportFactory::CreateMediaTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
-}
-
-MediaTransportPair::MediaTransportPair(rtc::Thread* thread)
- : first_datagram_transport_(thread),
- second_datagram_transport_(thread),
- first_factory_(&first_datagram_transport_),
- second_factory_(&second_datagram_transport_) {
- first_datagram_transport_.Connect(&second_datagram_transport_);
- second_datagram_transport_.Connect(&first_datagram_transport_);
-}
-
-MediaTransportPair::~MediaTransportPair() = default;
-
-MediaTransportPair::LoopbackDataChannelTransport::LoopbackDataChannelTransport(
- rtc::Thread* thread)
- : thread_(thread) {}
-
-MediaTransportPair::LoopbackDataChannelTransport::
- ~LoopbackDataChannelTransport() {
- RTC_CHECK(data_sink_ == nullptr);
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::Connect(
- LoopbackDataChannelTransport* other) {
- other_ = other;
-}
-
-RTCError MediaTransportPair::LoopbackDataChannelTransport::OpenChannel(
- int channel_id) {
- // No-op. No need to open channels for the loopback.
- return RTCError::OK();
-}
-
-RTCError MediaTransportPair::LoopbackDataChannelTransport::SendData(
- int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_,
- [this, channel_id, params, buffer] {
- other_->OnData(channel_id, params.type, buffer);
- });
- return RTCError::OK();
-}
-
-RTCError MediaTransportPair::LoopbackDataChannelTransport::CloseChannel(
- int channel_id) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, channel_id] {
- other_->OnRemoteCloseChannel(channel_id);
- rtc::CritScope lock(&sink_lock_);
- if (data_sink_) {
- data_sink_->OnChannelClosed(channel_id);
- }
- });
- return RTCError::OK();
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::SetDataSink(
- DataChannelSink* sink) {
- rtc::CritScope lock(&sink_lock_);
- data_sink_ = sink;
- if (data_sink_ && ready_to_send_) {
- data_sink_->OnReadyToSend();
- }
-}
-
-bool MediaTransportPair::LoopbackDataChannelTransport::IsReadyToSend() const {
- rtc::CritScope lock(&sink_lock_);
- return ready_to_send_;
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::FlushAsyncInvokes() {
- invoker_.Flush(thread_);
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::OnData(
- int channel_id,
- DataMessageType type,
- const rtc::CopyOnWriteBuffer& buffer) {
- rtc::CritScope lock(&sink_lock_);
- if (data_sink_) {
- data_sink_->OnDataReceived(channel_id, type, buffer);
- }
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::OnRemoteCloseChannel(
- int channel_id) {
- rtc::CritScope lock(&sink_lock_);
- if (data_sink_) {
- data_sink_->OnChannelClosing(channel_id);
- data_sink_->OnChannelClosed(channel_id);
- }
-}
-
-void MediaTransportPair::LoopbackDataChannelTransport::OnReadyToSend(
- bool ready_to_send) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, ready_to_send] {
- rtc::CritScope lock(&sink_lock_);
- ready_to_send_ = ready_to_send;
- // Propagate state to data channel sink, if present.
- if (data_sink_ && ready_to_send_) {
- data_sink_->OnReadyToSend();
- }
- });
-}
-
-MediaTransportPair::LoopbackDatagramTransport::LoopbackDatagramTransport(
- rtc::Thread* thread)
- : thread_(thread), dc_transport_(thread) {}
-
-void MediaTransportPair::LoopbackDatagramTransport::Connect(
- LoopbackDatagramTransport* other) {
- other_ = other;
- dc_transport_.Connect(&other->dc_transport_);
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::Connect(
- rtc::PacketTransportInternal* packet_transport) {
- if (state_after_connect_) {
- SetState(*state_after_connect_);
- }
-}
-
-CongestionControlInterface*
-MediaTransportPair::LoopbackDatagramTransport::congestion_control() {
- return nullptr;
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetTransportStateCallback(
- MediaTransportStateCallback* callback) {
- RTC_DCHECK_RUN_ON(thread_);
- state_callback_ = callback;
- if (state_callback_) {
- state_callback_->OnStateChanged(state_);
- }
-}
-
-RTCError MediaTransportPair::LoopbackDatagramTransport::SendDatagram(
- rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) {
- rtc::CopyOnWriteBuffer buffer;
- buffer.SetData(data.data(), data.size());
- invoker_.AsyncInvoke<void>(
- RTC_FROM_HERE, thread_, [this, datagram_id, buffer = std::move(buffer)] {
- RTC_DCHECK_RUN_ON(thread_);
- other_->DeliverDatagram(std::move(buffer));
- if (sink_) {
- DatagramAck ack;
- ack.datagram_id = datagram_id;
- ack.receive_timestamp = Timestamp::Micros(rtc::TimeMicros());
- sink_->OnDatagramAcked(ack);
- }
- });
- return RTCError::OK();
-}
-
-size_t MediaTransportPair::LoopbackDatagramTransport::GetLargestDatagramSize()
- const {
- return kLoopbackMaxDatagramSize;
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetDatagramSink(
- DatagramSinkInterface* sink) {
- RTC_DCHECK_RUN_ON(thread_);
- sink_ = sink;
-}
-
-std::string
-MediaTransportPair::LoopbackDatagramTransport::GetTransportParameters() const {
- return transport_parameters_;
-}
-
-RTCError
-MediaTransportPair::LoopbackDatagramTransport::SetRemoteTransportParameters(
- absl::string_view remote_parameters) {
- RTC_DCHECK_RUN_ON(thread_);
- if (transport_parameters_comparison_(GetTransportParameters(),
- remote_parameters)) {
- return RTCError::OK();
- }
- return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
- "Incompatible remote transport parameters");
-}
-
-RTCError MediaTransportPair::LoopbackDatagramTransport::OpenChannel(
- int channel_id) {
- return dc_transport_.OpenChannel(channel_id);
-}
-
-RTCError MediaTransportPair::LoopbackDatagramTransport::SendData(
- int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) {
- return dc_transport_.SendData(channel_id, params, buffer);
-}
-
-RTCError MediaTransportPair::LoopbackDatagramTransport::CloseChannel(
- int channel_id) {
- return dc_transport_.CloseChannel(channel_id);
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetDataSink(
- DataChannelSink* sink) {
- dc_transport_.SetDataSink(sink);
-}
-
-bool MediaTransportPair::LoopbackDatagramTransport::IsReadyToSend() const {
- return dc_transport_.IsReadyToSend();
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetState(
- MediaTransportState state) {
- invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, state] {
- RTC_DCHECK_RUN_ON(thread_);
- state_ = state;
- if (state_callback_) {
- state_callback_->OnStateChanged(state_);
- }
- });
- dc_transport_.OnReadyToSend(state == MediaTransportState::kWritable);
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::SetStateAfterConnect(
- MediaTransportState state) {
- state_after_connect_ = state;
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::FlushAsyncInvokes() {
- dc_transport_.FlushAsyncInvokes();
-}
-
-void MediaTransportPair::LoopbackDatagramTransport::DeliverDatagram(
- rtc::CopyOnWriteBuffer buffer) {
- RTC_DCHECK_RUN_ON(thread_);
- if (sink_) {
- sink_->OnDatagramReceived(buffer);
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/test/loopback_media_transport.h b/chromium/third_party/webrtc/api/test/loopback_media_transport.h
deleted file mode 100644
index 468965ba311..00000000000
--- a/chromium/third_party/webrtc/api/test/loopback_media_transport.h
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
-#define API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "api/transport/datagram_transport_interface.h"
-#include "api/transport/media/media_transport_interface.h"
-#include "rtc_base/async_invoker.h"
-#include "rtc_base/critical_section.h"
-#include "rtc_base/thread.h"
-#include "rtc_base/thread_checker.h"
-
-namespace webrtc {
-
-// Wrapper used to hand out unique_ptrs to loopback media
-// transport without ownership changes to the underlying
-// transport.
-// It works in two modes:
-// It can either wrap a factory, or it can wrap an existing interface.
-// In the former mode, it delegates the work to the wrapped factory.
-// In the latter mode, it always returns static instance of the transport
-// interface.
-//
-// Example use:
-// Factory wrap_static_interface = Wrapper(media_transport_interface);
-// Factory wrap_factory = Wrapper(wrap_static_interface);
-// The second factory may be created multiple times, and ownership may be passed
-// to the client. The first factory counts the number of invocations of
-// CreateMediaTransport();
-class WrapperMediaTransportFactory : public MediaTransportFactory {
- public:
- explicit WrapperMediaTransportFactory(
- DatagramTransportInterface* wrapped_datagram_transport);
- explicit WrapperMediaTransportFactory(MediaTransportFactory* wrapped);
-
- RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
- rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override;
-
- RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override;
-
- RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
- CreateDatagramTransport(rtc::Thread* network_thread,
- const MediaTransportSettings& settings) override;
-
- std::string GetTransportName() const override;
-
- int created_transport_count() const;
-
- private:
- DatagramTransportInterface* wrapped_datagram_transport_ = nullptr;
- MediaTransportFactory* wrapped_factory_ = nullptr;
- int created_transport_count_ = 0;
-};
-
-// Contains two MediaTransportsInterfaces that are connected to each other.
-// Currently supports audio only.
-class MediaTransportPair {
- public:
- struct Stats {
- int sent_audio_frames = 0;
- int received_audio_frames = 0;
- int sent_video_frames = 0;
- int received_video_frames = 0;
- };
-
- explicit MediaTransportPair(rtc::Thread* thread);
- ~MediaTransportPair();
-
- DatagramTransportInterface* first_datagram_transport() {
- return &first_datagram_transport_;
- }
- DatagramTransportInterface* second_datagram_transport() {
- return &second_datagram_transport_;
- }
-
- std::unique_ptr<MediaTransportFactory> first_factory() {
- return std::make_unique<WrapperMediaTransportFactory>(&first_factory_);
- }
-
- std::unique_ptr<MediaTransportFactory> second_factory() {
- return std::make_unique<WrapperMediaTransportFactory>(&second_factory_);
- }
-
- void SetState(MediaTransportState state) {
- first_datagram_transport_.SetState(state);
- second_datagram_transport_.SetState(state);
- }
-
- void SetFirstState(MediaTransportState state) {
- first_datagram_transport_.SetState(state);
- }
-
- void SetSecondStateAfterConnect(MediaTransportState state) {
- second_datagram_transport_.SetState(state);
- }
-
- void SetFirstDatagramTransportParameters(const std::string& params) {
- first_datagram_transport_.set_transport_parameters(params);
- }
-
- void SetSecondDatagramTransportParameters(const std::string& params) {
- second_datagram_transport_.set_transport_parameters(params);
- }
-
- void SetFirstDatagramTransportParametersComparison(
- std::function<bool(absl::string_view, absl::string_view)> comparison) {
- first_datagram_transport_.set_transport_parameters_comparison(
- std::move(comparison));
- }
-
- void SetSecondDatagramTransportParametersComparison(
- std::function<bool(absl::string_view, absl::string_view)> comparison) {
- second_datagram_transport_.set_transport_parameters_comparison(
- std::move(comparison));
- }
-
- void FlushAsyncInvokes() {
- first_datagram_transport_.FlushAsyncInvokes();
- second_datagram_transport_.FlushAsyncInvokes();
- }
-
- int first_factory_transport_count() const {
- return first_factory_.created_transport_count();
- }
-
- int second_factory_transport_count() const {
- return second_factory_.created_transport_count();
- }
-
- private:
- class LoopbackDataChannelTransport : public DataChannelTransportInterface {
- public:
- explicit LoopbackDataChannelTransport(rtc::Thread* thread);
- ~LoopbackDataChannelTransport() override;
-
- void Connect(LoopbackDataChannelTransport* other);
-
- RTCError OpenChannel(int channel_id) override;
-
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override;
-
- RTCError CloseChannel(int channel_id) override;
-
- bool IsReadyToSend() const override;
-
- void SetDataSink(DataChannelSink* sink) override;
-
- void OnReadyToSend(bool ready_to_send);
-
- void FlushAsyncInvokes();
-
- private:
- void OnData(int channel_id,
- DataMessageType type,
- const rtc::CopyOnWriteBuffer& buffer);
-
- void OnRemoteCloseChannel(int channel_id);
-
- rtc::Thread* const thread_;
- rtc::CriticalSection sink_lock_;
- DataChannelSink* data_sink_ RTC_GUARDED_BY(sink_lock_) = nullptr;
-
- bool ready_to_send_ RTC_GUARDED_BY(sink_lock_) = false;
-
- LoopbackDataChannelTransport* other_;
-
- rtc::AsyncInvoker invoker_;
- };
-
- class LoopbackDatagramTransport : public DatagramTransportInterface {
- public:
- explicit LoopbackDatagramTransport(rtc::Thread* thread);
-
- void Connect(LoopbackDatagramTransport* other);
-
- // Datagram transport overrides.
- void Connect(rtc::PacketTransportInternal* packet_transport) override;
- CongestionControlInterface* congestion_control() override;
- void SetTransportStateCallback(
- MediaTransportStateCallback* callback) override;
- RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) override;
- size_t GetLargestDatagramSize() const override;
- void SetDatagramSink(DatagramSinkInterface* sink) override;
- std::string GetTransportParameters() const override;
- RTCError SetRemoteTransportParameters(
- absl::string_view remote_parameters) override;
-
- // Data channel overrides.
- RTCError OpenChannel(int channel_id) override;
- RTCError SendData(int channel_id,
- const SendDataParams& params,
- const rtc::CopyOnWriteBuffer& buffer) override;
- RTCError CloseChannel(int channel_id) override;
- void SetDataSink(DataChannelSink* sink) override;
- bool IsReadyToSend() const override;
-
- // Loopback-specific functionality.
- void SetState(MediaTransportState state);
-
- // When Connect() is called, the datagram transport will enter this state.
- // This is useful for mimicking zero-RTT connectivity, for example.
- void SetStateAfterConnect(MediaTransportState state);
- void FlushAsyncInvokes();
-
- void set_transport_parameters(const std::string& value) {
- transport_parameters_ = value;
- }
-
- void set_transport_parameters_comparison(
- std::function<bool(absl::string_view, absl::string_view)> comparison) {
- thread_->Invoke<void>(
- RTC_FROM_HERE, [this, comparison = std::move(comparison)] {
- RTC_DCHECK_RUN_ON(thread_);
- transport_parameters_comparison_ = std::move(comparison);
- });
- }
-
- private:
- void DeliverDatagram(rtc::CopyOnWriteBuffer buffer);
-
- rtc::Thread* thread_;
- LoopbackDataChannelTransport dc_transport_;
-
- MediaTransportState state_ RTC_GUARDED_BY(thread_) =
- MediaTransportState::kPending;
- DatagramSinkInterface* sink_ RTC_GUARDED_BY(thread_) = nullptr;
- MediaTransportStateCallback* state_callback_ RTC_GUARDED_BY(thread_) =
- nullptr;
- LoopbackDatagramTransport* other_;
-
- std::string transport_parameters_;
- std::function<bool(absl::string_view, absl::string_view)>
- transport_parameters_comparison_ RTC_GUARDED_BY(thread_) =
- [](absl::string_view a, absl::string_view b) { return a == b; };
-
- absl::optional<MediaTransportState> state_after_connect_;
-
- rtc::AsyncInvoker invoker_;
- };
-
- LoopbackDatagramTransport first_datagram_transport_;
- LoopbackDatagramTransport second_datagram_transport_;
- WrapperMediaTransportFactory first_factory_;
- WrapperMediaTransportFactory second_factory_;
-};
-
-} // namespace webrtc
-
-#endif // API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
diff --git a/chromium/third_party/webrtc/api/test/loopback_media_transport_unittest.cc b/chromium/third_party/webrtc/api/test/loopback_media_transport_unittest.cc
deleted file mode 100644
index f036de3eae0..00000000000
--- a/chromium/third_party/webrtc/api/test/loopback_media_transport_unittest.cc
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright 2018 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "api/test/loopback_media_transport.h"
-
-#include <algorithm>
-#include <memory>
-#include <vector>
-
-#include "test/gmock.h"
-
-namespace webrtc {
-
-namespace {
-
-class MockMediaTransportAudioSinkInterface
- : public MediaTransportAudioSinkInterface {
- public:
- MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedAudioFrame));
-};
-
-class MockMediaTransportVideoSinkInterface
- : public MediaTransportVideoSinkInterface {
- public:
- MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedVideoFrame));
-};
-
-class MockMediaTransportKeyFrameRequestCallback
- : public MediaTransportKeyFrameRequestCallback {
- public:
- MOCK_METHOD1(OnKeyFrameRequested, void(uint64_t));
-};
-
-class MockDataChannelSink : public DataChannelSink {
- public:
- MOCK_METHOD3(OnDataReceived,
- void(int, DataMessageType, const rtc::CopyOnWriteBuffer&));
- MOCK_METHOD1(OnChannelClosing, void(int));
- MOCK_METHOD1(OnChannelClosed, void(int));
- MOCK_METHOD0(OnReadyToSend, void());
-};
-
-class MockStateCallback : public MediaTransportStateCallback {
- public:
- MOCK_METHOD1(OnStateChanged, void(MediaTransportState));
-};
-
-} // namespace
-
-TEST(LoopbackMediaTransport, DataDeliveredToSink) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockDataChannelSink sink;
- transport_pair.first_datagram_transport()->SetDataSink(&sink);
-
- const int channel_id = 1;
- EXPECT_CALL(
- sink, OnDataReceived(
- channel_id, DataMessageType::kText,
- ::testing::Property<rtc::CopyOnWriteBuffer, const char*>(
- &rtc::CopyOnWriteBuffer::cdata, ::testing::StrEq("foo"))));
-
- SendDataParams params;
- params.type = DataMessageType::kText;
- rtc::CopyOnWriteBuffer buffer("foo");
- transport_pair.second_datagram_transport()->SendData(channel_id, params,
- buffer);
-
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
-}
-
-TEST(LoopbackMediaTransport, CloseDeliveredToSink) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockDataChannelSink first_sink;
- transport_pair.first_datagram_transport()->SetDataSink(&first_sink);
-
- MockDataChannelSink second_sink;
- transport_pair.second_datagram_transport()->SetDataSink(&second_sink);
-
- const int channel_id = 1;
- {
- ::testing::InSequence s;
- EXPECT_CALL(second_sink, OnChannelClosing(channel_id));
- EXPECT_CALL(second_sink, OnChannelClosed(channel_id));
- EXPECT_CALL(first_sink, OnChannelClosed(channel_id));
- }
-
- transport_pair.first_datagram_transport()->CloseChannel(channel_id);
-
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
- transport_pair.second_datagram_transport()->SetDataSink(nullptr);
-}
-
-TEST(LoopbackMediaTransport, InitialStateDeliveredWhenCallbackSet) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockStateCallback state_callback;
- EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending));
-
- thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
- transport_pair.first_datagram_transport()->SetTransportStateCallback(
- &state_callback);
- });
- transport_pair.FlushAsyncInvokes();
-}
-
-TEST(LoopbackMediaTransport, ChangedStateDeliveredWhenCallbackSet) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- transport_pair.SetState(MediaTransportState::kWritable);
- transport_pair.FlushAsyncInvokes();
-
- MockStateCallback state_callback;
-
- EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable));
- thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
- transport_pair.first_datagram_transport()->SetTransportStateCallback(
- &state_callback);
- });
- transport_pair.FlushAsyncInvokes();
-}
-
-TEST(LoopbackMediaTransport, StateChangeDeliveredToCallback) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockStateCallback state_callback;
-
- EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending));
- EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable));
- thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
- transport_pair.first_datagram_transport()->SetTransportStateCallback(
- &state_callback);
- });
- transport_pair.SetState(MediaTransportState::kWritable);
- transport_pair.FlushAsyncInvokes();
-}
-
-TEST(LoopbackMediaTransport, NotReadyToSendWhenDataSinkSet) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockDataChannelSink data_channel_sink;
- EXPECT_CALL(data_channel_sink, OnReadyToSend()).Times(0);
-
- transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
-}
-
-TEST(LoopbackMediaTransport, ReadyToSendWhenDataSinkSet) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- transport_pair.SetState(MediaTransportState::kWritable);
- transport_pair.FlushAsyncInvokes();
-
- MockDataChannelSink data_channel_sink;
- EXPECT_CALL(data_channel_sink, OnReadyToSend());
-
- transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
-}
-
-TEST(LoopbackMediaTransport, StateChangeDeliveredToDataSink) {
- std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
- thread->Start();
- MediaTransportPair transport_pair(thread.get());
-
- MockDataChannelSink data_channel_sink;
- EXPECT_CALL(data_channel_sink, OnReadyToSend());
-
- transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
- transport_pair.SetState(MediaTransportState::kWritable);
- transport_pair.FlushAsyncInvokes();
- transport_pair.first_datagram_transport()->SetDataSink(nullptr);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/test/mock_audio_mixer.h b/chromium/third_party/webrtc/api/test/mock_audio_mixer.h
index aee717bebf4..88dc108ca3c 100644
--- a/chromium/third_party/webrtc/api/test/mock_audio_mixer.h
+++ b/chromium/third_party/webrtc/api/test/mock_audio_mixer.h
@@ -19,12 +19,9 @@ namespace test {
class MockAudioMixer : public AudioMixer {
public:
- MOCK_METHOD(bool, AddSource, (Source * audio_source), (override));
- MOCK_METHOD(void, RemoveSource, (Source * audio_source), (override));
- MOCK_METHOD(void,
- Mix,
- (size_t number_of_channels, AudioFrame* audio_frame_for_mixing),
- (override));
+ MOCK_METHOD(bool, AddSource, (Source*), (override));
+ MOCK_METHOD(void, RemoveSource, (Source*), (override));
+ MOCK_METHOD(void, Mix, (size_t number_of_channels, AudioFrame*), (override));
};
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/test/mock_peer_connection_factory_interface.h b/chromium/third_party/webrtc/api/test/mock_peer_connection_factory_interface.h
new file mode 100644
index 00000000000..19c3f4063e9
--- /dev/null
+++ b/chromium/third_party/webrtc/api/test/mock_peer_connection_factory_interface.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
+#define API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
+
+#include <memory>
+#include <string>
+
+#include "api/peer_connection_interface.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockPeerConnectionFactoryInterface final
+ : public rtc::RefCountedObject<webrtc::PeerConnectionFactoryInterface> {
+ public:
+ rtc::scoped_refptr<MockPeerConnectionFactoryInterface> Create() {
+ return new MockPeerConnectionFactoryInterface();
+ }
+
+ MOCK_METHOD(void, SetOptions, (const Options&), (override));
+ MOCK_METHOD(rtc::scoped_refptr<PeerConnectionInterface>,
+ CreatePeerConnection,
+ (const PeerConnectionInterface::RTCConfiguration&,
+ PeerConnectionDependencies),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<PeerConnectionInterface>,
+ CreatePeerConnection,
+ (const PeerConnectionInterface::RTCConfiguration&,
+ std::unique_ptr<cricket::PortAllocator>,
+ std::unique_ptr<rtc::RTCCertificateGeneratorInterface>,
+ PeerConnectionObserver*),
+ (override));
+ MOCK_METHOD(RtpCapabilities,
+ GetRtpSenderCapabilities,
+ (cricket::MediaType),
+ (const override));
+ MOCK_METHOD(RtpCapabilities,
+ GetRtpReceiverCapabilities,
+ (cricket::MediaType),
+ (const override));
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamInterface>,
+ CreateLocalMediaStream,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<AudioSourceInterface>,
+ CreateAudioSource,
+ (const cricket::AudioOptions&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<VideoTrackInterface>,
+ CreateVideoTrack,
+ (const std::string&, VideoTrackSourceInterface*),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<AudioTrackInterface>,
+ CreateAudioTrack,
+ (const std::string&, AudioSourceInterface*),
+ (override));
+ MOCK_METHOD(bool, StartAecDump, (FILE*, int64_t), (override));
+ MOCK_METHOD(void, StopAecDump, (), (override));
+
+ protected:
+ MockPeerConnectionFactoryInterface() = default;
+};
+
+} // namespace webrtc
+
+#endif // API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
diff --git a/chromium/third_party/webrtc/api/test/mock_transformable_video_frame.h b/chromium/third_party/webrtc/api/test/mock_transformable_video_frame.h
new file mode 100644
index 00000000000..36798b5d730
--- /dev/null
+++ b/chromium/third_party/webrtc/api/test/mock_transformable_video_frame.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
+#define API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
+
+#include <vector>
+
+#include "api/frame_transformer_interface.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockTransformableVideoFrame
+ : public webrtc::TransformableVideoFrameInterface {
+ public:
+ MOCK_METHOD(rtc::ArrayView<const uint8_t>, GetData, (), (const override));
+ MOCK_METHOD(void, SetData, (rtc::ArrayView<const uint8_t> data), (override));
+ MOCK_METHOD(uint32_t, GetTimestamp, (), (const override));
+ MOCK_METHOD(uint32_t, GetSsrc, (), (const, override));
+ MOCK_METHOD(bool, IsKeyFrame, (), (const, override));
+ MOCK_METHOD(std::vector<uint8_t>, GetAdditionalData, (), (const, override));
+ MOCK_METHOD(const webrtc::VideoFrameMetadata&,
+ GetMetadata,
+ (),
+ (const, override));
+};
+
+} // namespace webrtc
+
+#endif // API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
diff --git a/chromium/third_party/webrtc/api/test/mock_video_bitrate_allocator_factory.h b/chromium/third_party/webrtc/api/test/mock_video_bitrate_allocator_factory.h
index c7d883a5b9c..16af191970e 100644
--- a/chromium/third_party/webrtc/api/test/mock_video_bitrate_allocator_factory.h
+++ b/chromium/third_party/webrtc/api/test/mock_video_bitrate_allocator_factory.h
@@ -26,7 +26,7 @@ class MockVideoBitrateAllocatorFactory
CreateVideoBitrateAllocator,
(const VideoCodec&),
(override));
- MOCK_METHOD(void, Die, (), ());
+ MOCK_METHOD(void, Die, ());
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/test/mock_video_decoder_factory.h b/chromium/third_party/webrtc/api/test/mock_video_decoder_factory.h
index 1f832a156bd..98a5d40eb6f 100644
--- a/chromium/third_party/webrtc/api/test/mock_video_decoder_factory.h
+++ b/chromium/third_party/webrtc/api/test/mock_video_decoder_factory.h
@@ -27,12 +27,12 @@ class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory {
MOCK_METHOD(std::vector<webrtc::SdpVideoFormat>,
GetSupportedFormats,
(),
- (const override));
+ (const, override));
MOCK_METHOD(std::unique_ptr<webrtc::VideoDecoder>,
CreateVideoDecoder,
(const webrtc::SdpVideoFormat&),
(override));
- MOCK_METHOD(void, Die, (), ());
+ MOCK_METHOD(void, Die, ());
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/test/mock_video_encoder.h b/chromium/third_party/webrtc/api/test/mock_video_encoder.h
index c4b6b3e50c1..26d758fd6a3 100644
--- a/chromium/third_party/webrtc/api/test/mock_video_encoder.h
+++ b/chromium/third_party/webrtc/api/test/mock_video_encoder.h
@@ -23,8 +23,8 @@ class MockEncodedImageCallback : public EncodedImageCallback {
MOCK_METHOD(Result,
OnEncodedImage,
(const EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo,
- const RTPFragmentationHeader* fragmentation),
+ const CodecSpecificInfo*,
+ const RTPFragmentationHeader*),
(override));
MOCK_METHOD(void, OnDroppedFrame, (DropReason reason), (override));
};
@@ -33,31 +33,41 @@ class MockVideoEncoder : public VideoEncoder {
public:
MOCK_METHOD(void,
SetFecControllerOverride,
- (FecControllerOverride * fec_controller_override),
+ (FecControllerOverride*),
(override));
MOCK_METHOD(int32_t,
InitEncode,
- (const VideoCodec* codecSettings,
- int32_t numberOfCores,
- size_t maxPayloadSize),
+ (const VideoCodec*, int32_t numberOfCores, size_t maxPayloadSize),
+ (override));
+ MOCK_METHOD(int32_t,
+ InitEncode,
+ (const VideoCodec*, const VideoEncoder::Settings& settings),
(override));
- MOCK_METHOD2(InitEncode,
- int32_t(const VideoCodec* codecSettings,
- const VideoEncoder::Settings& settings));
- MOCK_METHOD2(Encode,
- int32_t(const VideoFrame& inputImage,
- const std::vector<VideoFrameType>* frame_types));
- MOCK_METHOD1(RegisterEncodeCompleteCallback,
- int32_t(EncodedImageCallback* callback));
- MOCK_METHOD0(Release, int32_t());
- MOCK_METHOD0(Reset, int32_t());
- MOCK_METHOD1(SetRates, void(const RateControlParameters& parameters));
- MOCK_METHOD1(OnPacketLossRateUpdate, void(float packet_loss_rate));
- MOCK_METHOD1(OnRttUpdate, void(int64_t rtt_ms));
- MOCK_METHOD1(OnLossNotification,
- void(const LossNotification& loss_notification));
- MOCK_CONST_METHOD0(GetEncoderInfo, EncoderInfo(void));
+ MOCK_METHOD(int32_t,
+ Encode,
+ (const VideoFrame& inputImage,
+ const std::vector<VideoFrameType>*),
+ (override));
+ MOCK_METHOD(int32_t,
+ RegisterEncodeCompleteCallback,
+ (EncodedImageCallback*),
+ (override));
+ MOCK_METHOD(int32_t, Release, (), (override));
+ MOCK_METHOD(void,
+ SetRates,
+ (const RateControlParameters& parameters),
+ (override));
+ MOCK_METHOD(void,
+ OnPacketLossRateUpdate,
+ (float packet_loss_rate),
+ (override));
+ MOCK_METHOD(void, OnRttUpdate, (int64_t rtt_ms), (override));
+ MOCK_METHOD(void,
+ OnLossNotification,
+ (const LossNotification& loss_notification),
+ (override));
+ MOCK_METHOD(EncoderInfo, GetEncoderInfo, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/test/mock_video_encoder_factory.h b/chromium/third_party/webrtc/api/test/mock_video_encoder_factory.h
index fa08dbd6bc2..1aa14631bed 100644
--- a/chromium/third_party/webrtc/api/test/mock_video_encoder_factory.h
+++ b/chromium/third_party/webrtc/api/test/mock_video_encoder_factory.h
@@ -27,17 +27,17 @@ class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory {
MOCK_METHOD(std::vector<SdpVideoFormat>,
GetSupportedFormats,
(),
- (const override));
+ (const, override));
MOCK_METHOD(CodecInfo,
QueryVideoEncoder,
(const SdpVideoFormat&),
- (const override));
+ (const, override));
MOCK_METHOD(std::unique_ptr<VideoEncoder>,
CreateVideoEncoder,
(const SdpVideoFormat&),
(override));
- MOCK_METHOD(void, Die, (), ());
+ MOCK_METHOD(void, Die, ());
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/test/network_emulation/BUILD.gn b/chromium/third_party/webrtc/api/test/network_emulation/BUILD.gn
index 5fda1e288a1..a3dd961c811 100644
--- a/chromium/third_party/webrtc/api/test/network_emulation/BUILD.gn
+++ b/chromium/third_party/webrtc/api/test/network_emulation/BUILD.gn
@@ -23,6 +23,6 @@ rtc_library("network_emulation") {
"../../units:data_rate",
"../../units:data_size",
"../../units:timestamp",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/chromium/third_party/webrtc/api/test/peerconnection_quality_test_fixture.h b/chromium/third_party/webrtc/api/test/peerconnection_quality_test_fixture.h
index 8165443d36e..35736876cce 100644
--- a/chromium/third_party/webrtc/api/test/peerconnection_quality_test_fixture.h
+++ b/chromium/third_party/webrtc/api/test/peerconnection_quality_test_fixture.h
@@ -33,13 +33,13 @@
#include "api/test/simulated_network.h"
#include "api/test/stats_observer_interface.h"
#include "api/test/video_quality_analyzer_interface.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/units/time_delta.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "media/base/media_constants.h"
+#include "rtc_base/deprecation.h"
#include "rtc_base/network.h"
#include "rtc_base/rtc_certificate_generator.h"
#include "rtc_base/ssl_certificate.h"
@@ -287,8 +287,6 @@ class PeerConnectionE2EQualityTestFixture {
virtual PeerConfigurer* SetNetworkControllerFactory(
std::unique_ptr<NetworkControllerFactoryInterface>
network_controller_factory) = 0;
- virtual PeerConfigurer* SetMediaTransportFactory(
- std::unique_ptr<MediaTransportFactory> media_transport_factory) = 0;
virtual PeerConfigurer* SetVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> video_encoder_factory) = 0;
virtual PeerConfigurer* SetVideoDecoderFactory(
@@ -337,6 +335,9 @@ class PeerConnectionE2EQualityTestFixture {
PeerConnectionInterface::RTCConfiguration configuration) = 0;
// Set bitrate parameters on PeerConnection. This constraints will be
// applied to all summed RTP streams for this peer.
+ virtual PeerConfigurer* SetBitrateSettings(
+ BitrateSettings bitrate_settings) = 0;
+ RTC_DEPRECATED
virtual PeerConfigurer* SetBitrateParameters(
PeerConnectionInterface::BitrateParameters bitrate_params) = 0;
};
@@ -448,6 +449,12 @@ class PeerConnectionE2EQualityTestFixture {
virtual void AddPeer(rtc::Thread* network_thread,
rtc::NetworkManager* network_manager,
rtc::FunctionView<void(PeerConfigurer*)> configurer) = 0;
+ // Runs the media quality test, which includes setting up the call with
+ // configured participants, running it according to provided |run_params| and
+ // terminating it properly at the end. During call duration media quality
+ // metrics are gathered, which are then reported to stdout and (if configured)
+ // to the json/protobuf output file through the WebRTC perf test results
+ // reporting system.
virtual void Run(RunParams run_params) = 0;
// Returns real test duration - the time of test execution measured during
diff --git a/chromium/third_party/webrtc/api/test/stats_observer_interface.h b/chromium/third_party/webrtc/api/test/stats_observer_interface.h
index 98c8dd937f2..ea4d6c23db9 100644
--- a/chromium/third_party/webrtc/api/test/stats_observer_interface.h
+++ b/chromium/third_party/webrtc/api/test/stats_observer_interface.h
@@ -11,9 +11,8 @@
#ifndef API_TEST_STATS_OBSERVER_INTERFACE_H_
#define API_TEST_STATS_OBSERVER_INTERFACE_H_
-#include <string>
-
-#include "api/stats_types.h"
+#include "absl/strings/string_view.h"
+#include "api/stats/rtc_stats_report.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@@ -25,8 +24,9 @@ class StatsObserverInterface {
// Method called when stats reports are available for the PeerConnection
// identified by |pc_label|.
- virtual void OnStatsReports(const std::string& pc_label,
- const StatsReports& reports) = 0;
+ virtual void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) = 0;
};
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/api/test/test_dependency_factory.cc b/chromium/third_party/webrtc/api/test/test_dependency_factory.cc
index e72f55aab57..41ad70cc3f7 100644
--- a/chromium/third_party/webrtc/api/test/test_dependency_factory.cc
+++ b/chromium/third_party/webrtc/api/test/test_dependency_factory.cc
@@ -14,22 +14,24 @@
#include <utility>
#include "rtc_base/checks.h"
-#include "rtc_base/thread_checker.h"
+#include "rtc_base/platform_thread_types.h"
namespace webrtc {
+namespace {
// This checks everything in this file gets called on the same thread. It's
// static because it needs to look at the static methods too.
-rtc::ThreadChecker* GetThreadChecker() {
- static rtc::ThreadChecker checker;
- return &checker;
+bool IsValidTestDependencyFactoryThread() {
+ const rtc::PlatformThreadRef main_thread = rtc::CurrentThreadRef();
+ return rtc::IsThreadRefEqual(main_thread, rtc::CurrentThreadRef());
}
+} // namespace
std::unique_ptr<TestDependencyFactory> TestDependencyFactory::instance_ =
nullptr;
const TestDependencyFactory& TestDependencyFactory::GetInstance() {
- RTC_DCHECK(GetThreadChecker()->IsCurrent());
+ RTC_DCHECK(IsValidTestDependencyFactoryThread());
if (instance_ == nullptr) {
instance_ = std::make_unique<TestDependencyFactory>();
}
@@ -38,14 +40,14 @@ const TestDependencyFactory& TestDependencyFactory::GetInstance() {
void TestDependencyFactory::SetInstance(
std::unique_ptr<TestDependencyFactory> instance) {
- RTC_DCHECK(GetThreadChecker()->IsCurrent());
+ RTC_DCHECK(IsValidTestDependencyFactoryThread());
RTC_CHECK(instance_ == nullptr);
instance_ = std::move(instance);
}
std::unique_ptr<VideoQualityTestFixtureInterface::InjectionComponents>
TestDependencyFactory::CreateComponents() const {
- RTC_DCHECK(GetThreadChecker()->IsCurrent());
+ RTC_DCHECK(IsValidTestDependencyFactoryThread());
return nullptr;
}
diff --git a/chromium/third_party/webrtc/api/test/video_quality_analyzer_interface.h b/chromium/third_party/webrtc/api/test/video_quality_analyzer_interface.h
index 0d3f4415348..c5370a7089e 100644
--- a/chromium/third_party/webrtc/api/test/video_quality_analyzer_interface.h
+++ b/chromium/third_party/webrtc/api/test/video_quality_analyzer_interface.h
@@ -14,7 +14,9 @@
#include <memory>
#include <string>
+#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
+#include "api/array_view.h"
#include "api/test/stats_observer_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
@@ -76,42 +78,65 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface {
// calculations. Analyzer can perform simple calculations on the calling
// thread in each method, but should remember, that it is the same thread,
// that is used in video pipeline.
- virtual void Start(std::string test_case_name, int max_threads_count) {}
+ virtual void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {}
// Will be called when frame was generated from the input stream.
+ // |peer_name| is name of the peer on which side frame was captured.
// Returns frame id, that will be set by framework to the frame.
- virtual uint16_t OnFrameCaptured(const std::string& stream_label,
+ virtual uint16_t OnFrameCaptured(absl::string_view peer_name,
+ const std::string& stream_label,
const VideoFrame& frame) = 0;
// Will be called before calling the encoder.
- virtual void OnFramePreEncode(const VideoFrame& frame) {}
+ // |peer_name| is name of the peer on which side frame came to encoder.
+ virtual void OnFramePreEncode(absl::string_view peer_name,
+ const VideoFrame& frame) {}
// Will be called for each EncodedImage received from encoder. Single
// VideoFrame can produce multiple EncodedImages. Each encoded image will
// have id from VideoFrame.
- virtual void OnFrameEncoded(uint16_t frame_id,
+ // |peer_name| is name of the peer on which side frame was encoded.
+ virtual void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image,
const EncoderStats& stats) {}
// Will be called for each frame dropped by encoder.
- virtual void OnFrameDropped(EncodedImageCallback::DropReason reason) {}
+ // |peer_name| is name of the peer on which side frame drop was detected.
+ virtual void OnFrameDropped(absl::string_view peer_name,
+ EncodedImageCallback::DropReason reason) {}
// Will be called before calling the decoder.
- virtual void OnFramePreDecode(uint16_t frame_id,
+ // |peer_name| is name of the peer on which side frame was received.
+ virtual void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image) {}
// Will be called after decoding the frame.
- virtual void OnFrameDecoded(const VideoFrame& frame,
+ // |peer_name| is name of the peer on which side frame was decoded.
+ virtual void OnFrameDecoded(absl::string_view peer_name,
+ const VideoFrame& frame,
const DecoderStats& stats) {}
// Will be called when frame will be obtained from PeerConnection stack.
- virtual void OnFrameRendered(const VideoFrame& frame) {}
+ // |peer_name| is name of the peer on which side frame was rendered.
+ virtual void OnFrameRendered(absl::string_view peer_name,
+ const VideoFrame& frame) {}
// Will be called if encoder return not WEBRTC_VIDEO_CODEC_OK.
// All available codes are listed in
// modules/video_coding/include/video_error_codes.h
- virtual void OnEncoderError(const VideoFrame& frame, int32_t error_code) {}
+ // |peer_name| is name of the peer on which side error acquired.
+ virtual void OnEncoderError(absl::string_view peer_name,
+ const VideoFrame& frame,
+ int32_t error_code) {}
// Will be called if decoder return not WEBRTC_VIDEO_CODEC_OK.
// All available codes are listed in
// modules/video_coding/include/video_error_codes.h
- virtual void OnDecoderError(uint16_t frame_id, int32_t error_code) {}
+ // |peer_name| is name of the peer on which side error acquired.
+ virtual void OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code) {}
// Will be called every time new stats reports are available for the
// Peer Connection identified by |pc_label|.
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& stats_reports) override {}
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {}
// Tells analyzer that analysis complete and it should calculate final
// statistics.
diff --git a/chromium/third_party/webrtc/api/transport/BUILD.gn b/chromium/third_party/webrtc/api/transport/BUILD.gn
index 0f07301fe41..a4ada07108b 100644
--- a/chromium/third_party/webrtc/api/transport/BUILD.gn
+++ b/chromium/third_party/webrtc/api/transport/BUILD.gn
@@ -14,10 +14,8 @@ rtc_library("bitrate_settings") {
"bitrate_settings.cc",
"bitrate_settings.h",
]
- deps = [
- "../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../../rtc_base/system:rtc_export" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("enums") {
@@ -41,6 +39,8 @@ rtc_library("network_control") {
"../units:data_size",
"../units:time_delta",
"../units:timestamp",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -49,10 +49,8 @@ rtc_library("network_control") {
rtc_source_set("webrtc_key_value_config") {
visibility = [ "*" ]
sources = [ "webrtc_key_value_config.h" ]
- deps = [
- "../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/strings",
- ]
+ deps = [ "../../rtc_base/system:rtc_export" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("field_trial_based_config") {
@@ -64,26 +62,20 @@ rtc_library("field_trial_based_config") {
deps = [
":webrtc_key_value_config",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
+# TODO(nisse): Rename?
rtc_source_set("datagram_transport_interface") {
visibility = [ "*" ]
- sources = [
- "congestion_control_interface.h",
- "data_channel_transport_interface.h",
- "datagram_transport_interface.h",
- ]
+ sources = [ "data_channel_transport_interface.h" ]
deps = [
- ":network_control",
"..:array_view",
"..:rtc_error",
"../../rtc_base:rtc_base_approved",
- "../units:data_rate",
- "../units:timestamp",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("goog_cc") {
diff --git a/chromium/third_party/webrtc/api/transport/congestion_control_interface.h b/chromium/third_party/webrtc/api/transport/congestion_control_interface.h
deleted file mode 100644
index 40552cb4ffd..00000000000
--- a/chromium/third_party/webrtc/api/transport/congestion_control_interface.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media and datagram transports.
-
-#ifndef API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
-#define API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "api/transport/network_control.h"
-#include "api/units/data_rate.h"
-
-namespace webrtc {
-
-// TODO(nisse): Defined together with MediaTransportInterface. But we should use
-// types that aren't tied to media, so that MediaTransportInterface can depend
-// on CongestionControlInterface, but not the other way around.
-// api/transport/network_control.h may be a reasonable place.
-class MediaTransportRttObserver;
-struct MediaTransportAllocatedBitrateLimits;
-struct MediaTransportTargetRateConstraints;
-
-// Defines congestion control feedback interface for media and datagram
-// transports.
-class CongestionControlInterface {
- public:
- virtual ~CongestionControlInterface() = default;
-
- // Updates allocation limits.
- virtual void SetAllocatedBitrateLimits(
- const MediaTransportAllocatedBitrateLimits& limits) = 0;
-
- // Sets starting rate.
- virtual void SetTargetBitrateLimits(
- const MediaTransportTargetRateConstraints& target_rate_constraints) = 0;
-
- // Intended for receive side. AddRttObserver registers an observer to be
- // called for each RTT measurement, typically once per ACK. Before media
- // transport is destructed the observer must be unregistered.
- //
- // TODO(sukhanov): Looks like AddRttObserver and RemoveRttObserver were
- // never implemented for media transport, so keeping noop implementation.
- virtual void AddRttObserver(MediaTransportRttObserver* observer) {}
- virtual void RemoveRttObserver(MediaTransportRttObserver* observer) {}
-
- // Adds a target bitrate observer. Before media transport is destructed
- // the observer must be unregistered (by calling
- // RemoveTargetTransferRateObserver).
- // A newly registered observer will be called back with the latest recorded
- // target rate, if available.
- virtual void AddTargetTransferRateObserver(
- TargetTransferRateObserver* observer) = 0;
-
- // Removes an existing |observer| from observers. If observer was never
- // registered, an error is logged and method does nothing.
- virtual void RemoveTargetTransferRateObserver(
- TargetTransferRateObserver* observer) = 0;
-
- // Returns the last known target transfer rate as reported to the above
- // observers.
- virtual absl::optional<TargetTransferRate> GetLatestTargetTransferRate() = 0;
-};
-
-} // namespace webrtc
-
-#endif // API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
diff --git a/chromium/third_party/webrtc/api/transport/data_channel_transport_interface.h b/chromium/third_party/webrtc/api/transport/data_channel_transport_interface.h
index 671deffc6ee..7b8c653c390 100644
--- a/chromium/third_party/webrtc/api/transport/data_channel_transport_interface.h
+++ b/chromium/third_party/webrtc/api/transport/data_channel_transport_interface.h
@@ -35,8 +35,8 @@ enum class DataMessageType {
// sent reliably and in-order, even if the data channel is configured for
// unreliable delivery.
struct SendDataParams {
- SendDataParams();
- SendDataParams(const SendDataParams&);
+ SendDataParams() = default;
+ SendDataParams(const SendDataParams&) = default;
DataMessageType type = DataMessageType::kText;
diff --git a/chromium/third_party/webrtc/api/transport/datagram_transport_interface.h b/chromium/third_party/webrtc/api/transport/datagram_transport_interface.h
deleted file mode 100644
index 01736b978d0..00000000000
--- a/chromium/third_party/webrtc/api/transport/datagram_transport_interface.h
+++ /dev/null
@@ -1,151 +0,0 @@
-/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media and datagram transports.
-
-#ifndef API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
-#define API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "absl/types/optional.h"
-#include "api/array_view.h"
-#include "api/rtc_error.h"
-#include "api/transport/congestion_control_interface.h"
-#include "api/transport/data_channel_transport_interface.h"
-#include "api/units/data_rate.h"
-#include "api/units/timestamp.h"
-
-namespace rtc {
-class PacketTransportInternal;
-} // namespace rtc
-
-namespace webrtc {
-
-class MediaTransportStateCallback;
-
-typedef int64_t DatagramId;
-
-struct DatagramAck {
- // |datagram_id| is same as passed in
- // DatagramTransportInterface::SendDatagram.
- DatagramId datagram_id;
-
- // The timestamp at which the remote peer received the identified datagram,
- // according to that peer's clock.
- Timestamp receive_timestamp = Timestamp::MinusInfinity();
-};
-
-// All sink methods are called on network thread.
-class DatagramSinkInterface {
- public:
- virtual ~DatagramSinkInterface() {}
-
- // Called when new packet is received.
- virtual void OnDatagramReceived(rtc::ArrayView<const uint8_t> data) = 0;
-
- // Called when datagram is actually sent (datragram can be delayed due
- // to congestion control or fusing). |datagram_id| is same as passed in
- // DatagramTransportInterface::SendDatagram.
- virtual void OnDatagramSent(DatagramId datagram_id) = 0;
-
- // Called when datagram is ACKed.
- virtual void OnDatagramAcked(const DatagramAck& datagram_ack) = 0;
-
- // Called when a datagram is lost.
- virtual void OnDatagramLost(DatagramId datagram_id) = 0;
-};
-
-// Datagram transport allows to send and receive unreliable packets (datagrams)
-// and receive feedback from congestion control (via
-// CongestionControlInterface). The idea is to send RTP packets as datagrams and
-// have underlying implementation of datagram transport to use QUIC datagram
-// protocol.
-class DatagramTransportInterface : public DataChannelTransportInterface {
- public:
- virtual ~DatagramTransportInterface() = default;
-
- // Connect the datagram transport to the ICE transport.
- // The implementation must be able to ignore incoming packets that don't
- // belong to it.
- virtual void Connect(rtc::PacketTransportInternal* packet_transport) = 0;
-
- // Returns congestion control feedback interface or nullptr if datagram
- // transport does not implement congestion control.
- //
- // Note that right now datagram transport is used without congestion control,
- // but we plan to use it in the future.
- virtual CongestionControlInterface* congestion_control() = 0;
-
- // Sets a state observer callback. Before datagram transport is destroyed, the
- // callback must be unregistered by setting it to nullptr.
- // A newly registered callback will be called with the current state.
- // Datagram transport does not invoke this callback concurrently.
- virtual void SetTransportStateCallback(
- MediaTransportStateCallback* callback) = 0;
-
- // Start asynchronous send of datagram. The status returned by this method
- // only pertains to the synchronous operations (e.g. serialization /
- // packetization), not to the asynchronous operation.
- //
- // Datagrams larger than GetLargestDatagramSize() will fail and return error.
- //
- // Datagrams are sent in FIFO order.
- //
- // |datagram_id| is only used in ACK/LOST notifications in
- // DatagramSinkInterface and does not need to be unique.
- virtual RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) = 0;
-
- // Returns maximum size of datagram message, does not change.
- // TODO(sukhanov): Because value may be undefined before connection setup
- // is complete, consider returning error when called before connection is
- // established. Currently returns hardcoded const, because integration
- // prototype may call before connection is established.
- virtual size_t GetLargestDatagramSize() const = 0;
-
- // Sets packet sink. Sink must be unset by calling
- // SetDataTransportSink(nullptr) before the data transport is destroyed or
- // before new sink is set.
- virtual void SetDatagramSink(DatagramSinkInterface* sink) = 0;
-
- // Retrieves transport parameters for this datagram transport. May be called
- // on either client- or server-perspective transports.
- //
- // For servers, the parameters represent what kind of connections and data the
- // server is prepared to accept. This is generally a superset of acceptable
- // parameters.
- //
- // For clients, the parameters echo the server configuration used to create
- // the client, possibly removing any fields or parameters which the client
- // does not understand.
- virtual std::string GetTransportParameters() const = 0;
-
- // Sets remote transport parameters. |remote_params| is a serialized string
- // of opaque parameters, understood by the datagram transport implementation.
- // Returns an error if |remote_params| are not compatible with this transport.
- //
- // TODO(mellem): Make pure virtual. The default implementation maintains
- // original negotiation behavior (negotiation falls back to RTP if the
- // remote datagram transport fails to echo exactly the local parameters).
- virtual RTCError SetRemoteTransportParameters(
- absl::string_view remote_params) {
- if (remote_params == GetTransportParameters()) {
- return RTCError::OK();
- }
- return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
- "Local and remote transport parameters do not match");
- }
-};
-
-} // namespace webrtc
-
-#endif // API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
diff --git a/chromium/third_party/webrtc/api/transport/media/BUILD.gn b/chromium/third_party/webrtc/api/transport/media/BUILD.gn
deleted file mode 100644
index 24a364c2e5d..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/BUILD.gn
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-import("../../../webrtc.gni")
-
-rtc_library("media_transport_interface") {
- visibility = [ "*" ]
- sources = [
- "media_transport_config.cc",
- "media_transport_config.h",
- "media_transport_interface.cc",
- "media_transport_interface.h",
- ]
- deps = [
- ":audio_interfaces",
- ":video_interfaces",
- "..:datagram_transport_interface",
- "..:network_control",
- "../..:array_view",
- "../..:rtc_error",
- "../../..:webrtc_common",
- "../../../rtc_base",
- "../../../rtc_base:checks",
- "../../../rtc_base:rtc_base_approved",
- "../../../rtc_base:stringutils",
- "../../units:data_rate",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
-}
-
-rtc_library("audio_interfaces") {
- visibility = [ "*" ]
- sources = [
- "audio_transport.cc",
- "audio_transport.h",
- ]
- deps = [ "../..:array_view" ]
-}
-
-rtc_library("video_interfaces") {
- visibility = [ "*" ]
- sources = [
- "video_transport.cc",
- "video_transport.h",
- ]
- deps = [ "../../video:encoded_image" ]
-}
diff --git a/chromium/third_party/webrtc/api/transport/media/audio_transport.cc b/chromium/third_party/webrtc/api/transport/media/audio_transport.cc
deleted file mode 100644
index 0f5fe8bcf2e..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/audio_transport.cc
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#include "api/transport/media/audio_transport.h"
-
-#include <utility>
-
-namespace webrtc {
-
-MediaTransportEncodedAudioFrame::~MediaTransportEncodedAudioFrame() {}
-
-MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
- int sampling_rate_hz,
- int starting_sample_index,
- int samples_per_channel,
- int sequence_number,
- FrameType frame_type,
- int payload_type,
- std::vector<uint8_t> encoded_data)
- : sampling_rate_hz_(sampling_rate_hz),
- starting_sample_index_(starting_sample_index),
- samples_per_channel_(samples_per_channel),
- sequence_number_(sequence_number),
- frame_type_(frame_type),
- payload_type_(payload_type),
- encoded_data_(std::move(encoded_data)) {}
-
-MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=(
- const MediaTransportEncodedAudioFrame&) = default;
-
-MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=(
- MediaTransportEncodedAudioFrame&&) = default;
-
-MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
- const MediaTransportEncodedAudioFrame&) = default;
-
-MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
- MediaTransportEncodedAudioFrame&&) = default;
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/transport/media/audio_transport.h b/chromium/third_party/webrtc/api/transport/media/audio_transport.h
deleted file mode 100644
index dcbdcd7afe2..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/audio_transport.h
+++ /dev/null
@@ -1,120 +0,0 @@
-/* Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#ifndef API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
-#define API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
-
-#include <vector>
-
-#include "api/array_view.h"
-
-namespace webrtc {
-
-// Represents encoded audio frame in any encoding (type of encoding is opaque).
-// To avoid copying of encoded data use move semantics when passing by value.
-class MediaTransportEncodedAudioFrame final {
- public:
- enum class FrameType {
- // Normal audio frame (equivalent to webrtc::kAudioFrameSpeech).
- kSpeech,
-
- // DTX frame (equivalent to webrtc::kAudioFrameCN).
- kDiscontinuousTransmission,
- // TODO(nisse): Mis-spelled version, update users, then delete.
- kDiscountinuousTransmission = kDiscontinuousTransmission,
- };
-
- MediaTransportEncodedAudioFrame(
- // Audio sampling rate, for example 48000.
- int sampling_rate_hz,
-
- // Starting sample index of the frame, i.e. how many audio samples were
- // before this frame since the beginning of the call or beginning of time
- // in one channel (the starting point should not matter for NetEq). In
- // WebRTC it is used as a timestamp of the frame.
- // TODO(sukhanov): Starting_sample_index is currently adjusted on the
- // receiver side in RTP path. Non-RTP implementations should preserve it.
- // For NetEq initial offset should not matter so we should consider fixing
- // RTP path.
- int starting_sample_index,
-
- // Number of audio samples in audio frame in 1 channel.
- int samples_per_channel,
-
- // Sequence number of the frame in the order sent, it is currently
- // required by NetEq, but we can fix NetEq, because starting_sample_index
- // should be enough.
- int sequence_number,
-
- // If audio frame is a speech or discontinued transmission.
- FrameType frame_type,
-
- // Opaque payload type. In RTP codepath payload type is stored in RTP
- // header. In other implementations it should be simply passed through the
- // wire -- it's needed for decoder.
- int payload_type,
-
- // Vector with opaque encoded data.
- std::vector<uint8_t> encoded_data);
-
- ~MediaTransportEncodedAudioFrame();
- MediaTransportEncodedAudioFrame(const MediaTransportEncodedAudioFrame&);
- MediaTransportEncodedAudioFrame& operator=(
- const MediaTransportEncodedAudioFrame& other);
- MediaTransportEncodedAudioFrame& operator=(
- MediaTransportEncodedAudioFrame&& other);
- MediaTransportEncodedAudioFrame(MediaTransportEncodedAudioFrame&&);
-
- // Getters.
- int sampling_rate_hz() const { return sampling_rate_hz_; }
- int starting_sample_index() const { return starting_sample_index_; }
- int samples_per_channel() const { return samples_per_channel_; }
- int sequence_number() const { return sequence_number_; }
-
- int payload_type() const { return payload_type_; }
- FrameType frame_type() const { return frame_type_; }
-
- rtc::ArrayView<const uint8_t> encoded_data() const { return encoded_data_; }
-
- private:
- int sampling_rate_hz_;
- int starting_sample_index_;
- int samples_per_channel_;
-
- // TODO(sukhanov): Refactor NetEq so we don't need sequence number.
- // Having sample_index and samples_per_channel should be enough.
- int sequence_number_;
-
- FrameType frame_type_;
-
- int payload_type_;
-
- std::vector<uint8_t> encoded_data_;
-};
-
-// Interface for receiving encoded audio frames from MediaTransportInterface
-// implementations.
-class MediaTransportAudioSinkInterface {
- public:
- virtual ~MediaTransportAudioSinkInterface() = default;
-
- // Called when new encoded audio frame is received.
- virtual void OnData(uint64_t channel_id,
- MediaTransportEncodedAudioFrame frame) = 0;
-};
-
-} // namespace webrtc
-#endif // API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
diff --git a/chromium/third_party/webrtc/api/transport/media/media_transport_config.cc b/chromium/third_party/webrtc/api/transport/media/media_transport_config.cc
deleted file mode 100644
index b9b19cb6f0f..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/media_transport_config.cc
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "api/transport/media/media_transport_config.h"
-
-#include "rtc_base/checks.h"
-#include "rtc_base/strings/string_builder.h"
-
-namespace webrtc {
-
-MediaTransportConfig::MediaTransportConfig(size_t rtp_max_packet_size)
- : rtp_max_packet_size(rtp_max_packet_size) {
- RTC_DCHECK_GT(rtp_max_packet_size, 0);
-}
-
-std::string MediaTransportConfig::DebugString() const {
- rtc::StringBuilder result;
- result << "{rtp_max_packet_size: " << rtp_max_packet_size.value_or(0) << "}";
- return result.Release();
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/transport/media/media_transport_config.h b/chromium/third_party/webrtc/api/transport/media/media_transport_config.h
deleted file mode 100644
index 7ef65453ae2..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/media_transport_config.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
-#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "absl/types/optional.h"
-
-namespace webrtc {
-
-// Media transport config is made available to both transport and audio / video
-// layers, but access to individual interfaces should not be open without
-// necessity.
-struct MediaTransportConfig {
- // Default constructor for no-media transport scenarios.
- MediaTransportConfig() = default;
-
- // Constructor for datagram transport scenarios.
- explicit MediaTransportConfig(size_t rtp_max_packet_size);
-
- std::string DebugString() const;
-
- // If provided, limits RTP packet size (excludes ICE, IP or network overhead).
- absl::optional<size_t> rtp_max_packet_size;
-};
-
-} // namespace webrtc
-
-#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
diff --git a/chromium/third_party/webrtc/api/transport/media/media_transport_interface.cc b/chromium/third_party/webrtc/api/transport/media/media_transport_interface.cc
deleted file mode 100644
index 323ddca6895..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/media_transport_interface.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#include "api/transport/media/media_transport_interface.h"
-
-#include <cstdint>
-#include <utility>
-
-#include "api/transport/datagram_transport_interface.h"
-
-namespace webrtc {
-
-MediaTransportSettings::MediaTransportSettings() = default;
-MediaTransportSettings::MediaTransportSettings(const MediaTransportSettings&) =
- default;
-MediaTransportSettings& MediaTransportSettings::operator=(
- const MediaTransportSettings&) = default;
-MediaTransportSettings::~MediaTransportSettings() = default;
-
-SendDataParams::SendDataParams() = default;
-SendDataParams::SendDataParams(const SendDataParams&) = default;
-
-RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
-MediaTransportFactory::CreateMediaTransport(
- rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return std::unique_ptr<MediaTransportInterface>(nullptr);
-}
-
-RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
-MediaTransportFactory::CreateMediaTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return std::unique_ptr<MediaTransportInterface>(nullptr);
-}
-
-RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
-MediaTransportFactory::CreateDatagramTransport(
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings) {
- return std::unique_ptr<DatagramTransportInterface>(nullptr);
-}
-
-std::string MediaTransportFactory::GetTransportName() const {
- return "";
-}
-
-MediaTransportInterface::MediaTransportInterface() = default;
-MediaTransportInterface::~MediaTransportInterface() = default;
-
-absl::optional<std::string>
-MediaTransportInterface::GetTransportParametersOffer() const {
- return absl::nullopt;
-}
-
-void MediaTransportInterface::Connect(
- rtc::PacketTransportInternal* packet_transport) {}
-
-void MediaTransportInterface::SetKeyFrameRequestCallback(
- MediaTransportKeyFrameRequestCallback* callback) {}
-
-absl::optional<TargetTransferRate>
-MediaTransportInterface::GetLatestTargetTransferRate() {
- return absl::nullopt;
-}
-
-void MediaTransportInterface::AddNetworkChangeCallback(
- MediaTransportNetworkChangeCallback* callback) {}
-
-void MediaTransportInterface::RemoveNetworkChangeCallback(
- MediaTransportNetworkChangeCallback* callback) {}
-
-void MediaTransportInterface::SetFirstAudioPacketReceivedObserver(
- AudioPacketReceivedObserver* observer) {}
-
-void MediaTransportInterface::AddTargetTransferRateObserver(
- TargetTransferRateObserver* observer) {}
-void MediaTransportInterface::RemoveTargetTransferRateObserver(
- TargetTransferRateObserver* observer) {}
-
-void MediaTransportInterface::AddRttObserver(
- MediaTransportRttObserver* observer) {}
-void MediaTransportInterface::RemoveRttObserver(
- MediaTransportRttObserver* observer) {}
-
-size_t MediaTransportInterface::GetAudioPacketOverhead() const {
- return 0;
-}
-
-void MediaTransportInterface::SetAllocatedBitrateLimits(
- const MediaTransportAllocatedBitrateLimits& limits) {}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/transport/media/media_transport_interface.h b/chromium/third_party/webrtc/api/transport/media/media_transport_interface.h
deleted file mode 100644
index dbe68d344b6..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/media_transport_interface.h
+++ /dev/null
@@ -1,320 +0,0 @@
-/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
-#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
-
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "absl/types/optional.h"
-#include "api/array_view.h"
-#include "api/rtc_error.h"
-#include "api/transport/data_channel_transport_interface.h"
-#include "api/transport/media/audio_transport.h"
-#include "api/transport/media/video_transport.h"
-#include "api/transport/network_control.h"
-#include "api/units/data_rate.h"
-#include "rtc_base/copy_on_write_buffer.h"
-#include "rtc_base/network_route.h"
-
-namespace rtc {
-class PacketTransportInternal;
-class Thread;
-} // namespace rtc
-
-namespace webrtc {
-
-class DatagramTransportInterface;
-class RtcEventLog;
-
-class AudioPacketReceivedObserver {
- public:
- virtual ~AudioPacketReceivedObserver() = default;
-
- // Invoked for the first received audio packet on a given channel id.
- // It will be invoked once for each channel id.
- virtual void OnFirstAudioPacketReceived(int64_t channel_id) = 0;
-};
-
-// Used to configure stream allocations.
-struct MediaTransportAllocatedBitrateLimits {
- DataRate min_pacing_rate = DataRate::Zero();
- DataRate max_padding_bitrate = DataRate::Zero();
- DataRate max_total_allocated_bitrate = DataRate::Zero();
-};
-
-// Used to configure target bitrate constraints.
-// If the value is provided, the constraint is updated.
-// If the value is omitted, the value is left unchanged.
-struct MediaTransportTargetRateConstraints {
- absl::optional<DataRate> min_bitrate;
- absl::optional<DataRate> max_bitrate;
- absl::optional<DataRate> starting_bitrate;
-};
-
-// A collection of settings for creation of media transport.
-struct MediaTransportSettings final {
- MediaTransportSettings();
- MediaTransportSettings(const MediaTransportSettings&);
- MediaTransportSettings& operator=(const MediaTransportSettings&);
- ~MediaTransportSettings();
-
- // Group calls are not currently supported, in 1:1 call one side must set
- // is_caller = true and another is_caller = false.
- bool is_caller;
-
- // Must be set if a pre-shared key is used for the call.
- // TODO(bugs.webrtc.org/9944): This should become zero buffer in the distant
- // future.
- absl::optional<std::string> pre_shared_key;
-
- // If present, this is a config passed from the caller to the answerer in the
- // offer. Each media transport knows how to understand its own parameters.
- absl::optional<std::string> remote_transport_parameters;
-
- // If present, provides the event log that media transport should use.
- // Media transport does not own it. The lifetime of |event_log| will exceed
- // the lifetime of the instance of MediaTransportInterface instance.
- RtcEventLog* event_log = nullptr;
-};
-
-// Callback to notify about network route changes.
-class MediaTransportNetworkChangeCallback {
- public:
- virtual ~MediaTransportNetworkChangeCallback() = default;
-
- // Called when the network route is changed, with the new network route.
- virtual void OnNetworkRouteChanged(
- const rtc::NetworkRoute& new_network_route) = 0;
-};
-
-// State of the media transport. Media transport begins in the pending state.
-// It transitions to writable when it is ready to send media. It may transition
-// back to pending if the connection is blocked. It may transition to closed at
-// any time. Closed is terminal: a transport will never re-open once closed.
-enum class MediaTransportState {
- kPending,
- kWritable,
- kClosed,
-};
-
-// Callback invoked whenever the state of the media transport changes.
-class MediaTransportStateCallback {
- public:
- virtual ~MediaTransportStateCallback() = default;
-
- // Invoked whenever the state of the media transport changes.
- virtual void OnStateChanged(MediaTransportState state) = 0;
-};
-
-// Callback for RTT measurements on the receive side.
-// TODO(nisse): Related interfaces: CallStatsObserver and RtcpRttStats. It's
-// somewhat unclear what type of measurement is needed. It's used to configure
-// NACK generation and playout buffer. Either raw measurement values or recent
-// maximum would make sense for this use. Need consolidation of RTT signalling.
-class MediaTransportRttObserver {
- public:
- virtual ~MediaTransportRttObserver() = default;
-
- // Invoked when a new RTT measurement is available, typically once per ACK.
- virtual void OnRttUpdated(int64_t rtt_ms) = 0;
-};
-
-// Media transport interface for sending / receiving encoded audio/video frames
-// and receiving bandwidth estimate update from congestion control.
-class MediaTransportInterface : public DataChannelTransportInterface {
- public:
- MediaTransportInterface();
- virtual ~MediaTransportInterface();
-
- // Retrieves callers config (i.e. media transport offer) that should be passed
- // to the callee, before the call is connected. Such config is opaque to SDP
- // (sdp just passes it through). The config is a binary blob, so SDP may
- // choose to use base64 to serialize it (or any other approach that guarantees
- // that the binary blob goes through). This should only be called for the
- // caller's perspective.
- //
- // This may return an unset optional, which means that the given media
- // transport is not supported / disabled and shouldn't be reported in SDP.
- //
- // It may also return an empty string, in which case the media transport is
- // supported, but without any extra settings.
- // TODO(psla): Make abstract.
- virtual absl::optional<std::string> GetTransportParametersOffer() const;
-
- // Connect the media transport to the ICE transport.
- // The implementation must be able to ignore incoming packets that don't
- // belong to it.
- // TODO(psla): Make abstract.
- virtual void Connect(rtc::PacketTransportInternal* packet_transport);
-
- // Start asynchronous send of audio frame. The status returned by this method
- // only pertains to the synchronous operations (e.g.
- // serialization/packetization), not to the asynchronous operation.
-
- virtual RTCError SendAudioFrame(uint64_t channel_id,
- MediaTransportEncodedAudioFrame frame) = 0;
-
- // Start asynchronous send of video frame. The status returned by this method
- // only pertains to the synchronous operations (e.g.
- // serialization/packetization), not to the asynchronous operation.
- virtual RTCError SendVideoFrame(
- uint64_t channel_id,
- const MediaTransportEncodedVideoFrame& frame) = 0;
-
- // Used by video sender to be notified on key frame requests.
- virtual void SetKeyFrameRequestCallback(
- MediaTransportKeyFrameRequestCallback* callback);
-
- // Requests a keyframe for the particular channel (stream). The caller should
- // check that the keyframe is not present in a jitter buffer already (i.e.
- // don't request a keyframe if there is one that you will get from the jitter
- // buffer in a moment).
- virtual RTCError RequestKeyFrame(uint64_t channel_id) = 0;
-
- // Sets audio sink. Sink must be unset by calling SetReceiveAudioSink(nullptr)
- // before the media transport is destroyed or before new sink is set.
- virtual void SetReceiveAudioSink(MediaTransportAudioSinkInterface* sink) = 0;
-
- // Registers a video sink. Before destruction of media transport, you must
- // pass a nullptr.
- virtual void SetReceiveVideoSink(MediaTransportVideoSinkInterface* sink) = 0;
-
- // Adds a target bitrate observer. Before media transport is destructed
- // the observer must be unregistered (by calling
- // RemoveTargetTransferRateObserver).
- // A newly registered observer will be called back with the latest recorded
- // target rate, if available.
- virtual void AddTargetTransferRateObserver(
- TargetTransferRateObserver* observer);
-
- // Removes an existing |observer| from observers. If observer was never
- // registered, an error is logged and method does nothing.
- virtual void RemoveTargetTransferRateObserver(
- TargetTransferRateObserver* observer);
-
- // Sets audio packets observer, which gets informed about incoming audio
- // packets. Before destruction, the observer must be unregistered by setting
- // nullptr.
- //
- // This method may be temporary, when the multiplexer is implemented (or
- // multiplexer may use it to demultiplex channel ids).
- virtual void SetFirstAudioPacketReceivedObserver(
- AudioPacketReceivedObserver* observer);
-
- // Intended for receive side. AddRttObserver registers an observer to be
- // called for each RTT measurement, typically once per ACK. Before media
- // transport is destructed the observer must be unregistered.
- virtual void AddRttObserver(MediaTransportRttObserver* observer);
- virtual void RemoveRttObserver(MediaTransportRttObserver* observer);
-
- // Returns the last known target transfer rate as reported to the above
- // observers.
- virtual absl::optional<TargetTransferRate> GetLatestTargetTransferRate();
-
- // Gets the audio packet overhead in bytes. Returned overhead does not include
- // transport overhead (ipv4/6, turn channeldata, tcp/udp, etc.).
- // If the transport is capable of fusing packets together, this overhead
- // might not be a very accurate number.
- // TODO(nisse): Deprecated.
- virtual size_t GetAudioPacketOverhead() const;
-
- // Corresponding observers for audio and video overhead. Before destruction,
- // the observers must be unregistered by setting nullptr.
-
- // Registers an observer for network change events. If the network route is
- // already established when the callback is added, |callback| will be called
- // immediately with the current network route. Before media transport is
- // destroyed, the callback must be removed.
- virtual void AddNetworkChangeCallback(
- MediaTransportNetworkChangeCallback* callback);
- virtual void RemoveNetworkChangeCallback(
- MediaTransportNetworkChangeCallback* callback);
-
- // Sets a state observer callback. Before media transport is destroyed, the
- // callback must be unregistered by setting it to nullptr.
- // A newly registered callback will be called with the current state.
- // Media transport does not invoke this callback concurrently.
- virtual void SetMediaTransportStateCallback(
- MediaTransportStateCallback* callback) = 0;
-
- // Updates allocation limits.
- // TODO(psla): Make abstract when downstream implementation implement it.
- virtual void SetAllocatedBitrateLimits(
- const MediaTransportAllocatedBitrateLimits& limits);
-
- // Sets starting rate.
- // TODO(psla): Make abstract when downstream implementation implement it.
- virtual void SetTargetBitrateLimits(
- const MediaTransportTargetRateConstraints& target_rate_constraints) {}
-
- // TODO(sukhanov): RtcEventLogs.
-};
-
-// If media transport factory is set in peer connection factory, it will be
-// used to create media transport for sending/receiving encoded frames and
-// this transport will be used instead of default RTP/SRTP transport.
-//
-// Currently Media Transport negotiation is not supported in SDP.
-// If application is using media transport, it must negotiate it before
-// setting media transport factory in peer connection.
-class MediaTransportFactory {
- public:
- virtual ~MediaTransportFactory() = default;
-
- // Creates media transport.
- // - Does not take ownership of packet_transport or network_thread.
- // - Does not support group calls, in 1:1 call one side must set
- // is_caller = true and another is_caller = false.
- virtual RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
- CreateMediaTransport(rtc::PacketTransportInternal* packet_transport,
- rtc::Thread* network_thread,
- const MediaTransportSettings& settings);
-
- // Creates a new Media Transport in a disconnected state. If the media
- // transport for the caller is created, one can then call
- // MediaTransportInterface::GetTransportParametersOffer on that new instance.
- // TODO(psla): Make abstract.
- virtual RTCErrorOr<std::unique_ptr<webrtc::MediaTransportInterface>>
- CreateMediaTransport(rtc::Thread* network_thread,
- const MediaTransportSettings& settings);
-
- // Creates a new Datagram Transport in a disconnected state. If the datagram
- // transport for the caller is created, one can then call
- // DatagramTransportInterface::GetTransportParametersOffer on that new
- // instance.
- //
- // TODO(sukhanov): Consider separating media and datagram transport factories.
- // TODO(sukhanov): Move factory to a separate .h file.
- virtual RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
- CreateDatagramTransport(rtc::Thread* network_thread,
- const MediaTransportSettings& settings);
-
- // Gets a transport name which is supported by the implementation.
- // Different factories should return different transport names, and at runtime
- // it will be checked that different names were used.
- // For example, "rtp" or "generic" may be returned by two different
- // implementations.
- // The value returned by this method must never change in the lifetime of the
- // factory.
- // TODO(psla): Make abstract.
- virtual std::string GetTransportName() const;
-};
-
-} // namespace webrtc
-#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
diff --git a/chromium/third_party/webrtc/api/transport/media/video_transport.cc b/chromium/third_party/webrtc/api/transport/media/video_transport.cc
deleted file mode 100644
index a6f53040487..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/video_transport.cc
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#include "api/transport/media/video_transport.h"
-
-#include <utility>
-
-namespace webrtc {
-
-MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame() = default;
-
-MediaTransportEncodedVideoFrame::~MediaTransportEncodedVideoFrame() = default;
-
-MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
- int64_t frame_id,
- std::vector<int64_t> referenced_frame_ids,
- int payload_type,
- const webrtc::EncodedImage& encoded_image)
- : payload_type_(payload_type),
- encoded_image_(encoded_image),
- frame_id_(frame_id),
- referenced_frame_ids_(std::move(referenced_frame_ids)) {}
-
-MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=(
- const MediaTransportEncodedVideoFrame&) = default;
-
-MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=(
- MediaTransportEncodedVideoFrame&&) = default;
-
-MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
- const MediaTransportEncodedVideoFrame& o)
- : MediaTransportEncodedVideoFrame() {
- *this = o;
-}
-
-MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
- MediaTransportEncodedVideoFrame&& o)
- : MediaTransportEncodedVideoFrame() {
- *this = std::move(o);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/transport/media/video_transport.h b/chromium/third_party/webrtc/api/transport/media/video_transport.h
deleted file mode 100644
index affd2e0d382..00000000000
--- a/chromium/third_party/webrtc/api/transport/media/video_transport.h
+++ /dev/null
@@ -1,101 +0,0 @@
-/* Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This is EXPERIMENTAL interface for media transport.
-//
-// The goal is to refactor WebRTC code so that audio and video frames
-// are sent / received through the media transport interface. This will
-// enable different media transport implementations, including QUIC-based
-// media transport.
-
-#ifndef API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
-#define API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
-
-#include <vector>
-
-#include "api/video/encoded_image.h"
-
-namespace webrtc {
-
-// Represents encoded video frame, along with the codec information.
-class MediaTransportEncodedVideoFrame final {
- public:
- MediaTransportEncodedVideoFrame(int64_t frame_id,
- std::vector<int64_t> referenced_frame_ids,
- int payload_type,
- const webrtc::EncodedImage& encoded_image);
- ~MediaTransportEncodedVideoFrame();
- MediaTransportEncodedVideoFrame(const MediaTransportEncodedVideoFrame&);
- MediaTransportEncodedVideoFrame& operator=(
- const MediaTransportEncodedVideoFrame& other);
- MediaTransportEncodedVideoFrame& operator=(
- MediaTransportEncodedVideoFrame&& other);
- MediaTransportEncodedVideoFrame(MediaTransportEncodedVideoFrame&&);
-
- int payload_type() const { return payload_type_; }
- const webrtc::EncodedImage& encoded_image() const { return encoded_image_; }
-
- int64_t frame_id() const { return frame_id_; }
- const std::vector<int64_t>& referenced_frame_ids() const {
- return referenced_frame_ids_;
- }
-
- // Hack to workaround lack of ownership of the EncodedImage buffer. If we
- // don't already own the underlying data, make a copy.
- void Retain() { encoded_image_.Retain(); }
-
- private:
- MediaTransportEncodedVideoFrame();
-
- int payload_type_;
-
- // The buffer is not always owned by the encoded image. On the sender it means
- // that it will need to make a copy using the Retain() method, if it wants to
- // deliver it asynchronously.
- webrtc::EncodedImage encoded_image_;
-
- // Frame id uniquely identifies a frame in a stream. It needs to be unique in
- // a given time window (i.e. technically unique identifier for the lifetime of
- // the connection is not needed, but you need to guarantee that remote side
- // got rid of the previous frame_id if you plan to reuse it).
- //
- // It is required by a remote jitter buffer, and is the same as
- // EncodedFrame::id::picture_id.
- //
- // This data must be opaque to the media transport, and media transport should
- // itself not make any assumptions about what it is and its uniqueness.
- int64_t frame_id_;
-
- // A single frame might depend on other frames. This is set of identifiers on
- // which the current frame depends.
- std::vector<int64_t> referenced_frame_ids_;
-};
-
-// Interface for receiving encoded video frames from MediaTransportInterface
-// implementations.
-class MediaTransportVideoSinkInterface {
- public:
- virtual ~MediaTransportVideoSinkInterface() = default;
-
- // Called when new encoded video frame is received.
- virtual void OnData(uint64_t channel_id,
- MediaTransportEncodedVideoFrame frame) = 0;
-};
-
-// Interface for video sender to be notified of received key frame request.
-class MediaTransportKeyFrameRequestCallback {
- public:
- virtual ~MediaTransportKeyFrameRequestCallback() = default;
-
- // Called when a key frame request is received on the transport.
- virtual void OnKeyFrameRequested(uint64_t channel_id) = 0;
-};
-
-} // namespace webrtc
-#endif // API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
diff --git a/chromium/third_party/webrtc/api/transport/rtp/BUILD.gn b/chromium/third_party/webrtc/api/transport/rtp/BUILD.gn
index b0849502c81..7b01169360c 100644
--- a/chromium/third_party/webrtc/api/transport/rtp/BUILD.gn
+++ b/chromium/third_party/webrtc/api/transport/rtp/BUILD.gn
@@ -14,15 +14,20 @@ rtc_source_set("rtp_source") {
deps = [
"../../../api:rtp_headers",
"../../../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("dependency_descriptor") {
visibility = [ "*" ]
- sources = [ "dependency_descriptor.h" ]
- deps = [
+ sources = [
+ "dependency_descriptor.cc",
+ "dependency_descriptor.h",
+ ]
+ deps = [ "../../../rtc_base:checks" ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
diff --git a/chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.cc b/chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.cc
new file mode 100644
index 00000000000..2a9b6d9a713
--- /dev/null
+++ b/chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/transport/rtp/dependency_descriptor.h"
+
+#include "absl/container/inlined_vector.h"
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+constexpr int DependencyDescriptor::kMaxSpatialIds;
+constexpr int DependencyDescriptor::kMaxTemporalIds;
+constexpr int DependencyDescriptor::kMaxTemplates;
+constexpr int DependencyDescriptor::kMaxDecodeTargets;
+
+namespace webrtc_impl {
+
+absl::InlinedVector<DecodeTargetIndication, 10> StringToDecodeTargetIndications(
+ absl::string_view symbols) {
+ absl::InlinedVector<DecodeTargetIndication, 10> dtis;
+ dtis.reserve(symbols.size());
+ for (char symbol : symbols) {
+ DecodeTargetIndication indication;
+ switch (symbol) {
+ case '-':
+ indication = DecodeTargetIndication::kNotPresent;
+ break;
+ case 'D':
+ indication = DecodeTargetIndication::kDiscardable;
+ break;
+ case 'R':
+ indication = DecodeTargetIndication::kRequired;
+ break;
+ case 'S':
+ indication = DecodeTargetIndication::kSwitch;
+ break;
+ default:
+ RTC_NOTREACHED();
+ }
+ dtis.push_back(indication);
+ }
+ return dtis;
+}
+
+} // namespace webrtc_impl
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.h b/chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.h
index a488f56dfd0..a30608c1628 100644
--- a/chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.h
+++ b/chromium/third_party/webrtc/api/transport/rtp/dependency_descriptor.h
@@ -13,10 +13,12 @@
#include <stdint.h>
+#include <initializer_list>
#include <memory>
#include <vector>
#include "absl/container/inlined_vector.h"
+#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
namespace webrtc {
@@ -52,6 +54,13 @@ enum class DecodeTargetIndication {
};
struct FrameDependencyTemplate {
+ // Setters are named briefly to chain them when building the template.
+ FrameDependencyTemplate& S(int spatial_layer);
+ FrameDependencyTemplate& T(int temporal_layer);
+ FrameDependencyTemplate& Dtis(absl::string_view dtis);
+ FrameDependencyTemplate& FrameDiffs(std::initializer_list<int> diffs);
+ FrameDependencyTemplate& ChainDiffs(std::initializer_list<int> diffs);
+
friend bool operator==(const FrameDependencyTemplate& lhs,
const FrameDependencyTemplate& rhs) {
return lhs.spatial_id == rhs.spatial_id &&
@@ -90,6 +99,11 @@ struct FrameDependencyStructure {
};
struct DependencyDescriptor {
+ static constexpr int kMaxSpatialIds = 4;
+ static constexpr int kMaxTemporalIds = 8;
+ static constexpr int kMaxDecodeTargets = 32;
+ static constexpr int kMaxTemplates = 64;
+
bool first_packet_in_frame = true;
bool last_packet_in_frame = true;
int frame_number = 0;
@@ -99,6 +113,37 @@ struct DependencyDescriptor {
std::unique_ptr<FrameDependencyStructure> attached_structure;
};
+// Below are implementation details.
+namespace webrtc_impl {
+absl::InlinedVector<DecodeTargetIndication, 10> StringToDecodeTargetIndications(
+ absl::string_view indication_symbols);
+} // namespace webrtc_impl
+
+inline FrameDependencyTemplate& FrameDependencyTemplate::S(int spatial_layer) {
+ this->spatial_id = spatial_layer;
+ return *this;
+}
+inline FrameDependencyTemplate& FrameDependencyTemplate::T(int temporal_layer) {
+ this->temporal_id = temporal_layer;
+ return *this;
+}
+inline FrameDependencyTemplate& FrameDependencyTemplate::Dtis(
+ absl::string_view dtis) {
+ this->decode_target_indications =
+ webrtc_impl::StringToDecodeTargetIndications(dtis);
+ return *this;
+}
+inline FrameDependencyTemplate& FrameDependencyTemplate::FrameDiffs(
+ std::initializer_list<int> diffs) {
+ this->frame_diffs.assign(diffs.begin(), diffs.end());
+ return *this;
+}
+inline FrameDependencyTemplate& FrameDependencyTemplate::ChainDiffs(
+ std::initializer_list<int> diffs) {
+ this->chain_diffs.assign(diffs.begin(), diffs.end());
+ return *this;
+}
+
} // namespace webrtc
#endif // API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_
diff --git a/chromium/third_party/webrtc/api/transport/test/mock_network_control.h b/chromium/third_party/webrtc/api/transport/test/mock_network_control.h
index 54a416cb770..f613004fb7a 100644
--- a/chromium/third_party/webrtc/api/transport/test/mock_network_control.h
+++ b/chromium/third_party/webrtc/api/transport/test/mock_network_control.h
@@ -18,11 +18,16 @@ namespace webrtc {
class MockNetworkStateEstimator : public NetworkStateEstimator {
public:
- MOCK_METHOD0(GetCurrentEstimate, absl::optional<NetworkStateEstimate>());
- MOCK_METHOD1(OnTransportPacketsFeedback,
- void(const TransportPacketsFeedback&));
- MOCK_METHOD1(OnReceivedPacket, void(const PacketResult&));
- MOCK_METHOD1(OnRouteChange, void(const NetworkRouteChange&));
+ MOCK_METHOD(absl::optional<NetworkStateEstimate>,
+ GetCurrentEstimate,
+ (),
+ (override));
+ MOCK_METHOD(void,
+ OnTransportPacketsFeedback,
+ (const TransportPacketsFeedback&),
+ (override));
+ MOCK_METHOD(void, OnReceivedPacket, (const PacketResult&), (override));
+ MOCK_METHOD(void, OnRouteChange, (const NetworkRouteChange&), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/units/data_rate_unittest.cc b/chromium/third_party/webrtc/api/units/data_rate_unittest.cc
index 4a6dd21af36..f77b3702d4d 100644
--- a/chromium/third_party/webrtc/api/units/data_rate_unittest.cc
+++ b/chromium/third_party/webrtc/api/units/data_rate_unittest.cc
@@ -175,7 +175,7 @@ TEST(UnitConversionTest, DataRateAndDataSizeAndFrequency) {
EXPECT_EQ((rate_b / freq_a).bytes(), kBitsPerSecond / kHertz / 8);
}
-TEST(UnitConversionTest, DivisionFailsOnLargeSize) {
+TEST(UnitConversionDeathTest, DivisionFailsOnLargeSize) {
// Note that the failure is expected since the current implementation is
// implementated in a way that does not support division of large sizes. If
// the implementation is changed, this test can safely be removed.
diff --git a/chromium/third_party/webrtc/api/video/BUILD.gn b/chromium/third_party/webrtc/api/video/BUILD.gn
index 290c2f2abba..ee7e51d4c42 100644
--- a/chromium/third_party/webrtc/api/video/BUILD.gn
+++ b/chromium/third_party/webrtc/api/video/BUILD.gn
@@ -21,7 +21,6 @@ rtc_library("video_rtp_headers") {
"hdr_metadata.h",
"video_content_type.cc",
"video_content_type.h",
- "video_frame_marking.h",
"video_rotation.h",
"video_timing.cc",
"video_timing.h",
@@ -31,8 +30,8 @@ rtc_library("video_rtp_headers") {
"..:array_view",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_frame") {
@@ -56,8 +55,8 @@ rtc_library("video_frame") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (is_android) {
@@ -141,8 +140,8 @@ rtc_library("encoded_image") {
"../../rtc_base:deprecation",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("encoded_frame") {
@@ -173,8 +172,8 @@ rtc_library("video_bitrate_allocation") {
"../../rtc_base:safe_conversions",
"../../rtc_base:stringutils",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_bitrate_allocator") {
@@ -209,8 +208,8 @@ rtc_source_set("video_stream_decoder") {
"../task_queue",
"../units:time_delta",
"../video_codecs:video_codecs_api",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_stream_decoder_create") {
@@ -237,7 +236,10 @@ rtc_library("video_adaptation") {
"video_adaptation_reason.h",
]
- deps = [ "../../rtc_base:checks" ]
+ deps = [
+ "../../rtc_base:checks",
+ "../../rtc_base:stringutils",
+ ]
}
rtc_source_set("video_stream_encoder") {
@@ -256,10 +258,29 @@ rtc_source_set("video_stream_encoder") {
":video_codec_constants",
":video_frame",
"..:rtp_parameters",
+ "..:scoped_refptr",
"../:fec_controller_api",
"../:rtp_parameters",
+ "../adaptation:resource_adaptation_api",
"../units:data_rate",
"../video_codecs:video_codecs_api",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("video_frame_metadata") {
+ visibility = [ "*" ]
+ sources = [
+ "video_frame_metadata.cc",
+ "video_frame_metadata.h",
+ ]
+ deps = [
+ "..:array_view",
+ "../../modules/rtp_rtcp:rtp_video_header",
+ "../transport/rtp:dependency_descriptor",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -299,16 +320,21 @@ rtc_library("builtin_video_bitrate_allocator_factory") {
"../../modules/video_coding:video_coding_utility",
"../../modules/video_coding:webrtc_vp9_helpers",
"../video_codecs:video_codecs_api",
- "//third_party/abseil-cpp/absl/base:core_headers",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
}
if (rtc_include_tests) {
rtc_library("video_unittests") {
testonly = true
- sources = [ "video_stream_decoder_create_unittest.cc" ]
+ sources = [
+ "video_frame_metadata_unittest.cc",
+ "video_stream_decoder_create_unittest.cc",
+ ]
deps = [
+ ":video_frame_metadata",
":video_stream_decoder_create",
+ "../../modules/rtp_rtcp:rtp_video_header",
"../../test:test_support",
"../task_queue:default_task_queue_factory",
"../video_codecs:builtin_video_decoder_factory",
diff --git a/chromium/third_party/webrtc/api/video/OWNERS b/chromium/third_party/webrtc/api/video/OWNERS
index 315f85e7d00..e4a16c360a1 100644
--- a/chromium/third_party/webrtc/api/video/OWNERS
+++ b/chromium/third_party/webrtc/api/video/OWNERS
@@ -1,3 +1,4 @@
+brandtr@webrtc.org
magjed@webrtc.org
nisse@webrtc.org
diff --git a/chromium/third_party/webrtc/api/video/test/BUILD.gn b/chromium/third_party/webrtc/api/video/test/BUILD.gn
index 5f697a081c8..5633371102b 100644
--- a/chromium/third_party/webrtc/api/video/test/BUILD.gn
+++ b/chromium/third_party/webrtc/api/video/test/BUILD.gn
@@ -21,8 +21,8 @@ rtc_library("rtc_api_video_unittests") {
"..:video_frame",
"..:video_rtp_headers",
"../../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("mock_recordable_encoded_frame") {
diff --git a/chromium/third_party/webrtc/api/video/test/mock_recordable_encoded_frame.h b/chromium/third_party/webrtc/api/video/test/mock_recordable_encoded_frame.h
index 1788a493c6d..2178932d2a6 100644
--- a/chromium/third_party/webrtc/api/video/test/mock_recordable_encoded_frame.h
+++ b/chromium/third_party/webrtc/api/video/test/mock_recordable_encoded_frame.h
@@ -17,13 +17,18 @@
namespace webrtc {
class MockRecordableEncodedFrame : public RecordableEncodedFrame {
public:
- MOCK_CONST_METHOD0(encoded_buffer,
- rtc::scoped_refptr<const EncodedImageBufferInterface>());
- MOCK_CONST_METHOD0(color_space, absl::optional<webrtc::ColorSpace>());
- MOCK_CONST_METHOD0(codec, VideoCodecType());
- MOCK_CONST_METHOD0(is_key_frame, bool());
- MOCK_CONST_METHOD0(resolution, EncodedResolution());
- MOCK_CONST_METHOD0(render_time, Timestamp());
+ MOCK_METHOD(rtc::scoped_refptr<const EncodedImageBufferInterface>,
+ encoded_buffer,
+ (),
+ (const, override));
+ MOCK_METHOD(absl::optional<webrtc::ColorSpace>,
+ color_space,
+ (),
+ (const, override));
+ MOCK_METHOD(VideoCodecType, codec, (), (const, override));
+ MOCK_METHOD(bool, is_key_frame, (), (const, override));
+ MOCK_METHOD(EncodedResolution, resolution, (), (const, override));
+ MOCK_METHOD(Timestamp, render_time, (), (const, override));
};
} // namespace webrtc
#endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_
diff --git a/chromium/third_party/webrtc/api/video/video_adaptation_counters.cc b/chromium/third_party/webrtc/api/video/video_adaptation_counters.cc
index 25e0bee1ff8..df1769d5d46 100644
--- a/chromium/third_party/webrtc/api/video/video_adaptation_counters.cc
+++ b/chromium/third_party/webrtc/api/video/video_adaptation_counters.cc
@@ -10,6 +10,8 @@
#include "api/video/video_adaptation_counters.h"
+#include "rtc_base/strings/string_builder.h"
+
namespace webrtc {
bool VideoAdaptationCounters::operator==(
@@ -30,4 +32,11 @@ VideoAdaptationCounters VideoAdaptationCounters::operator+(
fps_adaptations + other.fps_adaptations);
}
+std::string VideoAdaptationCounters::ToString() const {
+ rtc::StringBuilder ss;
+ ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations
+ << " }";
+ return ss.Release();
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/video/video_adaptation_counters.h b/chromium/third_party/webrtc/api/video/video_adaptation_counters.h
index eff0baaa21f..2dea902f2f1 100644
--- a/chromium/third_party/webrtc/api/video/video_adaptation_counters.h
+++ b/chromium/third_party/webrtc/api/video/video_adaptation_counters.h
@@ -11,6 +11,8 @@
#ifndef API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
#define API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
+#include <string>
+
#include "rtc_base/checks.h"
namespace webrtc {
@@ -33,6 +35,8 @@ struct VideoAdaptationCounters {
VideoAdaptationCounters operator+(const VideoAdaptationCounters& other) const;
+ std::string ToString() const;
+
int resolution_adaptations;
int fps_adaptations;
};
diff --git a/chromium/third_party/webrtc/api/video/video_frame_marking.h b/chromium/third_party/webrtc/api/video/video_frame_marking.h
deleted file mode 100644
index 2a34852f1d6..00000000000
--- a/chromium/third_party/webrtc/api/video/video_frame_marking.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef API_VIDEO_VIDEO_FRAME_MARKING_H_
-#define API_VIDEO_VIDEO_FRAME_MARKING_H_
-
-namespace webrtc {
-
-struct FrameMarking {
- bool start_of_frame;
- bool end_of_frame;
- bool independent_frame;
- bool discardable_frame;
- bool base_layer_sync;
- uint8_t temporal_id;
- uint8_t layer_id;
- uint8_t tl0_pic_idx;
-};
-
-} // namespace webrtc
-
-#endif // API_VIDEO_VIDEO_FRAME_MARKING_H_
diff --git a/chromium/third_party/webrtc/api/video/video_frame_metadata.cc b/chromium/third_party/webrtc/api/video/video_frame_metadata.cc
new file mode 100644
index 00000000000..df82875eb95
--- /dev/null
+++ b/chromium/third_party/webrtc/api/video/video_frame_metadata.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_frame_metadata.h"
+
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+
+namespace webrtc {
+
+VideoFrameMetadata::VideoFrameMetadata(const RTPVideoHeader& header)
+ : width_(header.width), height_(header.height) {
+ if (header.generic) {
+ frame_id_ = header.generic->frame_id;
+ spatial_index_ = header.generic->spatial_index;
+ temporal_index_ = header.generic->temporal_index;
+ frame_dependencies_ = header.generic->dependencies;
+ decode_target_indications_ = header.generic->decode_target_indications;
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/video/video_frame_metadata.h b/chromium/third_party/webrtc/api/video/video_frame_metadata.h
new file mode 100644
index 00000000000..2e9309841b0
--- /dev/null
+++ b/chromium/third_party/webrtc/api/video/video_frame_metadata.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_FRAME_METADATA_H_
+#define API_VIDEO_VIDEO_FRAME_METADATA_H_
+
+#include <cstdint>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+
+namespace webrtc {
+
+struct RTPVideoHeader;
+
+// A subset of metadata from the RTP video header, exposed in insertable streams
+// API.
+class VideoFrameMetadata {
+ public:
+ explicit VideoFrameMetadata(const RTPVideoHeader& header);
+ VideoFrameMetadata(const VideoFrameMetadata&) = default;
+ VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default;
+
+ uint16_t GetWidth() const { return width_; }
+ uint16_t GetHeight() const { return height_; }
+ absl::optional<int64_t> GetFrameId() const { return frame_id_; }
+ int GetSpatialIndex() const { return spatial_index_; }
+ int GetTemporalIndex() const { return temporal_index_; }
+
+ rtc::ArrayView<const int64_t> GetFrameDependencies() const {
+ return frame_dependencies_;
+ }
+
+ rtc::ArrayView<const DecodeTargetIndication> GetDecodeTargetIndications()
+ const {
+ return decode_target_indications_;
+ }
+
+ private:
+ int16_t width_;
+ int16_t height_;
+ absl::optional<int64_t> frame_id_;
+ int spatial_index_ = 0;
+ int temporal_index_ = 0;
+ absl::InlinedVector<int64_t, 5> frame_dependencies_;
+ absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications_;
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_FRAME_METADATA_H_
diff --git a/chromium/third_party/webrtc/api/video/video_frame_metadata_unittest.cc b/chromium/third_party/webrtc/api/video/video_frame_metadata_unittest.cc
new file mode 100644
index 00000000000..7a808e1ea9b
--- /dev/null
+++ b/chromium/third_party/webrtc/api/video/video_frame_metadata_unittest.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_frame_metadata.h"
+
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::IsEmpty;
+
+TEST(VideoFrameMetadata, GetWidthReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ video_header.width = 1280u;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetWidth(), video_header.width);
+}
+
+TEST(VideoFrameMetadata, GetHeightReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ video_header.height = 720u;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetHeight(), video_header.height);
+}
+
+TEST(VideoFrameMetadata, GetFrameIdReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.frame_id = 10;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetFrameId().value(), 10);
+}
+
+TEST(VideoFrameMetadata, HasNoFrameIdForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_EQ(metadata.GetFrameId(), absl::nullopt);
+}
+
+TEST(VideoFrameMetadata, GetSpatialIndexReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.spatial_index = 2;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetSpatialIndex(), 2);
+}
+
+TEST(VideoFrameMetadata, SpatialIndexIsZeroForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_EQ(metadata.GetSpatialIndex(), 0);
+}
+
+TEST(VideoFrameMetadata, GetTemporalIndexReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.temporal_index = 3;
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_EQ(metadata.GetTemporalIndex(), 3);
+}
+
+TEST(VideoFrameMetadata, TemporalIndexIsZeroForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_EQ(metadata.GetTemporalIndex(), 0);
+}
+
+TEST(VideoFrameMetadata, GetFrameDependenciesReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.dependencies = {5, 6, 7};
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7));
+}
+
+TEST(VideoFrameMetadata, FrameDependencyVectorIsEmptyForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty());
+}
+
+TEST(VideoFrameMetadata, GetDecodeTargetIndicationsReturnsCorrectValue) {
+ RTPVideoHeader video_header;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch};
+ VideoFrameMetadata metadata(video_header);
+ EXPECT_THAT(metadata.GetDecodeTargetIndications(),
+ ElementsAre(DecodeTargetIndication::kSwitch));
+}
+
+TEST(VideoFrameMetadata,
+ DecodeTargetIndicationsVectorIsEmptyForHeaderWithoutGeneric) {
+ RTPVideoHeader video_header;
+ VideoFrameMetadata metadata(video_header);
+ ASSERT_FALSE(video_header.generic);
+ EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty());
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/video/video_stream_encoder_interface.h b/chromium/third_party/webrtc/api/video/video_stream_encoder_interface.h
index 8e1df0f858a..d8dd8e15994 100644
--- a/chromium/third_party/webrtc/api/video/video_stream_encoder_interface.h
+++ b/chromium/third_party/webrtc/api/video/video_stream_encoder_interface.h
@@ -13,8 +13,10 @@
#include <vector>
+#include "api/adaptation/resource.h"
#include "api/fec_controller_override.h"
#include "api/rtp_parameters.h" // For DegradationPreference.
+#include "api/scoped_refptr.h"
#include "api/units/data_rate.h"
#include "api/video/video_bitrate_allocator.h"
#include "api/video/video_sink_interface.h"
@@ -49,6 +51,15 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface<VideoFrame> {
int min_transmit_bitrate_bps) = 0;
};
+ // If the resource is overusing, the VideoStreamEncoder will try to reduce
+ // resolution or frame rate until no resource is overusing.
+ // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor
+ // is moved to Call this method could be deleted altogether in favor of
+ // Call-level APIs only.
+ virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) = 0;
+ virtual std::vector<rtc::scoped_refptr<Resource>>
+ GetAdaptationResources() = 0;
+
// Sets the source that will provide video frames to the VideoStreamEncoder's
// OnFrame method. |degradation_preference| control whether or not resolution
// or frame rate may be reduced. The VideoStreamEncoder registers itself with
diff --git a/chromium/third_party/webrtc/api/video_codecs/BUILD.gn b/chromium/third_party/webrtc/api/video_codecs/BUILD.gn
index 21a5f6faa07..597478ba0a8 100644
--- a/chromium/third_party/webrtc/api/video_codecs/BUILD.gn
+++ b/chromium/third_party/webrtc/api/video_codecs/BUILD.gn
@@ -49,6 +49,8 @@ rtc_library("video_codecs_api") {
"../video:video_codec_constants",
"../video:video_frame",
"../video:video_rtp_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
@@ -100,8 +102,8 @@ rtc_library("builtin_video_encoder_factory") {
"../../media:rtc_media_base",
"../../rtc_base:checks",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("vp8_temporal_layers_factory") {
@@ -148,6 +150,8 @@ rtc_library("rtc_software_fallback_wrappers") {
"../video:video_bitrate_allocation",
"../video:video_frame",
"../video:video_rtp_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/chromium/third_party/webrtc/api/video_codecs/test/BUILD.gn b/chromium/third_party/webrtc/api/video_codecs/test/BUILD.gn
index 243b78267fc..10b18b6e5b5 100644
--- a/chromium/third_party/webrtc/api/video_codecs/test/BUILD.gn
+++ b/chromium/third_party/webrtc/api/video_codecs/test/BUILD.gn
@@ -40,5 +40,6 @@ if (rtc_include_tests) {
"../../video:video_rtp_headers",
"//testing/gtest",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/chromium/third_party/webrtc/api/video_codecs/video_codec.h b/chromium/third_party/webrtc/api/video_codecs/video_codec.h
index 330bbbce199..c07fae9b8bb 100644
--- a/chromium/third_party/webrtc/api/video_codecs/video_codec.h
+++ b/chromium/third_party/webrtc/api/video_codecs/video_codec.h
@@ -19,7 +19,7 @@
#include "absl/types/optional.h"
#include "api/video/video_bitrate_allocation.h"
#include "api/video/video_codec_type.h"
-#include "common_types.h" // NOLINT(build/include)
+#include "common_types.h" // NOLINT(build/include_directory)
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/api/voip/BUILD.gn b/chromium/third_party/webrtc/api/voip/BUILD.gn
index 2c5f71c988e..6f92ed67f4e 100644
--- a/chromium/third_party/webrtc/api/voip/BUILD.gn
+++ b/chromium/third_party/webrtc/api/voip/BUILD.gn
@@ -19,8 +19,8 @@ rtc_source_set("voip_api") {
deps = [
"..:array_view",
"../audio_codecs:audio_codecs_api",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("voip_engine_factory") {
diff --git a/chromium/third_party/webrtc/audio/BUILD.gn b/chromium/third_party/webrtc/audio/BUILD.gn
index 725128bb1a8..7df741e9a78 100644
--- a/chromium/third_party/webrtc/audio/BUILD.gn
+++ b/chromium/third_party/webrtc/audio/BUILD.gn
@@ -71,6 +71,7 @@ rtc_library("audio") {
"../modules/audio_coding:audio_coding_module_typedefs",
"../modules/audio_coding:audio_encoder_cng",
"../modules/audio_coding:audio_network_adaptor_config",
+ "../modules/audio_coding:red",
"../modules/audio_device",
"../modules/audio_processing",
"../modules/audio_processing:api",
@@ -95,6 +96,8 @@ rtc_library("audio") {
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
"utility:audio_frame_operations",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/audio/audio_send_stream.cc b/chromium/third_party/webrtc/audio/audio_send_stream.cc
index 8730c452582..42705aa99a5 100644
--- a/chromium/third_party/webrtc/audio/audio_send_stream.cc
+++ b/chromium/third_party/webrtc/audio/audio_send_stream.cc
@@ -31,6 +31,7 @@
#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h"
#include "logging/rtc_event_log/rtc_stream_config.h"
#include "modules/audio_coding/codecs/cng/audio_encoder_cng.h"
+#include "modules/audio_coding/codecs/red/audio_encoder_copy_red.h"
#include "modules/audio_processing/include/audio_processing.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "rtc_base/checks.h"
@@ -544,10 +545,12 @@ void AudioSendStream::SetTransportOverhead(
}
void AudioSendStream::UpdateOverheadForEncoder() {
- const size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes();
- if (overhead_per_packet_bytes == 0) {
- return; // Overhead is not known yet, do not tell the encoder.
+ size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes();
+ if (overhead_per_packet_ == overhead_per_packet_bytes) {
+ return;
}
+ overhead_per_packet_ = overhead_per_packet_bytes;
+
channel_send_->CallEncoder([&](AudioEncoder* encoder) {
encoder->OnReceivedOverhead(overhead_per_packet_bytes);
});
@@ -644,7 +647,7 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) {
}
}
- // Wrap the encoder in a an AudioEncoderCNG, if VAD is enabled.
+ // Wrap the encoder in an AudioEncoderCNG, if VAD is enabled.
if (spec.cng_payload_type) {
AudioEncoderCngConfig cng_config;
cng_config.num_channels = encoder->NumChannels();
@@ -657,6 +660,14 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) {
new_config.send_codec_spec->format.clockrate_hz);
}
+ // Wrap the encoder in a RED encoder, if RED is enabled.
+ if (spec.red_payload_type) {
+ AudioEncoderCopyRed::Config red_config;
+ red_config.payload_type = *spec.red_payload_type;
+ red_config.speech_encoder = std::move(encoder);
+ encoder = std::make_unique<AudioEncoderCopyRed>(std::move(red_config));
+ }
+
// Set currently known overhead (used in ANA, opus only).
// If overhead changes later, it will be updated in UpdateOverheadForEncoder.
{
diff --git a/chromium/third_party/webrtc/audio/audio_send_stream.h b/chromium/third_party/webrtc/audio/audio_send_stream.h
index 92e9a7fb165..13166d47e71 100644
--- a/chromium/third_party/webrtc/audio/audio_send_stream.h
+++ b/chromium/third_party/webrtc/audio/audio_send_stream.h
@@ -20,7 +20,7 @@
#include "call/audio_send_stream.h"
#include "call/audio_state.h"
#include "call/bitrate_allocator.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/experiments/struct_parameters_parser.h"
#include "rtc_base/race_checker.h"
@@ -175,7 +175,7 @@ class AudioSendStream final : public webrtc::AudioSendStream,
RTC_GUARDED_BY(worker_queue_);
RtpTransportControllerSendInterface* const rtp_transport_;
- RtpRtcp* const rtp_rtcp_module_;
+ RtpRtcpInterface* const rtp_rtcp_module_;
absl::optional<RtpState> const suspended_rtp_state_;
// RFC 5285: Each distinct extension MUST have a unique ID. The value 0 is
@@ -195,6 +195,7 @@ class AudioSendStream final : public webrtc::AudioSendStream,
static int TransportSeqNumId(const Config& config);
rtc::CriticalSection overhead_per_packet_lock_;
+ size_t overhead_per_packet_ RTC_GUARDED_BY(overhead_per_packet_lock_) = 0;
// Current transport overhead (ICE, TURN, etc.)
size_t transport_overhead_per_packet_bytes_
diff --git a/chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc b/chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc
index 334fdf50f7d..d094198721d 100644
--- a/chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc
+++ b/chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc
@@ -89,7 +89,10 @@ const DataRate kMaxOverheadRate = kOverheadPerPacket / kMinFrameLength;
class MockLimitObserver : public BitrateAllocator::LimitObserver {
public:
- MOCK_METHOD1(OnAllocationLimitsChanged, void(BitrateAllocationLimits));
+ MOCK_METHOD(void,
+ OnAllocationLimitsChanged,
+ (BitrateAllocationLimits),
+ (override));
};
std::unique_ptr<MockAudioEncoder> SetupAudioEncoderMock(
@@ -200,7 +203,7 @@ struct ConfigHelper {
return *static_cast<MockAudioEncoderFactory*>(
stream_config_.encoder_factory.get());
}
- MockRtpRtcp* rtp_rtcp() { return &rtp_rtcp_; }
+ MockRtpRtcpInterface* rtp_rtcp() { return &rtp_rtcp_; }
MockChannelSend* channel_send() { return channel_send_; }
RtpTransportControllerSendInterface* transport() { return &rtp_transport_; }
@@ -247,12 +250,12 @@ struct ConfigHelper {
void SetupMockForSetupSendCodec(bool expect_set_encoder_call) {
if (expect_set_encoder_call) {
- EXPECT_CALL(*channel_send_, SetEncoderForMock(_, _))
- .WillOnce(Invoke(
- [this](int payload_type, std::unique_ptr<AudioEncoder>* encoder) {
- this->audio_encoder_ = std::move(*encoder);
+ EXPECT_CALL(*channel_send_, SetEncoder)
+ .WillOnce(
+ [this](int payload_type, std::unique_ptr<AudioEncoder> encoder) {
+ this->audio_encoder_ = std::move(encoder);
return true;
- }));
+ });
}
}
@@ -329,7 +332,7 @@ struct ConfigHelper {
::testing::StrictMock<MockRtcpBandwidthObserver> bandwidth_observer_;
::testing::NiceMock<MockRtcEventLog> event_log_;
::testing::NiceMock<MockRtpTransportControllerSend> rtp_transport_;
- ::testing::NiceMock<MockRtpRtcp> rtp_rtcp_;
+ ::testing::NiceMock<MockRtpRtcpInterface> rtp_rtcp_;
::testing::NiceMock<MockLimitObserver> limit_observer_;
BitrateAllocator bitrate_allocator_;
// |worker_queue| is defined last to ensure all pending tasks are cancelled
@@ -368,6 +371,7 @@ TEST(AudioSendStreamTest, ConfigToString) {
config.send_codec_spec->nack_enabled = true;
config.send_codec_spec->transport_cc_enabled = false;
config.send_codec_spec->cng_payload_type = 42;
+ config.send_codec_spec->red_payload_type = 43;
config.encoder_factory = MockAudioEncoderFactory::CreateUnusedFactory();
config.rtp.extmap_allow_mixed = true;
config.rtp.extensions.push_back(
@@ -380,7 +384,7 @@ TEST(AudioSendStreamTest, ConfigToString) {
"send_transport: null, "
"min_bitrate_bps: 12000, max_bitrate_bps: 34000, "
"send_codec_spec: {nack_enabled: true, transport_cc_enabled: false, "
- "cng_payload_type: 42, payload_type: 103, "
+ "cng_payload_type: 42, red_payload_type: 43, payload_type: 103, "
"format: {name: isac, clockrate_hz: 16000, num_channels: 1, "
"parameters: {}}}}",
config.ToString());
@@ -473,7 +477,7 @@ TEST(AudioSendStreamTest, GetStatsAudioLevel) {
ConfigHelper helper(false, true, use_null_audio_processing);
auto send_stream = helper.CreateAudioSendStream();
helper.SetupMockForGetStats(use_null_audio_processing);
- EXPECT_CALL(*helper.channel_send(), ProcessAndEncodeAudioForMock(_))
+ EXPECT_CALL(*helper.channel_send(), ProcessAndEncodeAudio)
.Times(AnyNumber());
constexpr int kSampleRateHz = 48000;
@@ -558,15 +562,13 @@ TEST(AudioSendStreamTest, SendCodecCanApplyVad) {
helper.config().send_codec_spec =
AudioSendStream::Config::SendCodecSpec(9, kG722Format);
helper.config().send_codec_spec->cng_payload_type = 105;
- using ::testing::Invoke;
std::unique_ptr<AudioEncoder> stolen_encoder;
- EXPECT_CALL(*helper.channel_send(), SetEncoderForMock(_, _))
- .WillOnce(
- Invoke([&stolen_encoder](int payload_type,
- std::unique_ptr<AudioEncoder>* encoder) {
- stolen_encoder = std::move(*encoder);
- return true;
- }));
+ EXPECT_CALL(*helper.channel_send(), SetEncoder)
+ .WillOnce([&stolen_encoder](int payload_type,
+ std::unique_ptr<AudioEncoder> encoder) {
+ stolen_encoder = std::move(encoder);
+ return true;
+ });
EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000));
auto send_stream = helper.CreateAudioSendStream();
@@ -748,8 +750,7 @@ TEST(AudioSendStreamTest, DontRecreateEncoder) {
// test to be correct, it's instead set-up manually here. Otherwise a simple
// change to ConfigHelper (say to WillRepeatedly) would silently make this
// test useless.
- EXPECT_CALL(*helper.channel_send(), SetEncoderForMock(_, _))
- .WillOnce(Return());
+ EXPECT_CALL(*helper.channel_send(), SetEncoder).WillOnce(Return());
EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000));
@@ -794,7 +795,7 @@ TEST(AudioSendStreamTest, OnTransportOverheadChanged) {
auto new_config = helper.config();
// CallEncoder will be called on overhead change.
- EXPECT_CALL(*helper.channel_send(), CallEncoder(::testing::_)).Times(1);
+ EXPECT_CALL(*helper.channel_send(), CallEncoder);
const size_t transport_overhead_per_packet_bytes = 333;
send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes);
@@ -804,6 +805,27 @@ TEST(AudioSendStreamTest, OnTransportOverheadChanged) {
}
}
+TEST(AudioSendStreamTest, DoesntCallEncoderWhenOverheadUnchanged) {
+ for (bool use_null_audio_processing : {false, true}) {
+ ConfigHelper helper(false, true, use_null_audio_processing);
+ auto send_stream = helper.CreateAudioSendStream();
+ auto new_config = helper.config();
+
+ // CallEncoder will be called on overhead change.
+ EXPECT_CALL(*helper.channel_send(), CallEncoder);
+ const size_t transport_overhead_per_packet_bytes = 333;
+ send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes);
+
+ // Set the same overhead again, CallEncoder should not be called again.
+ EXPECT_CALL(*helper.channel_send(), CallEncoder).Times(0);
+ send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes);
+
+ // New overhead, call CallEncoder again
+ EXPECT_CALL(*helper.channel_send(), CallEncoder);
+ send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes + 1);
+ }
+}
+
TEST(AudioSendStreamTest, AudioOverheadChanged) {
for (bool use_null_audio_processing : {false, true}) {
ConfigHelper helper(false, true, use_null_audio_processing);
diff --git a/chromium/third_party/webrtc/audio/audio_state_unittest.cc b/chromium/third_party/webrtc/audio/audio_state_unittest.cc
index 76e08c549c2..2bbe0fb0b70 100644
--- a/chromium/third_party/webrtc/audio/audio_state_unittest.cc
+++ b/chromium/third_party/webrtc/audio/audio_state_unittest.cc
@@ -60,8 +60,10 @@ class FakeAudioSource : public AudioMixer::Source {
int PreferredSampleRate() const /*override*/ { return kSampleRate; }
- MOCK_METHOD2(GetAudioFrameWithInfo,
- AudioFrameInfo(int sample_rate_hz, AudioFrame* audio_frame));
+ MOCK_METHOD(AudioFrameInfo,
+ GetAudioFrameWithInfo,
+ (int sample_rate_hz, AudioFrame*),
+ (override));
};
std::vector<int16_t> Create10msTestData(int sample_rate_hz,
diff --git a/chromium/third_party/webrtc/audio/channel_receive.cc b/chromium/third_party/webrtc/audio/channel_receive.cc
index 66b4bb11f58..34b6d097afa 100644
--- a/chromium/third_party/webrtc/audio/channel_receive.cc
+++ b/chromium/third_party/webrtc/audio/channel_receive.cc
@@ -33,11 +33,11 @@
#include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/checks.h"
#include "rtc_base/critical_section.h"
@@ -216,7 +216,7 @@ class ChannelReceive : public ChannelReceiveInterface {
std::map<uint8_t, int> payload_type_frequencies_;
std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
- std::unique_ptr<RtpRtcp> _rtpRtcpModule;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
const uint32_t remote_ssrc_;
// Info for GetSyncInfo is updated on network or worker thread, and queried on
@@ -297,7 +297,7 @@ void ChannelReceive::OnReceivedPayloadData(
}
int64_t round_trip_time = 0;
- _rtpRtcpModule->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL);
+ rtp_rtcp_->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL);
std::vector<uint16_t> nack_list = acm_receiver_.GetNackList(round_trip_time);
if (!nack_list.empty()) {
@@ -495,7 +495,7 @@ ChannelReceive::ChannelReceive(
_outputAudioLevel.ResetLevelFullRange();
rtp_receive_statistics_->EnableRetransmitDetection(remote_ssrc_, true);
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.clock = clock;
configuration.audio = true;
configuration.receiver_only = true;
@@ -507,14 +507,14 @@ ChannelReceive::ChannelReceive(
if (frame_transformer)
InitFrameTransformerDelegate(std::move(frame_transformer));
- _rtpRtcpModule = RtpRtcp::Create(configuration);
- _rtpRtcpModule->SetSendingMediaStatus(false);
- _rtpRtcpModule->SetRemoteSSRC(remote_ssrc_);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration);
+ rtp_rtcp_->SetSendingMediaStatus(false);
+ rtp_rtcp_->SetRemoteSSRC(remote_ssrc_);
- _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get(), RTC_FROM_HERE);
+ _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE);
// Ensure that RTCP is enabled for the created channel.
- _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
+ rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound);
}
ChannelReceive::~ChannelReceive() {
@@ -527,7 +527,7 @@ ChannelReceive::~ChannelReceive() {
StopPlayout();
if (_moduleProcessThreadPtr)
- _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
+ _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get());
}
void ChannelReceive::SetSink(AudioSinkInterface* sink) {
@@ -659,7 +659,7 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
UpdatePlayoutTimestamp(true, rtc::TimeMillis());
// Deliver RTCP packet to RTP/RTCP module for parsing
- _rtpRtcpModule->IncomingRtcpPacket(data, length);
+ rtp_rtcp_->IncomingRtcpPacket(data, length);
int64_t rtt = GetRTT();
if (rtt == 0) {
@@ -670,8 +670,8 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
uint32_t ntp_secs = 0;
uint32_t ntp_frac = 0;
uint32_t rtp_timestamp = 0;
- if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
- &rtp_timestamp)) {
+ if (0 !=
+ rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, &rtp_timestamp)) {
// Waiting for RTCP.
return;
}
@@ -709,14 +709,14 @@ void ChannelReceive::RegisterReceiverCongestionControlObjects(
RTC_DCHECK(packet_router);
RTC_DCHECK(!packet_router_);
constexpr bool remb_candidate = false;
- packet_router->AddReceiveRtpModule(_rtpRtcpModule.get(), remb_candidate);
+ packet_router->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate);
packet_router_ = packet_router;
}
void ChannelReceive::ResetReceiverCongestionControlObjects() {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
RTC_DCHECK(packet_router_);
- packet_router_->RemoveReceiveRtpModule(_rtpRtcpModule.get());
+ packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get());
packet_router_ = nullptr;
}
@@ -781,7 +781,7 @@ void ChannelReceive::SetNACKStatus(bool enable, int max_packets) {
// Called when we are missing one or more packets.
int ChannelReceive::ResendPackets(const uint16_t* sequence_numbers,
int length) {
- return _rtpRtcpModule->SendNACK(sequence_numbers, length);
+ return rtp_rtcp_->SendNACK(sequence_numbers, length);
}
void ChannelReceive::SetAssociatedSendChannel(
@@ -877,9 +877,9 @@ int ChannelReceive::GetBaseMinimumPlayoutDelayMs() const {
absl::optional<Syncable::Info> ChannelReceive::GetSyncInfo() const {
RTC_DCHECK(module_process_thread_checker_.IsCurrent());
Syncable::Info info;
- if (_rtpRtcpModule->RemoteNTP(&info.capture_time_ntp_secs,
- &info.capture_time_ntp_frac, nullptr, nullptr,
- &info.capture_time_source_clock) != 0) {
+ if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs,
+ &info.capture_time_ntp_frac, nullptr, nullptr,
+ &info.capture_time_source_clock) != 0) {
return absl::nullopt;
}
{
@@ -942,7 +942,7 @@ int ChannelReceive::GetRtpTimestampRateHz() const {
int64_t ChannelReceive::GetRTT() const {
std::vector<RTCPReportBlock> report_blocks;
- _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
+ rtp_rtcp_->RemoteRTCPStat(&report_blocks);
// TODO(nisse): Could we check the return value from the ->RTT() call below,
// instead of checking if we have any report blocks?
@@ -961,8 +961,7 @@ int64_t ChannelReceive::GetRTT() const {
int64_t min_rtt = 0;
// TODO(nisse): This method computes RTT based on sender reports, even though
// a receive stream is not supposed to do that.
- if (_rtpRtcpModule->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
- 0) {
+ if (rtp_rtcp_->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) != 0) {
return 0;
}
return rtt;
diff --git a/chromium/third_party/webrtc/audio/channel_send.cc b/chromium/third_party/webrtc/audio/channel_send.cc
index 3387f271ba0..16d1da648c9 100644
--- a/chromium/third_party/webrtc/audio/channel_send.cc
+++ b/chromium/third_party/webrtc/audio/channel_send.cc
@@ -29,6 +29,7 @@
#include "modules/audio_coding/include/audio_coding_module.h"
#include "modules/audio_processing/rms_level.h"
#include "modules/pacing/packet_router.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/checks.h"
#include "rtc_base/event.h"
@@ -106,7 +107,7 @@ class ChannelSend : public ChannelSendInterface,
ANAStats GetANAStatistics() const override;
// Used by AudioSendStream.
- RtpRtcp* GetRtpRtcp() const override;
+ RtpRtcpInterface* GetRtpRtcp() const override;
void RegisterCngPayloadType(int payload_type, int payload_frequency) override;
@@ -191,7 +192,7 @@ class ChannelSend : public ChannelSendInterface,
RtcEventLog* const event_log_;
- std::unique_ptr<RtpRtcp> _rtpRtcpModule;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
std::unique_ptr<RTPSenderAudio> rtp_sender_audio_;
std::unique_ptr<AudioCodingModule> audio_coding_;
@@ -388,9 +389,9 @@ int32_t ChannelSend::SendData(AudioFrameType frameType,
// Asynchronously transform the payload before sending it. After the payload
// is transformed, the delegate will call SendRtpAudio to send it.
frame_transformer_delegate_->Transform(
- frameType, payloadType, rtp_timestamp, _rtpRtcpModule->StartTimestamp(),
+ frameType, payloadType, rtp_timestamp, rtp_rtcp_->StartTimestamp(),
payloadData, payloadSize, absolute_capture_timestamp_ms,
- _rtpRtcpModule->SSRC());
+ rtp_rtcp_->SSRC());
return 0;
}
return SendRtpAudio(frameType, payloadType, rtp_timestamp, payload,
@@ -427,7 +428,7 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType,
// Encrypt the audio payload into the buffer.
size_t bytes_written = 0;
int encrypt_status = frame_encryptor_->Encrypt(
- cricket::MEDIA_TYPE_AUDIO, _rtpRtcpModule->SSRC(),
+ cricket::MEDIA_TYPE_AUDIO, rtp_rtcp_->SSRC(),
/*additional_data=*/nullptr, payload, encrypted_audio_payload,
&bytes_written);
if (encrypt_status != 0) {
@@ -449,12 +450,12 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType,
// Push data from ACM to RTP/RTCP-module to deliver audio frame for
// packetization.
- if (!_rtpRtcpModule->OnSendingRtpFrame(rtp_timestamp,
- // Leaving the time when this frame was
- // received from the capture device as
- // undefined for voice for now.
- -1, payloadType,
- /*force_sender_report=*/false)) {
+ if (!rtp_rtcp_->OnSendingRtpFrame(rtp_timestamp,
+ // Leaving the time when this frame was
+ // received from the capture device as
+ // undefined for voice for now.
+ -1, payloadType,
+ /*force_sender_report=*/false)) {
return -1;
}
@@ -466,9 +467,8 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType,
// This call will trigger Transport::SendPacket() from the RTP/RTCP module.
if (!rtp_sender_audio_->SendAudio(
- frameType, payloadType,
- rtp_timestamp + _rtpRtcpModule->StartTimestamp(), payload.data(),
- payload.size(), absolute_capture_timestamp_ms)) {
+ frameType, payloadType, rtp_timestamp + rtp_rtcp_->StartTimestamp(),
+ payload.data(), payload.size(), absolute_capture_timestamp_ms)) {
RTC_DLOG(LS_ERROR)
<< "ChannelSend::SendData() failed to send data to RTP/RTCP module";
return -1;
@@ -512,7 +512,7 @@ ChannelSend::ChannelSend(
audio_coding_.reset(AudioCodingModule::Create(AudioCodingModule::Config()));
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.bandwidth_callback = rtcp_observer_.get();
configuration.transport_feedback_callback = feedback_observer_proxy_.get();
configuration.clock = (clock ? clock : Clock::GetRealTimeClock());
@@ -530,16 +530,16 @@ ChannelSend::ChannelSend(
configuration.local_media_ssrc = ssrc;
- _rtpRtcpModule = RtpRtcp::Create(configuration);
- _rtpRtcpModule->SetSendingMediaStatus(false);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration);
+ rtp_rtcp_->SetSendingMediaStatus(false);
- rtp_sender_audio_ = std::make_unique<RTPSenderAudio>(
- configuration.clock, _rtpRtcpModule->RtpSender());
+ rtp_sender_audio_ = std::make_unique<RTPSenderAudio>(configuration.clock,
+ rtp_rtcp_->RtpSender());
- _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get(), RTC_FROM_HERE);
+ _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE);
// Ensure that RTCP is enabled by default for the created channel.
- _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
+ rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound);
int error = audio_coding_->RegisterTransportCallback(this);
RTC_DCHECK_EQ(0, error);
@@ -559,7 +559,7 @@ ChannelSend::~ChannelSend() {
RTC_DCHECK_EQ(0, error);
if (_moduleProcessThreadPtr)
- _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
+ _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get());
}
void ChannelSend::StartSend() {
@@ -567,8 +567,8 @@ void ChannelSend::StartSend() {
RTC_DCHECK(!sending_);
sending_ = true;
- _rtpRtcpModule->SetSendingMediaStatus(true);
- int ret = _rtpRtcpModule->SetSendingStatus(true);
+ rtp_rtcp_->SetSendingMediaStatus(true);
+ int ret = rtp_rtcp_->SetSendingStatus(true);
RTC_DCHECK_EQ(0, ret);
// It is now OK to start processing on the encoder task queue.
encoder_queue_.PostTask([this] {
@@ -594,10 +594,10 @@ void ChannelSend::StopSend() {
// Reset sending SSRC and sequence number and triggers direct transmission
// of RTCP BYE
- if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
+ if (rtp_rtcp_->SetSendingStatus(false) == -1) {
RTC_DLOG(LS_ERROR) << "StartSend() RTP/RTCP failed to stop sending";
}
- _rtpRtcpModule->SetSendingMediaStatus(false);
+ rtp_rtcp_->SetSendingMediaStatus(false);
}
void ChannelSend::SetEncoder(int payload_type,
@@ -608,8 +608,8 @@ void ChannelSend::SetEncoder(int payload_type,
// The RTP/RTCP module needs to know the RTP timestamp rate (i.e. clockrate)
// as well as some other things, so we collect this info and send it along.
- _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type,
- encoder->RtpTimestampRateHz());
+ rtp_rtcp_->RegisterSendPayloadFrequency(payload_type,
+ encoder->RtpTimestampRateHz());
rtp_sender_audio_->RegisterAudioPayload("audio", payload_type,
encoder->RtpTimestampRateHz(),
encoder->NumChannels(), 0);
@@ -664,7 +664,7 @@ void ChannelSend::OnUplinkPacketLossRate(float packet_loss_rate) {
void ChannelSend::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
// Deliver RTCP packet to RTP/RTCP module for parsing
- _rtpRtcpModule->IncomingRtcpPacket(data, length);
+ rtp_rtcp_->IncomingRtcpPacket(data, length);
int64_t rtt = GetRTT();
if (rtt == 0) {
@@ -713,7 +713,7 @@ bool ChannelSend::SendTelephoneEventOutband(int event, int duration_ms) {
void ChannelSend::RegisterCngPayloadType(int payload_type,
int payload_frequency) {
- _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, payload_frequency);
+ rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, payload_frequency);
rtp_sender_audio_->RegisterAudioPayload("CN", payload_type, payload_frequency,
1, 0);
}
@@ -723,7 +723,7 @@ void ChannelSend::SetSendTelephoneEventPayloadType(int payload_type,
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
RTC_DCHECK_LE(0, payload_type);
RTC_DCHECK_GE(127, payload_type);
- _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, payload_frequency);
+ rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, payload_frequency);
rtp_sender_audio_->RegisterAudioPayload("telephone-event", payload_type,
payload_frequency, 0, 0);
}
@@ -732,9 +732,9 @@ void ChannelSend::SetSendAudioLevelIndicationStatus(bool enable, int id) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
_includeAudioLevelIndication = enable;
if (enable) {
- _rtpRtcpModule->RegisterRtpHeaderExtension(AudioLevel::kUri, id);
+ rtp_rtcp_->RegisterRtpHeaderExtension(AudioLevel::kUri, id);
} else {
- _rtpRtcpModule->DeregisterSendRtpHeaderExtension(AudioLevel::kUri);
+ rtp_rtcp_->DeregisterSendRtpHeaderExtension(AudioLevel::kUri);
}
}
@@ -755,19 +755,19 @@ void ChannelSend::RegisterSenderCongestionControlObjects(
feedback_observer_proxy_->SetTransportFeedbackObserver(
transport_feedback_observer);
rtp_packet_pacer_proxy_->SetPacketPacer(rtp_packet_pacer);
- _rtpRtcpModule->SetStorePacketsStatus(true, 600);
+ rtp_rtcp_->SetStorePacketsStatus(true, 600);
constexpr bool remb_candidate = false;
- packet_router->AddSendRtpModule(_rtpRtcpModule.get(), remb_candidate);
+ packet_router->AddSendRtpModule(rtp_rtcp_.get(), remb_candidate);
packet_router_ = packet_router;
}
void ChannelSend::ResetSenderCongestionControlObjects() {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
RTC_DCHECK(packet_router_);
- _rtpRtcpModule->SetStorePacketsStatus(false, 600);
+ rtp_rtcp_->SetStorePacketsStatus(false, 600);
rtcp_observer_->SetBandwidthObserver(nullptr);
feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
- packet_router_->RemoveSendRtpModule(_rtpRtcpModule.get());
+ packet_router_->RemoveSendRtpModule(rtp_rtcp_.get());
packet_router_ = nullptr;
rtp_packet_pacer_proxy_->SetPacketPacer(nullptr);
}
@@ -776,7 +776,7 @@ void ChannelSend::SetRTCP_CNAME(absl::string_view c_name) {
RTC_DCHECK_RUN_ON(&worker_thread_checker_);
// Note: SetCNAME() accepts a c string of length at most 255.
const std::string c_name_limited(c_name.substr(0, 255));
- int ret = _rtpRtcpModule->SetCNAME(c_name_limited.c_str()) != 0;
+ int ret = rtp_rtcp_->SetCNAME(c_name_limited.c_str()) != 0;
RTC_DCHECK_EQ(0, ret) << "SetRTCP_CNAME() failed to set RTCP CNAME";
}
@@ -787,7 +787,7 @@ std::vector<ReportBlock> ChannelSend::GetRemoteRTCPReportBlocks() const {
// report block according to RFC 3550.
std::vector<RTCPReportBlock> rtcp_report_blocks;
- int ret = _rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks);
+ int ret = rtp_rtcp_->RemoteRTCPStat(&rtcp_report_blocks);
RTC_DCHECK_EQ(0, ret);
std::vector<ReportBlock> report_blocks;
@@ -816,7 +816,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const {
StreamDataCounters rtp_stats;
StreamDataCounters rtx_stats;
- _rtpRtcpModule->GetSendStreamDataCounters(&rtp_stats, &rtx_stats);
+ rtp_rtcp_->GetSendStreamDataCounters(&rtp_stats, &rtx_stats);
stats.payload_bytes_sent =
rtp_stats.transmitted.payload_bytes + rtx_stats.transmitted.payload_bytes;
stats.header_and_padding_bytes_sent =
@@ -829,7 +829,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const {
stats.packetsSent =
rtp_stats.transmitted.packets + rtx_stats.transmitted.packets;
stats.retransmitted_packets_sent = rtp_stats.retransmitted.packets;
- stats.report_block_datas = _rtpRtcpModule->GetLatestReportBlockData();
+ stats.report_block_datas = rtp_rtcp_->GetLatestReportBlockData();
return stats;
}
@@ -894,14 +894,14 @@ ANAStats ChannelSend::GetANAStatistics() const {
return audio_coding_->GetANAStats();
}
-RtpRtcp* ChannelSend::GetRtpRtcp() const {
+RtpRtcpInterface* ChannelSend::GetRtpRtcp() const {
RTC_DCHECK(module_process_thread_checker_.IsCurrent());
- return _rtpRtcpModule.get();
+ return rtp_rtcp_.get();
}
int64_t ChannelSend::GetRTT() const {
std::vector<RTCPReportBlock> report_blocks;
- _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
+ rtp_rtcp_->RemoteRTCPStat(&report_blocks);
if (report_blocks.empty()) {
return 0;
@@ -913,8 +913,8 @@ int64_t ChannelSend::GetRTT() const {
int64_t min_rtt = 0;
// We don't know in advance the remote ssrc used by the other end's receiver
// reports, so use the SSRC of the first report block for calculating the RTT.
- if (_rtpRtcpModule->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt,
- &min_rtt, &max_rtt) != 0) {
+ if (rtp_rtcp_->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt, &min_rtt,
+ &max_rtt) != 0) {
return 0;
}
return rtt;
diff --git a/chromium/third_party/webrtc/audio/channel_send.h b/chromium/third_party/webrtc/audio/channel_send.h
index cb3b99287ba..56fea97f9c2 100644
--- a/chromium/third_party/webrtc/audio/channel_send.h
+++ b/chromium/third_party/webrtc/audio/channel_send.h
@@ -22,7 +22,7 @@
#include "api/function_view.h"
#include "api/task_queue/task_queue_factory.h"
#include "modules/rtp_rtcp/include/report_block_data.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_sender_audio.h"
namespace webrtc {
@@ -30,7 +30,6 @@ namespace webrtc {
class FrameEncryptorInterface;
class ProcessThread;
class RtcEventLog;
-class RtpRtcp;
class RtpTransportControllerSendInterface;
struct CallSendStatistics {
@@ -97,7 +96,7 @@ class ChannelSendInterface {
virtual void ProcessAndEncodeAudio(
std::unique_ptr<AudioFrame> audio_frame) = 0;
- virtual RtpRtcp* GetRtpRtcp() const = 0;
+ virtual RtpRtcpInterface* GetRtpRtcp() const = 0;
// In RTP we currently rely on RTCP packets (|ReceivedRTCPPacket|) to inform
// about RTT.
diff --git a/chromium/third_party/webrtc/audio/mock_voe_channel_proxy.h b/chromium/third_party/webrtc/audio/mock_voe_channel_proxy.h
index 38ad208e1a8..542358f6870 100644
--- a/chromium/third_party/webrtc/audio/mock_voe_channel_proxy.h
+++ b/chromium/third_party/webrtc/audio/mock_voe_channel_proxy.h
@@ -28,102 +28,144 @@ namespace test {
class MockChannelReceive : public voe::ChannelReceiveInterface {
public:
- MOCK_METHOD2(SetNACKStatus, void(bool enable, int max_packets));
- MOCK_METHOD1(RegisterReceiverCongestionControlObjects,
- void(PacketRouter* packet_router));
- MOCK_METHOD0(ResetReceiverCongestionControlObjects, void());
- MOCK_CONST_METHOD0(GetRTCPStatistics, CallReceiveStatistics());
- MOCK_CONST_METHOD0(GetNetworkStatistics, NetworkStatistics());
- MOCK_CONST_METHOD0(GetDecodingCallStatistics, AudioDecodingCallStats());
- MOCK_CONST_METHOD0(GetSpeechOutputLevelFullRange, int());
- MOCK_CONST_METHOD0(GetTotalOutputEnergy, double());
- MOCK_CONST_METHOD0(GetTotalOutputDuration, double());
- MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t());
- MOCK_METHOD1(SetSink, void(AudioSinkInterface* sink));
- MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived& packet));
- MOCK_METHOD2(ReceivedRTCPPacket, void(const uint8_t* packet, size_t length));
- MOCK_METHOD1(SetChannelOutputVolumeScaling, void(float scaling));
- MOCK_METHOD2(GetAudioFrameWithInfo,
- AudioMixer::Source::AudioFrameInfo(int sample_rate_hz,
- AudioFrame* audio_frame));
- MOCK_CONST_METHOD0(PreferredSampleRate, int());
- MOCK_METHOD1(SetAssociatedSendChannel,
- void(const voe::ChannelSendInterface* send_channel));
- MOCK_CONST_METHOD2(GetPlayoutRtpTimestamp,
- bool(uint32_t* rtp_timestamp, int64_t* time_ms));
- MOCK_METHOD2(SetEstimatedPlayoutNtpTimestampMs,
- void(int64_t ntp_timestamp_ms, int64_t time_ms));
- MOCK_CONST_METHOD1(GetCurrentEstimatedPlayoutNtpTimestampMs,
- absl::optional<int64_t>(int64_t now_ms));
- MOCK_CONST_METHOD0(GetSyncInfo, absl::optional<Syncable::Info>());
- MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms));
- MOCK_METHOD1(SetBaseMinimumPlayoutDelayMs, bool(int delay_ms));
- MOCK_CONST_METHOD0(GetBaseMinimumPlayoutDelayMs, int());
- MOCK_CONST_METHOD0(GetReceiveCodec,
- absl::optional<std::pair<int, SdpAudioFormat>>());
- MOCK_METHOD1(SetReceiveCodecs,
- void(const std::map<int, SdpAudioFormat>& codecs));
- MOCK_CONST_METHOD0(GetSources, std::vector<RtpSource>());
- MOCK_METHOD0(StartPlayout, void());
- MOCK_METHOD0(StopPlayout, void());
- MOCK_METHOD1(SetDepacketizerToDecoderFrameTransformer,
- void(rtc::scoped_refptr<webrtc::FrameTransformerInterface>
- frame_transformer));
+ MOCK_METHOD(void, SetNACKStatus, (bool enable, int max_packets), (override));
+ MOCK_METHOD(void,
+ RegisterReceiverCongestionControlObjects,
+ (PacketRouter*),
+ (override));
+ MOCK_METHOD(void, ResetReceiverCongestionControlObjects, (), (override));
+ MOCK_METHOD(CallReceiveStatistics, GetRTCPStatistics, (), (const, override));
+ MOCK_METHOD(NetworkStatistics, GetNetworkStatistics, (), (const, override));
+ MOCK_METHOD(AudioDecodingCallStats,
+ GetDecodingCallStatistics,
+ (),
+ (const, override));
+ MOCK_METHOD(int, GetSpeechOutputLevelFullRange, (), (const, override));
+ MOCK_METHOD(double, GetTotalOutputEnergy, (), (const, override));
+ MOCK_METHOD(double, GetTotalOutputDuration, (), (const, override));
+ MOCK_METHOD(uint32_t, GetDelayEstimate, (), (const, override));
+ MOCK_METHOD(void, SetSink, (AudioSinkInterface*), (override));
+ MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived& packet), (override));
+ MOCK_METHOD(void,
+ ReceivedRTCPPacket,
+ (const uint8_t*, size_t length),
+ (override));
+ MOCK_METHOD(void, SetChannelOutputVolumeScaling, (float scaling), (override));
+ MOCK_METHOD(AudioMixer::Source::AudioFrameInfo,
+ GetAudioFrameWithInfo,
+ (int sample_rate_hz, AudioFrame*),
+ (override));
+ MOCK_METHOD(int, PreferredSampleRate, (), (const, override));
+ MOCK_METHOD(void,
+ SetAssociatedSendChannel,
+ (const voe::ChannelSendInterface*),
+ (override));
+ MOCK_METHOD(bool,
+ GetPlayoutRtpTimestamp,
+ (uint32_t*, int64_t*),
+ (const, override));
+ MOCK_METHOD(void,
+ SetEstimatedPlayoutNtpTimestampMs,
+ (int64_t ntp_timestamp_ms, int64_t time_ms),
+ (override));
+ MOCK_METHOD(absl::optional<int64_t>,
+ GetCurrentEstimatedPlayoutNtpTimestampMs,
+ (int64_t now_ms),
+ (const, override));
+ MOCK_METHOD(absl::optional<Syncable::Info>,
+ GetSyncInfo,
+ (),
+ (const, override));
+ MOCK_METHOD(void, SetMinimumPlayoutDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (int delay_ms), (override));
+ MOCK_METHOD(int, GetBaseMinimumPlayoutDelayMs, (), (const, override));
+ MOCK_METHOD((absl::optional<std::pair<int, SdpAudioFormat>>),
+ GetReceiveCodec,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ SetReceiveCodecs,
+ ((const std::map<int, SdpAudioFormat>& codecs)),
+ (override));
+ MOCK_METHOD(void, StartPlayout, (), (override));
+ MOCK_METHOD(void, StopPlayout, (), (override));
+ MOCK_METHOD(
+ void,
+ SetDepacketizerToDecoderFrameTransformer,
+ (rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer),
+ (override));
};
class MockChannelSend : public voe::ChannelSendInterface {
public:
- // GMock doesn't like move-only types, like std::unique_ptr.
- virtual void SetEncoder(int payload_type,
- std::unique_ptr<AudioEncoder> encoder) {
- return SetEncoderForMock(payload_type, &encoder);
- }
- MOCK_METHOD2(SetEncoderForMock,
- void(int payload_type, std::unique_ptr<AudioEncoder>* encoder));
- MOCK_METHOD1(
+ MOCK_METHOD(void,
+ SetEncoder,
+ (int payload_type, std::unique_ptr<AudioEncoder> encoder),
+ (override));
+ MOCK_METHOD(
+ void,
ModifyEncoder,
- void(rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier));
- MOCK_METHOD1(CallEncoder,
- void(rtc::FunctionView<void(AudioEncoder*)> modifier));
- MOCK_METHOD1(SetRTCP_CNAME, void(absl::string_view c_name));
- MOCK_METHOD2(SetSendAudioLevelIndicationStatus, void(bool enable, int id));
- MOCK_METHOD2(RegisterSenderCongestionControlObjects,
- void(RtpTransportControllerSendInterface* transport,
- RtcpBandwidthObserver* bandwidth_observer));
- MOCK_METHOD0(ResetSenderCongestionControlObjects, void());
- MOCK_CONST_METHOD0(GetRTCPStatistics, CallSendStatistics());
- MOCK_CONST_METHOD0(GetRemoteRTCPReportBlocks, std::vector<ReportBlock>());
- MOCK_CONST_METHOD0(GetANAStatistics, ANAStats());
- MOCK_METHOD2(RegisterCngPayloadType,
- void(int payload_type, int payload_frequency));
- MOCK_METHOD2(SetSendTelephoneEventPayloadType,
- void(int payload_type, int payload_frequency));
- MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms));
- MOCK_METHOD1(OnBitrateAllocation, void(BitrateAllocationUpdate update));
- MOCK_METHOD1(SetInputMute, void(bool muted));
- MOCK_METHOD2(ReceivedRTCPPacket, void(const uint8_t* packet, size_t length));
- // GMock doesn't like move-only types, like std::unique_ptr.
- virtual void ProcessAndEncodeAudio(std::unique_ptr<AudioFrame> audio_frame) {
- ProcessAndEncodeAudioForMock(&audio_frame);
- }
- MOCK_METHOD1(ProcessAndEncodeAudioForMock,
- void(std::unique_ptr<AudioFrame>* audio_frame));
- MOCK_METHOD1(SetTransportOverhead,
- void(size_t transport_overhead_per_packet));
- MOCK_CONST_METHOD0(GetRtpRtcp, RtpRtcp*());
- MOCK_CONST_METHOD0(GetBitrate, int());
- MOCK_METHOD1(OnTwccBasedUplinkPacketLossRate, void(float packet_loss_rate));
- MOCK_METHOD1(OnRecoverableUplinkPacketLossRate,
- void(float recoverable_packet_loss_rate));
- MOCK_CONST_METHOD0(GetRTT, int64_t());
- MOCK_METHOD0(StartSend, void());
- MOCK_METHOD0(StopSend, void());
- MOCK_METHOD1(
- SetFrameEncryptor,
- void(rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor));
- MOCK_METHOD1(SetEncoderToPacketizerFrameTransformer,
- void(rtc::scoped_refptr<webrtc::FrameTransformerInterface>
- frame_transformer));
+ (rtc::FunctionView<void(std::unique_ptr<AudioEncoder>*)> modifier),
+ (override));
+ MOCK_METHOD(void,
+ CallEncoder,
+ (rtc::FunctionView<void(AudioEncoder*)> modifier),
+ (override));
+ MOCK_METHOD(void, SetRTCP_CNAME, (absl::string_view c_name), (override));
+ MOCK_METHOD(void,
+ SetSendAudioLevelIndicationStatus,
+ (bool enable, int id),
+ (override));
+ MOCK_METHOD(void,
+ RegisterSenderCongestionControlObjects,
+ (RtpTransportControllerSendInterface*, RtcpBandwidthObserver*),
+ (override));
+ MOCK_METHOD(void, ResetSenderCongestionControlObjects, (), (override));
+ MOCK_METHOD(CallSendStatistics, GetRTCPStatistics, (), (const, override));
+ MOCK_METHOD(std::vector<ReportBlock>,
+ GetRemoteRTCPReportBlocks,
+ (),
+ (const, override));
+ MOCK_METHOD(ANAStats, GetANAStatistics, (), (const, override));
+ MOCK_METHOD(void,
+ RegisterCngPayloadType,
+ (int payload_type, int payload_frequency),
+ (override));
+ MOCK_METHOD(void,
+ SetSendTelephoneEventPayloadType,
+ (int payload_type, int payload_frequency),
+ (override));
+ MOCK_METHOD(bool,
+ SendTelephoneEventOutband,
+ (int event, int duration_ms),
+ (override));
+ MOCK_METHOD(void,
+ OnBitrateAllocation,
+ (BitrateAllocationUpdate update),
+ (override));
+ MOCK_METHOD(void, SetInputMute, (bool muted), (override));
+ MOCK_METHOD(void,
+ ReceivedRTCPPacket,
+ (const uint8_t*, size_t length),
+ (override));
+ MOCK_METHOD(void,
+ ProcessAndEncodeAudio,
+ (std::unique_ptr<AudioFrame>),
+ (override));
+ MOCK_METHOD(RtpRtcpInterface*, GetRtpRtcp, (), (const, override));
+ MOCK_METHOD(int, GetBitrate, (), (const, override));
+ MOCK_METHOD(int64_t, GetRTT, (), (const, override));
+ MOCK_METHOD(void, StartSend, (), (override));
+ MOCK_METHOD(void, StopSend, (), (override));
+ MOCK_METHOD(void,
+ SetFrameEncryptor,
+ (rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor),
+ (override));
+ MOCK_METHOD(
+ void,
+ SetEncoderToPacketizerFrameTransformer,
+ (rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer),
+ (override));
};
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/audio/null_audio_poller.cc b/chromium/third_party/webrtc/audio/null_audio_poller.cc
index 22f575d8bba..16d267fb466 100644
--- a/chromium/third_party/webrtc/audio/null_audio_poller.cc
+++ b/chromium/third_party/webrtc/audio/null_audio_poller.cc
@@ -31,7 +31,8 @@ constexpr size_t kNumSamples = kSamplesPerSecond / 100; // 10ms of samples
} // namespace
NullAudioPoller::NullAudioPoller(AudioTransport* audio_transport)
- : audio_transport_(audio_transport),
+ : MessageHandler(false),
+ audio_transport_(audio_transport),
reschedule_at_(rtc::TimeMillis() + kPollDelayMs) {
RTC_DCHECK(audio_transport);
OnMessage(nullptr); // Start the poll loop.
diff --git a/chromium/third_party/webrtc/audio/test/low_bandwidth_audio_test.cc b/chromium/third_party/webrtc/audio/test/low_bandwidth_audio_test.cc
index 049b5e51505..50cf4999202 100644
--- a/chromium/third_party/webrtc/audio/test/low_bandwidth_audio_test.cc
+++ b/chromium/third_party/webrtc/audio/test/low_bandwidth_audio_test.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "absl/flags/declare.h"
#include "absl/flags/flag.h"
#include "api/test/simulated_network.h"
#include "audio/test/audio_end_to_end_test.h"
diff --git a/chromium/third_party/webrtc/audio/test/pc_low_bandwidth_audio_test.cc b/chromium/third_party/webrtc/audio/test/pc_low_bandwidth_audio_test.cc
index aafb65f15d2..dbc23760692 100644
--- a/chromium/third_party/webrtc/audio/test/pc_low_bandwidth_audio_test.cc
+++ b/chromium/third_party/webrtc/audio/test/pc_low_bandwidth_audio_test.cc
@@ -10,6 +10,7 @@
#include <memory>
+#include "absl/flags/declare.h"
#include "absl/flags/flag.h"
#include "api/test/create_network_emulation_manager.h"
#include "api/test/create_peerconnection_quality_test_fixture.h"
diff --git a/chromium/third_party/webrtc/audio/utility/audio_frame_operations_unittest.cc b/chromium/third_party/webrtc/audio/utility/audio_frame_operations_unittest.cc
index 1d38875add3..1a2c16e45f5 100644
--- a/chromium/third_party/webrtc/audio/utility/audio_frame_operations_unittest.cc
+++ b/chromium/third_party/webrtc/audio/utility/audio_frame_operations_unittest.cc
@@ -27,6 +27,8 @@ class AudioFrameOperationsTest : public ::testing::Test {
AudioFrame frame_;
};
+class AudioFrameOperationsDeathTest : public AudioFrameOperationsTest {};
+
void SetFrameData(int16_t ch1,
int16_t ch2,
int16_t ch3,
@@ -105,7 +107,7 @@ void VerifyFrameDataBounds(const AudioFrame& frame,
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(AudioFrameOperationsTest, MonoToStereoFailsWithBadParameters) {
+TEST_F(AudioFrameOperationsDeathTest, MonoToStereoFailsWithBadParameters) {
EXPECT_DEATH(AudioFrameOperations::UpmixChannels(2, &frame_), "");
frame_.samples_per_channel_ = AudioFrame::kMaxDataSizeSamples;
frame_.num_channels_ = 1;
@@ -136,7 +138,7 @@ TEST_F(AudioFrameOperationsTest, MonoToStereoMuted) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(AudioFrameOperationsTest, StereoToMonoFailsWithBadParameters) {
+TEST_F(AudioFrameOperationsDeathTest, StereoToMonoFailsWithBadParameters) {
frame_.num_channels_ = 1;
EXPECT_DEATH(AudioFrameOperations::DownmixChannels(1, &frame_), "");
}
diff --git a/chromium/third_party/webrtc/audio/voip/BUILD.gn b/chromium/third_party/webrtc/audio/voip/BUILD.gn
index 60232d5144c..84a0ad7ab0f 100644
--- a/chromium/third_party/webrtc/audio/voip/BUILD.gn
+++ b/chromium/third_party/webrtc/audio/voip/BUILD.gn
@@ -26,8 +26,8 @@ rtc_library("voip_core") {
"../../modules/utility:utility",
"../../rtc_base:criticalsection",
"../../rtc_base:logging",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_channel") {
diff --git a/chromium/third_party/webrtc/audio/voip/audio_channel.cc b/chromium/third_party/webrtc/audio/voip/audio_channel.cc
index b9ce7accd1b..ee08e0590fe 100644
--- a/chromium/third_party/webrtc/audio/voip/audio_channel.cc
+++ b/chromium/third_party/webrtc/audio/voip/audio_channel.cc
@@ -16,6 +16,7 @@
#include "api/audio_codecs/audio_format.h"
#include "api/task_queue/task_queue_factory.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
@@ -43,7 +44,7 @@ AudioChannel::AudioChannel(
Clock* clock = Clock::GetRealTimeClock();
receive_statistics_ = ReceiveStatistics::Create(clock);
- RtpRtcp::Configuration rtp_config;
+ RtpRtcpInterface::Configuration rtp_config;
rtp_config.clock = clock;
rtp_config.audio = true;
rtp_config.receive_statistics = receive_statistics_.get();
@@ -51,7 +52,7 @@ AudioChannel::AudioChannel(
rtp_config.outgoing_transport = transport;
rtp_config.local_media_ssrc = local_ssrc;
- rtp_rtcp_ = RtpRtcp::Create(rtp_config);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config);
rtp_rtcp_->SetSendingMediaStatus(false);
rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound);
diff --git a/chromium/third_party/webrtc/audio/voip/audio_channel.h b/chromium/third_party/webrtc/audio/voip/audio_channel.h
index 8b6f1a8e59c..b305215ef6a 100644
--- a/chromium/third_party/webrtc/audio/voip/audio_channel.h
+++ b/chromium/third_party/webrtc/audio/voip/audio_channel.h
@@ -20,7 +20,7 @@
#include "api/voip/voip_base.h"
#include "audio/voip/audio_egress.h"
#include "audio/voip/audio_ingress.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/ref_count.h"
@@ -88,7 +88,7 @@ class AudioChannel : public rtc::RefCountInterface {
// Listed in order for safe destruction of AudioChannel object.
// Synchronization for these are handled internally.
std::unique_ptr<ReceiveStatistics> receive_statistics_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
std::unique_ptr<AudioIngress> ingress_;
std::unique_ptr<AudioEgress> egress_;
};
diff --git a/chromium/third_party/webrtc/audio/voip/audio_egress.cc b/chromium/third_party/webrtc/audio/voip/audio_egress.cc
index a7bc202a41f..305f7126244 100644
--- a/chromium/third_party/webrtc/audio/voip/audio_egress.cc
+++ b/chromium/third_party/webrtc/audio/voip/audio_egress.cc
@@ -17,7 +17,7 @@
namespace webrtc {
-AudioEgress::AudioEgress(RtpRtcp* rtp_rtcp,
+AudioEgress::AudioEgress(RtpRtcpInterface* rtp_rtcp,
Clock* clock,
TaskQueueFactory* task_queue_factory)
: rtp_rtcp_(rtp_rtcp),
diff --git a/chromium/third_party/webrtc/audio/voip/audio_egress.h b/chromium/third_party/webrtc/audio/voip/audio_egress.h
index e5632cde325..20b0bac02f9 100644
--- a/chromium/third_party/webrtc/audio/voip/audio_egress.h
+++ b/chromium/third_party/webrtc/audio/voip/audio_egress.h
@@ -20,7 +20,7 @@
#include "call/audio_sender.h"
#include "modules/audio_coding/include/audio_coding_module.h"
#include "modules/rtp_rtcp/include/report_block_data.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_sender_audio.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_checker.h"
@@ -43,7 +43,7 @@ namespace webrtc {
// smaller footprint.
class AudioEgress : public AudioSender, public AudioPacketizationCallback {
public:
- AudioEgress(RtpRtcp* rtp_rtcp,
+ AudioEgress(RtpRtcpInterface* rtp_rtcp,
Clock* clock,
TaskQueueFactory* task_queue_factory);
~AudioEgress() override;
@@ -109,7 +109,7 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback {
absl::optional<SdpAudioFormat> encoder_format_ RTC_GUARDED_BY(lock_);
// Synchronization is handled internally by RtpRtcp.
- RtpRtcp* const rtp_rtcp_;
+ RtpRtcpInterface* const rtp_rtcp_;
// Synchronization is handled internally by RTPSenderAudio.
RTPSenderAudio rtp_sender_audio_;
diff --git a/chromium/third_party/webrtc/audio/voip/audio_ingress.cc b/chromium/third_party/webrtc/audio/voip/audio_ingress.cc
index fb43fcd7539..68864eb2e1d 100644
--- a/chromium/third_party/webrtc/audio/voip/audio_ingress.cc
+++ b/chromium/third_party/webrtc/audio/voip/audio_ingress.cc
@@ -36,7 +36,7 @@ AudioCodingModule::Config CreateAcmConfig(
} // namespace
AudioIngress::AudioIngress(
- RtpRtcp* rtp_rtcp,
+ RtpRtcpInterface* rtp_rtcp,
Clock* clock,
ReceiveStatistics* receive_statistics,
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory)
diff --git a/chromium/third_party/webrtc/audio/voip/audio_ingress.h b/chromium/third_party/webrtc/audio/voip/audio_ingress.h
index 99766741d63..15f7900617a 100644
--- a/chromium/third_party/webrtc/audio/voip/audio_ingress.h
+++ b/chromium/third_party/webrtc/audio/voip/audio_ingress.h
@@ -26,8 +26,8 @@
#include "modules/audio_coding/include/audio_coding_module.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/time_utils.h"
@@ -44,7 +44,7 @@ namespace webrtc {
// smaller footprint.
class AudioIngress : public AudioMixer::Source {
public:
- AudioIngress(RtpRtcp* rtp_rtcp,
+ AudioIngress(RtpRtcpInterface* rtp_rtcp,
Clock* clock,
ReceiveStatistics* receive_statistics,
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory);
@@ -122,8 +122,8 @@ class AudioIngress : public AudioMixer::Source {
// Synchronizaton is handled internally by ReceiveStatistics.
ReceiveStatistics* const rtp_receive_statistics_;
- // Synchronizaton is handled internally by RtpRtcp.
- RtpRtcp* const rtp_rtcp_;
+ // Synchronizaton is handled internally by RtpRtcpInterface.
+ RtpRtcpInterface* const rtp_rtcp_;
// Synchronizaton is handled internally by acm2::AcmReceiver.
acm2::AcmReceiver acm_receiver_;
diff --git a/chromium/third_party/webrtc/audio/voip/test/BUILD.gn b/chromium/third_party/webrtc/audio/voip/test/BUILD.gn
index 39f100a3aab..d698b3321d0 100644
--- a/chromium/third_party/webrtc/audio/voip/test/BUILD.gn
+++ b/chromium/third_party/webrtc/audio/voip/test/BUILD.gn
@@ -36,6 +36,7 @@ if (rtc_include_tests) {
"../../../api/task_queue:default_task_queue_factory",
"../../../modules/audio_mixer:audio_mixer_impl",
"../../../modules/audio_mixer:audio_mixer_test_utils",
+ "../../../modules/rtp_rtcp:rtp_rtcp",
"../../../modules/rtp_rtcp:rtp_rtcp_format",
"../../../modules/utility",
"../../../rtc_base:logging",
@@ -56,6 +57,7 @@ if (rtc_include_tests) {
"../../../api/audio_codecs:builtin_audio_encoder_factory",
"../../../api/task_queue:default_task_queue_factory",
"../../../modules/audio_mixer:audio_mixer_test_utils",
+ "../../../modules/rtp_rtcp:rtp_rtcp",
"../../../rtc_base:logging",
"../../../rtc_base:rtc_event",
"../../../test:mock_transport",
@@ -72,6 +74,7 @@ if (rtc_include_tests) {
"../../../api/audio_codecs:builtin_audio_encoder_factory",
"../../../api/task_queue:default_task_queue_factory",
"../../../modules/audio_mixer:audio_mixer_test_utils",
+ "../../../modules/rtp_rtcp:rtp_rtcp",
"../../../modules/rtp_rtcp:rtp_rtcp_format",
"../../../rtc_base:logging",
"../../../rtc_base:rtc_event",
diff --git a/chromium/third_party/webrtc/audio/voip/test/audio_egress_unittest.cc b/chromium/third_party/webrtc/audio/voip/test/audio_egress_unittest.cc
index 33912658804..70fb6dcf367 100644
--- a/chromium/third_party/webrtc/audio/voip/test/audio_egress_unittest.cc
+++ b/chromium/third_party/webrtc/audio/voip/test/audio_egress_unittest.cc
@@ -14,6 +14,7 @@
#include "api/task_queue/default_task_queue_factory.h"
#include "modules/audio_mixer/sine_wave_generator.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "test/gmock.h"
@@ -27,16 +28,16 @@ using ::testing::Invoke;
using ::testing::NiceMock;
using ::testing::Unused;
-std::unique_ptr<RtpRtcp> CreateRtpStack(Clock* clock,
- Transport* transport,
- uint32_t remote_ssrc) {
- RtpRtcp::Configuration rtp_config;
+std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpStack(Clock* clock,
+ Transport* transport,
+ uint32_t remote_ssrc) {
+ RtpRtcpInterface::Configuration rtp_config;
rtp_config.clock = clock;
rtp_config.audio = true;
rtp_config.rtcp_report_interval_ms = 5000;
rtp_config.outgoing_transport = transport;
rtp_config.local_media_ssrc = remote_ssrc;
- auto rtp_rtcp = RtpRtcp::Create(rtp_config);
+ auto rtp_rtcp = ModuleRtpRtcpImpl2::Create(rtp_config);
rtp_rtcp->SetSendingMediaStatus(false);
rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
return rtp_rtcp;
@@ -100,7 +101,7 @@ class AudioEgressTest : public ::testing::Test {
SimulatedClock fake_clock_;
NiceMock<MockTransport> transport_;
SineWaveGenerator wave_generator_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
std::unique_ptr<TaskQueueFactory> task_queue_factory_;
rtc::scoped_refptr<AudioEncoderFactory> encoder_factory_;
std::unique_ptr<AudioEgress> egress_;
diff --git a/chromium/third_party/webrtc/audio/voip/test/audio_ingress_unittest.cc b/chromium/third_party/webrtc/audio/voip/test/audio_ingress_unittest.cc
index bedb82e211a..3a2a66a3255 100644
--- a/chromium/third_party/webrtc/audio/voip/test/audio_ingress_unittest.cc
+++ b/chromium/third_party/webrtc/audio/voip/test/audio_ingress_unittest.cc
@@ -15,6 +15,7 @@
#include "api/task_queue/default_task_queue_factory.h"
#include "audio/voip/audio_egress.h"
#include "modules/audio_mixer/sine_wave_generator.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "test/gmock.h"
@@ -38,14 +39,14 @@ class AudioIngressTest : public ::testing::Test {
: fake_clock_(123456789), wave_generator_(1000.0, kAudioLevel) {
receive_statistics_ = ReceiveStatistics::Create(&fake_clock_);
- RtpRtcp::Configuration rtp_config;
+ RtpRtcpInterface::Configuration rtp_config;
rtp_config.clock = &fake_clock_;
rtp_config.audio = true;
rtp_config.receive_statistics = receive_statistics_.get();
rtp_config.rtcp_report_interval_ms = 5000;
rtp_config.outgoing_transport = &transport_;
rtp_config.local_media_ssrc = 0xdeadc0de;
- rtp_rtcp_ = RtpRtcp::Create(rtp_config);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config);
rtp_rtcp_->SetSendingMediaStatus(false);
rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound);
@@ -94,7 +95,7 @@ class AudioIngressTest : public ::testing::Test {
SineWaveGenerator wave_generator_;
NiceMock<MockTransport> transport_;
std::unique_ptr<ReceiveStatistics> receive_statistics_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
rtc::scoped_refptr<AudioEncoderFactory> encoder_factory_;
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory_;
std::unique_ptr<TaskQueueFactory> task_queue_factory_;
diff --git a/chromium/third_party/webrtc/build_overrides/build.gni b/chromium/third_party/webrtc/build_overrides/build.gni
index 669044db81c..01eb9a0b2d6 100644
--- a/chromium/third_party/webrtc/build_overrides/build.gni
+++ b/chromium/third_party/webrtc/build_overrides/build.gni
@@ -16,6 +16,9 @@ linux_use_bundled_binutils_override = true
# only needed to support both WebRTC standalone and Chromium builds.
build_with_chromium = false
+# WebRTC checks out google_benchmark by default since it is always used.
+checkout_google_benchmark = true
+
# Use our own suppressions files.
asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
lsan_suppressions_file = "//tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc"
@@ -27,9 +30,6 @@ ubsan_blacklist_path =
ubsan_vptr_blacklist_path =
rebase_path("//tools_webrtc/ubsan/vptr_blacklist.txt", root_build_dir)
-# Android lint suppressions file
-lint_suppressions_file = "//tools_webrtc/android/suppressions.xml"
-
# For Chromium, Android 32-bit non-component, non-clang builds hit a 4GiB size
# limit, making them requiring symbol_level=2. WebRTC doesn't hit that problem
# so we just ignore that assert. See https://crbug.com/648948 for more info.
diff --git a/chromium/third_party/webrtc/call/BUILD.gn b/chromium/third_party/webrtc/call/BUILD.gn
index a9037c3819f..5f7c603c8d1 100644
--- a/chromium/third_party/webrtc/call/BUILD.gn
+++ b/chromium/third_party/webrtc/call/BUILD.gn
@@ -39,6 +39,7 @@ rtc_library("call_interfaces") {
"../api:rtp_parameters",
"../api:scoped_refptr",
"../api:transport_api",
+ "../api/adaptation:resource_adaptation_api",
"../api/audio:audio_mixer_api",
"../api/audio_codecs:audio_codecs_api",
"../api/crypto:frame_decryptor_interface",
@@ -61,8 +62,8 @@ rtc_library("call_interfaces") {
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/network:sent_packet",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("audio_sender_interface") {
@@ -100,6 +101,8 @@ rtc_library("rtp_interfaces") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -128,8 +131,8 @@ rtc_library("rtp_receiver") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtp_sender") {
@@ -171,6 +174,7 @@ rtc_library("rtp_sender") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../modules/rtp_rtcp:rtp_video_header",
"../modules/utility",
+ "../modules/video_coding:chain_diff_calculator",
"../modules/video_coding:codec_globals_headers",
"../modules/video_coding:frame_dependencies_calculator",
"../modules/video_coding:video_codec_interface",
@@ -180,6 +184,8 @@ rtc_library("rtp_sender") {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
"../rtc_base/task_utils:repeating_task",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings:strings",
@@ -202,8 +208,8 @@ rtc_library("bitrate_configurator") {
"../api/units:data_rate",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("bitrate_allocator") {
@@ -223,8 +229,8 @@ rtc_library("bitrate_allocator") {
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/algorithm:container",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
}
rtc_library("call") {
@@ -279,14 +285,15 @@ rtc_library("call") {
"../rtc_base:safe_minmax",
"../rtc_base/experiments:field_trial_parser",
"../rtc_base/network:sent_packet",
- "../rtc_base/synchronization:rw_lock_wrapper",
"../rtc_base/synchronization:sequence_checker",
+ "../rtc_base/task_utils:pending_task_safety_flag",
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
"../video",
- "//third_party/abseil-cpp/absl/types:optional",
+ "adaptation:resource_adaptation",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_stream_api") {
@@ -301,7 +308,9 @@ rtc_library("video_stream_api") {
"../api:frame_transformer_interface",
"../api:rtp_headers",
"../api:rtp_parameters",
+ "../api:scoped_refptr",
"../api:transport_api",
+ "../api/adaptation:resource_adaptation_api",
"../api/crypto:frame_decryptor_interface",
"../api/crypto:frame_encryptor_interface",
"../api/crypto:options",
@@ -315,8 +324,8 @@ rtc_library("video_stream_api") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("simulated_network") {
@@ -333,8 +342,8 @@ rtc_library("simulated_network") {
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/synchronization:sequence_checker",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("simulated_packet_receiver") {
@@ -402,7 +411,9 @@ if (rtc_include_tests) {
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/rtc_event_log",
"../api/task_queue:default_task_queue_factory",
+ "../api/test/video:function_video_factory",
"../api/transport:field_trial_based_config",
+ "../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
"../audio",
@@ -436,12 +447,16 @@ if (rtc_include_tests) {
"../test:video_test_common",
"../test/time_controller:time_controller",
"../video",
+ "adaptation:resource_adaptation_test_utilities",
"//test/scenario:scenario",
"//testing/gmock",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
+ "//third_party/abseil-cpp/absl/types:variant",
]
}
@@ -494,8 +509,8 @@ if (rtc_include_tests) {
"../test:video_test_common",
"../video",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/flags:flag",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
}
# TODO(eladalon): This should be moved, as with the TODO for |rtp_interfaces|.
@@ -553,7 +568,7 @@ if (rtc_include_tests) {
"../system_wrappers",
"../test:test_support",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/algorithm:container",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
}
}
diff --git a/chromium/third_party/webrtc/call/adaptation/BUILD.gn b/chromium/third_party/webrtc/call/adaptation/BUILD.gn
index 2a6933ebd57..055fc437823 100644
--- a/chromium/third_party/webrtc/call/adaptation/BUILD.gn
+++ b/chromium/third_party/webrtc/call/adaptation/BUILD.gn
@@ -10,10 +10,14 @@ import("../../webrtc.gni")
rtc_library("resource_adaptation") {
sources = [
+ "adaptation_constraint.cc",
+ "adaptation_constraint.h",
+ "adaptation_listener.cc",
+ "adaptation_listener.h",
+ "broadcast_resource_listener.cc",
+ "broadcast_resource_listener.h",
"encoder_settings.cc",
"encoder_settings.h",
- "resource.cc",
- "resource.h",
"resource_adaptation_processor.cc",
"resource_adaptation_processor.h",
"resource_adaptation_processor_interface.cc",
@@ -30,6 +34,8 @@ rtc_library("resource_adaptation") {
deps = [
"../../api:rtp_parameters",
"../../api:scoped_refptr",
+ "../../api/adaptation:resource_adaptation_api",
+ "../../api/task_queue:task_queue",
"../../api/video:video_adaptation",
"../../api/video:video_frame",
"../../api/video:video_stream_encoder",
@@ -40,6 +46,9 @@ rtc_library("resource_adaptation") {
"../../rtc_base:rtc_task_queue",
"../../rtc_base/experiments:balanced_degradation_settings",
"../../rtc_base/synchronization:sequence_checker",
+ "../../rtc_base/task_utils:to_queued_task",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -50,6 +59,7 @@ if (rtc_include_tests) {
testonly = true
sources = [
+ "broadcast_resource_listener_unittest.cc",
"resource_adaptation_processor_unittest.cc",
"resource_unittest.cc",
"video_source_restrictions_unittest.cc",
@@ -60,35 +70,48 @@ if (rtc_include_tests) {
":resource_adaptation",
":resource_adaptation_test_utilities",
"../../api:scoped_refptr",
+ "../../api/adaptation:resource_adaptation_api",
"../../api/task_queue:default_task_queue_factory",
"../../api/task_queue:task_queue",
"../../api/video:video_adaptation",
"../../api/video_codecs:video_codecs_api",
"../../rtc_base:checks",
+ "../../rtc_base:gunit_helpers",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
"../../rtc_base:task_queue_for_test",
"../../test:field_trial",
"../../test:rtc_expect_death",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("resource_adaptation_test_utilities") {
testonly = true
sources = [
+ "test/fake_adaptation_constraint.cc",
+ "test/fake_adaptation_constraint.h",
+ "test/fake_adaptation_listener.cc",
+ "test/fake_adaptation_listener.h",
"test/fake_frame_rate_provider.cc",
"test/fake_frame_rate_provider.h",
"test/fake_resource.cc",
"test/fake_resource.h",
+ "test/mock_resource_listener.h",
]
deps = [
":resource_adaptation",
+ "../../api:scoped_refptr",
+ "../../api/adaptation:resource_adaptation_api",
+ "../../api/task_queue:task_queue",
"../../api/video:video_stream_encoder",
"../../rtc_base:rtc_base_approved",
+ "../../rtc_base/synchronization:sequence_checker",
+ "../../rtc_base/task_utils:to_queued_task",
"../../test:test_support",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/chromium/third_party/webrtc/call/adaptation/adaptation_constraint.cc b/chromium/third_party/webrtc/call/adaptation/adaptation_constraint.cc
new file mode 100644
index 00000000000..d62bb74f87e
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/adaptation_constraint.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/adaptation_constraint.h"
+
+namespace webrtc {
+
+AdaptationConstraint::~AdaptationConstraint() {}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/adaptation_constraint.h b/chromium/third_party/webrtc/call/adaptation/adaptation_constraint.h
new file mode 100644
index 00000000000..9ff15d6b860
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/adaptation_constraint.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_
+#define CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_
+
+#include <string>
+
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
+#include "call/adaptation/video_source_restrictions.h"
+#include "call/adaptation/video_stream_input_state.h"
+
+namespace webrtc {
+
+// Adaptation constraints have the ability to prevent applying a proposed
+// adaptation (expressed as restrictions before/after adaptation).
+class AdaptationConstraint {
+ public:
+ virtual ~AdaptationConstraint();
+
+ virtual std::string Name() const = 0;
+
+ // TODO(https://crbug.com/webrtc/11172): When we have multi-stream adaptation
+ // support, this interface needs to indicate which stream the adaptation
+ // applies to.
+ virtual bool IsAdaptationUpAllowed(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) const = 0;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_
diff --git a/chromium/third_party/webrtc/call/adaptation/adaptation_listener.cc b/chromium/third_party/webrtc/call/adaptation/adaptation_listener.cc
new file mode 100644
index 00000000000..acc1564f770
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/adaptation_listener.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/adaptation_listener.h"
+
+namespace webrtc {
+
+AdaptationListener::~AdaptationListener() {}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/adaptation_listener.h b/chromium/third_party/webrtc/call/adaptation/adaptation_listener.h
new file mode 100644
index 00000000000..4a96baef8e0
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/adaptation_listener.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_ADAPTATION_LISTENER_H_
+#define CALL_ADAPTATION_ADAPTATION_LISTENER_H_
+
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
+#include "call/adaptation/video_source_restrictions.h"
+#include "call/adaptation/video_stream_input_state.h"
+
+namespace webrtc {
+
+// TODO(hbos): Can this be consolidated with
+// ResourceAdaptationProcessorListener::OnVideoSourceRestrictionsUpdated()? Both
+// listen to adaptations being applied, but on different layers with different
+// arguments.
+class AdaptationListener {
+ public:
+ virtual ~AdaptationListener();
+
+ // TODO(https://crbug.com/webrtc/11172): When we have multi-stream adaptation
+ // support, this interface needs to indicate which stream the adaptation
+ // applies to.
+ virtual void OnAdaptationApplied(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) = 0;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_ADAPTATION_LISTENER_H_
diff --git a/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener.cc b/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener.cc
new file mode 100644
index 00000000000..2a4d8cab09d
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/broadcast_resource_listener.h"
+
+#include <algorithm>
+#include <string>
+#include <utility>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/critical_section.h"
+#include "rtc_base/ref_counted_object.h"
+
+namespace webrtc {
+
+// The AdapterResource redirects resource usage measurements from its parent to
+// a single ResourceListener.
+class BroadcastResourceListener::AdapterResource : public Resource {
+ public:
+ explicit AdapterResource(std::string name) : name_(std::move(name)) {}
+ ~AdapterResource() override { RTC_DCHECK(!listener_); }
+
+ // The parent is letting us know we have a usage neasurement.
+ void OnResourceUsageStateMeasured(ResourceUsageState usage_state) {
+ rtc::CritScope crit(&lock_);
+ if (!listener_)
+ return;
+ listener_->OnResourceUsageStateMeasured(this, usage_state);
+ }
+
+ // Resource implementation.
+ std::string Name() const override { return name_; }
+ void SetResourceListener(ResourceListener* listener) override {
+ rtc::CritScope crit(&lock_);
+ RTC_DCHECK(!listener_ || !listener);
+ listener_ = listener;
+ }
+
+ private:
+ const std::string name_;
+ rtc::CriticalSection lock_;
+ ResourceListener* listener_ RTC_GUARDED_BY(lock_) = nullptr;
+};
+
+BroadcastResourceListener::BroadcastResourceListener(
+ rtc::scoped_refptr<Resource> source_resource)
+ : source_resource_(source_resource), is_listening_(false) {
+ RTC_DCHECK(source_resource_);
+}
+
+BroadcastResourceListener::~BroadcastResourceListener() {
+ RTC_DCHECK(!is_listening_);
+}
+
+rtc::scoped_refptr<Resource> BroadcastResourceListener::SourceResource() const {
+ return source_resource_;
+}
+
+void BroadcastResourceListener::StartListening() {
+ rtc::CritScope crit(&lock_);
+ RTC_DCHECK(!is_listening_);
+ source_resource_->SetResourceListener(this);
+ is_listening_ = true;
+}
+
+void BroadcastResourceListener::StopListening() {
+ rtc::CritScope crit(&lock_);
+ RTC_DCHECK(is_listening_);
+ RTC_DCHECK(adapters_.empty());
+ source_resource_->SetResourceListener(nullptr);
+ is_listening_ = false;
+}
+
+rtc::scoped_refptr<Resource>
+BroadcastResourceListener::CreateAdapterResource() {
+ rtc::CritScope crit(&lock_);
+ RTC_DCHECK(is_listening_);
+ rtc::scoped_refptr<AdapterResource> adapter =
+ new rtc::RefCountedObject<AdapterResource>(source_resource_->Name() +
+ "Adapter");
+ adapters_.push_back(adapter);
+ return adapter;
+}
+
+void BroadcastResourceListener::RemoveAdapterResource(
+ rtc::scoped_refptr<Resource> resource) {
+ rtc::CritScope crit(&lock_);
+ auto it = std::find(adapters_.begin(), adapters_.end(), resource);
+ RTC_DCHECK(it != adapters_.end());
+ adapters_.erase(it);
+}
+
+std::vector<rtc::scoped_refptr<Resource>>
+BroadcastResourceListener::GetAdapterResources() {
+ std::vector<rtc::scoped_refptr<Resource>> resources;
+ rtc::CritScope crit(&lock_);
+ for (const auto& adapter : adapters_) {
+ resources.push_back(adapter);
+ }
+ return resources;
+}
+
+void BroadcastResourceListener::OnResourceUsageStateMeasured(
+ rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ RTC_DCHECK_EQ(resource, source_resource_);
+ rtc::CritScope crit(&lock_);
+ for (const auto& adapter : adapters_) {
+ adapter->OnResourceUsageStateMeasured(usage_state);
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener.h b/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener.h
new file mode 100644
index 00000000000..f0d035dab7c
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_
+#define CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_
+
+#include <vector>
+
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
+#include "rtc_base/critical_section.h"
+
+namespace webrtc {
+
+// Responsible for forwarding 1 resource usage measurement to N listeners by
+// creating N "adapter" resources.
+//
+// Example:
+// If we have ResourceA, ResourceListenerX and ResourceListenerY we can create a
+// BroadcastResourceListener that listens to ResourceA, use CreateAdapter() to
+// spawn adapter resources ResourceX and ResourceY and let ResourceListenerX
+// listen to ResourceX and ResourceListenerY listen to ResourceY. When ResourceA
+// makes a measurement it will be echoed by both ResourceX and ResourceY.
+//
+// TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor is
+// moved to call there will only be one ResourceAdaptationProcessor that needs
+// to listen to the injected resources. When this is the case, delete this class
+// and DCHECK that a Resource's listener is never overwritten.
+class BroadcastResourceListener : public ResourceListener {
+ public:
+ explicit BroadcastResourceListener(
+ rtc::scoped_refptr<Resource> source_resource);
+ ~BroadcastResourceListener() override;
+
+ rtc::scoped_refptr<Resource> SourceResource() const;
+ void StartListening();
+ void StopListening();
+
+ // Creates a Resource that redirects any resource usage measurements that
+ // BroadcastResourceListener receives to its listener.
+ rtc::scoped_refptr<Resource> CreateAdapterResource();
+
+ // Unregister the adapter from the BroadcastResourceListener; it will no
+ // longer receive resource usage measurement and will no longer be referenced.
+ // Use this to prevent memory leaks of old adapters.
+ void RemoveAdapterResource(rtc::scoped_refptr<Resource> resource);
+ std::vector<rtc::scoped_refptr<Resource>> GetAdapterResources();
+
+ // ResourceListener implementation.
+ void OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) override;
+
+ private:
+ class AdapterResource;
+ friend class AdapterResource;
+
+ const rtc::scoped_refptr<Resource> source_resource_;
+ rtc::CriticalSection lock_;
+ bool is_listening_ RTC_GUARDED_BY(lock_);
+ // The AdapterResource unregisters itself prior to destruction, guaranteeing
+ // that these pointers are safe to use.
+ std::vector<rtc::scoped_refptr<AdapterResource>> adapters_
+ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_
diff --git a/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener_unittest.cc b/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener_unittest.cc
new file mode 100644
index 00000000000..9cd80500c2a
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/broadcast_resource_listener_unittest.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/broadcast_resource_listener.h"
+
+#include "call/adaptation/test/fake_resource.h"
+#include "call/adaptation/test/mock_resource_listener.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+using ::testing::_;
+using ::testing::StrictMock;
+
+TEST(BroadcastResourceListenerTest, CreateAndRemoveAdapterResource) {
+ rtc::scoped_refptr<FakeResource> source_resource =
+ FakeResource::Create("SourceResource");
+ BroadcastResourceListener broadcast_resource_listener(source_resource);
+ broadcast_resource_listener.StartListening();
+
+ EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty());
+ rtc::scoped_refptr<Resource> adapter =
+ broadcast_resource_listener.CreateAdapterResource();
+ StrictMock<MockResourceListener> listener;
+ adapter->SetResourceListener(&listener);
+ EXPECT_EQ(std::vector<rtc::scoped_refptr<Resource>>{adapter},
+ broadcast_resource_listener.GetAdapterResources());
+
+ // The removed adapter is not referenced by the broadcaster.
+ broadcast_resource_listener.RemoveAdapterResource(adapter);
+ EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty());
+ // The removed adapter is not forwarding measurements.
+ EXPECT_CALL(listener, OnResourceUsageStateMeasured(_, _)).Times(0);
+ source_resource->SetUsageState(ResourceUsageState::kOveruse);
+ // Cleanup.
+ adapter->SetResourceListener(nullptr);
+ broadcast_resource_listener.StopListening();
+}
+
+TEST(BroadcastResourceListenerTest, AdapterNameIsBasedOnSourceResourceName) {
+ rtc::scoped_refptr<FakeResource> source_resource =
+ FakeResource::Create("FooBarResource");
+ BroadcastResourceListener broadcast_resource_listener(source_resource);
+ broadcast_resource_listener.StartListening();
+
+ rtc::scoped_refptr<Resource> adapter =
+ broadcast_resource_listener.CreateAdapterResource();
+ EXPECT_EQ("FooBarResourceAdapter", adapter->Name());
+
+ broadcast_resource_listener.RemoveAdapterResource(adapter);
+ broadcast_resource_listener.StopListening();
+}
+
+TEST(BroadcastResourceListenerTest, AdaptersForwardsUsageMeasurements) {
+ rtc::scoped_refptr<FakeResource> source_resource =
+ FakeResource::Create("SourceResource");
+ BroadcastResourceListener broadcast_resource_listener(source_resource);
+ broadcast_resource_listener.StartListening();
+
+ StrictMock<MockResourceListener> destination_listener1;
+ StrictMock<MockResourceListener> destination_listener2;
+ rtc::scoped_refptr<Resource> adapter1 =
+ broadcast_resource_listener.CreateAdapterResource();
+ adapter1->SetResourceListener(&destination_listener1);
+ rtc::scoped_refptr<Resource> adapter2 =
+ broadcast_resource_listener.CreateAdapterResource();
+ adapter2->SetResourceListener(&destination_listener2);
+
+ // Expect kOveruse to be echoed.
+ EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([adapter1](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(adapter1, resource);
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([adapter2](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(adapter2, resource);
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ source_resource->SetUsageState(ResourceUsageState::kOveruse);
+
+ // Expect kUnderuse to be echoed.
+ EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([adapter1](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(adapter1, resource);
+ EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state);
+ });
+ EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([adapter2](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(adapter2, resource);
+ EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state);
+ });
+ source_resource->SetUsageState(ResourceUsageState::kUnderuse);
+
+ // Adapters have to be unregistered before they or the broadcaster is
+ // destroyed, ensuring safe use of raw pointers.
+ adapter1->SetResourceListener(nullptr);
+ adapter2->SetResourceListener(nullptr);
+
+ broadcast_resource_listener.RemoveAdapterResource(adapter1);
+ broadcast_resource_listener.RemoveAdapterResource(adapter2);
+ broadcast_resource_listener.StopListening();
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/resource.cc b/chromium/third_party/webrtc/call/adaptation/resource.cc
deleted file mode 100644
index a546450bc6b..00000000000
--- a/chromium/third_party/webrtc/call/adaptation/resource.cc
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright 2019 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "call/adaptation/resource.h"
-
-#include "absl/algorithm/container.h"
-#include "rtc_base/checks.h"
-
-namespace webrtc {
-
-ResourceListener::~ResourceListener() {}
-
-Resource::Resource()
- : encoder_queue_(nullptr),
- resource_adaptation_queue_(nullptr),
- usage_state_(absl::nullopt),
- listener_(nullptr) {}
-
-Resource::~Resource() {
- RTC_DCHECK(!listener_)
- << "There is a listener depending on a Resource being destroyed.";
-}
-
-void Resource::Initialize(rtc::TaskQueue* encoder_queue,
- rtc::TaskQueue* resource_adaptation_queue) {
- RTC_DCHECK(!encoder_queue_);
- RTC_DCHECK(encoder_queue);
- RTC_DCHECK(!resource_adaptation_queue_);
- RTC_DCHECK(resource_adaptation_queue);
- encoder_queue_ = encoder_queue;
- resource_adaptation_queue_ = resource_adaptation_queue;
-}
-
-void Resource::SetResourceListener(ResourceListener* listener) {
- RTC_DCHECK(resource_adaptation_queue_);
- RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- // If you want to change listener you need to unregister the old listener by
- // setting it to null first.
- RTC_DCHECK(!listener_ || !listener) << "A listener is already set";
- listener_ = listener;
-}
-
-absl::optional<ResourceUsageState> Resource::usage_state() const {
- RTC_DCHECK(resource_adaptation_queue_);
- RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- return usage_state_;
-}
-
-void Resource::ClearUsageState() {
- RTC_DCHECK(resource_adaptation_queue_);
- RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- usage_state_ = absl::nullopt;
-}
-
-bool Resource::IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) const {
- return true;
-}
-
-void Resource::OnAdaptationApplied(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) {}
-
-rtc::TaskQueue* Resource::encoder_queue() const {
- return encoder_queue_;
-}
-
-rtc::TaskQueue* Resource::resource_adaptation_queue() const {
- return resource_adaptation_queue_;
-}
-
-void Resource::OnResourceUsageStateMeasured(ResourceUsageState usage_state) {
- RTC_DCHECK(resource_adaptation_queue_);
- RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- usage_state_ = usage_state;
- if (!listener_)
- return;
- listener_->OnResourceUsageStateMeasured(this);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/resource.h b/chromium/third_party/webrtc/call/adaptation/resource.h
deleted file mode 100644
index 2ee0c720d2c..00000000000
--- a/chromium/third_party/webrtc/call/adaptation/resource.h
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2019 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef CALL_ADAPTATION_RESOURCE_H_
-#define CALL_ADAPTATION_RESOURCE_H_
-
-#include <string>
-#include <vector>
-
-#include "absl/types/optional.h"
-#include "api/scoped_refptr.h"
-#include "call/adaptation/video_source_restrictions.h"
-#include "call/adaptation/video_stream_input_state.h"
-#include "rtc_base/ref_count.h"
-#include "rtc_base/task_queue.h"
-
-namespace webrtc {
-
-class Resource;
-
-enum class ResourceUsageState {
- // Action is needed to minimze the load on this resource.
- kOveruse,
- // Increasing the load on this resource is desired, if possible.
- kUnderuse,
-};
-
-class ResourceListener {
- public:
- virtual ~ResourceListener();
-
- // Informs the listener of a new measurement of resource usage. This means
- // that |resource->usage_state()| is now up-to-date.
- virtual void OnResourceUsageStateMeasured(
- rtc::scoped_refptr<Resource> resource) = 0;
-};
-
-class Resource : public rtc::RefCountInterface {
- public:
- // By default, usage_state() is null until a measurement is made.
- Resource();
- ~Resource() override;
-
- void Initialize(rtc::TaskQueue* encoder_queue,
- rtc::TaskQueue* resource_adaptation_queue);
-
- void SetResourceListener(ResourceListener* listener);
-
- absl::optional<ResourceUsageState> usage_state() const;
- void ClearUsageState();
-
- // This method allows the Resource to reject a proposed adaptation in the "up"
- // direction if it predicts this would cause overuse of this resource. The
- // default implementation unconditionally returns true (= allowed).
- virtual bool IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) const;
- virtual void OnAdaptationApplied(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource);
-
- virtual std::string name() const = 0;
-
- protected:
- rtc::TaskQueue* encoder_queue() const;
- rtc::TaskQueue* resource_adaptation_queue() const;
-
- // Updates the usage state and informs all registered listeners.
- void OnResourceUsageStateMeasured(ResourceUsageState usage_state);
-
- private:
- rtc::TaskQueue* encoder_queue_;
- rtc::TaskQueue* resource_adaptation_queue_;
- absl::optional<ResourceUsageState> usage_state_
- RTC_GUARDED_BY(resource_adaptation_queue_);
- ResourceListener* listener_ RTC_GUARDED_BY(resource_adaptation_queue_);
-};
-
-} // namespace webrtc
-
-#endif // CALL_ADAPTATION_RESOURCE_H_
diff --git a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.cc b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.cc
index 0224ac3bb2b..0be01cb7949 100644
--- a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.cc
+++ b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.cc
@@ -11,17 +11,66 @@
#include "call/adaptation/resource_adaptation_processor.h"
#include <algorithm>
+#include <string>
#include <utility>
#include "absl/algorithm/container.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/task_utils/to_queued_task.h"
namespace webrtc {
+ResourceAdaptationProcessor::ResourceListenerDelegate::ResourceListenerDelegate(
+ ResourceAdaptationProcessor* processor)
+ : resource_adaptation_queue_(nullptr), processor_(processor) {}
+
+void ResourceAdaptationProcessor::ResourceListenerDelegate::
+ SetResourceAdaptationQueue(TaskQueueBase* resource_adaptation_queue) {
+ RTC_DCHECK(!resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_queue);
+ resource_adaptation_queue_ = resource_adaptation_queue;
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+}
+
+void ResourceAdaptationProcessor::ResourceListenerDelegate::
+ OnProcessorDestroyed() {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ processor_ = nullptr;
+}
+
+void ResourceAdaptationProcessor::ResourceListenerDelegate::
+ OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ if (!resource_adaptation_queue_->IsCurrent()) {
+ resource_adaptation_queue_->PostTask(ToQueuedTask(
+ [this_ref = rtc::scoped_refptr<ResourceListenerDelegate>(this),
+ resource, usage_state] {
+ this_ref->OnResourceUsageStateMeasured(resource, usage_state);
+ }));
+ return;
+ }
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ if (processor_) {
+ processor_->OnResourceUsageStateMeasured(resource, usage_state);
+ }
+}
+
+ResourceAdaptationProcessor::MitigationResultAndLogMessage::
+ MitigationResultAndLogMessage()
+ : result(MitigationResult::kAdaptationApplied), message() {}
+
+ResourceAdaptationProcessor::MitigationResultAndLogMessage::
+ MitigationResultAndLogMessage(MitigationResult result, std::string message)
+ : result(result), message(std::move(message)) {}
+
ResourceAdaptationProcessor::ResourceAdaptationProcessor(
VideoStreamInputStateProvider* input_state_provider,
VideoStreamEncoderObserver* encoder_stats_observer)
- : sequence_checker_(),
- is_resource_adaptation_enabled_(false),
+ : resource_adaptation_queue_(nullptr),
+ resource_listener_delegate_(
+ new rtc::RefCountedObject<ResourceListenerDelegate>(this)),
input_state_provider_(input_state_provider),
encoder_stats_observer_(encoder_stats_observer),
resources_(),
@@ -30,62 +79,123 @@ ResourceAdaptationProcessor::ResourceAdaptationProcessor(
is_screenshare_(false),
stream_adapter_(std::make_unique<VideoStreamAdapter>()),
last_reported_source_restrictions_(),
- processing_in_progress_(false) {
- sequence_checker_.Detach();
-}
+ previous_mitigation_results_(),
+ processing_in_progress_(false) {}
ResourceAdaptationProcessor::~ResourceAdaptationProcessor() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- RTC_DCHECK(!is_resource_adaptation_enabled_);
- RTC_DCHECK(adaptation_listeners_.empty())
- << "There are listener(s) depending on a ResourceAdaptationProcessor "
- << "being destroyed.";
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(restrictions_listeners_.empty())
+ << "There are restrictions listener(s) depending on a "
+ << "ResourceAdaptationProcessor being destroyed.";
RTC_DCHECK(resources_.empty())
<< "There are resource(s) attached to a ResourceAdaptationProcessor "
<< "being destroyed.";
+ RTC_DCHECK(adaptation_constraints_.empty())
+ << "There are constaint(s) attached to a ResourceAdaptationProcessor "
+ << "being destroyed.";
+ RTC_DCHECK(adaptation_listeners_.empty())
+ << "There are listener(s) attached to a ResourceAdaptationProcessor "
+ << "being destroyed.";
+ resource_listener_delegate_->OnProcessorDestroyed();
}
-void ResourceAdaptationProcessor::InitializeOnResourceAdaptationQueue() {
- // Allows |sequence_checker_| to attach to the resource adaptation queue.
- // The caller is responsible for ensuring that this is the current queue.
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+void ResourceAdaptationProcessor::SetResourceAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) {
+ RTC_DCHECK(!resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_queue);
+ resource_adaptation_queue_ = resource_adaptation_queue;
+ resource_listener_delegate_->SetResourceAdaptationQueue(
+ resource_adaptation_queue);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
}
DegradationPreference ResourceAdaptationProcessor::degradation_preference()
const {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
return degradation_preference_;
}
DegradationPreference
ResourceAdaptationProcessor::effective_degradation_preference() const {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
return effective_degradation_preference_;
}
-void ResourceAdaptationProcessor::StartResourceAdaptation() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- if (is_resource_adaptation_enabled_)
- return;
- for (const auto& resource : resources_) {
- resource->SetResourceListener(this);
- }
- is_resource_adaptation_enabled_ = true;
+void ResourceAdaptationProcessor::AddRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(std::find(restrictions_listeners_.begin(),
+ restrictions_listeners_.end(),
+ restrictions_listener) == restrictions_listeners_.end());
+ restrictions_listeners_.push_back(restrictions_listener);
}
-void ResourceAdaptationProcessor::StopResourceAdaptation() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- if (!is_resource_adaptation_enabled_)
- return;
- for (const auto& resource : resources_) {
- resource->SetResourceListener(nullptr);
+void ResourceAdaptationProcessor::RemoveRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ auto it = std::find(restrictions_listeners_.begin(),
+ restrictions_listeners_.end(), restrictions_listener);
+ RTC_DCHECK(it != restrictions_listeners_.end());
+ restrictions_listeners_.erase(it);
+}
+
+void ResourceAdaptationProcessor::AddResource(
+ rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(resource);
+ RTC_DCHECK(absl::c_find(resources_, resource) == resources_.end())
+ << "Resource \"" << resource->Name() << "\" was already registered.";
+ resources_.push_back(resource);
+ resource->SetResourceListener(resource_listener_delegate_);
+}
+
+std::vector<rtc::scoped_refptr<Resource>>
+ResourceAdaptationProcessor::GetResources() const {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ return resources_;
+}
+
+void ResourceAdaptationProcessor::RemoveResource(
+ rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(resource);
+ RTC_LOG(INFO) << "Removing resource \"" << resource->Name() << "\".";
+ auto it = absl::c_find(resources_, resource);
+ RTC_DCHECK(it != resources_.end()) << "Resource \"" << resource->Name()
+ << "\" was not a registered resource.";
+ auto resource_adaptation_limits =
+ adaptation_limits_by_resources_.find(resource);
+ if (resource_adaptation_limits != adaptation_limits_by_resources_.end()) {
+ VideoStreamAdapter::RestrictionsWithCounters adaptation_limits =
+ resource_adaptation_limits->second;
+ adaptation_limits_by_resources_.erase(resource_adaptation_limits);
+ MaybeUpdateResourceLimitationsOnResourceRemoval(adaptation_limits);
}
- is_resource_adaptation_enabled_ = false;
+ resources_.erase(it);
+ resource->SetResourceListener(nullptr);
+}
+
+void ResourceAdaptationProcessor::AddAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(std::find(adaptation_constraints_.begin(),
+ adaptation_constraints_.end(),
+ adaptation_constraint) == adaptation_constraints_.end());
+ adaptation_constraints_.push_back(adaptation_constraint);
+}
+
+void ResourceAdaptationProcessor::RemoveAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ auto it = std::find(adaptation_constraints_.begin(),
+ adaptation_constraints_.end(), adaptation_constraint);
+ RTC_DCHECK(it != adaptation_constraints_.end());
+ adaptation_constraints_.erase(it);
}
void ResourceAdaptationProcessor::AddAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ AdaptationListener* adaptation_listener) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
RTC_DCHECK(std::find(adaptation_listeners_.begin(),
adaptation_listeners_.end(),
adaptation_listener) == adaptation_listeners_.end());
@@ -93,52 +203,29 @@ void ResourceAdaptationProcessor::AddAdaptationListener(
}
void ResourceAdaptationProcessor::RemoveAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ AdaptationListener* adaptation_listener) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
auto it = std::find(adaptation_listeners_.begin(),
adaptation_listeners_.end(), adaptation_listener);
RTC_DCHECK(it != adaptation_listeners_.end());
adaptation_listeners_.erase(it);
}
-void ResourceAdaptationProcessor::AddResource(
- rtc::scoped_refptr<Resource> resource) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- // TODO(hbos): Allow adding resources while |is_resource_adaptation_enabled_|
- // by registering as a listener of the resource on adding it.
- RTC_DCHECK(!is_resource_adaptation_enabled_);
- RTC_DCHECK(std::find(resources_.begin(), resources_.end(), resource) ==
- resources_.end());
- resources_.push_back(resource);
-}
-
-void ResourceAdaptationProcessor::RemoveResource(
- rtc::scoped_refptr<Resource> resource) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- // TODO(hbos): Allow removing resources while
- // |is_resource_adaptation_enabled_| by unregistering as a listener of the
- // resource on removing it.
- RTC_DCHECK(!is_resource_adaptation_enabled_);
- auto it = std::find(resources_.begin(), resources_.end(), resource);
- RTC_DCHECK(it != resources_.end());
- resources_.erase(it);
-}
-
void ResourceAdaptationProcessor::SetDegradationPreference(
DegradationPreference degradation_preference) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
degradation_preference_ = degradation_preference;
MaybeUpdateEffectiveDegradationPreference();
}
void ResourceAdaptationProcessor::SetIsScreenshare(bool is_screenshare) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
is_screenshare_ = is_screenshare;
MaybeUpdateEffectiveDegradationPreference();
}
void ResourceAdaptationProcessor::MaybeUpdateEffectiveDegradationPreference() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
effective_degradation_preference_ =
(is_screenshare_ &&
degradation_preference_ == DegradationPreference::BALANCED)
@@ -149,76 +236,103 @@ void ResourceAdaptationProcessor::MaybeUpdateEffectiveDegradationPreference() {
}
void ResourceAdaptationProcessor::ResetVideoSourceRestrictions() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_LOG(INFO) << "Resetting restrictions";
stream_adapter_->ClearRestrictions();
- adaptations_counts_by_resource_.clear();
+ adaptation_limits_by_resources_.clear();
+ for (auto restrictions_listener : restrictions_listeners_) {
+ restrictions_listener->OnResourceLimitationChanged(nullptr, {});
+ }
MaybeUpdateVideoSourceRestrictions(nullptr);
}
void ResourceAdaptationProcessor::MaybeUpdateVideoSourceRestrictions(
rtc::scoped_refptr<Resource> reason) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
VideoSourceRestrictions new_source_restrictions =
FilterRestrictionsByDegradationPreference(
stream_adapter_->source_restrictions(),
effective_degradation_preference_);
if (last_reported_source_restrictions_ != new_source_restrictions) {
+ RTC_LOG(INFO) << "Reporting new restrictions (in "
+ << DegradationPreferenceToString(
+ effective_degradation_preference_)
+ << "): " << new_source_restrictions.ToString();
last_reported_source_restrictions_ = std::move(new_source_restrictions);
- for (auto* adaptation_listener : adaptation_listeners_) {
- adaptation_listener->OnVideoSourceRestrictionsUpdated(
+ for (auto* restrictions_listener : restrictions_listeners_) {
+ restrictions_listener->OnVideoSourceRestrictionsUpdated(
last_reported_source_restrictions_,
stream_adapter_->adaptation_counters(), reason);
}
- if (reason) {
- UpdateResourceDegradationCounts(reason);
- }
}
}
void ResourceAdaptationProcessor::OnResourceUsageStateMeasured(
- rtc::scoped_refptr<Resource> resource) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- RTC_DCHECK(resource->usage_state().has_value());
- switch (resource->usage_state().value()) {
+ rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_DCHECK(resource);
+ // |resource| could have been removed after signalling.
+ if (absl::c_find(resources_, resource) == resources_.end()) {
+ RTC_LOG(INFO) << "Ignoring signal from removed resource \""
+ << resource->Name() << "\".";
+ return;
+ }
+ MitigationResultAndLogMessage result_and_message;
+ switch (usage_state) {
case ResourceUsageState::kOveruse:
- OnResourceOveruse(resource);
+ result_and_message = OnResourceOveruse(resource);
break;
case ResourceUsageState::kUnderuse:
- OnResourceUnderuse(resource);
+ result_and_message = OnResourceUnderuse(resource);
break;
}
+ // Maybe log the result of the operation.
+ auto it = previous_mitigation_results_.find(resource.get());
+ if (it != previous_mitigation_results_.end() &&
+ it->second == result_and_message.result) {
+ // This resource has previously reported the same result and we haven't
+ // successfully adapted since - don't log to avoid spam.
+ return;
+ }
+ RTC_LOG(INFO) << "Resource \"" << resource->Name() << "\" signalled "
+ << ResourceUsageStateToString(usage_state) << ". "
+ << result_and_message.message;
+ if (result_and_message.result == MitigationResult::kAdaptationApplied) {
+ previous_mitigation_results_.clear();
+ } else {
+ previous_mitigation_results_.insert(
+ std::make_pair(resource.get(), result_and_message.result));
+ }
}
bool ResourceAdaptationProcessor::HasSufficientInputForAdaptation(
const VideoStreamInputState& input_state) const {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
return input_state.HasInputFrameSizeAndFramesPerSecond() &&
(effective_degradation_preference_ !=
DegradationPreference::MAINTAIN_RESOLUTION ||
input_state.frames_per_second() >= kMinFrameRateFps);
}
-void ResourceAdaptationProcessor::OnResourceUnderuse(
+ResourceAdaptationProcessor::MitigationResultAndLogMessage
+ResourceAdaptationProcessor::OnResourceUnderuse(
rtc::scoped_refptr<Resource> reason_resource) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
RTC_DCHECK(!processing_in_progress_);
processing_in_progress_ = true;
- // Clear all usage states. In order to re-run adaptation logic, resources need
- // to provide new resource usage measurements.
- // TODO(hbos): Support not unconditionally clearing usage states by having the
- // ResourceAdaptationProcessor check in on its resources at certain intervals.
- for (const auto& resource : resources_) {
- resource->ClearUsageState();
- }
- VideoStreamInputState input_state = input_state_provider_->InputState();
- if (effective_degradation_preference_ == DegradationPreference::DISABLED ||
- !HasSufficientInputForAdaptation(input_state)) {
+ if (effective_degradation_preference_ == DegradationPreference::DISABLED) {
processing_in_progress_ = false;
- return;
+ return MitigationResultAndLogMessage(
+ MitigationResult::kDisabled,
+ "Not adapting up because DegradationPreference is disabled");
}
- if (!IsResourceAllowedToAdaptUp(reason_resource)) {
+ VideoStreamInputState input_state = input_state_provider_->InputState();
+ if (!HasSufficientInputForAdaptation(input_state)) {
processing_in_progress_ = false;
- return;
+ return MitigationResultAndLogMessage(
+ MitigationResult::kInsufficientInput,
+ "Not adapting up because input is insufficient");
}
// Update video input states and encoder settings for accurate adaptation.
stream_adapter_->SetInput(input_state);
@@ -226,56 +340,99 @@ void ResourceAdaptationProcessor::OnResourceUnderuse(
Adaptation adaptation = stream_adapter_->GetAdaptationUp();
if (adaptation.status() != Adaptation::Status::kValid) {
processing_in_progress_ = false;
- return;
+ rtc::StringBuilder message;
+ message << "Not adapting up because VideoStreamAdapter returned "
+ << Adaptation::StatusToString(adaptation.status());
+ return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter,
+ message.Release());
}
- // Are all resources OK with this adaptation being applied?
VideoSourceRestrictions restrictions_before =
stream_adapter_->source_restrictions();
- VideoSourceRestrictions restrictions_after =
+ VideoStreamAdapter::RestrictionsWithCounters peek_restrictions =
stream_adapter_->PeekNextRestrictions(adaptation);
- if (!absl::c_all_of(resources_, [&input_state, &restrictions_before,
- &restrictions_after, &reason_resource](
- rtc::scoped_refptr<Resource> resource) {
- return resource->IsAdaptationUpAllowed(input_state, restrictions_before,
- restrictions_after,
- reason_resource);
- })) {
- processing_in_progress_ = false;
- return;
+ VideoSourceRestrictions restrictions_after = peek_restrictions.restrictions;
+ // Check that resource is most limited...
+ std::vector<rtc::scoped_refptr<Resource>> most_limited_resources;
+ VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions;
+ std::tie(most_limited_resources, most_limited_restrictions) =
+ FindMostLimitedResources();
+
+ for (const auto* constraint : adaptation_constraints_) {
+ if (!constraint->IsAdaptationUpAllowed(input_state, restrictions_before,
+ restrictions_after,
+ reason_resource)) {
+ processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Not adapting up because constraint \"" << constraint->Name()
+ << "\" disallowed it";
+ return MitigationResultAndLogMessage(
+ MitigationResult::kRejectedByConstraint, message.Release());
+ }
+ }
+ // If the most restricted resource is less limited than current restrictions
+ // then proceed with adapting up.
+ if (!most_limited_resources.empty() &&
+ most_limited_restrictions.adaptation_counters.Total() >=
+ stream_adapter_->adaptation_counters().Total()) {
+ // If |reason_resource| is not one of the most limiting resources then abort
+ // adaptation.
+ if (absl::c_find(most_limited_resources, reason_resource) ==
+ most_limited_resources.end()) {
+ processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Resource \"" << reason_resource->Name()
+ << "\" was not the most limited resource.";
+ return MitigationResultAndLogMessage(
+ MitigationResult::kNotMostLimitedResource, message.Release());
+ }
+
+ UpdateResourceLimitations(reason_resource, peek_restrictions);
+ if (most_limited_resources.size() > 1) {
+ // If there are multiple most limited resources, all must signal underuse
+ // before the adaptation is applied.
+ processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Resource \"" << reason_resource->Name()
+ << "\" was not the only most limited resource.";
+ return MitigationResultAndLogMessage(
+ MitigationResult::kSharedMostLimitedResource, message.Release());
+ }
}
// Apply adaptation.
stream_adapter_->ApplyAdaptation(adaptation);
- for (const auto& resource : resources_) {
- resource->OnAdaptationApplied(input_state, restrictions_before,
- restrictions_after, reason_resource);
+ for (auto* adaptation_listener : adaptation_listeners_) {
+ adaptation_listener->OnAdaptationApplied(
+ input_state, restrictions_before, restrictions_after, reason_resource);
}
// Update VideoSourceRestrictions based on adaptation. This also informs the
- // |adaptation_listeners_|.
+ // |restrictions_listeners_|.
MaybeUpdateVideoSourceRestrictions(reason_resource);
processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Adapted up successfully. Unfiltered adaptations: "
+ << stream_adapter_->adaptation_counters().ToString();
+ return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied,
+ message.Release());
}
-void ResourceAdaptationProcessor::OnResourceOveruse(
+ResourceAdaptationProcessor::MitigationResultAndLogMessage
+ResourceAdaptationProcessor::OnResourceOveruse(
rtc::scoped_refptr<Resource> reason_resource) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
RTC_DCHECK(!processing_in_progress_);
processing_in_progress_ = true;
- // Clear all usage states. In order to re-run adaptation logic, resources need
- // to provide new resource usage measurements.
- // TODO(hbos): Support not unconditionally clearing usage states by having the
- // ResourceAdaptationProcessor check in on its resources at certain intervals.
- for (const auto& resource : resources_) {
- resource->ClearUsageState();
+ if (effective_degradation_preference_ == DegradationPreference::DISABLED) {
+ processing_in_progress_ = false;
+ return MitigationResultAndLogMessage(
+ MitigationResult::kDisabled,
+ "Not adapting down because DegradationPreference is disabled");
}
VideoStreamInputState input_state = input_state_provider_->InputState();
- if (!input_state.has_input()) {
+ if (!HasSufficientInputForAdaptation(input_state)) {
processing_in_progress_ = false;
- return;
- }
- if (effective_degradation_preference_ == DegradationPreference::DISABLED ||
- !HasSufficientInputForAdaptation(input_state)) {
- processing_in_progress_ = false;
- return;
+ return MitigationResultAndLogMessage(
+ MitigationResult::kInsufficientInput,
+ "Not adapting down because input is insufficient");
}
// Update video input states and encoder settings for accurate adaptation.
stream_adapter_->SetInput(input_state);
@@ -286,27 +443,40 @@ void ResourceAdaptationProcessor::OnResourceOveruse(
}
if (adaptation.status() != Adaptation::Status::kValid) {
processing_in_progress_ = false;
- return;
+ rtc::StringBuilder message;
+ message << "Not adapting down because VideoStreamAdapter returned "
+ << Adaptation::StatusToString(adaptation.status());
+ return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter,
+ message.Release());
}
// Apply adaptation.
VideoSourceRestrictions restrictions_before =
stream_adapter_->source_restrictions();
- VideoSourceRestrictions restrictions_after =
+ VideoStreamAdapter::RestrictionsWithCounters peek_next_restrictions =
stream_adapter_->PeekNextRestrictions(adaptation);
+ VideoSourceRestrictions restrictions_after =
+ peek_next_restrictions.restrictions;
+ UpdateResourceLimitations(reason_resource, peek_next_restrictions);
stream_adapter_->ApplyAdaptation(adaptation);
- for (const auto& resource : resources_) {
- resource->OnAdaptationApplied(input_state, restrictions_before,
- restrictions_after, reason_resource);
+ for (auto* adaptation_listener : adaptation_listeners_) {
+ adaptation_listener->OnAdaptationApplied(
+ input_state, restrictions_before, restrictions_after, reason_resource);
}
// Update VideoSourceRestrictions based on adaptation. This also informs the
- // |adaptation_listeners_|.
+ // |restrictions_listeners_|.
MaybeUpdateVideoSourceRestrictions(reason_resource);
processing_in_progress_ = false;
+ rtc::StringBuilder message;
+ message << "Adapted down successfully. Unfiltered adaptations: "
+ << stream_adapter_->adaptation_counters().ToString();
+ return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied,
+ message.Release());
}
void ResourceAdaptationProcessor::TriggerAdaptationDueToFrameDroppedDueToSize(
rtc::scoped_refptr<Resource> reason_resource) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ RTC_LOG(INFO) << "TriggerAdaptationDueToFrameDroppedDueToSize called";
VideoAdaptationCounters counters_before =
stream_adapter_->adaptation_counters();
OnResourceOveruse(reason_resource);
@@ -323,27 +493,85 @@ void ResourceAdaptationProcessor::TriggerAdaptationDueToFrameDroppedDueToSize(
}
}
-void ResourceAdaptationProcessor::UpdateResourceDegradationCounts(
- rtc::scoped_refptr<Resource> resource) {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- RTC_DCHECK(resource);
- int delta = stream_adapter_->adaptation_counters().Total();
- for (const auto& adaptations : adaptations_counts_by_resource_) {
- delta -= adaptations.second;
+std::pair<std::vector<rtc::scoped_refptr<Resource>>,
+ VideoStreamAdapter::RestrictionsWithCounters>
+ResourceAdaptationProcessor::FindMostLimitedResources() const {
+ std::vector<rtc::scoped_refptr<Resource>> most_limited_resources;
+ VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions{
+ VideoSourceRestrictions(), VideoAdaptationCounters()};
+
+ for (const auto& resource_and_adaptation_limit_ :
+ adaptation_limits_by_resources_) {
+ const auto& restrictions_with_counters =
+ resource_and_adaptation_limit_.second;
+ if (restrictions_with_counters.adaptation_counters.Total() >
+ most_limited_restrictions.adaptation_counters.Total()) {
+ most_limited_restrictions = restrictions_with_counters;
+ most_limited_resources.clear();
+ most_limited_resources.push_back(resource_and_adaptation_limit_.first);
+ } else if (most_limited_restrictions.adaptation_counters ==
+ restrictions_with_counters.adaptation_counters) {
+ most_limited_resources.push_back(resource_and_adaptation_limit_.first);
+ }
+ }
+ return std::make_pair(std::move(most_limited_resources),
+ most_limited_restrictions);
+}
+
+void ResourceAdaptationProcessor::UpdateResourceLimitations(
+ rtc::scoped_refptr<Resource> reason_resource,
+ const VideoStreamAdapter::RestrictionsWithCounters&
+ peek_next_restrictions) {
+ adaptation_limits_by_resources_[reason_resource] = peek_next_restrictions;
+
+ std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters> limitations;
+ for (const auto& p : adaptation_limits_by_resources_) {
+ limitations.insert(std::make_pair(p.first, p.second.adaptation_counters));
}
- // Default value is 0, inserts the value if missing.
- adaptations_counts_by_resource_[resource] += delta;
- RTC_DCHECK_GE(adaptations_counts_by_resource_[resource], 0);
+ for (auto restrictions_listener : restrictions_listeners_) {
+ restrictions_listener->OnResourceLimitationChanged(reason_resource,
+ limitations);
+ }
}
-bool ResourceAdaptationProcessor::IsResourceAllowedToAdaptUp(
- rtc::scoped_refptr<Resource> resource) const {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- RTC_DCHECK(resource);
- const auto& adaptations = adaptations_counts_by_resource_.find(resource);
- return adaptations != adaptations_counts_by_resource_.end() &&
- adaptations->second > 0;
+void ResourceAdaptationProcessor::
+ MaybeUpdateResourceLimitationsOnResourceRemoval(
+ VideoStreamAdapter::RestrictionsWithCounters removed_limitations) {
+ if (adaptation_limits_by_resources_.empty()) {
+ // Only the resource being removed was adapted so reset restrictions.
+ ResetVideoSourceRestrictions();
+ return;
+ }
+
+ VideoStreamAdapter::RestrictionsWithCounters most_limited =
+ FindMostLimitedResources().second;
+
+ if (removed_limitations.adaptation_counters.Total() <=
+ most_limited.adaptation_counters.Total()) {
+ // The removed limitations were less limited than the most limited resource.
+ // Don't change the current restrictions.
+ return;
+ }
+
+ // Apply the new most limited resource as the next restrictions.
+ Adaptation adapt_to = stream_adapter_->GetAdaptationTo(
+ most_limited.adaptation_counters, most_limited.restrictions);
+ RTC_DCHECK_EQ(adapt_to.status(), Adaptation::Status::kValid);
+ stream_adapter_->ApplyAdaptation(adapt_to);
+
+ RTC_LOG(INFO) << "Most limited resource removed. Restoring restrictions to "
+ "next most limited restrictions: "
+ << most_limited.restrictions.ToString() << " with counters "
+ << most_limited.adaptation_counters.ToString();
+
+ MaybeUpdateVideoSourceRestrictions(nullptr);
+ auto input_state = input_state_provider_->InputState();
+ for (auto* adaptation_listener : adaptation_listeners_) {
+ adaptation_listener->OnAdaptationApplied(
+ input_state, removed_limitations.restrictions,
+ most_limited.restrictions, nullptr);
+ }
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.h b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.h
index cf1e187026a..cff50955e70 100644
--- a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.h
+++ b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor.h
@@ -13,20 +13,24 @@
#include <map>
#include <memory>
+#include <string>
+#include <utility>
#include <vector>
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/rtp_parameters.h"
#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
#include "api/video/video_frame.h"
#include "api/video/video_stream_encoder_observer.h"
-#include "call/adaptation/resource.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
#include "call/adaptation/video_source_restrictions.h"
#include "call/adaptation/video_stream_adapter.h"
#include "call/adaptation/video_stream_input_state.h"
#include "call/adaptation/video_stream_input_state_provider.h"
-#include "rtc_base/synchronization/sequence_checker.h"
namespace webrtc {
@@ -54,20 +58,27 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
VideoStreamEncoderObserver* encoder_stats_observer);
~ResourceAdaptationProcessor() override;
- void InitializeOnResourceAdaptationQueue() override;
+ void SetResourceAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) override;
// ResourceAdaptationProcessorInterface implementation.
DegradationPreference degradation_preference() const override;
DegradationPreference effective_degradation_preference() const override;
- void StartResourceAdaptation() override;
- void StopResourceAdaptation() override;
- void AddAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) override;
- void RemoveAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) override;
+ void AddRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener) override;
+ void RemoveRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener) override;
void AddResource(rtc::scoped_refptr<Resource> resource) override;
+ std::vector<rtc::scoped_refptr<Resource>> GetResources() const override;
void RemoveResource(rtc::scoped_refptr<Resource> resource) override;
+ void AddAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) override;
+ void RemoveAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) override;
+ void AddAdaptationListener(AdaptationListener* adaptation_listener) override;
+ void RemoveAdaptationListener(
+ AdaptationListener* adaptation_listener) override;
void SetDegradationPreference(
DegradationPreference degradation_preference) override;
@@ -76,8 +87,8 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
// ResourceListener implementation.
// Triggers OnResourceUnderuse() or OnResourceOveruse().
- void OnResourceUsageStateMeasured(
- rtc::scoped_refptr<Resource> resource) override;
+ void OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) override;
// May trigger 1-2 adaptations. It is meant to reduce resolution but this is
// not guaranteed. It may adapt frame rate, which does not address the issue.
@@ -89,11 +100,51 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
bool HasSufficientInputForAdaptation(
const VideoStreamInputState& input_state) const;
+ // If resource usage measurements happens off the adaptation task queue, this
+ // class takes care of posting the measurement for the processor to handle it
+ // on the adaptation task queue.
+ class ResourceListenerDelegate : public rtc::RefCountInterface,
+ public ResourceListener {
+ public:
+ explicit ResourceListenerDelegate(ResourceAdaptationProcessor* processor);
+
+ void SetResourceAdaptationQueue(TaskQueueBase* resource_adaptation_queue);
+ void OnProcessorDestroyed();
+
+ // ResourceListener implementation.
+ void OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) override;
+
+ private:
+ TaskQueueBase* resource_adaptation_queue_;
+ ResourceAdaptationProcessor* processor_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ };
+
+ enum class MitigationResult {
+ kDisabled,
+ kInsufficientInput,
+ kNotMostLimitedResource,
+ kSharedMostLimitedResource,
+ kRejectedByAdapter,
+ kRejectedByConstraint,
+ kAdaptationApplied,
+ };
+
+ struct MitigationResultAndLogMessage {
+ MitigationResultAndLogMessage();
+ MitigationResultAndLogMessage(MitigationResult result, std::string message);
+ MitigationResult result;
+ std::string message;
+ };
+
// Performs the adaptation by getting the next target, applying it and
// informing listeners of the new VideoSourceRestriction and adaptation
// counters.
- void OnResourceUnderuse(rtc::scoped_refptr<Resource> reason_resource);
- void OnResourceOveruse(rtc::scoped_refptr<Resource> reason_resource);
+ MitigationResultAndLogMessage OnResourceUnderuse(
+ rtc::scoped_refptr<Resource> reason_resource);
+ MitigationResultAndLogMessage OnResourceOveruse(
+ rtc::scoped_refptr<Resource> reason_resource);
// Needs to be invoked any time |degradation_preference_| or |is_screenshare_|
// changes to ensure |effective_degradation_preference_| is up-to-date.
@@ -101,43 +152,60 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
// If the filtered source restrictions are different than
// |last_reported_source_restrictions_|, inform the listeners.
void MaybeUpdateVideoSourceRestrictions(rtc::scoped_refptr<Resource> reason);
- // Updates the number of times the resource has degraded based on the latest
- // degradation applied.
- void UpdateResourceDegradationCounts(rtc::scoped_refptr<Resource> resource);
- // Returns true if a Resource has been overused in the pass and is responsible
- // for creating a VideoSourceRestriction. The current algorithm counts the
- // number of times the resource caused an adaptation and allows adapting up
- // if that number is non-zero. This is consistent with how adaptation has
- // traditionally been handled.
- // TODO(crbug.com/webrtc/11553) Change this algorithm to look at the resources
- // restrictions rather than just the counters.
- bool IsResourceAllowedToAdaptUp(rtc::scoped_refptr<Resource> resource) const;
-
- webrtc::SequenceChecker sequence_checker_;
- bool is_resource_adaptation_enabled_ RTC_GUARDED_BY(sequence_checker_);
+
+ void UpdateResourceLimitations(
+ rtc::scoped_refptr<Resource> reason_resource,
+ const VideoStreamAdapter::RestrictionsWithCounters&
+ peek_next_restrictions) RTC_RUN_ON(resource_adaptation_queue_);
+
+ // Searches |adaptation_limits_by_resources_| for each resource with the
+ // highest total adaptation counts. Adaptation up may only occur if the
+ // resource performing the adaptation is the only most limited resource. This
+ // function returns the list of all most limited resources as well as the
+ // corresponding adaptation of that resource.
+ std::pair<std::vector<rtc::scoped_refptr<Resource>>,
+ VideoStreamAdapter::RestrictionsWithCounters>
+ FindMostLimitedResources() const RTC_RUN_ON(resource_adaptation_queue_);
+
+ void MaybeUpdateResourceLimitationsOnResourceRemoval(
+ VideoStreamAdapter::RestrictionsWithCounters removed_limitations)
+ RTC_RUN_ON(resource_adaptation_queue_);
+
+ TaskQueueBase* resource_adaptation_queue_;
+ rtc::scoped_refptr<ResourceListenerDelegate> resource_listener_delegate_;
// Input and output.
VideoStreamInputStateProvider* const input_state_provider_
- RTC_GUARDED_BY(sequence_checker_);
+ RTC_GUARDED_BY(resource_adaptation_queue_);
VideoStreamEncoderObserver* const encoder_stats_observer_
- RTC_GUARDED_BY(sequence_checker_);
- std::vector<ResourceAdaptationProcessorListener*> adaptation_listeners_
- RTC_GUARDED_BY(sequence_checker_);
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ std::vector<VideoSourceRestrictionsListener*> restrictions_listeners_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
std::vector<rtc::scoped_refptr<Resource>> resources_
- RTC_GUARDED_BY(sequence_checker_);
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ std::vector<AdaptationConstraint*> adaptation_constraints_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ std::vector<AdaptationListener*> adaptation_listeners_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
// Purely used for statistics, does not ensure mapped resources stay alive.
- std::map<const Resource*, int> adaptations_counts_by_resource_
- RTC_GUARDED_BY(sequence_checker_);
+ std::map<rtc::scoped_refptr<Resource>,
+ VideoStreamAdapter::RestrictionsWithCounters>
+ adaptation_limits_by_resources_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
// Adaptation strategy settings.
DegradationPreference degradation_preference_
- RTC_GUARDED_BY(sequence_checker_);
+ RTC_GUARDED_BY(resource_adaptation_queue_);
DegradationPreference effective_degradation_preference_
- RTC_GUARDED_BY(sequence_checker_);
- bool is_screenshare_ RTC_GUARDED_BY(sequence_checker_);
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ bool is_screenshare_ RTC_GUARDED_BY(resource_adaptation_queue_);
// Responsible for generating and applying possible adaptations.
const std::unique_ptr<VideoStreamAdapter> stream_adapter_
- RTC_GUARDED_BY(sequence_checker_);
+ RTC_GUARDED_BY(resource_adaptation_queue_);
VideoSourceRestrictions last_reported_source_restrictions_
- RTC_GUARDED_BY(sequence_checker_);
+ RTC_GUARDED_BY(resource_adaptation_queue_);
+ // Keeps track of previous mitigation results per resource since the last
+ // successful adaptation. Used to avoid RTC_LOG spam.
+ std::map<Resource*, MitigationResult> previous_mitigation_results_
+ RTC_GUARDED_BY(resource_adaptation_queue_);
// Prevents recursion.
//
// This is used to prevent triggering resource adaptation in the process of
@@ -149,7 +217,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
// Resource::OnAdaptationApplied() ->
// Resource::OnResourceUsageStateMeasured() ->
// ResourceAdaptationProcessor::OnResourceOveruse() // Boom, not allowed.
- bool processing_in_progress_ RTC_GUARDED_BY(sequence_checker_);
+ bool processing_in_progress_ RTC_GUARDED_BY(resource_adaptation_queue_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.cc b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.cc
index 4e5251ce909..48ddf65ed3b 100644
--- a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.cc
+++ b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.cc
@@ -12,8 +12,9 @@
namespace webrtc {
-ResourceAdaptationProcessorListener::~ResourceAdaptationProcessorListener() {}
+VideoSourceRestrictionsListener::~VideoSourceRestrictionsListener() = default;
-ResourceAdaptationProcessorInterface::~ResourceAdaptationProcessorInterface() {}
+ResourceAdaptationProcessorInterface::~ResourceAdaptationProcessorInterface() =
+ default;
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.h b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.h
index d6295c4d750..a97fe8efe4b 100644
--- a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.h
+++ b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_interface.h
@@ -11,23 +11,28 @@
#ifndef CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_
#define CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_
+#include <map>
+#include <vector>
+
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/rtp_parameters.h"
#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
#include "api/video/video_adaptation_counters.h"
#include "api/video/video_frame.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
#include "call/adaptation/encoder_settings.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/video_source_restrictions.h"
-#include "rtc_base/task_queue.h"
namespace webrtc {
// The listener is responsible for carrying out the reconfiguration of the video
// source such that the VideoSourceRestrictions are fulfilled.
-class ResourceAdaptationProcessorListener {
+class VideoSourceRestrictionsListener {
public:
- virtual ~ResourceAdaptationProcessorListener();
+ virtual ~VideoSourceRestrictionsListener();
// The |restrictions| are filtered by degradation preference but not the
// |adaptation_counters|, which are currently only reported for legacy stats
@@ -36,6 +41,13 @@ class ResourceAdaptationProcessorListener {
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
rtc::scoped_refptr<Resource> reason) = 0;
+
+ // The limitations on a resource were changed. This does not mean the current
+ // video restrictions have changed.
+ virtual void OnResourceLimitationChanged(
+ rtc::scoped_refptr<Resource> resource,
+ const std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters>&
+ resource_limitations) {}
};
// The Resource Adaptation Processor is responsible for reacting to resource
@@ -46,7 +58,8 @@ class ResourceAdaptationProcessorInterface {
public:
virtual ~ResourceAdaptationProcessorInterface();
- virtual void InitializeOnResourceAdaptationQueue() = 0;
+ virtual void SetResourceAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) = 0;
virtual DegradationPreference degradation_preference() const = 0;
// Reinterprets "balanced + screenshare" as "maintain-resolution".
@@ -61,14 +74,21 @@ class ResourceAdaptationProcessorInterface {
// with AddResource() and RemoveResource() instead. When the processor is
// multi-stream aware, stream-specific resouces will get added and removed
// over time.
- virtual void StartResourceAdaptation() = 0;
- virtual void StopResourceAdaptation() = 0;
- virtual void AddAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) = 0;
- virtual void RemoveAdaptationListener(
- ResourceAdaptationProcessorListener* adaptation_listener) = 0;
+ virtual void AddRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener) = 0;
+ virtual void RemoveRestrictionsListener(
+ VideoSourceRestrictionsListener* restrictions_listener) = 0;
virtual void AddResource(rtc::scoped_refptr<Resource> resource) = 0;
+ virtual std::vector<rtc::scoped_refptr<Resource>> GetResources() const = 0;
virtual void RemoveResource(rtc::scoped_refptr<Resource> resource) = 0;
+ virtual void AddAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) = 0;
+ virtual void RemoveAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) = 0;
+ virtual void AddAdaptationListener(
+ AdaptationListener* adaptation_listener) = 0;
+ virtual void RemoveAdaptationListener(
+ AdaptationListener* adaptation_listener) = 0;
virtual void SetDegradationPreference(
DegradationPreference degradation_preference) = 0;
diff --git a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_unittest.cc b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_unittest.cc
index e94b3a99d7e..da1ab1cda1d 100644
--- a/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_unittest.cc
+++ b/chromium/third_party/webrtc/call/adaptation/resource_adaptation_processor_unittest.cc
@@ -10,15 +10,19 @@
#include "call/adaptation/resource_adaptation_processor.h"
+#include "api/adaptation/resource.h"
#include "api/scoped_refptr.h"
#include "api/video/video_adaptation_counters.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
+#include "call/adaptation/test/fake_adaptation_constraint.h"
+#include "call/adaptation/test/fake_adaptation_listener.h"
#include "call/adaptation/test/fake_frame_rate_provider.h"
#include "call/adaptation/test/fake_resource.h"
#include "call/adaptation/video_source_restrictions.h"
#include "call/adaptation/video_stream_input_state_provider.h"
+#include "rtc_base/critical_section.h"
#include "rtc_base/event.h"
+#include "rtc_base/gunit.h"
#include "rtc_base/task_queue_for_test.h"
#include "test/gtest.h"
@@ -28,31 +32,41 @@ namespace {
const int kDefaultFrameRate = 30;
const int kDefaultFrameSize = 1280 * 720;
+const int kDefaultTimeoutMs = 5000;
-class ResourceAdaptationProcessorListenerForTesting
- : public ResourceAdaptationProcessorListener {
+class VideoSourceRestrictionsListenerForTesting
+ : public VideoSourceRestrictionsListener {
public:
- ResourceAdaptationProcessorListenerForTesting()
+ VideoSourceRestrictionsListenerForTesting()
: restrictions_updated_count_(0),
restrictions_(),
adaptation_counters_(),
reason_(nullptr) {}
- ~ResourceAdaptationProcessorListenerForTesting() override {}
+ ~VideoSourceRestrictionsListenerForTesting() override {}
size_t restrictions_updated_count() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
return restrictions_updated_count_;
}
- const VideoSourceRestrictions& restrictions() const { return restrictions_; }
- const VideoAdaptationCounters& adaptation_counters() const {
+ VideoSourceRestrictions restrictions() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return restrictions_;
+ }
+ VideoAdaptationCounters adaptation_counters() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
return adaptation_counters_;
}
- rtc::scoped_refptr<Resource> reason() const { return reason_; }
+ rtc::scoped_refptr<Resource> reason() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return reason_;
+ }
- // ResourceAdaptationProcessorListener implementation.
+ // VideoSourceRestrictionsListener implementation.
void OnVideoSourceRestrictionsUpdated(
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
rtc::scoped_refptr<Resource> reason) override {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
++restrictions_updated_count_;
restrictions_ = restrictions;
adaptation_counters_ = adaptation_counters;
@@ -60,47 +74,37 @@ class ResourceAdaptationProcessorListenerForTesting
}
private:
- size_t restrictions_updated_count_;
- VideoSourceRestrictions restrictions_;
- VideoAdaptationCounters adaptation_counters_;
- rtc::scoped_refptr<Resource> reason_;
+ SequenceChecker sequence_checker_;
+ size_t restrictions_updated_count_ RTC_GUARDED_BY(&sequence_checker_);
+ VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_);
+ VideoAdaptationCounters adaptation_counters_
+ RTC_GUARDED_BY(&sequence_checker_);
+ rtc::scoped_refptr<Resource> reason_ RTC_GUARDED_BY(&sequence_checker_);
};
class ResourceAdaptationProcessorTest : public ::testing::Test {
public:
ResourceAdaptationProcessorTest()
- : resource_adaptation_queue_("ResourceAdaptationQueue"),
- encoder_queue_("EncoderQueue"),
- frame_rate_provider_(),
+ : frame_rate_provider_(),
input_state_provider_(&frame_rate_provider_),
- resource_(new FakeResource("FakeResource")),
- other_resource_(new FakeResource("OtherFakeResource")),
+ resource_(FakeResource::Create("FakeResource")),
+ other_resource_(FakeResource::Create("OtherFakeResource")),
+ adaptation_constraint_("FakeAdaptationConstraint"),
+ adaptation_listener_(),
processor_(std::make_unique<ResourceAdaptationProcessor>(
&input_state_provider_,
/*encoder_stats_observer=*/&frame_rate_provider_)) {
- resource_->Initialize(&encoder_queue_, &resource_adaptation_queue_);
- other_resource_->Initialize(&encoder_queue_, &resource_adaptation_queue_);
- rtc::Event event;
- resource_adaptation_queue_.PostTask([this, &event] {
- processor_->InitializeOnResourceAdaptationQueue();
- processor_->AddAdaptationListener(&processor_listener_);
- processor_->AddResource(resource_);
- processor_->AddResource(other_resource_);
- event.Set();
- });
- event.Wait(rtc::Event::kForever);
+ processor_->SetResourceAdaptationQueue(TaskQueueBase::Current());
+ processor_->AddRestrictionsListener(&restrictions_listener_);
+ processor_->AddResource(resource_);
+ processor_->AddResource(other_resource_);
+ processor_->AddAdaptationConstraint(&adaptation_constraint_);
+ processor_->AddAdaptationListener(&adaptation_listener_);
}
~ResourceAdaptationProcessorTest() override {
- rtc::Event event;
- resource_adaptation_queue_.PostTask([this, &event] {
- processor_->StopResourceAdaptation();
- processor_->RemoveResource(resource_);
- processor_->RemoveResource(other_resource_);
- processor_->RemoveAdaptationListener(&processor_listener_);
- processor_.reset();
- event.Set();
- });
- event.Wait(rtc::Event::kForever);
+ if (processor_) {
+ DestroyProcessor();
+ }
}
void SetInputStates(bool has_input, int fps, int frame_size) {
@@ -117,53 +121,60 @@ class ResourceAdaptationProcessorTest : public ::testing::Test {
: restrictions.max_pixels_per_frame().value_or(kDefaultFrameSize));
}
+ void DestroyProcessor() {
+ processor_->RemoveRestrictionsListener(&restrictions_listener_);
+ if (resource_) {
+ processor_->RemoveResource(resource_);
+ }
+ if (other_resource_) {
+ processor_->RemoveResource(other_resource_);
+ }
+ processor_->RemoveAdaptationConstraint(&adaptation_constraint_);
+ processor_->RemoveAdaptationListener(&adaptation_listener_);
+ processor_.reset();
+ }
+
+ static void WaitUntilTaskQueueIdle() {
+ ASSERT_TRUE(rtc::Thread::Current()->ProcessMessages(0));
+ }
+
protected:
- TaskQueueForTest resource_adaptation_queue_;
- TaskQueueForTest encoder_queue_;
FakeFrameRateProvider frame_rate_provider_;
VideoStreamInputStateProvider input_state_provider_;
rtc::scoped_refptr<FakeResource> resource_;
rtc::scoped_refptr<FakeResource> other_resource_;
+ FakeAdaptationConstraint adaptation_constraint_;
+ FakeAdaptationListener adaptation_listener_;
std::unique_ptr<ResourceAdaptationProcessor> processor_;
- ResourceAdaptationProcessorListenerForTesting processor_listener_;
+ VideoSourceRestrictionsListenerForTesting restrictions_listener_;
};
} // namespace
TEST_F(ResourceAdaptationProcessorTest, DisabledByDefault) {
- resource_adaptation_queue_.SendTask(
- [this] {
- EXPECT_EQ(DegradationPreference::DISABLED,
- processor_->degradation_preference());
- EXPECT_EQ(DegradationPreference::DISABLED,
- processor_->effective_degradation_preference());
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- processor_->StartResourceAdaptation();
- // Adaptation does not happen when disabled.
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
- },
- RTC_FROM_HERE);
+ EXPECT_EQ(DegradationPreference::DISABLED,
+ processor_->degradation_preference());
+ EXPECT_EQ(DegradationPreference::DISABLED,
+ processor_->effective_degradation_preference());
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ // Adaptation does not happen when disabled.
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest, InsufficientInput) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- // Adaptation does not happen if input is insufficient.
- // When frame size is missing (OnFrameSizeObserved not called yet).
- input_state_provider_.OnHasInputChanged(true);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
- // When "has input" is missing.
- SetInputStates(false, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
- // Note: frame rate cannot be missing, if unset it is 0.
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ // Adaptation does not happen if input is insufficient.
+ // When frame size is missing (OnFrameSizeObserved not called yet).
+ input_state_provider_.OnHasInputChanged(true);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
+ // When "has input" is missing.
+ SetInputStates(false, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
+ // Note: frame rate cannot be missing, if unset it is 0.
}
// These tests verify that restrictions are applied, but not exactly how much
@@ -172,273 +183,558 @@ TEST_F(ResourceAdaptationProcessorTest, InsufficientInput) {
// restrictions. For that, see video_stream_adapter_unittest.cc.
TEST_F(ResourceAdaptationProcessorTest,
OveruseTriggersRestrictingResolutionInMaintainFrameRate) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- EXPECT_TRUE(processor_listener_.restrictions()
- .max_pixels_per_frame()
- .has_value());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ EXPECT_TRUE(
+ restrictions_listener_.restrictions().max_pixels_per_frame().has_value());
}
TEST_F(ResourceAdaptationProcessorTest,
OveruseTriggersRestrictingFrameRateInMaintainResolution) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_RESOLUTION);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- EXPECT_TRUE(
- processor_listener_.restrictions().max_frame_rate().has_value());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_RESOLUTION);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ EXPECT_TRUE(
+ restrictions_listener_.restrictions().max_frame_rate().has_value());
}
TEST_F(ResourceAdaptationProcessorTest,
OveruseTriggersRestrictingFrameRateAndResolutionInBalanced) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(DegradationPreference::BALANCED);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- // Adapting multiple times eventually resticts both frame rate and
- // resolution. Exactly many times we need to adapt depends on
- // BalancedDegradationSettings, VideoStreamAdapter and default input
- // states. This test requires it to be achieved within 4 adaptations.
- for (size_t i = 0; i < 4; ++i) {
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(i + 1, processor_listener_.restrictions_updated_count());
- RestrictSource(processor_listener_.restrictions());
- }
- EXPECT_TRUE(processor_listener_.restrictions()
- .max_pixels_per_frame()
- .has_value());
- EXPECT_TRUE(
- processor_listener_.restrictions().max_frame_rate().has_value());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(DegradationPreference::BALANCED);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ // Adapting multiple times eventually resticts both frame rate and
+ // resolution. Exactly many times we need to adapt depends on
+ // BalancedDegradationSettings, VideoStreamAdapter and default input
+ // states. This test requires it to be achieved within 4 adaptations.
+ for (size_t i = 0; i < 4; ++i) {
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(i + 1, restrictions_listener_.restrictions_updated_count());
+ RestrictSource(restrictions_listener_.restrictions());
+ }
+ EXPECT_TRUE(
+ restrictions_listener_.restrictions().max_pixels_per_frame().has_value());
+ EXPECT_TRUE(
+ restrictions_listener_.restrictions().max_frame_rate().has_value());
}
TEST_F(ResourceAdaptationProcessorTest, AwaitingPreviousAdaptation) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- // If we don't restrict the source then adaptation will not happen again
- // due to "awaiting previous adaptation". This prevents "double-adapt".
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ // If we don't restrict the source then adaptation will not happen again
+ // due to "awaiting previous adaptation". This prevents "double-adapt".
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest, CannotAdaptUpWhenUnrestricted) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest, UnderuseTakesUsBackToUnrestricted) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- RestrictSource(processor_listener_.restrictions());
- resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(2u, processor_listener_.restrictions_updated_count());
- EXPECT_EQ(VideoSourceRestrictions(),
- processor_listener_.restrictions());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2u, restrictions_listener_.restrictions_updated_count());
+ EXPECT_EQ(VideoSourceRestrictions(), restrictions_listener_.restrictions());
}
TEST_F(ResourceAdaptationProcessorTest, ResourcesCanPreventAdaptingUp) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- // Adapt down so that we can adapt up.
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- RestrictSource(processor_listener_.restrictions());
- // Adapting up is prevented.
- resource_->set_is_adaptation_up_allowed(false);
- resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ // Adapt down so that we can adapt up.
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ RestrictSource(restrictions_listener_.restrictions());
+ // Adapting up is prevented.
+ adaptation_constraint_.set_is_adaptation_up_allowed(false);
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest,
ResourcesCanNotAdaptUpIfNeverAdaptedDown) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- RestrictSource(processor_listener_.restrictions());
-
- // Other resource signals under-use
- other_resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // Other resource signals under-use
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
}
TEST_F(ResourceAdaptationProcessorTest,
ResourcesCanNotAdaptUpIfNotAdaptedDownAfterReset) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
-
- processor_->ResetVideoSourceRestrictions();
- EXPECT_EQ(0, processor_listener_.adaptation_counters().Total());
- other_resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
-
- // resource_ did not overuse after we reset the restrictions, so adapt
- // up should be disallowed.
- resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count());
+
+ processor_->ResetVideoSourceRestrictions();
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // resource_ did not overuse after we reset the restrictions, so adapt
+ // up should be disallowed.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+}
+
+TEST_F(ResourceAdaptationProcessorTest, OnlyMostLimitedResourceMayAdaptUp) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // |other_resource_| is most limited, resource_ can't adapt up.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // |resource_| and |other_resource_| are now most limited, so both must
+ // signal underuse to adapt up.
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
}
TEST_F(ResourceAdaptationProcessorTest,
MultipleResourcesCanTriggerMultipleAdaptations) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- other_resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(2, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- other_resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(3, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
-
- resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(2, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- // Does not trigger adaptation since resource has no adaptations left.
- resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(2, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
-
- other_resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- other_resource_->set_usage_state(ResourceUsageState::kUnderuse);
- EXPECT_EQ(0, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // resource_ is not most limited so can't adapt from underuse.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ // resource_ is still not most limited so can't adapt from underuse.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // However it will be after overuse
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // Now other_resource_ can't adapt up as it is not most restricted.
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ // resource_ is limited at 3 adaptations and other_resource_ 2.
+ // With the most limited resource signalling underuse in the following
+ // order we get back to unrestricted video.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ // Both resource_ and other_resource_ are most limited.
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ // Again both are most limited.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ MostLimitedResourceAdaptationWorksAfterChangingDegradataionPreference) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ // Adapt down until we can't anymore.
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ int last_total = restrictions_listener_.adaptation_counters().Total();
+
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_RESOLUTION);
+ // resource_ can not adapt up since we have never reduced FPS.
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(last_total, restrictions_listener_.adaptation_counters().Total());
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(last_total + 1,
+ restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+ // other_resource_ is most limited so should be able to adapt up.
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(last_total, restrictions_listener_.adaptation_counters().Total());
}
TEST_F(ResourceAdaptationProcessorTest, AdaptingTriggersOnAdaptationApplied) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, resource_->num_adaptations_applied());
- },
- RTC_FROM_HERE);
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1u, adaptation_listener_.num_adaptations_applied());
}
-TEST_F(ResourceAdaptationProcessorTest, AdaptingClearsResourceUsageState) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(1u, processor_listener_.restrictions_updated_count());
- EXPECT_FALSE(resource_->usage_state().has_value());
- },
- RTC_FROM_HERE);
+TEST_F(ResourceAdaptationProcessorTest,
+ AdaptsDownWhenOtherResourceIsAlwaysUnderused) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ // Does not trigger adapataion because there's no restriction.
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
+
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ // Adapts down even if other resource asked for adapting up.
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+
+ RestrictSource(restrictions_listener_.restrictions());
+ other_resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ // Doesn't adapt up because adaptation is due to another resource.
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
}
TEST_F(ResourceAdaptationProcessorTest,
- FailingAdaptingAlsoClearsResourceUsageState) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(DegradationPreference::DISABLED);
- processor_->StartResourceAdaptation();
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- EXPECT_EQ(0u, processor_listener_.restrictions_updated_count());
- EXPECT_FALSE(resource_->usage_state().has_value());
- },
- RTC_FROM_HERE);
+ TriggerOveruseNotOnAdaptationTaskQueue) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ TaskQueueForTest resource_task_queue("ResourceTaskQueue");
+ resource_task_queue.PostTask(ToQueuedTask(
+ [&]() { resource_->SetUsageState(ResourceUsageState::kOveruse); }));
+
+ EXPECT_EQ_WAIT(1u, restrictions_listener_.restrictions_updated_count(),
+ kDefaultTimeoutMs);
}
TEST_F(ResourceAdaptationProcessorTest,
- AdaptsDownWhenOtherResourceIsAlwaysUnderused) {
- resource_adaptation_queue_.SendTask(
- [this] {
- processor_->SetDegradationPreference(
- DegradationPreference::MAINTAIN_FRAMERATE);
- processor_->StartResourceAdaptation();
- SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
- other_resource_->set_usage_state(ResourceUsageState::kUnderuse);
- // Does not trigger adapataion because there's no restriction.
- EXPECT_EQ(0, processor_listener_.adaptation_counters().Total());
-
- RestrictSource(processor_listener_.restrictions());
- resource_->set_usage_state(ResourceUsageState::kOveruse);
- // Adapts down even if other resource asked for adapting up.
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
-
- RestrictSource(processor_listener_.restrictions());
- other_resource_->set_usage_state(ResourceUsageState::kUnderuse);
- // Doesn't adapt up because adaptation is due to another resource.
- EXPECT_EQ(1, processor_listener_.adaptation_counters().Total());
- RestrictSource(processor_listener_.restrictions());
- },
- RTC_FROM_HERE);
+ DestroyProcessorWhileResourceListenerDelegateHasTaskInFlight) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ // Wait for |resource_| to signal oversue first so we know that the delegate
+ // has passed it on to the processor's task queue.
+ rtc::Event resource_event;
+ TaskQueueForTest resource_task_queue("ResourceTaskQueue");
+ resource_task_queue.PostTask(ToQueuedTask([&]() {
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ resource_event.Set();
+ }));
+
+ EXPECT_TRUE(resource_event.Wait(kDefaultTimeoutMs));
+ // Now destroy the processor while handling the overuse is in flight.
+ DestroyProcessor();
+
+ // Because the processor was destroyed by the time the delegate's task ran,
+ // the overuse signal must not have been handled.
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ ResourceOveruseIgnoredWhenSignalledDuringRemoval) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ rtc::Event overuse_event;
+ TaskQueueForTest resource_task_queue("ResourceTaskQueue");
+ // Queues task for |resource_| overuse while |processor_| is still listening.
+ resource_task_queue.PostTask(ToQueuedTask([&]() {
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ overuse_event.Set();
+ }));
+ EXPECT_TRUE(overuse_event.Wait(kDefaultTimeoutMs));
+ // Once we know the overuse task is queued, remove |resource_| so that
+ // |processor_| is not listening to it.
+ processor_->RemoveResource(resource_);
+
+ // Runs the queued task so |processor_| gets signalled kOveruse from
+ // |resource_| even though |processor_| was not listening.
+ WaitUntilTaskQueueIdle();
+
+ // No restrictions should change even though |resource_| signaled |kOveruse|.
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingOnlyAdaptedResourceResetsAdaptation) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ RestrictSource(restrictions_listener_.restrictions());
+
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingMostLimitedResourceSetsAdaptationToNextLimitedLevel) {
+ processor_->SetDegradationPreference(DegradationPreference::BALANCED);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ VideoSourceRestrictions next_limited_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters next_limited_counters =
+ restrictions_listener_.adaptation_counters();
+
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ // Removing most limited |resource_| should revert us back to
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions());
+ EXPECT_EQ(next_limited_counters,
+ restrictions_listener_.adaptation_counters());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingMostLimitedResourceSetsAdaptationIfInputStateUnchanged) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ VideoSourceRestrictions next_limited_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters next_limited_counters =
+ restrictions_listener_.adaptation_counters();
+
+ // Overuse twice and underuse once. After the underuse we don't restrict the
+ // source. Normally this would block future underuses.
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ // Removing most limited |resource_| should revert us back to, even though we
+ // did not call RestrictSource() after |resource_| was overused. Normally
+ // adaptation for MAINTAIN_FRAMERATE would be blocked here but for removal we
+ // allow this anyways.
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions());
+ EXPECT_EQ(next_limited_counters,
+ restrictions_listener_.adaptation_counters());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingResourceNotMostLimitedHasNoEffectOnLimitations) {
+ processor_->SetDegradationPreference(DegradationPreference::BALANCED);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ VideoSourceRestrictions current_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters current_counters =
+ restrictions_listener_.adaptation_counters();
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ // Removing most limited |resource_| should revert us back to
+ processor_->RemoveResource(other_resource_);
+ EXPECT_EQ(current_restrictions, restrictions_listener_.restrictions());
+ EXPECT_EQ(current_counters, restrictions_listener_.adaptation_counters());
+
+ // Delete |other_resource_| for cleanup.
+ other_resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingMostLimitedResourceAfterSwitchingDegradationPreferences) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ VideoSourceRestrictions next_limited_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters next_limited_counters =
+ restrictions_listener_.adaptation_counters();
+
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_RESOLUTION);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ // Revert to |other_resource_| when removing |resource_| even though the
+ // degradation preference was different when it was overused.
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(next_limited_counters,
+ restrictions_listener_.adaptation_counters());
+
+ // After switching back to MAINTAIN_FRAMERATE, the next most limited settings
+ // are restored.
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingMostLimitedResourceSetsNextLimitationsInDisabled) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ VideoSourceRestrictions next_limited_restrictions =
+ restrictions_listener_.restrictions();
+ VideoAdaptationCounters next_limited_counters =
+ restrictions_listener_.adaptation_counters();
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total());
+
+ processor_->SetDegradationPreference(DegradationPreference::DISABLED);
+
+ // Revert to |other_resource_| when removing |resource_| even though the
+ // current degradataion preference is disabled.
+ processor_->RemoveResource(resource_);
+
+ // After switching back to MAINTAIN_FRAMERATE, the next most limited settings
+ // are restored.
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions());
+ EXPECT_EQ(next_limited_counters,
+ restrictions_listener_.adaptation_counters());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovedResourceSignalsIgnoredByProcessor) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ processor_->RemoveResource(resource_);
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
+}
+
+TEST_F(ResourceAdaptationProcessorTest,
+ RemovingResourceWhenMultipleMostLimtedHasNoEffect) {
+ processor_->SetDegradationPreference(
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize);
+
+ other_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+ // Adapt |resource_| up and then down so that both resource's are most
+ // limited at 1 adaptation.
+ resource_->SetUsageState(ResourceUsageState::kOveruse);
+ RestrictSource(restrictions_listener_.restrictions());
+ resource_->SetUsageState(ResourceUsageState::kUnderuse);
+ RestrictSource(restrictions_listener_.restrictions());
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+
+ // Removing |resource_| has no effect since both |resource_| and
+ // |other_resource_| are most limited.
+ processor_->RemoveResource(resource_);
+ EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total());
+
+ // Delete |resource_| for cleanup.
+ resource_ = nullptr;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/resource_unittest.cc b/chromium/third_party/webrtc/call/adaptation/resource_unittest.cc
index 8f3ae32dca6..a2291dfdce8 100644
--- a/chromium/third_party/webrtc/call/adaptation/resource_unittest.cc
+++ b/chromium/third_party/webrtc/call/adaptation/resource_unittest.cc
@@ -8,14 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "call/adaptation/resource.h"
+#include "api/adaptation/resource.h"
#include <memory>
#include "api/scoped_refptr.h"
#include "call/adaptation/test/fake_resource.h"
-#include "rtc_base/event.h"
-#include "rtc_base/task_queue_for_test.h"
+#include "call/adaptation/test/mock_resource_listener.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -24,56 +23,33 @@ namespace webrtc {
using ::testing::_;
using ::testing::StrictMock;
-class MockResourceListener : public ResourceListener {
- public:
- MOCK_METHOD(void,
- OnResourceUsageStateMeasured,
- (rtc::scoped_refptr<Resource> resource));
-};
-
class ResourceTest : public ::testing::Test {
public:
- ResourceTest()
- : resource_adaptation_queue_("ResourceAdaptationQueue"),
- encoder_queue_("EncoderQueue"),
- fake_resource_(new FakeResource("FakeResource")) {
- fake_resource_->Initialize(&encoder_queue_, &resource_adaptation_queue_);
- }
+ ResourceTest() : fake_resource_(FakeResource::Create("FakeResource")) {}
protected:
- const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
- TaskQueueForTest resource_adaptation_queue_;
- TaskQueueForTest encoder_queue_;
rtc::scoped_refptr<FakeResource> fake_resource_;
};
TEST_F(ResourceTest, RegisteringListenerReceivesCallbacks) {
- resource_adaptation_queue_.SendTask(
- [this] {
- StrictMock<MockResourceListener> resource_listener;
- fake_resource_->SetResourceListener(&resource_listener);
- EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_))
- .Times(1)
- .WillOnce([](rtc::scoped_refptr<Resource> resource) {
- EXPECT_EQ(ResourceUsageState::kOveruse, resource->usage_state());
- });
- fake_resource_->set_usage_state(ResourceUsageState::kOveruse);
- fake_resource_->SetResourceListener(nullptr);
- },
- RTC_FROM_HERE);
+ StrictMock<MockResourceListener> resource_listener;
+ fake_resource_->SetResourceListener(&resource_listener);
+ EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ fake_resource_->SetUsageState(ResourceUsageState::kOveruse);
+ fake_resource_->SetResourceListener(nullptr);
}
TEST_F(ResourceTest, UnregisteringListenerStopsCallbacks) {
- resource_adaptation_queue_.SendTask(
- [this] {
- StrictMock<MockResourceListener> resource_listener;
- fake_resource_->SetResourceListener(&resource_listener);
- fake_resource_->SetResourceListener(nullptr);
- EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_))
- .Times(0);
- fake_resource_->set_usage_state(ResourceUsageState::kOveruse);
- },
- RTC_FROM_HERE);
+ StrictMock<MockResourceListener> resource_listener;
+ fake_resource_->SetResourceListener(&resource_listener);
+ fake_resource_->SetResourceListener(nullptr);
+ EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _)).Times(0);
+ fake_resource_->SetUsageState(ResourceUsageState::kOveruse);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_constraint.cc b/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_constraint.cc
new file mode 100644
index 00000000000..983885e58ac
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_constraint.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/test/fake_adaptation_constraint.h"
+
+#include <utility>
+
+namespace webrtc {
+
+FakeAdaptationConstraint::FakeAdaptationConstraint(std::string name)
+ : name_(std::move(name)), is_adaptation_up_allowed_(true) {}
+
+FakeAdaptationConstraint::~FakeAdaptationConstraint() {}
+
+void FakeAdaptationConstraint::set_is_adaptation_up_allowed(
+ bool is_adaptation_up_allowed) {
+ is_adaptation_up_allowed_ = is_adaptation_up_allowed;
+}
+
+std::string FakeAdaptationConstraint::Name() const {
+ return name_;
+}
+
+bool FakeAdaptationConstraint::IsAdaptationUpAllowed(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) const {
+ return is_adaptation_up_allowed_;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_constraint.h b/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_constraint.h
new file mode 100644
index 00000000000..74637f48fd0
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_constraint.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_
+#define CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_
+
+#include <string>
+
+#include "call/adaptation/adaptation_constraint.h"
+
+namespace webrtc {
+
+class FakeAdaptationConstraint : public AdaptationConstraint {
+ public:
+ explicit FakeAdaptationConstraint(std::string name);
+ ~FakeAdaptationConstraint() override;
+
+ void set_is_adaptation_up_allowed(bool is_adaptation_up_allowed);
+
+ // AdaptationConstraint implementation.
+ std::string Name() const override;
+ bool IsAdaptationUpAllowed(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) const override;
+
+ private:
+ const std::string name_;
+ bool is_adaptation_up_allowed_;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_
diff --git a/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_listener.cc b/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_listener.cc
new file mode 100644
index 00000000000..7feecd63676
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_listener.cc
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "call/adaptation/test/fake_adaptation_listener.h"
+
+namespace webrtc {
+
+FakeAdaptationListener::FakeAdaptationListener()
+ : num_adaptations_applied_(0) {}
+
+FakeAdaptationListener::~FakeAdaptationListener() {}
+
+size_t FakeAdaptationListener::num_adaptations_applied() const {
+ return num_adaptations_applied_;
+}
+
+void FakeAdaptationListener::OnAdaptationApplied(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) {
+ ++num_adaptations_applied_;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_listener.h b/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_listener.h
new file mode 100644
index 00000000000..c60ba3089bc
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/test/fake_adaptation_listener.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_
+#define CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_
+
+#include "call/adaptation/adaptation_listener.h"
+
+namespace webrtc {
+
+class FakeAdaptationListener : public AdaptationListener {
+ public:
+ FakeAdaptationListener();
+ ~FakeAdaptationListener() override;
+
+ size_t num_adaptations_applied() const;
+
+ // AdaptationListener implementation.
+ void OnAdaptationApplied(
+ const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) override;
+
+ private:
+ size_t num_adaptations_applied_;
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_
diff --git a/chromium/third_party/webrtc/call/adaptation/test/fake_frame_rate_provider.h b/chromium/third_party/webrtc/call/adaptation/test/fake_frame_rate_provider.h
index a08e1623217..3638f478f37 100644
--- a/chromium/third_party/webrtc/call/adaptation/test/fake_frame_rate_provider.h
+++ b/chromium/third_party/webrtc/call/adaptation/test/fake_frame_rate_provider.h
@@ -21,29 +21,41 @@ namespace webrtc {
class MockVideoStreamEncoderObserver : public VideoStreamEncoderObserver {
public:
- MOCK_METHOD2(OnEncodedFrameTimeMeasured, void(int, int));
- MOCK_METHOD2(OnIncomingFrame, void(int, int));
- MOCK_METHOD2(OnSendEncodedImage,
- void(const EncodedImage&, const CodecSpecificInfo*));
- MOCK_METHOD1(OnEncoderImplementationChanged, void(const std::string&));
- MOCK_METHOD1(OnFrameDropped, void(DropReason));
- MOCK_METHOD2(OnEncoderReconfigured,
- void(const VideoEncoderConfig&,
- const std::vector<VideoStream>&));
- MOCK_METHOD3(OnAdaptationChanged,
- void(VideoAdaptationReason,
- const VideoAdaptationCounters&,
- const VideoAdaptationCounters&));
- MOCK_METHOD0(ClearAdaptationStats, void());
- MOCK_METHOD2(UpdateAdaptationSettings,
- void(AdaptationSettings, AdaptationSettings));
- MOCK_METHOD0(OnMinPixelLimitReached, void());
- MOCK_METHOD0(OnInitialQualityResolutionAdaptDown, void());
- MOCK_METHOD1(OnSuspendChange, void(bool));
- MOCK_METHOD2(OnBitrateAllocationUpdated,
- void(const VideoCodec&, const VideoBitrateAllocation&));
- MOCK_METHOD1(OnEncoderInternalScalerUpdate, void(bool));
- MOCK_CONST_METHOD0(GetInputFrameRate, int());
+ MOCK_METHOD(void, OnEncodedFrameTimeMeasured, (int, int), (override));
+ MOCK_METHOD(void, OnIncomingFrame, (int, int), (override));
+ MOCK_METHOD(void,
+ OnSendEncodedImage,
+ (const EncodedImage&, const CodecSpecificInfo*),
+ (override));
+ MOCK_METHOD(void,
+ OnEncoderImplementationChanged,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(void, OnFrameDropped, (DropReason), (override));
+ MOCK_METHOD(void,
+ OnEncoderReconfigured,
+ (const VideoEncoderConfig&, const std::vector<VideoStream>&),
+ (override));
+ MOCK_METHOD(void,
+ OnAdaptationChanged,
+ (VideoAdaptationReason,
+ const VideoAdaptationCounters&,
+ const VideoAdaptationCounters&),
+ (override));
+ MOCK_METHOD(void, ClearAdaptationStats, (), (override));
+ MOCK_METHOD(void,
+ UpdateAdaptationSettings,
+ (AdaptationSettings, AdaptationSettings),
+ (override));
+ MOCK_METHOD(void, OnMinPixelLimitReached, (), (override));
+ MOCK_METHOD(void, OnInitialQualityResolutionAdaptDown, (), (override));
+ MOCK_METHOD(void, OnSuspendChange, (bool), (override));
+ MOCK_METHOD(void,
+ OnBitrateAllocationUpdated,
+ (const VideoCodec&, const VideoBitrateAllocation&),
+ (override));
+ MOCK_METHOD(void, OnEncoderInternalScalerUpdate, (bool), (override));
+ MOCK_METHOD(int, GetInputFrameRate, (), (const, override));
};
class FakeFrameRateProvider : public MockVideoStreamEncoderObserver {
diff --git a/chromium/third_party/webrtc/call/adaptation/test/fake_resource.cc b/chromium/third_party/webrtc/call/adaptation/test/fake_resource.cc
index 4c0a129d04e..fa69e886bfc 100644
--- a/chromium/third_party/webrtc/call/adaptation/test/fake_resource.cc
+++ b/chromium/third_party/webrtc/call/adaptation/test/fake_resource.cc
@@ -10,44 +10,35 @@
#include "call/adaptation/test/fake_resource.h"
+#include <algorithm>
#include <utility>
+#include "rtc_base/ref_counted_object.h"
+
namespace webrtc {
+// static
+rtc::scoped_refptr<FakeResource> FakeResource::Create(std::string name) {
+ return new rtc::RefCountedObject<FakeResource>(name);
+}
+
FakeResource::FakeResource(std::string name)
- : rtc::RefCountedObject<Resource>(),
- name_(std::move(name)),
- is_adaptation_up_allowed_(true),
- num_adaptations_applied_(0) {}
+ : Resource(), name_(std::move(name)), listener_(nullptr) {}
FakeResource::~FakeResource() {}
-void FakeResource::set_usage_state(ResourceUsageState usage_state) {
- OnResourceUsageStateMeasured(usage_state);
-}
-
-void FakeResource::set_is_adaptation_up_allowed(bool is_adaptation_up_allowed) {
- is_adaptation_up_allowed_ = is_adaptation_up_allowed;
-}
-
-size_t FakeResource::num_adaptations_applied() const {
- return num_adaptations_applied_;
+void FakeResource::SetUsageState(ResourceUsageState usage_state) {
+ if (listener_) {
+ listener_->OnResourceUsageStateMeasured(this, usage_state);
+ }
}
-bool FakeResource::IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) const {
- return is_adaptation_up_allowed_;
+std::string FakeResource::Name() const {
+ return name_;
}
-void FakeResource::OnAdaptationApplied(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) {
- ++num_adaptations_applied_;
+void FakeResource::SetResourceListener(ResourceListener* listener) {
+ listener_ = listener;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/test/fake_resource.h b/chromium/third_party/webrtc/call/adaptation/test/fake_resource.h
index beaca546144..e88d97db7a0 100644
--- a/chromium/third_party/webrtc/call/adaptation/test/fake_resource.h
+++ b/chromium/third_party/webrtc/call/adaptation/test/fake_resource.h
@@ -12,39 +12,31 @@
#define CALL_ADAPTATION_TEST_FAKE_RESOURCE_H_
#include <string>
+#include <vector>
-#include "call/adaptation/resource.h"
-#include "rtc_base/ref_counted_object.h"
+#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
+#include "api/scoped_refptr.h"
namespace webrtc {
// Fake resource used for testing.
-class FakeResource : public rtc::RefCountedObject<Resource> {
+class FakeResource : public Resource {
public:
+ static rtc::scoped_refptr<FakeResource> Create(std::string name);
+
explicit FakeResource(std::string name);
~FakeResource() override;
- void set_usage_state(ResourceUsageState usage_state);
- void set_is_adaptation_up_allowed(bool is_adaptation_up_allowed);
- size_t num_adaptations_applied() const;
+ void SetUsageState(ResourceUsageState usage_state);
// Resource implementation.
- std::string name() const override { return name_; }
- bool IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) const override;
- void OnAdaptationApplied(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) override;
+ std::string Name() const override;
+ void SetResourceListener(ResourceListener* listener) override;
private:
const std::string name_;
- bool is_adaptation_up_allowed_;
- size_t num_adaptations_applied_;
+ ResourceListener* listener_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/test/mock_resource_listener.h b/chromium/third_party/webrtc/call/adaptation/test/mock_resource_listener.h
new file mode 100644
index 00000000000..f0f998f2e33
--- /dev/null
+++ b/chromium/third_party/webrtc/call/adaptation/test/mock_resource_listener.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_
+#define CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_
+
+#include "api/adaptation/resource.h"
+
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockResourceListener : public ResourceListener {
+ public:
+ MOCK_METHOD(void,
+ OnResourceUsageStateMeasured,
+ (rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state),
+ (override));
+};
+
+} // namespace webrtc
+
+#endif // CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_
diff --git a/chromium/third_party/webrtc/call/adaptation/video_source_restrictions.cc b/chromium/third_party/webrtc/call/adaptation/video_source_restrictions.cc
index 6fbdcb42a66..e9d6c26137b 100644
--- a/chromium/third_party/webrtc/call/adaptation/video_source_restrictions.cc
+++ b/chromium/third_party/webrtc/call/adaptation/video_source_restrictions.cc
@@ -13,6 +13,7 @@
#include <limits>
#include "rtc_base/checks.h"
+#include "rtc_base/strings/string_builder.h"
namespace webrtc {
@@ -36,6 +37,19 @@ VideoSourceRestrictions::VideoSourceRestrictions(
RTC_DCHECK(!max_frame_rate_.has_value() || max_frame_rate_.value() > 0.0);
}
+std::string VideoSourceRestrictions::ToString() const {
+ rtc::StringBuilder ss;
+ ss << "{";
+ if (max_frame_rate_)
+ ss << " max_fps=" << max_frame_rate_.value();
+ if (max_pixels_per_frame_)
+ ss << " max_pixels_per_frame=" << max_pixels_per_frame_.value();
+ if (target_pixels_per_frame_)
+ ss << " target_pixels_per_frame=" << target_pixels_per_frame_.value();
+ ss << " }";
+ return ss.Release();
+}
+
const absl::optional<size_t>& VideoSourceRestrictions::max_pixels_per_frame()
const {
return max_pixels_per_frame_;
diff --git a/chromium/third_party/webrtc/call/adaptation/video_source_restrictions.h b/chromium/third_party/webrtc/call/adaptation/video_source_restrictions.h
index 506bae61334..7f79a48e5d4 100644
--- a/chromium/third_party/webrtc/call/adaptation/video_source_restrictions.h
+++ b/chromium/third_party/webrtc/call/adaptation/video_source_restrictions.h
@@ -11,6 +11,7 @@
#ifndef CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_
#define CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_
+#include <string>
#include <utility>
#include "absl/types/optional.h"
@@ -38,6 +39,8 @@ class VideoSourceRestrictions {
return !(*this == rhs);
}
+ std::string ToString() const;
+
// The source must produce a resolution less than or equal to
// max_pixels_per_frame().
const absl::optional<size_t>& max_pixels_per_frame() const;
diff --git a/chromium/third_party/webrtc/call/adaptation/video_stream_adapter.cc b/chromium/third_party/webrtc/call/adaptation/video_stream_adapter.cc
index 4ebe00fb0c3..4bf236fe714 100644
--- a/chromium/third_party/webrtc/call/adaptation/video_stream_adapter.cc
+++ b/chromium/third_party/webrtc/call/adaptation/video_stream_adapter.cc
@@ -111,8 +111,27 @@ int GetHigherResolutionThan(int pixel_count) {
: std::numeric_limits<int>::max();
}
-Adaptation::Step::Step(StepType type, int target)
- : type(type), target(target) {}
+// static
+const char* Adaptation::StatusToString(Adaptation::Status status) {
+ switch (status) {
+ case Adaptation::Status::kValid:
+ return "kValid";
+ case Adaptation::Status::kLimitReached:
+ return "kLimitReached";
+ case Adaptation::Status::kAwaitingPreviousAdaptation:
+ return "kAwaitingPreviousAdaptation";
+ }
+}
+
+Adaptation::Step::Step(StepType type, int target) : type(type), target(target) {
+ RTC_DCHECK_NE(type, Adaptation::StepType::kForce);
+}
+
+Adaptation::Step::Step(VideoSourceRestrictions restrictions,
+ VideoAdaptationCounters counters)
+ : type(Adaptation::StepType::kForce),
+ restrictions(restrictions),
+ counters(counters) {}
Adaptation::Adaptation(int validation_id, Step step)
: validation_id_(validation_id),
@@ -176,13 +195,19 @@ class VideoStreamAdapter::VideoSourceRestrictor {
adaptations_ = VideoAdaptationCounters();
}
+ void ForceRestrictions(const VideoSourceRestrictions& restrictions,
+ const VideoAdaptationCounters& counters) {
+ source_restrictions_ = restrictions;
+ adaptations_ = counters;
+ }
+
void set_min_pixels_per_frame(int min_pixels_per_frame) {
min_pixels_per_frame_ = min_pixels_per_frame;
}
int min_pixels_per_frame() const { return min_pixels_per_frame_; }
- bool CanDecreaseResolutionTo(int target_pixels) {
+ bool CanDecreaseResolutionTo(int target_pixels) const {
int max_pixels_per_frame = rtc::dchecked_cast<int>(
source_restrictions_.max_pixels_per_frame().value_or(
std::numeric_limits<int>::max()));
@@ -190,7 +215,7 @@ class VideoStreamAdapter::VideoSourceRestrictor {
target_pixels >= min_pixels_per_frame_;
}
- bool CanIncreaseResolutionTo(int target_pixels) {
+ bool CanIncreaseResolutionTo(int target_pixels) const {
int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels);
int max_pixels_per_frame = rtc::dchecked_cast<int>(
source_restrictions_.max_pixels_per_frame().value_or(
@@ -198,14 +223,14 @@ class VideoStreamAdapter::VideoSourceRestrictor {
return max_pixels_wanted > max_pixels_per_frame;
}
- bool CanDecreaseFrameRateTo(int max_frame_rate) {
+ bool CanDecreaseFrameRateTo(int max_frame_rate) const {
const int fps_wanted = std::max(kMinFrameRateFps, max_frame_rate);
return fps_wanted < rtc::dchecked_cast<int>(
source_restrictions_.max_frame_rate().value_or(
std::numeric_limits<int>::max()));
}
- bool CanIncreaseFrameRateTo(int max_frame_rate) {
+ bool CanIncreaseFrameRateTo(int max_frame_rate) const {
return max_frame_rate > rtc::dchecked_cast<int>(
source_restrictions_.max_frame_rate().value_or(
std::numeric_limits<int>::max()));
@@ -215,13 +240,16 @@ class VideoStreamAdapter::VideoSourceRestrictor {
DegradationPreference degradation_preference) {
switch (step.type) {
case Adaptation::StepType::kIncreaseResolution:
- IncreaseResolutionTo(step.target);
+ RTC_DCHECK(step.target);
+ IncreaseResolutionTo(step.target.value());
break;
case Adaptation::StepType::kDecreaseResolution:
- DecreaseResolutionTo(step.target);
+ RTC_DCHECK(step.target);
+ DecreaseResolutionTo(step.target.value());
break;
case Adaptation::StepType::kIncreaseFrameRate:
- IncreaseFrameRateTo(step.target);
+ RTC_DCHECK(step.target);
+ IncreaseFrameRateTo(step.target.value());
// TODO(https://crbug.com/webrtc/11222): Don't adapt in two steps.
// GetAdaptationUp() should tell us the correct value, but BALANCED
// logic in DecrementFramerate() makes it hard to predict whether this
@@ -235,7 +263,13 @@ class VideoStreamAdapter::VideoSourceRestrictor {
}
break;
case Adaptation::StepType::kDecreaseFrameRate:
- DecreaseFrameRateTo(step.target);
+ RTC_DCHECK(step.target);
+ DecreaseFrameRateTo(step.target.value());
+ break;
+ case Adaptation::StepType::kForce:
+ RTC_DCHECK(step.restrictions);
+ RTC_DCHECK(step.counters);
+ ForceRestrictions(step.restrictions.value(), step.counters.value());
break;
}
}
@@ -501,19 +535,23 @@ Adaptation VideoStreamAdapter::GetAdaptationDown() const {
}
}
-VideoSourceRestrictions VideoStreamAdapter::PeekNextRestrictions(
- const Adaptation& adaptation) const {
+VideoStreamAdapter::RestrictionsWithCounters
+VideoStreamAdapter::PeekNextRestrictions(const Adaptation& adaptation) const {
RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_);
+ RTC_LOG(LS_INFO) << "PeekNextRestrictions called";
if (adaptation.status() != Adaptation::Status::kValid)
- return source_restrictor_->source_restrictions();
+ return {source_restrictor_->source_restrictions(),
+ source_restrictor_->adaptation_counters()};
VideoSourceRestrictor restrictor_copy = *source_restrictor_;
restrictor_copy.ApplyAdaptationStep(adaptation.step(),
degradation_preference_);
- return restrictor_copy.source_restrictions();
+ return {restrictor_copy.source_restrictions(),
+ restrictor_copy.adaptation_counters()};
}
void VideoStreamAdapter::ApplyAdaptation(const Adaptation& adaptation) {
RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_);
+ RTC_LOG(LS_INFO) << "ApplyAdaptation called";
if (adaptation.status() != Adaptation::Status::kValid)
return;
// Remember the input pixels and fps of this adaptation. Used to avoid
@@ -526,4 +564,12 @@ void VideoStreamAdapter::ApplyAdaptation(const Adaptation& adaptation) {
degradation_preference_);
}
+Adaptation VideoStreamAdapter::GetAdaptationTo(
+ const VideoAdaptationCounters& counters,
+ const VideoSourceRestrictions& restrictions) const {
+ // Adapts up/down from the current levels so counters are equal.
+ return Adaptation(adaptation_validation_id_,
+ Adaptation::Step(restrictions, counters));
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/adaptation/video_stream_adapter.h b/chromium/third_party/webrtc/call/adaptation/video_stream_adapter.h
index f313e6bed6c..3a56f4f7c52 100644
--- a/chromium/third_party/webrtc/call/adaptation/video_stream_adapter.h
+++ b/chromium/third_party/webrtc/call/adaptation/video_stream_adapter.h
@@ -14,9 +14,9 @@
#include <memory>
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/rtp_parameters.h"
#include "api/video/video_adaptation_counters.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/video_source_restrictions.h"
#include "call/adaptation/video_stream_input_state.h"
#include "modules/video_coding/utility/quality_scaler.h"
@@ -56,6 +56,8 @@ class Adaptation final {
kAwaitingPreviousAdaptation,
};
+ static const char* StatusToString(Status status);
+
// The status of this Adaptation. To find out how this Adaptation affects
// VideoSourceRestrictions, see VideoStreamAdapter::PeekNextRestrictions().
Status status() const;
@@ -73,12 +75,22 @@ class Adaptation final {
kDecreaseResolution,
kIncreaseFrameRate,
kDecreaseFrameRate,
+ kForce
};
struct Step {
Step(StepType type, int target);
+ // StepType is kForce
+ Step(VideoSourceRestrictions restrictions,
+ VideoAdaptationCounters counters);
const StepType type;
- const int target; // Pixel or frame rate depending on |type|.
+ // Pixel or frame rate depending on |type|.
+ // Only set when |type| is not kForce.
+ const absl::optional<int> target;
+ // Only set when |type| is kForce.
+ const absl::optional<VideoSourceRestrictions> restrictions;
+ // Only set when |type| is kForce.
+ const absl::optional<VideoAdaptationCounters> counters;
};
// Constructs with a valid adaptation Step. Status is kValid.
@@ -127,10 +139,18 @@ class VideoStreamAdapter {
// status code indicating the reason why we cannot adapt.
Adaptation GetAdaptationUp() const;
Adaptation GetAdaptationDown() const;
+ Adaptation GetAdaptationTo(const VideoAdaptationCounters& counters,
+ const VideoSourceRestrictions& restrictions) const;
+
+ struct RestrictionsWithCounters {
+ VideoSourceRestrictions restrictions;
+ VideoAdaptationCounters adaptation_counters;
+ };
+
// Returns the restrictions that result from applying the adaptation, without
// actually applying it. If the adaptation is not valid, current restrictions
// are returned.
- VideoSourceRestrictions PeekNextRestrictions(
+ RestrictionsWithCounters PeekNextRestrictions(
const Adaptation& adaptation) const;
// Updates source_restrictions() based according to the Adaptation.
void ApplyAdaptation(const Adaptation& adaptation);
diff --git a/chromium/third_party/webrtc/call/adaptation/video_stream_adapter_unittest.cc b/chromium/third_party/webrtc/call/adaptation/video_stream_adapter_unittest.cc
index 79247a7837d..49b291c3569 100644
--- a/chromium/third_party/webrtc/call/adaptation/video_stream_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/call/adaptation/video_stream_adapter_unittest.cc
@@ -686,26 +686,35 @@ TEST(VideoStreamAdapterTest, PeekNextRestrictions) {
{
Adaptation adaptation = adapter.GetAdaptationUp();
EXPECT_EQ(Adaptation::Status::kLimitReached, adaptation.status());
- EXPECT_EQ(adapter.PeekNextRestrictions(adaptation),
+ VideoStreamAdapter::RestrictionsWithCounters restrictions_with_counters =
+ adapter.PeekNextRestrictions(adaptation);
+ EXPECT_EQ(restrictions_with_counters.restrictions,
adapter.source_restrictions());
+ EXPECT_EQ(0, restrictions_with_counters.adaptation_counters.Total());
}
// When we adapt down.
{
Adaptation adaptation = adapter.GetAdaptationDown();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
- VideoSourceRestrictions next_restrictions =
+ VideoStreamAdapter::RestrictionsWithCounters restrictions_with_counters =
adapter.PeekNextRestrictions(adaptation);
fake_stream.ApplyAdaptation(adaptation);
- EXPECT_EQ(next_restrictions, adapter.source_restrictions());
+ EXPECT_EQ(restrictions_with_counters.restrictions,
+ adapter.source_restrictions());
+ EXPECT_EQ(restrictions_with_counters.adaptation_counters,
+ adapter.adaptation_counters());
}
// When we adapt up.
{
Adaptation adaptation = adapter.GetAdaptationUp();
EXPECT_EQ(Adaptation::Status::kValid, adaptation.status());
- VideoSourceRestrictions next_restrictions =
+ VideoStreamAdapter::RestrictionsWithCounters restrictions_with_counters =
adapter.PeekNextRestrictions(adaptation);
fake_stream.ApplyAdaptation(adaptation);
- EXPECT_EQ(next_restrictions, adapter.source_restrictions());
+ EXPECT_EQ(restrictions_with_counters.restrictions,
+ adapter.source_restrictions());
+ EXPECT_EQ(restrictions_with_counters.adaptation_counters,
+ adapter.adaptation_counters());
}
}
diff --git a/chromium/third_party/webrtc/call/audio_send_stream.cc b/chromium/third_party/webrtc/call/audio_send_stream.cc
index ddcba031a76..765ece7eb9b 100644
--- a/chromium/third_party/webrtc/call/audio_send_stream.cc
+++ b/chromium/third_party/webrtc/call/audio_send_stream.cc
@@ -75,6 +75,8 @@ std::string AudioSendStream::Config::SendCodecSpec::ToString() const {
ss << ", transport_cc_enabled: " << (transport_cc_enabled ? "true" : "false");
ss << ", cng_payload_type: "
<< (cng_payload_type ? rtc::ToString(*cng_payload_type) : "<unset>");
+ ss << ", red_payload_type: "
+ << (red_payload_type ? rtc::ToString(*red_payload_type) : "<unset>");
ss << ", payload_type: " << payload_type;
ss << ", format: " << rtc::ToString(format);
ss << '}';
diff --git a/chromium/third_party/webrtc/call/audio_send_stream.h b/chromium/third_party/webrtc/call/audio_send_stream.h
index 86cea38938f..d21dff48891 100644
--- a/chromium/third_party/webrtc/call/audio_send_stream.h
+++ b/chromium/third_party/webrtc/call/audio_send_stream.h
@@ -140,6 +140,7 @@ class AudioSendStream : public AudioSender {
bool nack_enabled = false;
bool transport_cc_enabled = false;
absl::optional<int> cng_payload_type;
+ absl::optional<int> red_payload_type;
// If unset, use the encoder's default target bitrate.
absl::optional<int> target_bitrate_bps;
};
diff --git a/chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc b/chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc
index 1479a4714aa..00fb2369483 100644
--- a/chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc
+++ b/chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc
@@ -47,7 +47,10 @@ auto AllocationLimitsEq(uint32_t min_allocatable_rate_bps,
class MockLimitObserver : public BitrateAllocator::LimitObserver {
public:
- MOCK_METHOD1(OnAllocationLimitsChanged, void(BitrateAllocationLimits));
+ MOCK_METHOD(void,
+ OnAllocationLimitsChanged,
+ (BitrateAllocationLimits),
+ (override));
};
class TestBitrateObserver : public BitrateAllocatorObserver {
diff --git a/chromium/third_party/webrtc/call/call.cc b/chromium/third_party/webrtc/call/call.cc
index 4068db9f007..0ef2a3a4bc0 100644
--- a/chromium/third_party/webrtc/call/call.cc
+++ b/chromium/third_party/webrtc/call/call.cc
@@ -25,6 +25,7 @@
#include "audio/audio_receive_stream.h"
#include "audio/audio_send_stream.h"
#include "audio/audio_state.h"
+#include "call/adaptation/broadcast_resource_listener.h"
#include "call/bitrate_allocator.h"
#include "call/flexfec_receive_stream_impl.h"
#include "call/receive_time_calculator.h"
@@ -49,8 +50,8 @@
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
-#include "rtc_base/synchronization/rw_lock_wrapper.h"
#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/trace_event.h"
@@ -168,6 +169,47 @@ TaskQueueBase* GetCurrentTaskQueueOrThread() {
namespace internal {
+// Wraps an injected resource in a BroadcastResourceListener and handles adding
+// and removing adapter resources to individual VideoSendStreams.
+class ResourceVideoSendStreamForwarder {
+ public:
+ ResourceVideoSendStreamForwarder(
+ rtc::scoped_refptr<webrtc::Resource> resource)
+ : broadcast_resource_listener_(resource) {
+ broadcast_resource_listener_.StartListening();
+ }
+ ~ResourceVideoSendStreamForwarder() {
+ RTC_DCHECK(adapter_resources_.empty());
+ broadcast_resource_listener_.StopListening();
+ }
+
+ rtc::scoped_refptr<webrtc::Resource> Resource() const {
+ return broadcast_resource_listener_.SourceResource();
+ }
+
+ void OnCreateVideoSendStream(VideoSendStream* video_send_stream) {
+ RTC_DCHECK(adapter_resources_.find(video_send_stream) ==
+ adapter_resources_.end());
+ auto adapter_resource =
+ broadcast_resource_listener_.CreateAdapterResource();
+ video_send_stream->AddAdaptationResource(adapter_resource);
+ adapter_resources_.insert(
+ std::make_pair(video_send_stream, adapter_resource));
+ }
+
+ void OnDestroyVideoSendStream(VideoSendStream* video_send_stream) {
+ auto it = adapter_resources_.find(video_send_stream);
+ RTC_DCHECK(it != adapter_resources_.end());
+ broadcast_resource_listener_.RemoveAdapterResource(it->second);
+ adapter_resources_.erase(it);
+ }
+
+ private:
+ BroadcastResourceListener broadcast_resource_listener_;
+ std::map<VideoSendStream*, rtc::scoped_refptr<webrtc::Resource>>
+ adapter_resources_;
+};
+
class Call final : public webrtc::Call,
public PacketReceiver,
public RecoveredPacketReceiver,
@@ -177,7 +219,7 @@ class Call final : public webrtc::Call,
Call(Clock* clock,
const Call::Config& config,
std::unique_ptr<RtpTransportControllerSendInterface> transport_send,
- std::unique_ptr<ProcessThread> module_process_thread,
+ rtc::scoped_refptr<SharedModuleThread> module_process_thread,
TaskQueueFactory* task_queue_factory);
~Call() override;
@@ -212,6 +254,8 @@ class Call final : public webrtc::Call,
void DestroyFlexfecReceiveStream(
FlexfecReceiveStream* receive_stream) override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+
RtpTransportControllerSendInterface* GetTransportControllerSend() override;
Stats GetStats() const override;
@@ -243,54 +287,54 @@ class Call final : public webrtc::Call,
private:
DeliveryStatus DeliverRtcp(MediaType media_type,
const uint8_t* packet,
- size_t length);
+ size_t length)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
DeliveryStatus DeliverRtp(MediaType media_type,
rtc::CopyOnWriteBuffer packet,
- int64_t packet_time_us);
+ int64_t packet_time_us)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
void ConfigureSync(const std::string& sync_group)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(receive_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
void NotifyBweOfReceivedPacket(const RtpPacketReceived& packet,
MediaType media_type)
- RTC_SHARED_LOCKS_REQUIRED(receive_crit_);
+ RTC_SHARED_LOCKS_REQUIRED(worker_thread_);
void UpdateSendHistograms(Timestamp first_sent_packet)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(&bitrate_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
void UpdateReceiveHistograms();
void UpdateHistograms();
void UpdateAggregateNetworkState();
void RegisterRateObserver();
- rtc::TaskQueue* network_queue() const {
+ rtc::TaskQueue* send_transport_queue() const {
return transport_send_ptr_->GetWorkerQueue();
}
Clock* const clock_;
TaskQueueFactory* const task_queue_factory_;
+ TaskQueueBase* const worker_thread_;
const int num_cpu_cores_;
- const std::unique_ptr<ProcessThread> module_process_thread_;
+ const rtc::scoped_refptr<SharedModuleThread> module_process_thread_;
const std::unique_ptr<CallStats> call_stats_;
const std::unique_ptr<BitrateAllocator> bitrate_allocator_;
Call::Config config_;
- SequenceChecker configuration_sequence_checker_;
- SequenceChecker worker_sequence_checker_;
NetworkState audio_network_state_;
NetworkState video_network_state_;
- bool aggregate_network_up_ RTC_GUARDED_BY(configuration_sequence_checker_);
+ bool aggregate_network_up_ RTC_GUARDED_BY(worker_thread_);
- std::unique_ptr<RWLockWrapper> receive_crit_;
// Audio, Video, and FlexFEC receive streams are owned by the client that
// creates them.
std::set<AudioReceiveStream*> audio_receive_streams_
- RTC_GUARDED_BY(receive_crit_);
+ RTC_GUARDED_BY(worker_thread_);
std::set<VideoReceiveStream2*> video_receive_streams_
- RTC_GUARDED_BY(receive_crit_);
+ RTC_GUARDED_BY(worker_thread_);
std::map<std::string, AudioReceiveStream*> sync_stream_mapping_
- RTC_GUARDED_BY(receive_crit_);
+ RTC_GUARDED_BY(worker_thread_);
// TODO(nisse): Should eventually be injected at creation,
// with a single object in the bundled case.
@@ -324,25 +368,26 @@ class Call final : public webrtc::Call,
const bool use_send_side_bwe;
};
std::map<uint32_t, ReceiveRtpConfig> receive_rtp_config_
- RTC_GUARDED_BY(receive_crit_);
+ RTC_GUARDED_BY(worker_thread_);
- std::unique_ptr<RWLockWrapper> send_crit_;
// Audio and Video send streams are owned by the client that creates them.
std::map<uint32_t, AudioSendStream*> audio_send_ssrcs_
- RTC_GUARDED_BY(send_crit_);
+ RTC_GUARDED_BY(worker_thread_);
std::map<uint32_t, VideoSendStream*> video_send_ssrcs_
- RTC_GUARDED_BY(send_crit_);
- std::set<VideoSendStream*> video_send_streams_ RTC_GUARDED_BY(send_crit_);
+ RTC_GUARDED_BY(worker_thread_);
+ std::set<VideoSendStream*> video_send_streams_ RTC_GUARDED_BY(worker_thread_);
+
+ // Each forwarder wraps an adaptation resource that was added to the call.
+ std::vector<std::unique_ptr<ResourceVideoSendStreamForwarder>>
+ adaptation_resource_forwarders_ RTC_GUARDED_BY(worker_thread_);
using RtpStateMap = std::map<uint32_t, RtpState>;
- RtpStateMap suspended_audio_send_ssrcs_
- RTC_GUARDED_BY(configuration_sequence_checker_);
- RtpStateMap suspended_video_send_ssrcs_
- RTC_GUARDED_BY(configuration_sequence_checker_);
+ RtpStateMap suspended_audio_send_ssrcs_ RTC_GUARDED_BY(worker_thread_);
+ RtpStateMap suspended_video_send_ssrcs_ RTC_GUARDED_BY(worker_thread_);
using RtpPayloadStateMap = std::map<uint32_t, RtpPayloadState>;
RtpPayloadStateMap suspended_video_payload_states_
- RTC_GUARDED_BY(configuration_sequence_checker_);
+ RTC_GUARDED_BY(worker_thread_);
webrtc::RtcEventLog* event_log_;
@@ -358,17 +403,14 @@ class Call final : public webrtc::Call,
absl::optional<int64_t> first_received_rtp_video_ms_;
absl::optional<int64_t> last_received_rtp_video_ms_;
- rtc::CriticalSection last_bandwidth_bps_crit_;
- uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(&last_bandwidth_bps_crit_);
+ uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(worker_thread_);
// TODO(holmer): Remove this lock once BitrateController no longer calls
// OnNetworkChanged from multiple threads.
- rtc::CriticalSection bitrate_crit_;
- uint32_t min_allocated_send_bitrate_bps_
- RTC_GUARDED_BY(&worker_sequence_checker_);
- uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(&bitrate_crit_);
+ uint32_t min_allocated_send_bitrate_bps_ RTC_GUARDED_BY(worker_thread_);
+ uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(worker_thread_);
AvgCounter estimated_send_bitrate_kbps_counter_
- RTC_GUARDED_BY(&bitrate_crit_);
- AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(&bitrate_crit_);
+ RTC_GUARDED_BY(worker_thread_);
+ AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(worker_thread_);
ReceiveSideCongestionController receive_side_cc_;
@@ -377,6 +419,11 @@ class Call final : public webrtc::Call,
const std::unique_ptr<SendDelayStats> video_send_delay_stats_;
const int64_t start_ms_;
+ // Note that |task_safety_| needs to be at a greater scope than the task queue
+ // owned by |transport_send_| since calls might arrive on the network thread
+ // while Call is being deleted and the task queue is being torn down.
+ ScopedTaskSafety task_safety_;
+
// Caches transport_send_.get(), to avoid racing with destructor.
// Note that this is declared before transport_send_ to ensure that it is not
// invalidated until no more tasks can be running on the transport_send_ task
@@ -386,8 +433,8 @@ class Call final : public webrtc::Call,
// last ensures that it is destroyed first and any running tasks are finished.
std::unique_ptr<RtpTransportControllerSendInterface> transport_send_;
- bool is_target_rate_observer_registered_
- RTC_GUARDED_BY(&configuration_sequence_checker_) = false;
+ bool is_target_rate_observer_registered_ RTC_GUARDED_BY(worker_thread_) =
+ false;
RTC_DISALLOW_COPY_AND_ASSIGN(Call);
};
@@ -407,14 +454,20 @@ std::string Call::Stats::ToString(int64_t time_ms) const {
}
Call* Call::Create(const Call::Config& config) {
- return Create(config, Clock::GetRealTimeClock(),
- ProcessThread::Create("ModuleProcessThread"),
+ rtc::scoped_refptr<SharedModuleThread> call_thread =
+ SharedModuleThread::Create("ModuleProcessThread", nullptr);
+ return Create(config, std::move(call_thread));
+}
+
+Call* Call::Create(const Call::Config& config,
+ rtc::scoped_refptr<SharedModuleThread> call_thread) {
+ return Create(config, Clock::GetRealTimeClock(), std::move(call_thread),
ProcessThread::Create("PacerThread"));
}
Call* Call::Create(const Call::Config& config,
Clock* clock,
- std::unique_ptr<ProcessThread> call_thread,
+ rtc::scoped_refptr<SharedModuleThread> call_thread,
std::unique_ptr<ProcessThread> pacer_thread) {
RTC_DCHECK(config.task_queue_factory);
return new internal::Call(
@@ -426,6 +479,104 @@ Call* Call::Create(const Call::Config& config,
std::move(call_thread), config.task_queue_factory);
}
+class SharedModuleThread::Impl {
+ public:
+ Impl(std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining)
+ : module_thread_(std::move(process_thread)),
+ on_one_ref_remaining_(std::move(on_one_ref_remaining)) {}
+
+ void EnsureStarted() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ if (started_)
+ return;
+ started_ = true;
+ module_thread_->Start();
+ }
+
+ ProcessThread* process_thread() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return module_thread_.get();
+ }
+
+ void AddRef() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ ++ref_count_;
+ }
+
+ rtc::RefCountReleaseStatus Release() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ --ref_count_;
+
+ if (ref_count_ == 0) {
+ module_thread_->Stop();
+ return rtc::RefCountReleaseStatus::kDroppedLastRef;
+ }
+
+ if (ref_count_ == 1 && on_one_ref_remaining_) {
+ auto moved_fn = std::move(on_one_ref_remaining_);
+ // NOTE: after this function returns, chances are that |this| has been
+ // deleted - do not touch any member variables.
+ // If the owner of the last reference implements a lambda that releases
+ // that last reference inside of the callback (which is legal according
+ // to this implementation), we will recursively enter Release() above,
+ // call Stop() and release the last reference.
+ moved_fn();
+ }
+
+ return rtc::RefCountReleaseStatus::kOtherRefsRemained;
+ }
+
+ private:
+ SequenceChecker sequence_checker_;
+ mutable int ref_count_ RTC_GUARDED_BY(sequence_checker_) = 0;
+ std::unique_ptr<ProcessThread> const module_thread_;
+ std::function<void()> const on_one_ref_remaining_;
+ bool started_ = false;
+};
+
+SharedModuleThread::SharedModuleThread(
+ std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining)
+ : impl_(std::make_unique<Impl>(std::move(process_thread),
+ std::move(on_one_ref_remaining))) {}
+
+SharedModuleThread::~SharedModuleThread() = default;
+
+// static
+rtc::scoped_refptr<SharedModuleThread> SharedModuleThread::Create(
+ const char* name,
+ std::function<void()> on_one_ref_remaining) {
+ return new SharedModuleThread(ProcessThread::Create(name),
+ std::move(on_one_ref_remaining));
+}
+
+rtc::scoped_refptr<SharedModuleThread> SharedModuleThread::Create(
+ std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining) {
+ return new SharedModuleThread(std::move(process_thread),
+ std::move(on_one_ref_remaining));
+}
+
+void SharedModuleThread::EnsureStarted() {
+ impl_->EnsureStarted();
+}
+
+ProcessThread* SharedModuleThread::process_thread() {
+ return impl_->process_thread();
+}
+
+void SharedModuleThread::AddRef() const {
+ impl_->AddRef();
+}
+
+rtc::RefCountReleaseStatus SharedModuleThread::Release() const {
+ auto ret = impl_->Release();
+ if (ret == rtc::RefCountReleaseStatus::kDroppedLastRef)
+ delete this;
+ return ret;
+}
+
// This method here to avoid subclasses has to implement this method.
// Call perf test will use Internal::Call::CreateVideoSendStream() to inject
// FecController.
@@ -441,20 +592,19 @@ namespace internal {
Call::Call(Clock* clock,
const Call::Config& config,
std::unique_ptr<RtpTransportControllerSendInterface> transport_send,
- std::unique_ptr<ProcessThread> module_process_thread,
+ rtc::scoped_refptr<SharedModuleThread> module_process_thread,
TaskQueueFactory* task_queue_factory)
: clock_(clock),
task_queue_factory_(task_queue_factory),
+ worker_thread_(GetCurrentTaskQueueOrThread()),
num_cpu_cores_(CpuInfo::DetectNumberOfCores()),
module_process_thread_(std::move(module_process_thread)),
- call_stats_(new CallStats(clock_, GetCurrentTaskQueueOrThread())),
+ call_stats_(new CallStats(clock_, worker_thread_)),
bitrate_allocator_(new BitrateAllocator(this)),
config_(config),
audio_network_state_(kNetworkDown),
video_network_state_(kNetworkDown),
aggregate_network_up_(false),
- receive_crit_(RWLockWrapper::CreateRWLock()),
- send_crit_(RWLockWrapper::CreateRWLock()),
event_log_(config.event_log),
received_bytes_per_second_counter_(clock_, nullptr, true),
received_audio_bytes_per_second_counter_(clock_, nullptr, true),
@@ -473,17 +623,18 @@ Call::Call(Clock* clock,
transport_send_(std::move(transport_send)) {
RTC_DCHECK(config.event_log != nullptr);
RTC_DCHECK(config.trials != nullptr);
- worker_sequence_checker_.Detach();
+ RTC_DCHECK(worker_thread_->IsCurrent());
call_stats_->RegisterStatsObserver(&receive_side_cc_);
- module_process_thread_->RegisterModule(
+ module_process_thread_->process_thread()->RegisterModule(
receive_side_cc_.GetRemoteBitrateEstimator(true), RTC_FROM_HERE);
- module_process_thread_->RegisterModule(&receive_side_cc_, RTC_FROM_HERE);
+ module_process_thread_->process_thread()->RegisterModule(&receive_side_cc_,
+ RTC_FROM_HERE);
}
Call::~Call() {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_CHECK(audio_send_ssrcs_.empty());
RTC_CHECK(video_send_ssrcs_.empty());
@@ -491,10 +642,9 @@ Call::~Call() {
RTC_CHECK(audio_receive_streams_.empty());
RTC_CHECK(video_receive_streams_.empty());
- module_process_thread_->Stop();
- module_process_thread_->DeRegisterModule(
+ module_process_thread_->process_thread()->DeRegisterModule(
receive_side_cc_.GetRemoteBitrateEstimator(true));
- module_process_thread_->DeRegisterModule(&receive_side_cc_);
+ module_process_thread_->process_thread()->DeRegisterModule(&receive_side_cc_);
call_stats_->DeregisterStatsObserver(&receive_side_cc_);
absl::optional<Timestamp> first_sent_packet_ms =
@@ -503,7 +653,6 @@ Call::~Call() {
// Only update histograms after process threads have been shut down, so that
// they won't try to concurrently update stats.
if (first_sent_packet_ms) {
- rtc::CritScope lock(&bitrate_crit_);
UpdateSendHistograms(*first_sent_packet_ms);
}
@@ -512,7 +661,7 @@ Call::~Call() {
}
void Call::RegisterRateObserver() {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
if (is_target_rate_observer_registered_)
return;
@@ -523,11 +672,11 @@ void Call::RegisterRateObserver() {
// off being kicked off on request rather than in the ctor.
transport_send_ptr_->RegisterTargetTransferRateObserver(this);
- module_process_thread_->Start();
+ module_process_thread_->EnsureStarted();
}
void Call::SetClientBitratePreferences(const BitrateSettings& preferences) {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
GetTransportControllerSend()->SetClientBitratePreferences(preferences);
}
@@ -609,14 +758,14 @@ void Call::UpdateReceiveHistograms() {
}
PacketReceiver* Call::Receiver() {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
return this;
}
webrtc::AudioSendStream* Call::CreateAudioSendStream(
const webrtc::AudioSendStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RegisterRateObserver();
@@ -632,30 +781,26 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream(
AudioSendStream* send_stream = new AudioSendStream(
clock_, config, config_.audio_state, task_queue_factory_,
- module_process_thread_.get(), transport_send_ptr_,
+ module_process_thread_->process_thread(), transport_send_ptr_,
bitrate_allocator_.get(), event_log_, call_stats_->AsRtcpRttStats(),
suspended_rtp_state);
- {
- WriteLockScoped write_lock(*send_crit_);
- RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) ==
- audio_send_ssrcs_.end());
- audio_send_ssrcs_[config.rtp.ssrc] = send_stream;
- }
- {
- ReadLockScoped read_lock(*receive_crit_);
- for (AudioReceiveStream* stream : audio_receive_streams_) {
- if (stream->config().rtp.local_ssrc == config.rtp.ssrc) {
- stream->AssociateSendStream(send_stream);
- }
+ RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) ==
+ audio_send_ssrcs_.end());
+ audio_send_ssrcs_[config.rtp.ssrc] = send_stream;
+
+ for (AudioReceiveStream* stream : audio_receive_streams_) {
+ if (stream->config().rtp.local_ssrc == config.rtp.ssrc) {
+ stream->AssociateSendStream(send_stream);
}
}
+
UpdateAggregateNetworkState();
return send_stream;
}
void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyAudioSendStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(send_stream != nullptr);
send_stream->Stop();
@@ -664,19 +809,16 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
webrtc::internal::AudioSendStream* audio_send_stream =
static_cast<webrtc::internal::AudioSendStream*>(send_stream);
suspended_audio_send_ssrcs_[ssrc] = audio_send_stream->GetRtpState();
- {
- WriteLockScoped write_lock(*send_crit_);
- size_t num_deleted = audio_send_ssrcs_.erase(ssrc);
- RTC_DCHECK_EQ(1, num_deleted);
- }
- {
- ReadLockScoped read_lock(*receive_crit_);
- for (AudioReceiveStream* stream : audio_receive_streams_) {
- if (stream->config().rtp.local_ssrc == ssrc) {
- stream->AssociateSendStream(nullptr);
- }
+
+ size_t num_deleted = audio_send_ssrcs_.erase(ssrc);
+ RTC_DCHECK_EQ(1, num_deleted);
+
+ for (AudioReceiveStream* stream : audio_receive_streams_) {
+ if (stream->config().rtp.local_ssrc == ssrc) {
+ stream->AssociateSendStream(nullptr);
}
}
+
UpdateAggregateNetworkState();
delete send_stream;
}
@@ -684,29 +826,25 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RegisterRateObserver();
event_log_->Log(std::make_unique<RtcEventAudioReceiveStreamConfig>(
CreateRtcLogStreamConfig(config)));
AudioReceiveStream* receive_stream = new AudioReceiveStream(
clock_, &audio_receiver_controller_, transport_send_ptr_->packet_router(),
- module_process_thread_.get(), config_.neteq_factory, config,
+ module_process_thread_->process_thread(), config_.neteq_factory, config,
config_.audio_state, event_log_);
- {
- WriteLockScoped write_lock(*receive_crit_);
- receive_rtp_config_.emplace(config.rtp.remote_ssrc,
- ReceiveRtpConfig(config));
- audio_receive_streams_.insert(receive_stream);
- ConfigureSync(config.sync_group);
- }
- {
- ReadLockScoped read_lock(*send_crit_);
- auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc);
- if (it != audio_send_ssrcs_.end()) {
- receive_stream->AssociateSendStream(it->second);
- }
+ receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config));
+ audio_receive_streams_.insert(receive_stream);
+
+ ConfigureSync(config.sync_group);
+
+ auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc);
+ if (it != audio_send_ssrcs_.end()) {
+ receive_stream->AssociateSendStream(it->second);
}
+
UpdateAggregateNetworkState();
return receive_stream;
}
@@ -714,26 +852,24 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream(
void Call::DestroyAudioReceiveStream(
webrtc::AudioReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyAudioReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(receive_stream != nullptr);
webrtc::internal::AudioReceiveStream* audio_receive_stream =
static_cast<webrtc::internal::AudioReceiveStream*>(receive_stream);
- {
- WriteLockScoped write_lock(*receive_crit_);
- const AudioReceiveStream::Config& config = audio_receive_stream->config();
- uint32_t ssrc = config.rtp.remote_ssrc;
- receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
- ->RemoveStream(ssrc);
- audio_receive_streams_.erase(audio_receive_stream);
- const std::string& sync_group = audio_receive_stream->config().sync_group;
- const auto it = sync_stream_mapping_.find(sync_group);
- if (it != sync_stream_mapping_.end() &&
- it->second == audio_receive_stream) {
- sync_stream_mapping_.erase(it);
- ConfigureSync(sync_group);
- }
- receive_rtp_config_.erase(ssrc);
+
+ const AudioReceiveStream::Config& config = audio_receive_stream->config();
+ uint32_t ssrc = config.rtp.remote_ssrc;
+ receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
+ ->RemoveStream(ssrc);
+ audio_receive_streams_.erase(audio_receive_stream);
+ const std::string& sync_group = audio_receive_stream->config().sync_group;
+ const auto it = sync_stream_mapping_.find(sync_group);
+ if (it != sync_stream_mapping_.end() && it->second == audio_receive_stream) {
+ sync_stream_mapping_.erase(it);
+ ConfigureSync(sync_group);
}
+ receive_rtp_config_.erase(ssrc);
+
UpdateAggregateNetworkState();
delete audio_receive_stream;
}
@@ -744,7 +880,7 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
VideoEncoderConfig encoder_config,
std::unique_ptr<FecController> fec_controller) {
TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RegisterRateObserver();
@@ -761,20 +897,22 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
std::vector<uint32_t> ssrcs = config.rtp.ssrcs;
VideoSendStream* send_stream = new VideoSendStream(
- clock_, num_cpu_cores_, module_process_thread_.get(), task_queue_factory_,
- call_stats_->AsRtcpRttStats(), transport_send_ptr_,
+ clock_, num_cpu_cores_, module_process_thread_->process_thread(),
+ task_queue_factory_, call_stats_->AsRtcpRttStats(), transport_send_ptr_,
bitrate_allocator_.get(), video_send_delay_stats_.get(), event_log_,
std::move(config), std::move(encoder_config), suspended_video_send_ssrcs_,
suspended_video_payload_states_, std::move(fec_controller));
- {
- WriteLockScoped write_lock(*send_crit_);
- for (uint32_t ssrc : ssrcs) {
- RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end());
- video_send_ssrcs_[ssrc] = send_stream;
- }
- video_send_streams_.insert(send_stream);
+ for (uint32_t ssrc : ssrcs) {
+ RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end());
+ video_send_ssrcs_[ssrc] = send_stream;
+ }
+ video_send_streams_.insert(send_stream);
+ // Forward resources that were previously added to the call to the new stream.
+ for (const auto& resource_forwarder : adaptation_resource_forwarders_) {
+ resource_forwarder->OnCreateVideoSendStream(send_stream);
}
+
UpdateAggregateNetworkState();
return send_stream;
@@ -797,24 +935,27 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyVideoSendStream");
RTC_DCHECK(send_stream != nullptr);
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
send_stream->Stop();
VideoSendStream* send_stream_impl = nullptr;
- {
- WriteLockScoped write_lock(*send_crit_);
- auto it = video_send_ssrcs_.begin();
- while (it != video_send_ssrcs_.end()) {
- if (it->second == static_cast<VideoSendStream*>(send_stream)) {
- send_stream_impl = it->second;
- video_send_ssrcs_.erase(it++);
- } else {
- ++it;
- }
+
+ auto it = video_send_ssrcs_.begin();
+ while (it != video_send_ssrcs_.end()) {
+ if (it->second == static_cast<VideoSendStream*>(send_stream)) {
+ send_stream_impl = it->second;
+ video_send_ssrcs_.erase(it++);
+ } else {
+ ++it;
}
- video_send_streams_.erase(send_stream_impl);
}
+ // Stop forwarding resources to the stream being destroyed.
+ for (const auto& resource_forwarder : adaptation_resource_forwarders_) {
+ resource_forwarder->OnDestroyVideoSendStream(send_stream_impl);
+ }
+ video_send_streams_.erase(send_stream_impl);
+
RTC_CHECK(send_stream_impl != nullptr);
VideoSendStream::RtpStateMap rtp_states;
@@ -835,7 +976,7 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) {
webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
webrtc::VideoReceiveStream::Config configuration) {
TRACE_EVENT0("webrtc", "Call::CreateVideoReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
receive_side_cc_.SetSendPeriodicFeedback(
SendPeriodicFeedback(configuration.rtp.extensions));
@@ -847,25 +988,21 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
VideoReceiveStream2* receive_stream = new VideoReceiveStream2(
task_queue_factory_, current, &video_receiver_controller_, num_cpu_cores_,
transport_send_ptr_->packet_router(), std::move(configuration),
- module_process_thread_.get(), call_stats_.get(), clock_,
+ module_process_thread_->process_thread(), call_stats_.get(), clock_,
new VCMTiming(clock_));
const webrtc::VideoReceiveStream::Config& config = receive_stream->config();
- {
- WriteLockScoped write_lock(*receive_crit_);
- if (config.rtp.rtx_ssrc) {
- // We record identical config for the rtx stream as for the main
- // stream. Since the transport_send_cc negotiation is per payload
- // type, we may get an incorrect value for the rtx stream, but
- // that is unlikely to matter in practice.
- receive_rtp_config_.emplace(config.rtp.rtx_ssrc,
- ReceiveRtpConfig(config));
- }
- receive_rtp_config_.emplace(config.rtp.remote_ssrc,
- ReceiveRtpConfig(config));
- video_receive_streams_.insert(receive_stream);
- ConfigureSync(config.sync_group);
+ if (config.rtp.rtx_ssrc) {
+ // We record identical config for the rtx stream as for the main
+ // stream. Since the transport_send_cc negotiation is per payload
+ // type, we may get an incorrect value for the rtx stream, but
+ // that is unlikely to matter in practice.
+ receive_rtp_config_.emplace(config.rtp.rtx_ssrc, ReceiveRtpConfig(config));
}
+ receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config));
+ video_receive_streams_.insert(receive_stream);
+ ConfigureSync(config.sync_group);
+
receive_stream->SignalNetworkState(video_network_state_);
UpdateAggregateNetworkState();
event_log_->Log(std::make_unique<RtcEventVideoReceiveStreamConfig>(
@@ -876,22 +1013,20 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream(
void Call::DestroyVideoReceiveStream(
webrtc::VideoReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyVideoReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(receive_stream != nullptr);
VideoReceiveStream2* receive_stream_impl =
static_cast<VideoReceiveStream2*>(receive_stream);
const VideoReceiveStream::Config& config = receive_stream_impl->config();
- {
- WriteLockScoped write_lock(*receive_crit_);
- // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a
- // separate SSRC there can be either one or two.
- receive_rtp_config_.erase(config.rtp.remote_ssrc);
- if (config.rtp.rtx_ssrc) {
- receive_rtp_config_.erase(config.rtp.rtx_ssrc);
- }
- video_receive_streams_.erase(receive_stream_impl);
- ConfigureSync(config.sync_group);
+
+ // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a
+ // separate SSRC there can be either one or two.
+ receive_rtp_config_.erase(config.rtp.remote_ssrc);
+ if (config.rtp.rtx_ssrc) {
+ receive_rtp_config_.erase(config.rtp.rtx_ssrc);
}
+ video_receive_streams_.erase(receive_stream_impl);
+ ConfigureSync(config.sync_group);
receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
->RemoveStream(config.rtp.remote_ssrc);
@@ -903,30 +1038,25 @@ void Call::DestroyVideoReceiveStream(
FlexfecReceiveStream* Call::CreateFlexfecReceiveStream(
const FlexfecReceiveStream::Config& config) {
TRACE_EVENT0("webrtc", "Call::CreateFlexfecReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RecoveredPacketReceiver* recovered_packet_receiver = this;
FlexfecReceiveStreamImpl* receive_stream;
- {
- WriteLockScoped write_lock(*receive_crit_);
- // Unlike the video and audio receive streams,
- // FlexfecReceiveStream implements RtpPacketSinkInterface itself,
- // and hence its constructor passes its |this| pointer to
- // video_receiver_controller_->CreateStream(). Calling the
- // constructor while holding |receive_crit_| ensures that we don't
- // call OnRtpPacket until the constructor is finished and the
- // object is in a valid state.
- // TODO(nisse): Fix constructor so that it can be moved outside of
- // this locked scope.
- receive_stream = new FlexfecReceiveStreamImpl(
- clock_, &video_receiver_controller_, config, recovered_packet_receiver,
- call_stats_->AsRtcpRttStats(), module_process_thread_.get());
-
- RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) ==
- receive_rtp_config_.end());
- receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config));
- }
+
+ // Unlike the video and audio receive streams, FlexfecReceiveStream implements
+ // RtpPacketSinkInterface itself, and hence its constructor passes its |this|
+ // pointer to video_receiver_controller_->CreateStream(). Calling the
+ // constructor while on the worker thread ensures that we don't call
+ // OnRtpPacket until the constructor is finished and the object is
+ // in a valid state, since OnRtpPacket runs on the same thread.
+ receive_stream = new FlexfecReceiveStreamImpl(
+ clock_, &video_receiver_controller_, config, recovered_packet_receiver,
+ call_stats_->AsRtcpRttStats(), module_process_thread_->process_thread());
+
+ RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) ==
+ receive_rtp_config_.end());
+ receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config));
// TODO(brandtr): Store config in RtcEventLog here.
@@ -935,39 +1065,37 @@ FlexfecReceiveStream* Call::CreateFlexfecReceiveStream(
void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) {
TRACE_EVENT0("webrtc", "Call::DestroyFlexfecReceiveStream");
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(receive_stream != nullptr);
- {
- WriteLockScoped write_lock(*receive_crit_);
+ const FlexfecReceiveStream::Config& config = receive_stream->GetConfig();
+ uint32_t ssrc = config.remote_ssrc;
+ receive_rtp_config_.erase(ssrc);
- const FlexfecReceiveStream::Config& config = receive_stream->GetConfig();
- uint32_t ssrc = config.remote_ssrc;
- receive_rtp_config_.erase(ssrc);
-
- // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be
- // destroyed.
- receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
- ->RemoveStream(ssrc);
- }
+ // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be
+ // destroyed.
+ receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config))
+ ->RemoveStream(ssrc);
delete receive_stream;
}
+void Call::AddAdaptationResource(rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ adaptation_resource_forwarders_.push_back(
+ std::make_unique<ResourceVideoSendStreamForwarder>(resource));
+ const auto& resource_forwarder = adaptation_resource_forwarders_.back();
+ for (VideoSendStream* send_stream : video_send_streams_) {
+ resource_forwarder->OnCreateVideoSendStream(send_stream);
+ }
+}
+
RtpTransportControllerSendInterface* Call::GetTransportControllerSend() {
return transport_send_ptr_;
}
Call::Stats Call::GetStats() const {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
-
- // TODO(tommi): The following stats are managed on the process thread:
- // - pacer_delay_ms (PacedSender::Process)
- // - rtt_ms
- // - recv_bandwidth_bps
- // These are delivered on the network TQ:
- // - send_bandwidth_bps (see OnTargetTransferRate)
- // - max_padding_bitrate_bps (see OnAllocationLimitsChanged)
+ RTC_DCHECK_RUN_ON(worker_thread_);
Stats stats;
// TODO(srte): It is unclear if we only want to report queues if network is
@@ -983,22 +1111,14 @@ Call::Stats Call::GetStats() const {
receive_side_cc_.GetRemoteBitrateEstimator(false)->LatestEstimate(
&ssrcs, &recv_bandwidth);
stats.recv_bandwidth_bps = recv_bandwidth;
-
- {
- rtc::CritScope cs(&last_bandwidth_bps_crit_);
- stats.send_bandwidth_bps = last_bandwidth_bps_;
- }
-
- {
- rtc::CritScope cs(&bitrate_crit_);
- stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_;
- }
+ stats.send_bandwidth_bps = last_bandwidth_bps_;
+ stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_;
return stats;
}
void Call::SignalChannelNetworkState(MediaType media, NetworkState state) {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
switch (media) {
case MediaType::AUDIO:
audio_network_state_ = state;
@@ -1013,40 +1133,25 @@ void Call::SignalChannelNetworkState(MediaType media, NetworkState state) {
}
UpdateAggregateNetworkState();
- {
- ReadLockScoped read_lock(*receive_crit_);
- for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) {
- video_receive_stream->SignalNetworkState(video_network_state_);
- }
+ for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) {
+ video_receive_stream->SignalNetworkState(video_network_state_);
}
}
void Call::OnAudioTransportOverheadChanged(int transport_overhead_per_packet) {
- ReadLockScoped read_lock(*send_crit_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
for (auto& kv : audio_send_ssrcs_) {
kv.second->SetTransportOverhead(transport_overhead_per_packet);
}
}
void Call::UpdateAggregateNetworkState() {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
- bool have_audio = false;
- bool have_video = false;
- {
- ReadLockScoped read_lock(*send_crit_);
- if (!audio_send_ssrcs_.empty())
- have_audio = true;
- if (!video_send_ssrcs_.empty())
- have_video = true;
- }
- {
- ReadLockScoped read_lock(*receive_crit_);
- if (!audio_receive_streams_.empty())
- have_audio = true;
- if (!video_receive_streams_.empty())
- have_video = true;
- }
+ bool have_audio =
+ !audio_send_ssrcs_.empty() || !audio_receive_streams_.empty();
+ bool have_video =
+ !video_send_ssrcs_.empty() || !video_receive_streams_.empty();
bool aggregate_network_up =
((have_video && video_network_state_ == kNetworkUp) ||
@@ -1073,61 +1178,50 @@ void Call::OnSentPacket(const rtc::SentPacket& sent_packet) {
}
void Call::OnStartRateUpdate(DataRate start_rate) {
- RTC_DCHECK(network_queue()->IsCurrent());
+ RTC_DCHECK_RUN_ON(send_transport_queue());
bitrate_allocator_->UpdateStartRate(start_rate.bps<uint32_t>());
}
void Call::OnTargetTransferRate(TargetTransferRate msg) {
- RTC_DCHECK(network_queue()->IsCurrent());
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
- {
- rtc::CritScope cs(&last_bandwidth_bps_crit_);
- last_bandwidth_bps_ = msg.target_rate.bps();
- }
+ RTC_DCHECK_RUN_ON(send_transport_queue());
uint32_t target_bitrate_bps = msg.target_rate.bps();
// For controlling the rate of feedback messages.
receive_side_cc_.OnBitrateChanged(target_bitrate_bps);
bitrate_allocator_->OnNetworkEstimateChanged(msg);
- // Ignore updates if bitrate is zero (the aggregate network state is down).
- if (target_bitrate_bps == 0) {
- rtc::CritScope lock(&bitrate_crit_);
- estimated_send_bitrate_kbps_counter_.ProcessAndPause();
- pacer_bitrate_kbps_counter_.ProcessAndPause();
- return;
- }
-
- bool sending_video;
- {
- ReadLockScoped read_lock(*send_crit_);
- sending_video = !video_send_streams_.empty();
- }
+ worker_thread_->PostTask(
+ ToQueuedTask(task_safety_, [this, target_bitrate_bps]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ last_bandwidth_bps_ = target_bitrate_bps;
+
+ // Ignore updates if bitrate is zero (the aggregate network state is
+ // down) or if we're not sending video.
+ if (target_bitrate_bps == 0 || video_send_streams_.empty()) {
+ estimated_send_bitrate_kbps_counter_.ProcessAndPause();
+ pacer_bitrate_kbps_counter_.ProcessAndPause();
+ return;
+ }
- rtc::CritScope lock(&bitrate_crit_);
- if (!sending_video) {
- // Do not update the stats if we are not sending video.
- estimated_send_bitrate_kbps_counter_.ProcessAndPause();
- pacer_bitrate_kbps_counter_.ProcessAndPause();
- return;
- }
- estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000);
- // Pacer bitrate may be higher than bitrate estimate if enforcing min bitrate.
- uint32_t pacer_bitrate_bps =
- std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_);
- pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000);
+ estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000);
+ // Pacer bitrate may be higher than bitrate estimate if enforcing min
+ // bitrate.
+ uint32_t pacer_bitrate_bps =
+ std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_);
+ pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000);
+ }));
}
void Call::OnAllocationLimitsChanged(BitrateAllocationLimits limits) {
- RTC_DCHECK(network_queue()->IsCurrent());
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RTC_DCHECK_RUN_ON(send_transport_queue());
transport_send_ptr_->SetAllocatedSendBitrateLimits(limits);
- min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps();
-
- rtc::CritScope lock(&bitrate_crit_);
- configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps();
+ worker_thread_->PostTask(ToQueuedTask(task_safety_, [this, limits]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps();
+ configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps();
+ }));
}
void Call::ConfigureSync(const std::string& sync_group) {
@@ -1194,28 +1288,24 @@ PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type,
}
bool rtcp_delivered = false;
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
- ReadLockScoped read_lock(*receive_crit_);
for (VideoReceiveStream2* stream : video_receive_streams_) {
if (stream->DeliverRtcp(packet, length))
rtcp_delivered = true;
}
}
if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
- ReadLockScoped read_lock(*receive_crit_);
for (AudioReceiveStream* stream : audio_receive_streams_) {
stream->DeliverRtcp(packet, length);
rtcp_delivered = true;
}
}
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
- ReadLockScoped read_lock(*send_crit_);
for (VideoSendStream* stream : video_send_streams_) {
stream->DeliverRtcp(packet, length);
rtcp_delivered = true;
}
}
if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
- ReadLockScoped read_lock(*send_crit_);
for (auto& kv : audio_send_ssrcs_) {
kv.second->DeliverRtcp(packet, length);
rtcp_delivered = true;
@@ -1259,17 +1349,15 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
RTC_DCHECK(media_type == MediaType::AUDIO || media_type == MediaType::VIDEO ||
is_keep_alive_packet);
- ReadLockScoped read_lock(*receive_crit_);
auto it = receive_rtp_config_.find(parsed_packet.Ssrc());
if (it == receive_rtp_config_.end()) {
RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc "
<< parsed_packet.Ssrc();
// Destruction of the receive stream, including deregistering from the
- // RtpDemuxer, is not protected by the |receive_crit_| lock. But
- // deregistering in the |receive_rtp_config_| map is protected by that lock.
- // So by not passing the packet on to demuxing in this case, we prevent
- // incoming packets to be passed on via the demuxer to a receive stream
- // which is being torned down.
+ // RtpDemuxer, is not protected by the |worker_thread_|.
+ // But deregistering in the |receive_rtp_config_| map is. So by not passing
+ // the packet on to demuxing in this case, we prevent incoming packets to be
+ // passed on via the demuxer to a receive stream which is being torned down.
return DELIVERY_UNKNOWN_SSRC;
}
@@ -1315,7 +1403,8 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket(
MediaType media_type,
rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
- RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
+ RTC_DCHECK_RUN_ON(worker_thread_);
+
if (IsRtcp(packet.cdata(), packet.size()))
return DeliverRtcp(media_type, packet.cdata(), packet.size());
@@ -1323,20 +1412,20 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket(
}
void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
RtpPacketReceived parsed_packet;
if (!parsed_packet.Parse(packet, length))
return;
parsed_packet.set_recovered(true);
- ReadLockScoped read_lock(*receive_crit_);
auto it = receive_rtp_config_.find(parsed_packet.Ssrc());
if (it == receive_rtp_config_.end()) {
RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc "
<< parsed_packet.Ssrc();
// Destruction of the receive stream, including deregistering from the
- // RtpDemuxer, is not protected by the |receive_crit_| lock. But
- // deregistering in the |receive_rtp_config_| map is protected by that lock.
+ // RtpDemuxer, is not protected by the |worker_thread_|.
+ // But deregistering in the |receive_rtp_config_| map is.
// So by not passing the packet on to demuxing in this case, we prevent
// incoming packets to be passed on via the demuxer to a receive stream
// which is being torn down.
diff --git a/chromium/third_party/webrtc/call/call.h b/chromium/third_party/webrtc/call/call.h
index 77cd3d26901..af9111826ce 100644
--- a/chromium/third_party/webrtc/call/call.h
+++ b/chromium/third_party/webrtc/call/call.h
@@ -15,6 +15,7 @@
#include <string>
#include <vector>
+#include "api/adaptation/resource.h"
#include "api/media_types.h"
#include "call/audio_receive_stream.h"
#include "call/audio_send_stream.h"
@@ -28,9 +29,46 @@
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/network/sent_packet.h"
#include "rtc_base/network_route.h"
+#include "rtc_base/ref_count.h"
namespace webrtc {
+// A restricted way to share the module process thread across multiple instances
+// of Call that are constructed on the same worker thread (which is what the
+// peer connection factory guarantees).
+// SharedModuleThread supports a callback that is issued when only one reference
+// remains, which is used to indicate to the original owner that the thread may
+// be discarded.
+class SharedModuleThread : public rtc::RefCountInterface {
+ protected:
+ SharedModuleThread(std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining);
+ friend class rtc::scoped_refptr<SharedModuleThread>;
+ ~SharedModuleThread() override;
+
+ public:
+ // Instantiates a default implementation of ProcessThread.
+ static rtc::scoped_refptr<SharedModuleThread> Create(
+ const char* name,
+ std::function<void()> on_one_ref_remaining);
+
+ // Allows injection of an externally created process thread.
+ static rtc::scoped_refptr<SharedModuleThread> Create(
+ std::unique_ptr<ProcessThread> process_thread,
+ std::function<void()> on_one_ref_remaining);
+
+ void EnsureStarted();
+
+ ProcessThread* process_thread();
+
+ private:
+ void AddRef() const override;
+ rtc::RefCountReleaseStatus Release() const override;
+
+ class Impl;
+ mutable std::unique_ptr<Impl> impl_;
+};
+
// A Call instance can contain several send and/or receive streams. All streams
// are assumed to have the same remote endpoint and will share bitrate estimates
// etc.
@@ -50,8 +88,10 @@ class Call {
static Call* Create(const Call::Config& config);
static Call* Create(const Call::Config& config,
+ rtc::scoped_refptr<SharedModuleThread> call_thread);
+ static Call* Create(const Call::Config& config,
Clock* clock,
- std::unique_ptr<ProcessThread> call_thread,
+ rtc::scoped_refptr<SharedModuleThread> call_thread,
std::unique_ptr<ProcessThread> pacer_thread);
virtual AudioSendStream* CreateAudioSendStream(
@@ -86,6 +126,11 @@ class Call {
virtual void DestroyFlexfecReceiveStream(
FlexfecReceiveStream* receive_stream) = 0;
+ // When a resource is overused, the Call will try to reduce the load on the
+ // sysem, for example by reducing the resolution or frame rate of encoded
+ // streams.
+ virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) = 0;
+
// All received RTP and RTCP packets for the call should be inserted to this
// PacketReceiver. The PacketReceiver pointer is valid as long as the
// Call instance exists.
diff --git a/chromium/third_party/webrtc/call/call_factory.cc b/chromium/third_party/webrtc/call/call_factory.cc
index 6b4f4197425..a3ebc47c6b3 100644
--- a/chromium/third_party/webrtc/call/call_factory.cc
+++ b/chromium/third_party/webrtc/call/call_factory.cc
@@ -70,7 +70,12 @@ absl::optional<webrtc::BuiltInNetworkBehaviorConfig> ParseDegradationConfig(
}
} // namespace
+CallFactory::CallFactory() {
+ call_thread_.Detach();
+}
+
Call* CallFactory::CreateCall(const Call::Config& config) {
+ RTC_DCHECK_RUN_ON(&call_thread_);
absl::optional<webrtc::BuiltInNetworkBehaviorConfig> send_degradation_config =
ParseDegradationConfig(true);
absl::optional<webrtc::BuiltInNetworkBehaviorConfig>
@@ -82,7 +87,14 @@ Call* CallFactory::CreateCall(const Call::Config& config) {
config.task_queue_factory);
}
- return Call::Create(config);
+ if (!module_thread_) {
+ module_thread_ = SharedModuleThread::Create("SharedModThread", [this]() {
+ RTC_DCHECK_RUN_ON(&call_thread_);
+ module_thread_ = nullptr;
+ });
+ }
+
+ return Call::Create(config, module_thread_);
}
std::unique_ptr<CallFactoryInterface> CreateCallFactory() {
diff --git a/chromium/third_party/webrtc/call/call_factory.h b/chromium/third_party/webrtc/call/call_factory.h
index f0d695c915b..65c0b6532ab 100644
--- a/chromium/third_party/webrtc/call/call_factory.h
+++ b/chromium/third_party/webrtc/call/call_factory.h
@@ -14,13 +14,22 @@
#include "api/call/call_factory_interface.h"
#include "call/call.h"
#include "call/call_config.h"
+#include "rtc_base/synchronization/sequence_checker.h"
namespace webrtc {
class CallFactory : public CallFactoryInterface {
+ public:
+ CallFactory();
+
+ private:
~CallFactory() override {}
Call* CreateCall(const CallConfig& config) override;
+
+ SequenceChecker call_thread_;
+ rtc::scoped_refptr<SharedModuleThread> module_thread_
+ RTC_GUARDED_BY(call_thread_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/call_unittest.cc b/chromium/third_party/webrtc/call/call_unittest.cc
index 8afcf25121b..bd89b873bfa 100644
--- a/chromium/third_party/webrtc/call/call_unittest.cc
+++ b/chromium/third_party/webrtc/call/call_unittest.cc
@@ -20,13 +20,17 @@
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "api/test/mock_audio_mixer.h"
+#include "api/test/video/function_video_encoder_factory.h"
#include "api/transport/field_trial_based_config.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "audio/audio_receive_stream.h"
#include "audio/audio_send_stream.h"
+#include "call/adaptation/test/fake_resource.h"
+#include "call/adaptation/test/mock_resource_listener.h"
#include "call/audio_state.h"
#include "modules/audio_device/include/mock_audio_device.h"
#include "modules/audio_processing/include/mock_audio_processing.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "test/fake_encoder.h"
#include "test/gtest.h"
#include "test/mock_audio_decoder_factory.h"
@@ -35,6 +39,10 @@
namespace {
+using ::testing::_;
+using ::testing::Contains;
+using ::testing::StrictMock;
+
struct CallHelper {
explicit CallHelper(bool use_null_audio_processing) {
task_queue_factory_ = webrtc::CreateDefaultTaskQueueFactory();
@@ -67,6 +75,20 @@ struct CallHelper {
namespace webrtc {
+namespace {
+
+rtc::scoped_refptr<Resource> FindResourceWhoseNameContains(
+ const std::vector<rtc::scoped_refptr<Resource>>& resources,
+ const std::string& name_contains) {
+ for (const auto& resource : resources) {
+ if (resource->Name().find(name_contains) != std::string::npos)
+ return resource;
+ }
+ return nullptr;
+}
+
+} // namespace
+
TEST(CallTest, ConstructDestruct) {
for (bool use_null_audio_processing : {false, true}) {
CallHelper call(use_null_audio_processing);
@@ -325,4 +347,182 @@ TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) {
}
}
+TEST(CallTest, AddAdaptationResourceAfterCreatingVideoSendStream) {
+ CallHelper call(true);
+ // Create a VideoSendStream.
+ test::FunctionVideoEncoderFactory fake_encoder_factory([]() {
+ return std::make_unique<test::FakeEncoder>(Clock::GetRealTimeClock());
+ });
+ auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory();
+ MockTransport send_transport;
+ VideoSendStream::Config config(&send_transport);
+ config.rtp.payload_type = 110;
+ config.rtp.ssrcs = {42};
+ config.encoder_settings.encoder_factory = &fake_encoder_factory;
+ config.encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory.get();
+ VideoEncoderConfig encoder_config;
+ encoder_config.max_bitrate_bps = 1337;
+ VideoSendStream* stream1 =
+ call->CreateVideoSendStream(config.Copy(), encoder_config.Copy());
+ EXPECT_NE(stream1, nullptr);
+ config.rtp.ssrcs = {43};
+ VideoSendStream* stream2 =
+ call->CreateVideoSendStream(config.Copy(), encoder_config.Copy());
+ EXPECT_NE(stream2, nullptr);
+ // Add a fake resource.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ call->AddAdaptationResource(fake_resource);
+ // An adapter resource mirroring the |fake_resource| should now be present on
+ // both streams.
+ auto injected_resource1 = FindResourceWhoseNameContains(
+ stream1->GetAdaptationResources(), fake_resource->Name());
+ EXPECT_TRUE(injected_resource1);
+ auto injected_resource2 = FindResourceWhoseNameContains(
+ stream2->GetAdaptationResources(), fake_resource->Name());
+ EXPECT_TRUE(injected_resource2);
+ // Overwrite the real resource listeners with mock ones to verify the signal
+ // gets through.
+ injected_resource1->SetResourceListener(nullptr);
+ StrictMock<MockResourceListener> resource_listener1;
+ EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([injected_resource1](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(injected_resource1, resource);
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ injected_resource1->SetResourceListener(&resource_listener1);
+ injected_resource2->SetResourceListener(nullptr);
+ StrictMock<MockResourceListener> resource_listener2;
+ EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([injected_resource2](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(injected_resource2, resource);
+ EXPECT_EQ(ResourceUsageState::kOveruse, usage_state);
+ });
+ injected_resource2->SetResourceListener(&resource_listener2);
+ // The kOveruse signal should get to our resource listeners.
+ fake_resource->SetUsageState(ResourceUsageState::kOveruse);
+ call->DestroyVideoSendStream(stream1);
+ call->DestroyVideoSendStream(stream2);
+}
+
+TEST(CallTest, AddAdaptationResourceBeforeCreatingVideoSendStream) {
+ CallHelper call(true);
+ // Add a fake resource.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ call->AddAdaptationResource(fake_resource);
+ // Create a VideoSendStream.
+ test::FunctionVideoEncoderFactory fake_encoder_factory([]() {
+ return std::make_unique<test::FakeEncoder>(Clock::GetRealTimeClock());
+ });
+ auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory();
+ MockTransport send_transport;
+ VideoSendStream::Config config(&send_transport);
+ config.rtp.payload_type = 110;
+ config.rtp.ssrcs = {42};
+ config.encoder_settings.encoder_factory = &fake_encoder_factory;
+ config.encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory.get();
+ VideoEncoderConfig encoder_config;
+ encoder_config.max_bitrate_bps = 1337;
+ VideoSendStream* stream1 =
+ call->CreateVideoSendStream(config.Copy(), encoder_config.Copy());
+ EXPECT_NE(stream1, nullptr);
+ config.rtp.ssrcs = {43};
+ VideoSendStream* stream2 =
+ call->CreateVideoSendStream(config.Copy(), encoder_config.Copy());
+ EXPECT_NE(stream2, nullptr);
+ // An adapter resource mirroring the |fake_resource| should be present on both
+ // streams.
+ auto injected_resource1 = FindResourceWhoseNameContains(
+ stream1->GetAdaptationResources(), fake_resource->Name());
+ EXPECT_TRUE(injected_resource1);
+ auto injected_resource2 = FindResourceWhoseNameContains(
+ stream2->GetAdaptationResources(), fake_resource->Name());
+ EXPECT_TRUE(injected_resource2);
+ // Overwrite the real resource listeners with mock ones to verify the signal
+ // gets through.
+ injected_resource1->SetResourceListener(nullptr);
+ StrictMock<MockResourceListener> resource_listener1;
+ EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([injected_resource1](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(injected_resource1, resource);
+ EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state);
+ });
+ injected_resource1->SetResourceListener(&resource_listener1);
+ injected_resource2->SetResourceListener(nullptr);
+ StrictMock<MockResourceListener> resource_listener2;
+ EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _))
+ .Times(1)
+ .WillOnce([injected_resource2](rtc::scoped_refptr<Resource> resource,
+ ResourceUsageState usage_state) {
+ EXPECT_EQ(injected_resource2, resource);
+ EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state);
+ });
+ injected_resource2->SetResourceListener(&resource_listener2);
+ // The kUnderuse signal should get to our resource listeners.
+ fake_resource->SetUsageState(ResourceUsageState::kUnderuse);
+ call->DestroyVideoSendStream(stream1);
+ call->DestroyVideoSendStream(stream2);
+}
+
+TEST(CallTest, SharedModuleThread) {
+ class SharedModuleThreadUser : public Module {
+ public:
+ SharedModuleThreadUser(ProcessThread* expected_thread,
+ rtc::scoped_refptr<SharedModuleThread> thread)
+ : expected_thread_(expected_thread), thread_(std::move(thread)) {
+ thread_->EnsureStarted();
+ thread_->process_thread()->RegisterModule(this, RTC_FROM_HERE);
+ }
+
+ ~SharedModuleThreadUser() override {
+ thread_->process_thread()->DeRegisterModule(this);
+ EXPECT_TRUE(thread_was_checked_);
+ }
+
+ private:
+ int64_t TimeUntilNextProcess() override { return 1000; }
+ void Process() override {}
+ void ProcessThreadAttached(ProcessThread* process_thread) override {
+ if (!process_thread) {
+ // Being detached.
+ return;
+ }
+ EXPECT_EQ(process_thread, expected_thread_);
+ thread_was_checked_ = true;
+ }
+
+ bool thread_was_checked_ = false;
+ ProcessThread* const expected_thread_;
+ rtc::scoped_refptr<SharedModuleThread> thread_;
+ };
+
+ // Create our test instance and pass a lambda to it that gets executed when
+ // the reference count goes back to 1 - meaning |shared| again is the only
+ // reference, which means we can free the variable and deallocate the thread.
+ rtc::scoped_refptr<SharedModuleThread> shared;
+ shared = SharedModuleThread::Create("MySharedProcessThread",
+ [&shared]() { shared = nullptr; });
+ ProcessThread* process_thread = shared->process_thread();
+
+ ASSERT_TRUE(shared.get());
+
+ {
+ // Create a couple of users of the thread.
+ // These instances are in a separate scope to trigger the callback to our
+ // lambda, which will run when these go out of scope.
+ SharedModuleThreadUser user1(process_thread, shared);
+ SharedModuleThreadUser user2(process_thread, shared);
+ }
+
+ // The thread should now have been stopped and freed.
+ EXPECT_FALSE(shared);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/degraded_call.cc b/chromium/third_party/webrtc/call/degraded_call.cc
index 9c8d2be5081..007e0af3602 100644
--- a/chromium/third_party/webrtc/call/degraded_call.cc
+++ b/chromium/third_party/webrtc/call/degraded_call.cc
@@ -245,6 +245,11 @@ void DegradedCall::DestroyFlexfecReceiveStream(
call_->DestroyFlexfecReceiveStream(receive_stream);
}
+void DegradedCall::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ call_->AddAdaptationResource(std::move(resource));
+}
+
PacketReceiver* DegradedCall::Receiver() {
if (receive_config_) {
return this;
diff --git a/chromium/third_party/webrtc/call/degraded_call.h b/chromium/third_party/webrtc/call/degraded_call.h
index 49230ca1edc..ac072b71594 100644
--- a/chromium/third_party/webrtc/call/degraded_call.h
+++ b/chromium/third_party/webrtc/call/degraded_call.h
@@ -77,6 +77,8 @@ class DegradedCall : public Call, private PacketReceiver {
void DestroyFlexfecReceiveStream(
FlexfecReceiveStream* receive_stream) override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+
PacketReceiver* Receiver() override;
RtpTransportControllerSendInterface* GetTransportControllerSend() override;
diff --git a/chromium/third_party/webrtc/call/fake_network_pipe_unittest.cc b/chromium/third_party/webrtc/call/fake_network_pipe_unittest.cc
index 9c4a3bf7558..852a4272220 100644
--- a/chromium/third_party/webrtc/call/fake_network_pipe_unittest.cc
+++ b/chromium/third_party/webrtc/call/fake_network_pipe_unittest.cc
@@ -24,8 +24,10 @@ namespace webrtc {
class MockReceiver : public PacketReceiver {
public:
- MOCK_METHOD3(DeliverPacket,
- DeliveryStatus(MediaType, rtc::CopyOnWriteBuffer, int64_t));
+ MOCK_METHOD(DeliveryStatus,
+ DeliverPacket,
+ (MediaType, rtc::CopyOnWriteBuffer, int64_t),
+ (override));
virtual ~MockReceiver() = default;
};
diff --git a/chromium/third_party/webrtc/call/flexfec_receive_stream_impl.cc b/chromium/third_party/webrtc/call/flexfec_receive_stream_impl.cc
index 40005efe835..e629bca3477 100644
--- a/chromium/third_party/webrtc/call/flexfec_receive_stream_impl.cc
+++ b/chromium/third_party/webrtc/call/flexfec_receive_stream_impl.cc
@@ -22,7 +22,6 @@
#include "call/rtp_stream_receiver_controller_interface.h"
#include "modules/rtp_rtcp/include/flexfec_receiver.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/checks.h"
@@ -119,12 +118,12 @@ std::unique_ptr<FlexfecReceiver> MaybeCreateFlexfecReceiver(
recovered_packet_receiver));
}
-std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
+std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpRtcpModule(
Clock* clock,
ReceiveStatistics* receive_statistics,
const FlexfecReceiveStreamImpl::Config& config,
RtcpRttStats* rtt_stats) {
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.audio = false;
configuration.receiver_only = true;
configuration.clock = clock;
@@ -132,7 +131,7 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
configuration.outgoing_transport = config.rtcp_send_transport;
configuration.rtt_stats = rtt_stats;
configuration.local_media_ssrc = config.local_ssrc;
- return RtpRtcp::Create(configuration);
+ return ModuleRtpRtcpImpl2::Create(configuration);
}
} // namespace
diff --git a/chromium/third_party/webrtc/call/flexfec_receive_stream_impl.h b/chromium/third_party/webrtc/call/flexfec_receive_stream_impl.h
index d4fdc7431a7..888dae9ebd5 100644
--- a/chromium/third_party/webrtc/call/flexfec_receive_stream_impl.h
+++ b/chromium/third_party/webrtc/call/flexfec_receive_stream_impl.h
@@ -15,6 +15,7 @@
#include "call/flexfec_receive_stream.h"
#include "call/rtp_packet_sink_interface.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -55,7 +56,7 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream {
// RTCP reporting.
const std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
- const std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ const std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
ProcessThread* process_thread_;
std::unique_ptr<RtpStreamReceiverInterface> rtp_stream_receiver_;
diff --git a/chromium/third_party/webrtc/call/rampup_tests.cc b/chromium/third_party/webrtc/call/rampup_tests.cc
index 64eab050cbe..89fbe3dde7c 100644
--- a/chromium/third_party/webrtc/call/rampup_tests.cc
+++ b/chromium/third_party/webrtc/call/rampup_tests.cc
@@ -362,14 +362,14 @@ void RampUpTester::AccumulateStats(const VideoSendStream::StreamStats& stream,
void RampUpTester::TriggerTestDone() {
RTC_DCHECK_GE(test_start_ms_, 0);
- // TODO(holmer): Add audio send stats here too when those APIs are available.
- if (!send_stream_)
- return;
-
// Stop polling stats.
// Corner case for field_trials=WebRTC-QuickPerfTest/Enabled/
SendTask(RTC_FROM_HERE, task_queue_, [this] { pending_task_.Stop(); });
+ // TODO(holmer): Add audio send stats here too when those APIs are available.
+ if (!send_stream_)
+ return;
+
VideoSendStream::Stats send_stats = send_stream_->GetStats();
send_stream_ = nullptr; // To avoid dereferencing a bad pointer.
diff --git a/chromium/third_party/webrtc/call/rtcp_demuxer_unittest.cc b/chromium/third_party/webrtc/call/rtcp_demuxer_unittest.cc
index 23c305c9007..f3949ca78b4 100644
--- a/chromium/third_party/webrtc/call/rtcp_demuxer_unittest.cc
+++ b/chromium/third_party/webrtc/call/rtcp_demuxer_unittest.cc
@@ -37,7 +37,7 @@ using ::testing::NiceMock;
class MockRtcpPacketSink : public RtcpPacketSinkInterface {
public:
- MOCK_METHOD1(OnRtcpPacket, void(rtc::ArrayView<const uint8_t>));
+ MOCK_METHOD(void, OnRtcpPacket, (rtc::ArrayView<const uint8_t>), (override));
};
class RtcpDemuxerTest : public ::testing::Test {
@@ -81,6 +81,8 @@ class RtcpDemuxerTest : public ::testing::Test {
std::set<RtcpPacketSinkInterface*> broadcast_sinks_to_tear_down_;
};
+class RtcpDemuxerDeathTest : public RtcpDemuxerTest {};
+
// Produces a packet buffer representing an RTCP packet with a given SSRC,
// as it would look when sent over the wire.
// |distinguishing_string| allows different RTCP packets with the same SSRC
@@ -419,7 +421,7 @@ TEST_F(RtcpDemuxerTest, FirstResolutionOfRsidNotForgotten) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(RtcpDemuxerTest, RepeatedSsrcToSinkAssociationsDisallowed) {
+TEST_F(RtcpDemuxerDeathTest, RepeatedSsrcToSinkAssociationsDisallowed) {
MockRtcpPacketSink sink;
constexpr uint32_t ssrc = 101;
@@ -427,7 +429,7 @@ TEST_F(RtcpDemuxerTest, RepeatedSsrcToSinkAssociationsDisallowed) {
EXPECT_DEATH(AddSsrcSink(ssrc, &sink), "");
}
-TEST_F(RtcpDemuxerTest, RepeatedRsidToSinkAssociationsDisallowed) {
+TEST_F(RtcpDemuxerDeathTest, RepeatedRsidToSinkAssociationsDisallowed) {
MockRtcpPacketSink sink;
const std::string rsid = "z";
@@ -435,14 +437,14 @@ TEST_F(RtcpDemuxerTest, RepeatedRsidToSinkAssociationsDisallowed) {
EXPECT_DEATH(AddRsidSink(rsid, &sink), "");
}
-TEST_F(RtcpDemuxerTest, RepeatedBroadcastSinkRegistrationDisallowed) {
+TEST_F(RtcpDemuxerDeathTest, RepeatedBroadcastSinkRegistrationDisallowed) {
MockRtcpPacketSink sink;
AddBroadcastSink(&sink);
EXPECT_DEATH(AddBroadcastSink(&sink), "");
}
-TEST_F(RtcpDemuxerTest, SsrcSinkCannotAlsoBeRegisteredAsBroadcast) {
+TEST_F(RtcpDemuxerDeathTest, SsrcSinkCannotAlsoBeRegisteredAsBroadcast) {
MockRtcpPacketSink sink;
constexpr uint32_t ssrc = 101;
@@ -450,7 +452,7 @@ TEST_F(RtcpDemuxerTest, SsrcSinkCannotAlsoBeRegisteredAsBroadcast) {
EXPECT_DEATH(AddBroadcastSink(&sink), "");
}
-TEST_F(RtcpDemuxerTest, RsidSinkCannotAlsoBeRegisteredAsBroadcast) {
+TEST_F(RtcpDemuxerDeathTest, RsidSinkCannotAlsoBeRegisteredAsBroadcast) {
MockRtcpPacketSink sink;
const std::string rsid = "z";
@@ -458,7 +460,7 @@ TEST_F(RtcpDemuxerTest, RsidSinkCannotAlsoBeRegisteredAsBroadcast) {
EXPECT_DEATH(AddBroadcastSink(&sink), "");
}
-TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsSsrcSink) {
+TEST_F(RtcpDemuxerDeathTest, BroadcastSinkCannotAlsoBeRegisteredAsSsrcSink) {
MockRtcpPacketSink sink;
AddBroadcastSink(&sink);
@@ -466,7 +468,7 @@ TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsSsrcSink) {
EXPECT_DEATH(AddSsrcSink(ssrc, &sink), "");
}
-TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsRsidSink) {
+TEST_F(RtcpDemuxerDeathTest, BroadcastSinkCannotAlsoBeRegisteredAsRsidSink) {
MockRtcpPacketSink sink;
AddBroadcastSink(&sink);
@@ -474,27 +476,27 @@ TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsRsidSink) {
EXPECT_DEATH(AddRsidSink(rsid, &sink), "");
}
-TEST_F(RtcpDemuxerTest, MayNotCallRemoveSinkOnNeverAddedSink) {
+TEST_F(RtcpDemuxerDeathTest, MayNotCallRemoveSinkOnNeverAddedSink) {
MockRtcpPacketSink sink;
EXPECT_DEATH(RemoveSink(&sink), "");
}
-TEST_F(RtcpDemuxerTest, MayNotCallRemoveBroadcastSinkOnNeverAddedSink) {
+TEST_F(RtcpDemuxerDeathTest, MayNotCallRemoveBroadcastSinkOnNeverAddedSink) {
MockRtcpPacketSink sink;
EXPECT_DEATH(RemoveBroadcastSink(&sink), "");
}
-TEST_F(RtcpDemuxerTest, RsidMustBeNonEmpty) {
+TEST_F(RtcpDemuxerDeathTest, RsidMustBeNonEmpty) {
MockRtcpPacketSink sink;
EXPECT_DEATH(AddRsidSink("", &sink), "");
}
-TEST_F(RtcpDemuxerTest, RsidMustBeAlphaNumeric) {
+TEST_F(RtcpDemuxerDeathTest, RsidMustBeAlphaNumeric) {
MockRtcpPacketSink sink;
EXPECT_DEATH(AddRsidSink("a_3", &sink), "");
}
-TEST_F(RtcpDemuxerTest, RsidMustNotExceedMaximumLength) {
+TEST_F(RtcpDemuxerDeathTest, RsidMustNotExceedMaximumLength) {
MockRtcpPacketSink sink;
std::string rsid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a');
EXPECT_DEATH(AddRsidSink(rsid, &sink), "");
diff --git a/chromium/third_party/webrtc/call/rtp_demuxer.cc b/chromium/third_party/webrtc/call/rtp_demuxer.cc
index 14725cf023d..3ab75c7f982 100644
--- a/chromium/third_party/webrtc/call/rtp_demuxer.cc
+++ b/chromium/third_party/webrtc/call/rtp_demuxer.cc
@@ -24,6 +24,25 @@ namespace webrtc {
RtpDemuxerCriteria::RtpDemuxerCriteria() = default;
RtpDemuxerCriteria::~RtpDemuxerCriteria() = default;
+std::string RtpDemuxerCriteria::ToString() const {
+ rtc::StringBuilder sb;
+ sb << "{mid: " << (mid.empty() ? "<empty>" : mid)
+ << ", rsid: " << (rsid.empty() ? "<empty>" : rsid) << ", ssrcs: [";
+
+ for (auto ssrc : ssrcs) {
+ sb << ssrc << ", ";
+ }
+
+ sb << "], payload_types = [";
+
+ for (auto pt : payload_types) {
+ sb << pt << ", ";
+ }
+
+ sb << "]}";
+ return sb.Release();
+}
+
// static
std::string RtpDemuxer::DescribePacket(const RtpPacketReceived& packet) {
rtc::StringBuilder sb;
@@ -66,6 +85,8 @@ bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria,
// criteria because new sinks are created according to user-specified SDP and
// we do not want to crash due to a data validation error.
if (CriteriaWouldConflict(criteria)) {
+ RTC_LOG(LS_ERROR) << "Unable to add sink = " << sink
+ << " due conflicting criteria " << criteria.ToString();
return false;
}
@@ -92,6 +113,9 @@ bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria,
RefreshKnownMids();
+ RTC_LOG(LS_INFO) << "Added sink = " << sink << " for criteria "
+ << criteria.ToString();
+
return true;
}
@@ -105,25 +129,40 @@ bool RtpDemuxer::CriteriaWouldConflict(
// Adding this criteria would cause one of these rules to be shadowed, so
// reject this new criteria.
if (known_mids_.find(criteria.mid) != known_mids_.end()) {
+ RTC_LOG(LS_INFO) << criteria.ToString()
+ << " would conflict with known mid";
return true;
}
} else {
// If the exact rule already exists, then reject this duplicate.
- if (sink_by_mid_and_rsid_.find(std::make_pair(
- criteria.mid, criteria.rsid)) != sink_by_mid_and_rsid_.end()) {
+ const auto sink_by_mid_and_rsid = sink_by_mid_and_rsid_.find(
+ std::make_pair(criteria.mid, criteria.rsid));
+ if (sink_by_mid_and_rsid != sink_by_mid_and_rsid_.end()) {
+ RTC_LOG(LS_INFO) << criteria.ToString()
+ << " would conflict with existing sink = "
+ << sink_by_mid_and_rsid->second
+ << " by mid+rsid binding";
return true;
}
// If there is already a sink registered for the bare MID, then this
// criteria will never receive any packets because they will just be
// directed to that MID sink, so reject this new criteria.
- if (sink_by_mid_.find(criteria.mid) != sink_by_mid_.end()) {
+ const auto sink_by_mid = sink_by_mid_.find(criteria.mid);
+ if (sink_by_mid != sink_by_mid_.end()) {
+ RTC_LOG(LS_INFO) << criteria.ToString()
+ << " would conflict with existing sink = "
+ << sink_by_mid->second << " by mid binding";
return true;
}
}
}
for (uint32_t ssrc : criteria.ssrcs) {
- if (sink_by_ssrc_.find(ssrc) != sink_by_ssrc_.end()) {
+ const auto sink_by_ssrc = sink_by_ssrc_.find(ssrc);
+ if (sink_by_ssrc != sink_by_ssrc_.end()) {
+ RTC_LOG(LS_INFO) << criteria.ToString()
+ << " would conflict with existing sink = "
+ << sink_by_ssrc->second << " binding by SSRC=" << ssrc;
return true;
}
}
@@ -168,7 +207,11 @@ bool RtpDemuxer::RemoveSink(const RtpPacketSinkInterface* sink) {
RemoveFromMapByValue(&sink_by_mid_and_rsid_, sink) +
RemoveFromMapByValue(&sink_by_rsid_, sink);
RefreshKnownMids();
- return num_removed > 0;
+ bool removed = num_removed > 0;
+ if (removed) {
+ RTC_LOG(LS_INFO) << "Removed sink = " << sink << " bindings";
+ }
+ return removed;
}
bool RtpDemuxer::OnRtpPacket(const RtpPacketReceived& packet) {
@@ -370,9 +413,13 @@ bool RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc,
auto it = result.first;
bool inserted = result.second;
if (inserted) {
+ RTC_LOG(LS_INFO) << "Added sink = " << sink
+ << " binding with SSRC=" << ssrc;
return true;
}
if (it->second != sink) {
+ RTC_LOG(LS_INFO) << "Updated sink = " << sink
+ << " binding with SSRC=" << ssrc;
it->second = sink;
return true;
}
diff --git a/chromium/third_party/webrtc/call/rtp_demuxer.h b/chromium/third_party/webrtc/call/rtp_demuxer.h
index c815c47f724..dae7a53b702 100644
--- a/chromium/third_party/webrtc/call/rtp_demuxer.h
+++ b/chromium/third_party/webrtc/call/rtp_demuxer.h
@@ -44,6 +44,9 @@ struct RtpDemuxerCriteria {
// Will match packets with any of these payload types.
std::set<uint8_t> payload_types;
+
+ // Return string representation of demux criteria to facilitate logging
+ std::string ToString() const;
};
// This class represents the RTP demuxing, for a single RTP session (i.e., one
diff --git a/chromium/third_party/webrtc/call/rtp_demuxer_unittest.cc b/chromium/third_party/webrtc/call/rtp_demuxer_unittest.cc
index 86b458a0cc0..59baafe9ff8 100644
--- a/chromium/third_party/webrtc/call/rtp_demuxer_unittest.cc
+++ b/chromium/third_party/webrtc/call/rtp_demuxer_unittest.cc
@@ -37,14 +37,22 @@ using ::testing::NiceMock;
class MockSsrcBindingObserver : public SsrcBindingObserver {
public:
- MOCK_METHOD2(OnSsrcBoundToRsid, void(const std::string& rsid, uint32_t ssrc));
- MOCK_METHOD2(OnSsrcBoundToMid, void(const std::string& mid, uint32_t ssrc));
- MOCK_METHOD3(OnSsrcBoundToMidRsid,
- void(const std::string& mid,
- const std::string& rsid,
- uint32_t ssrc));
- MOCK_METHOD2(OnSsrcBoundToPayloadType,
- void(uint8_t payload_type, uint32_t ssrc));
+ MOCK_METHOD(void,
+ OnSsrcBoundToRsid,
+ (const std::string& rsid, uint32_t ssrc),
+ (override));
+ MOCK_METHOD(void,
+ OnSsrcBoundToMid,
+ (const std::string& mid, uint32_t ssrc),
+ (override));
+ MOCK_METHOD(void,
+ OnSsrcBoundToMidRsid,
+ (const std::string& mid, const std::string& rsid, uint32_t ssrc),
+ (override));
+ MOCK_METHOD(void,
+ OnSsrcBoundToPayloadType,
+ (uint8_t payload_type, uint32_t ssrc),
+ (override));
};
class RtpDemuxerTest : public ::testing::Test {
@@ -210,6 +218,8 @@ class RtpDemuxerTest : public ::testing::Test {
uint16_t next_sequence_number_ = 1;
};
+class RtpDemuxerDeathTest : public RtpDemuxerTest {};
+
MATCHER_P(SamePacketAs, other, "") {
return arg.Ssrc() == other.Ssrc() &&
arg.SequenceNumber() == other.SequenceNumber();
@@ -1478,41 +1488,42 @@ TEST_F(RtpDemuxerTest, MaliciousPeerCannotCauseMemoryOveruse) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(RtpDemuxerTest, CriteriaMustBeNonEmpty) {
+TEST_F(RtpDemuxerDeathTest, CriteriaMustBeNonEmpty) {
MockRtpPacketSink sink;
RtpDemuxerCriteria criteria;
EXPECT_DEATH(AddSink(criteria, &sink), "");
}
-TEST_F(RtpDemuxerTest, RsidMustBeAlphaNumeric) {
+TEST_F(RtpDemuxerDeathTest, RsidMustBeAlphaNumeric) {
MockRtpPacketSink sink;
EXPECT_DEATH(AddSinkOnlyRsid("a_3", &sink), "");
}
-TEST_F(RtpDemuxerTest, MidMustBeToken) {
+TEST_F(RtpDemuxerDeathTest, MidMustBeToken) {
MockRtpPacketSink sink;
EXPECT_DEATH(AddSinkOnlyMid("a(3)", &sink), "");
}
-TEST_F(RtpDemuxerTest, RsidMustNotExceedMaximumLength) {
+TEST_F(RtpDemuxerDeathTest, RsidMustNotExceedMaximumLength) {
MockRtpPacketSink sink;
std::string rsid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a');
EXPECT_DEATH(AddSinkOnlyRsid(rsid, &sink), "");
}
-TEST_F(RtpDemuxerTest, MidMustNotExceedMaximumLength) {
+TEST_F(RtpDemuxerDeathTest, MidMustNotExceedMaximumLength) {
MockRtpPacketSink sink;
std::string mid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a');
EXPECT_DEATH(AddSinkOnlyMid(mid, &sink), "");
}
-TEST_F(RtpDemuxerTest, DoubleRegisterationOfSsrcBindingObserverDisallowed) {
+TEST_F(RtpDemuxerDeathTest,
+ DoubleRegisterationOfSsrcBindingObserverDisallowed) {
MockSsrcBindingObserver observer;
RegisterSsrcBindingObserver(&observer);
EXPECT_DEATH(RegisterSsrcBindingObserver(&observer), "");
}
-TEST_F(RtpDemuxerTest,
+TEST_F(RtpDemuxerDeathTest,
DregisterationOfNeverRegisteredSsrcBindingObserverDisallowed) {
MockSsrcBindingObserver observer;
EXPECT_DEATH(DeregisterSsrcBindingObserver(&observer), "");
diff --git a/chromium/third_party/webrtc/call/rtp_payload_params.cc b/chromium/third_party/webrtc/call/rtp_payload_params.cc
index 279eb588d73..110db2e9fab 100644
--- a/chromium/third_party/webrtc/call/rtp_payload_params.cc
+++ b/chromium/third_party/webrtc/call/rtp_payload_params.cc
@@ -93,15 +93,6 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info,
h264_header.packetization_mode =
info.codecSpecific.H264.packetization_mode;
rtp->simulcastIdx = spatial_index.value_or(0);
- rtp->frame_marking.temporal_id = kNoTemporalIdx;
- if (info.codecSpecific.H264.temporal_idx != kNoTemporalIdx) {
- rtp->frame_marking.temporal_id = info.codecSpecific.H264.temporal_idx;
- rtp->frame_marking.layer_id = 0;
- rtp->frame_marking.independent_frame =
- info.codecSpecific.H264.idr_frame;
- rtp->frame_marking.base_layer_sync =
- info.codecSpecific.H264.base_layer_sync;
- }
return;
}
case kVideoCodecMultiplex:
@@ -139,10 +130,7 @@ RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc,
: ssrc_(ssrc),
generic_picture_id_experiment_(
absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"),
- "Enabled")),
- generic_descriptor_experiment_(
- !absl::StartsWith(trials.Lookup("WebRTC-GenericDescriptor"),
- "Disabled")) {
+ "Enabled")) {
for (auto& spatial_layer : last_shared_frame_id_)
spatial_layer.fill(-1);
@@ -186,9 +174,8 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader(
SetCodecSpecific(&rtp_video_header, first_frame_in_picture);
- if (generic_descriptor_experiment_)
- SetGeneric(codec_specific_info, shared_frame_id, is_keyframe,
- &rtp_video_header);
+ SetGeneric(codec_specific_info, shared_frame_id, is_keyframe,
+ &rtp_video_header);
return rtp_video_header;
}
@@ -237,14 +224,6 @@ void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header,
vp9_header.tl0_pic_idx = state_.tl0_pic_idx;
}
}
- if (rtp_video_header->codec == kVideoCodecH264) {
- if (rtp_video_header->frame_marking.temporal_id != kNoTemporalIdx) {
- if (rtp_video_header->frame_marking.temporal_id == 0) {
- ++state_.tl0_pic_idx;
- }
- rtp_video_header->frame_marking.tl0_pic_idx = state_.tl0_pic_idx;
- }
- }
if (generic_picture_id_experiment_ &&
rtp_video_header->codec == kVideoCodecGeneric) {
rtp_video_header->video_type_header.emplace<RTPVideoHeaderLegacyGeneric>()
@@ -261,6 +240,8 @@ RtpPayloadParams::GenericDescriptorFromFrameInfo(
generic.frame_id = frame_id;
generic.dependencies = dependencies_calculator_.FromBuffersUsage(
frame_type, frame_id, frame_info.encoder_buffers);
+ generic.chain_diffs =
+ chains_calculator_.From(frame_id, frame_info.part_of_chain);
generic.spatial_index = frame_info.spatial_id;
generic.temporal_index = frame_info.temporal_id;
generic.decode_target_indications = frame_info.decode_target_indications;
@@ -273,6 +254,11 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info,
RTPVideoHeader* rtp_video_header) {
if (codec_specific_info && codec_specific_info->generic_frame_info &&
!codec_specific_info->generic_frame_info->encoder_buffers.empty()) {
+ if (is_keyframe) {
+ // Key frame resets all chains it is in.
+ chains_calculator_.Reset(
+ codec_specific_info->generic_frame_info->part_of_chain);
+ }
rtp_video_header->generic =
GenericDescriptorFromFrameInfo(*codec_specific_info->generic_frame_info,
frame_id, rtp_video_header->frame_type);
diff --git a/chromium/third_party/webrtc/call/rtp_payload_params.h b/chromium/third_party/webrtc/call/rtp_payload_params.h
index 13b10503781..2e0faeb5c99 100644
--- a/chromium/third_party/webrtc/call/rtp_payload_params.h
+++ b/chromium/third_party/webrtc/call/rtp_payload_params.h
@@ -19,6 +19,7 @@
#include "call/rtp_config.h"
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "modules/video_coding/chain_diff_calculator.h"
#include "modules/video_coding/frame_dependencies_calculator.h"
#include "modules/video_coding/include/video_codec_interface.h"
@@ -88,6 +89,7 @@ class RtpPayloadParams final {
RTPVideoHeader::GenericDescriptorInfo* generic);
FrameDependenciesCalculator dependencies_calculator_;
+ ChainDiffCalculator chains_calculator_;
// TODO(bugs.webrtc.org/10242): Remove once all encoder-wrappers are updated.
// Holds the last shared frame id for a given (spatial, temporal) layer.
std::array<std::array<int64_t, RtpGenericFrameDescriptor::kMaxTemporalLayers>,
@@ -112,7 +114,6 @@ class RtpPayloadParams final {
RtpPayloadState state_;
const bool generic_picture_id_experiment_;
- const bool generic_descriptor_experiment_;
};
} // namespace webrtc
#endif // CALL_RTP_PAYLOAD_PARAMS_H_
diff --git a/chromium/third_party/webrtc/call/rtp_payload_params_unittest.cc b/chromium/third_party/webrtc/call/rtp_payload_params_unittest.cc
index 1045504b44e..a5510b0240d 100644
--- a/chromium/third_party/webrtc/call/rtp_payload_params_unittest.cc
+++ b/chromium/third_party/webrtc/call/rtp_payload_params_unittest.cc
@@ -32,6 +32,7 @@
using ::testing::ElementsAre;
using ::testing::IsEmpty;
+using ::testing::SizeIs;
namespace webrtc {
namespace {
@@ -147,54 +148,6 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) {
codec_info.codecSpecific.VP9.end_of_picture);
}
-TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_H264) {
- RtpPayloadState state;
- state.picture_id = kPictureId;
- state.tl0_pic_idx = kInitialTl0PicIdx1;
- RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig());
-
- EncodedImage encoded_image;
- CodecSpecificInfo codec_info;
- CodecSpecificInfoH264* h264info = &codec_info.codecSpecific.H264;
- codec_info.codecType = kVideoCodecH264;
- h264info->packetization_mode = H264PacketizationMode::SingleNalUnit;
- h264info->temporal_idx = kNoTemporalIdx;
-
- RTPVideoHeader header =
- params.GetRtpVideoHeader(encoded_image, &codec_info, 10);
-
- EXPECT_EQ(0, header.simulcastIdx);
- EXPECT_EQ(kVideoCodecH264, header.codec);
- const auto& h264 = absl::get<RTPVideoHeaderH264>(header.video_type_header);
- EXPECT_EQ(H264PacketizationMode::SingleNalUnit, h264.packetization_mode);
-
- // test temporal param 1
- h264info->temporal_idx = 1;
- h264info->base_layer_sync = true;
- h264info->idr_frame = false;
-
- header = params.GetRtpVideoHeader(encoded_image, &codec_info, 20);
-
- EXPECT_EQ(kVideoCodecH264, header.codec);
- EXPECT_EQ(header.frame_marking.tl0_pic_idx, kInitialTl0PicIdx1);
- EXPECT_EQ(header.frame_marking.temporal_id, h264info->temporal_idx);
- EXPECT_EQ(header.frame_marking.base_layer_sync, h264info->base_layer_sync);
- EXPECT_EQ(header.frame_marking.independent_frame, h264info->idr_frame);
-
- // test temporal param 2
- h264info->temporal_idx = 0;
- h264info->base_layer_sync = false;
- h264info->idr_frame = true;
-
- header = params.GetRtpVideoHeader(encoded_image, &codec_info, 30);
-
- EXPECT_EQ(kVideoCodecH264, header.codec);
- EXPECT_EQ(header.frame_marking.tl0_pic_idx, kInitialTl0PicIdx1 + 1);
- EXPECT_EQ(header.frame_marking.temporal_id, h264info->temporal_idx);
- EXPECT_EQ(header.frame_marking.base_layer_sync, h264info->base_layer_sync);
- EXPECT_EQ(header.frame_marking.independent_frame, h264info->idr_frame);
-}
-
TEST(RtpPayloadParamsTest, PictureIdIsSetForVp8) {
RtpPayloadState state;
state.picture_id = kInitialPictureId1;
@@ -349,8 +302,6 @@ TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) {
}
TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) {
- test::ScopedFieldTrials generic_picture_id(
- "WebRTC-GenericDescriptor/Enabled/");
RtpPayloadState state{};
EncodedImage encoded_image;
@@ -375,8 +326,6 @@ TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) {
}
TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) {
- test::ScopedFieldTrials generic_picture_id(
- "WebRTC-GenericDescriptor/Enabled/");
RtpPayloadState state;
EncodedImage encoded_image;
CodecSpecificInfo codec_info;
@@ -388,6 +337,7 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) {
GenericFrameInfo::Builder().S(1).T(0).Dtis("S").Build();
codec_info.generic_frame_info->encoder_buffers = {
{/*id=*/0, /*referenced=*/false, /*updated=*/true}};
+ codec_info.generic_frame_info->part_of_chain = {true, false};
RTPVideoHeader key_header =
params.GetRtpVideoHeader(encoded_image, &codec_info, /*frame_id=*/1);
@@ -398,12 +348,14 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) {
EXPECT_THAT(key_header.generic->dependencies, IsEmpty());
EXPECT_THAT(key_header.generic->decode_target_indications,
ElementsAre(DecodeTargetIndication::kSwitch));
+ EXPECT_THAT(key_header.generic->chain_diffs, SizeIs(2));
encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
codec_info.generic_frame_info =
GenericFrameInfo::Builder().S(2).T(3).Dtis("D").Build();
codec_info.generic_frame_info->encoder_buffers = {
{/*id=*/0, /*referenced=*/true, /*updated=*/false}};
+ codec_info.generic_frame_info->part_of_chain = {false, false};
RTPVideoHeader delta_header =
params.GetRtpVideoHeader(encoded_image, &codec_info, /*frame_id=*/3);
@@ -414,6 +366,7 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) {
EXPECT_THAT(delta_header.generic->dependencies, ElementsAre(1));
EXPECT_THAT(delta_header.generic->decode_target_indications,
ElementsAre(DecodeTargetIndication::kDiscardable));
+ EXPECT_THAT(delta_header.generic->chain_diffs, SizeIs(2));
}
class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test {
@@ -421,9 +374,7 @@ class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test {
enum LayerSync { kNoSync, kSync };
RtpPayloadParamsVp8ToGenericTest()
- : generic_descriptor_field_trial_("WebRTC-GenericDescriptor/Enabled/"),
- state_(),
- params_(123, &state_, trials_config_) {}
+ : state_(), params_(123, &state_, trials_config_) {}
void ConvertAndCheck(int temporal_index,
int64_t shared_frame_id,
@@ -459,7 +410,6 @@ class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test {
}
protected:
- test::ScopedFieldTrials generic_descriptor_field_trial_;
FieldTrialBasedConfig trials_config_;
RtpPayloadState state_;
RtpPayloadParams params_;
@@ -518,9 +468,7 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test {
enum LayerSync { kNoSync, kSync };
RtpPayloadParamsH264ToGenericTest()
- : generic_descriptor_field_trial_("WebRTC-GenericDescriptor/Enabled/"),
- state_(),
- params_(123, &state_, trials_config_) {}
+ : state_(), params_(123, &state_, trials_config_) {}
void ConvertAndCheck(int temporal_index,
int64_t shared_frame_id,
@@ -556,7 +504,6 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test {
}
protected:
- test::ScopedFieldTrials generic_descriptor_field_trial_;
FieldTrialBasedConfig trials_config_;
RtpPayloadState state_;
RtpPayloadParams params_;
diff --git a/chromium/third_party/webrtc/call/rtp_transport_controller_send.cc b/chromium/third_party/webrtc/call/rtp_transport_controller_send.cc
index 56c5e55ca17..9baf164a60d 100644
--- a/chromium/third_party/webrtc/call/rtp_transport_controller_send.cc
+++ b/chromium/third_party/webrtc/call/rtp_transport_controller_send.cc
@@ -91,13 +91,16 @@ RtpTransportControllerSend::RtpTransportControllerSend(
event_log,
trials,
process_thread_.get())),
- task_queue_pacer_(use_task_queue_pacer_
- ? new TaskQueuePacedSender(clock,
- &packet_router_,
- event_log,
- trials,
- task_queue_factory)
- : nullptr),
+ task_queue_pacer_(
+ use_task_queue_pacer_
+ ? new TaskQueuePacedSender(
+ clock,
+ &packet_router_,
+ event_log,
+ trials,
+ task_queue_factory,
+ /*hold_back_window = */ PacingController::kMinSleepTime)
+ : nullptr),
observer_(nullptr),
controller_factory_override_(controller_factory),
controller_factory_fallback_(
diff --git a/chromium/third_party/webrtc/call/rtp_video_sender.cc b/chromium/third_party/webrtc/call/rtp_video_sender.cc
index ca8baee2b09..5f8d2df9655 100644
--- a/chromium/third_party/webrtc/call/rtp_video_sender.cc
+++ b/chromium/third_party/webrtc/call/rtp_video_sender.cc
@@ -22,8 +22,8 @@
#include "api/video_codecs/video_codec.h"
#include "call/rtp_transport_controller_send_interface.h"
#include "modules/pacing/packet_router.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "modules/utility/include/process_thread.h"
#include "modules/video_coding/include/video_codec_interface.h"
@@ -37,7 +37,7 @@ namespace webrtc {
namespace webrtc_internal_rtp_video_sender {
RtpStreamSender::RtpStreamSender(
- std::unique_ptr<RtpRtcp> rtp_rtcp,
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp,
std::unique_ptr<RTPSenderVideo> sender_video,
std::unique_ptr<VideoFecGenerator> fec_generator)
: rtp_rtcp(std::move(rtp_rtcp)),
@@ -200,7 +200,7 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
const WebRtcKeyValueConfig& trials) {
RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0);
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.clock = clock;
configuration.audio = false;
configuration.receiver_only = false;
@@ -253,7 +253,8 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
configuration.need_rtp_packet_infos = rtp_config.lntf.enabled;
- auto rtp_rtcp = RtpRtcp::Create(configuration);
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp(
+ ModuleRtpRtcpImpl2::Create(configuration));
rtp_rtcp->SetSendingStatus(false);
rtp_rtcp->SetSendingMediaStatus(false);
rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
@@ -282,7 +283,7 @@ std::vector<RtpStreamSender> CreateRtpStreamSenders(
video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead();
}
video_config.frame_transformer = frame_transformer;
- video_config.worker_queue = transport->GetWorkerQueue()->Get();
+ video_config.send_transport_queue = transport->GetWorkerQueue()->Get();
auto sender_video = std::make_unique<RTPSenderVideo>(video_config);
rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video),
std::move(fec_generator));
@@ -628,7 +629,7 @@ void RtpVideoSender::ConfigureSsrcs() {
RTC_CHECK(ssrc_to_rtp_module_.empty());
for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) {
uint32_t ssrc = rtp_config_.ssrcs[i];
- RtpRtcp* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
+ RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
// Restore RTP state if previous existed.
auto it = suspended_ssrcs_.find(ssrc);
@@ -645,7 +646,7 @@ void RtpVideoSender::ConfigureSsrcs() {
RTC_DCHECK_EQ(rtp_config_.rtx.ssrcs.size(), rtp_config_.ssrcs.size());
for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) {
uint32_t ssrc = rtp_config_.rtx.ssrcs[i];
- RtpRtcp* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
+ RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get();
auto it = suspended_ssrcs_.find(ssrc);
if (it != suspended_ssrcs_.end())
rtp_rtcp->SetRtxState(it->second);
diff --git a/chromium/third_party/webrtc/call/rtp_video_sender.h b/chromium/third_party/webrtc/call/rtp_video_sender.h
index 58bb7f412ef..0c277d6aa71 100644
--- a/chromium/third_party/webrtc/call/rtp_video_sender.h
+++ b/chromium/third_party/webrtc/call/rtp_video_sender.h
@@ -29,6 +29,7 @@
#include "call/rtp_transport_controller_send_interface.h"
#include "call/rtp_video_sender_interface.h"
#include "modules/rtp_rtcp/include/flexfec_sender.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
#include "modules/rtp_rtcp/source/rtp_sender_video.h"
#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
@@ -44,14 +45,13 @@ namespace webrtc {
class FrameEncryptorInterface;
class RTPFragmentationHeader;
-class RtpRtcp;
class RtpTransportControllerSendInterface;
namespace webrtc_internal_rtp_video_sender {
// RTP state for a single simulcast stream. Internal to the implementation of
// RtpVideoSender.
struct RtpStreamSender {
- RtpStreamSender(std::unique_ptr<RtpRtcp> rtp_rtcp,
+ RtpStreamSender(std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp,
std::unique_ptr<RTPSenderVideo> sender_video,
std::unique_ptr<VideoFecGenerator> fec_generator);
~RtpStreamSender();
@@ -60,7 +60,7 @@ struct RtpStreamSender {
RtpStreamSender& operator=(RtpStreamSender&&) = default;
// Note: Needs pointer stability.
- std::unique_ptr<RtpRtcp> rtp_rtcp;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp;
std::unique_ptr<RTPSenderVideo> sender_video;
std::unique_ptr<VideoFecGenerator> fec_generator;
};
@@ -215,7 +215,7 @@ class RtpVideoSender : public RtpVideoSenderInterface,
// Effectively const map from SSRC to RtpRtcp, for all media SSRCs.
// This map is set at construction time and never changed, but it's
// non-trivial to make it properly const.
- std::map<uint32_t, RtpRtcp*> ssrc_to_rtp_module_;
+ std::map<uint32_t, RtpRtcpInterface*> ssrc_to_rtp_module_;
RTC_DISALLOW_COPY_AND_ASSIGN(RtpVideoSender);
};
diff --git a/chromium/third_party/webrtc/call/rtp_video_sender_unittest.cc b/chromium/third_party/webrtc/call/rtp_video_sender_unittest.cc
index a87196111a0..8a88a24e3ba 100644
--- a/chromium/third_party/webrtc/call/rtp_video_sender_unittest.cc
+++ b/chromium/third_party/webrtc/call/rtp_video_sender_unittest.cc
@@ -56,7 +56,7 @@ const int kDependencyDescriptorExtensionId = 8;
class MockRtcpIntraFrameObserver : public RtcpIntraFrameObserver {
public:
- MOCK_METHOD1(OnReceivedIntraFrameRequest, void(uint32_t));
+ MOCK_METHOD(void, OnReceivedIntraFrameRequest, (uint32_t), (override));
};
RtpSenderObservers CreateObservers(
@@ -361,8 +361,10 @@ TEST(RtpVideoSenderTest, CreateWithPreviousStates) {
TEST(RtpVideoSenderTest, FrameCountCallbacks) {
class MockFrameCountObserver : public FrameCountObserver {
public:
- MOCK_METHOD2(FrameCountUpdated,
- void(const FrameCounts& frame_counts, uint32_t ssrc));
+ MOCK_METHOD(void,
+ FrameCountUpdated,
+ (const FrameCounts& frame_counts, uint32_t ssrc),
+ (override));
} callback;
RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {},
@@ -676,8 +678,6 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) {
}
TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) {
- test::ScopedFieldTrials trials("WebRTC-GenericDescriptor/Enabled/");
-
RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {});
test.router()->SetActive(true);
@@ -705,9 +705,9 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) {
codec_specific.template_structure.emplace();
codec_specific.template_structure->num_decode_targets = 1;
codec_specific.template_structure->templates = {
- GenericFrameInfo::Builder().T(0).Dtis("S").Build(),
- GenericFrameInfo::Builder().T(0).Dtis("S").Fdiffs({2}).Build(),
- GenericFrameInfo::Builder().T(1).Dtis("D").Fdiffs({1}).Build(),
+ FrameDependencyTemplate().T(0).Dtis("S"),
+ FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({2}),
+ FrameDependencyTemplate().T(1).Dtis("D").FrameDiffs({1}),
};
// Send two tiny images, mapping to single RTP packets.
@@ -742,8 +742,6 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) {
}
TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) {
- test::ScopedFieldTrials trials("WebRTC-GenericDescriptor/Enabled/");
-
RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {});
test.router()->SetActive(true);
@@ -771,9 +769,9 @@ TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) {
codec_specific.template_structure.emplace();
codec_specific.template_structure->num_decode_targets = 1;
codec_specific.template_structure->templates = {
- GenericFrameInfo::Builder().T(0).Dtis("S").Build(),
- GenericFrameInfo::Builder().T(0).Dtis("S").Fdiffs({2}).Build(),
- GenericFrameInfo::Builder().T(1).Dtis("D").Fdiffs({1}).Build(),
+ FrameDependencyTemplate().T(0).Dtis("S"),
+ FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({2}),
+ FrameDependencyTemplate().T(1).Dtis("D").FrameDiffs({1}),
};
// Send two tiny images, mapping to single RTP packets.
diff --git a/chromium/third_party/webrtc/call/test/mock_audio_send_stream.h b/chromium/third_party/webrtc/call/test/mock_audio_send_stream.h
index 489e826d0eb..4164dd550e1 100644
--- a/chromium/third_party/webrtc/call/test/mock_audio_send_stream.h
+++ b/chromium/third_party/webrtc/call/test/mock_audio_send_stream.h
@@ -21,23 +21,26 @@ namespace test {
class MockAudioSendStream : public AudioSendStream {
public:
- MOCK_CONST_METHOD0(GetConfig, const webrtc::AudioSendStream::Config&());
- MOCK_METHOD1(Reconfigure, void(const Config& config));
- MOCK_METHOD0(Start, void());
- MOCK_METHOD0(Stop, void());
+ MOCK_METHOD(const webrtc::AudioSendStream::Config&,
+ GetConfig,
+ (),
+ (const, override));
+ MOCK_METHOD(void, Reconfigure, (const Config& config), (override));
+ MOCK_METHOD(void, Start, (), (override));
+ MOCK_METHOD(void, Stop, (), (override));
// GMock doesn't like move-only types, such as std::unique_ptr.
- virtual void SendAudioData(std::unique_ptr<webrtc::AudioFrame> audio_frame) {
+ void SendAudioData(std::unique_ptr<webrtc::AudioFrame> audio_frame) override {
SendAudioDataForMock(audio_frame.get());
}
- MOCK_METHOD1(SendAudioDataForMock, void(webrtc::AudioFrame* audio_frame));
- MOCK_METHOD4(SendTelephoneEvent,
- bool(int payload_type,
- int payload_frequency,
- int event,
- int duration_ms));
- MOCK_METHOD1(SetMuted, void(bool muted));
- MOCK_CONST_METHOD0(GetStats, Stats());
- MOCK_CONST_METHOD1(GetStats, Stats(bool has_remote_tracks));
+ MOCK_METHOD(void, SendAudioDataForMock, (webrtc::AudioFrame*));
+ MOCK_METHOD(
+ bool,
+ SendTelephoneEvent,
+ (int payload_type, int payload_frequency, int event, int duration_ms),
+ (override));
+ MOCK_METHOD(void, SetMuted, (bool muted), (override));
+ MOCK_METHOD(Stats, GetStats, (), (const, override));
+ MOCK_METHOD(Stats, GetStats, (bool has_remote_tracks), (const, override));
};
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/test/mock_bitrate_allocator.h b/chromium/third_party/webrtc/call/test/mock_bitrate_allocator.h
index f00ed79c59f..b08916fe4fc 100644
--- a/chromium/third_party/webrtc/call/test/mock_bitrate_allocator.h
+++ b/chromium/third_party/webrtc/call/test/mock_bitrate_allocator.h
@@ -18,10 +18,15 @@
namespace webrtc {
class MockBitrateAllocator : public BitrateAllocatorInterface {
public:
- MOCK_METHOD2(AddObserver,
- void(BitrateAllocatorObserver*, MediaStreamAllocationConfig));
- MOCK_METHOD1(RemoveObserver, void(BitrateAllocatorObserver*));
- MOCK_CONST_METHOD1(GetStartBitrate, int(BitrateAllocatorObserver*));
+ MOCK_METHOD(void,
+ AddObserver,
+ (BitrateAllocatorObserver*, MediaStreamAllocationConfig),
+ (override));
+ MOCK_METHOD(void, RemoveObserver, (BitrateAllocatorObserver*), (override));
+ MOCK_METHOD(int,
+ GetStartBitrate,
+ (BitrateAllocatorObserver*),
+ (const, override));
};
} // namespace webrtc
#endif // CALL_TEST_MOCK_BITRATE_ALLOCATOR_H_
diff --git a/chromium/third_party/webrtc/call/test/mock_rtp_packet_sink_interface.h b/chromium/third_party/webrtc/call/test/mock_rtp_packet_sink_interface.h
index adc804f941b..e6d14f05c5d 100644
--- a/chromium/third_party/webrtc/call/test/mock_rtp_packet_sink_interface.h
+++ b/chromium/third_party/webrtc/call/test/mock_rtp_packet_sink_interface.h
@@ -17,7 +17,7 @@ namespace webrtc {
class MockRtpPacketSink : public RtpPacketSinkInterface {
public:
- MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived&));
+ MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/test/mock_rtp_transport_controller_send.h b/chromium/third_party/webrtc/call/test/mock_rtp_transport_controller_send.h
index afc8400f73a..308c087a408 100644
--- a/chromium/third_party/webrtc/call/test/mock_rtp_transport_controller_send.h
+++ b/chromium/third_party/webrtc/call/test/mock_rtp_transport_controller_send.h
@@ -32,45 +32,73 @@ namespace webrtc {
class MockRtpTransportControllerSend
: public RtpTransportControllerSendInterface {
public:
- MOCK_METHOD10(
- CreateRtpVideoSender,
- RtpVideoSenderInterface*(std::map<uint32_t, RtpState>,
- const std::map<uint32_t, RtpPayloadState>&,
- const RtpConfig&,
- int rtcp_report_interval_ms,
- Transport*,
- const RtpSenderObservers&,
- RtcEventLog*,
- std::unique_ptr<FecController>,
- const RtpSenderFrameEncryptionConfig&,
- rtc::scoped_refptr<FrameTransformerInterface>));
- MOCK_METHOD1(DestroyRtpVideoSender, void(RtpVideoSenderInterface*));
- MOCK_METHOD0(GetWorkerQueue, rtc::TaskQueue*());
- MOCK_METHOD0(packet_router, PacketRouter*());
- MOCK_METHOD0(network_state_estimate_observer,
- NetworkStateEstimateObserver*());
- MOCK_METHOD0(transport_feedback_observer, TransportFeedbackObserver*());
- MOCK_METHOD0(packet_sender, RtpPacketSender*());
- MOCK_METHOD1(SetAllocatedSendBitrateLimits, void(BitrateAllocationLimits));
- MOCK_METHOD1(SetPacingFactor, void(float));
- MOCK_METHOD1(SetQueueTimeLimit, void(int));
- MOCK_METHOD0(GetStreamFeedbackProvider, StreamFeedbackProvider*());
- MOCK_METHOD1(RegisterTargetTransferRateObserver,
- void(TargetTransferRateObserver*));
- MOCK_METHOD2(OnNetworkRouteChanged,
- void(const std::string&, const rtc::NetworkRoute&));
- MOCK_METHOD1(OnNetworkAvailability, void(bool));
- MOCK_METHOD0(GetBandwidthObserver, RtcpBandwidthObserver*());
- MOCK_CONST_METHOD0(GetPacerQueuingDelayMs, int64_t());
- MOCK_CONST_METHOD0(GetFirstPacketTime, absl::optional<Timestamp>());
- MOCK_METHOD1(EnablePeriodicAlrProbing, void(bool));
- MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket&));
- MOCK_METHOD1(SetSdpBitrateParameters, void(const BitrateConstraints&));
- MOCK_METHOD1(SetClientBitratePreferences, void(const BitrateSettings&));
- MOCK_METHOD1(OnTransportOverheadChanged, void(size_t));
- MOCK_METHOD1(AccountForAudioPacketsInPacedSender, void(bool));
- MOCK_METHOD0(IncludeOverheadInPacedSender, void());
- MOCK_METHOD1(OnReceivedPacket, void(const ReceivedPacket&));
+ MOCK_METHOD(RtpVideoSenderInterface*,
+ CreateRtpVideoSender,
+ ((std::map<uint32_t, RtpState>),
+ (const std::map<uint32_t, RtpPayloadState>&),
+ const RtpConfig&,
+ int rtcp_report_interval_ms,
+ Transport*,
+ const RtpSenderObservers&,
+ RtcEventLog*,
+ std::unique_ptr<FecController>,
+ const RtpSenderFrameEncryptionConfig&,
+ rtc::scoped_refptr<FrameTransformerInterface>),
+ (override));
+ MOCK_METHOD(void,
+ DestroyRtpVideoSender,
+ (RtpVideoSenderInterface*),
+ (override));
+ MOCK_METHOD(rtc::TaskQueue*, GetWorkerQueue, (), (override));
+ MOCK_METHOD(PacketRouter*, packet_router, (), (override));
+ MOCK_METHOD(NetworkStateEstimateObserver*,
+ network_state_estimate_observer,
+ (),
+ (override));
+ MOCK_METHOD(TransportFeedbackObserver*,
+ transport_feedback_observer,
+ (),
+ (override));
+ MOCK_METHOD(RtpPacketSender*, packet_sender, (), (override));
+ MOCK_METHOD(void,
+ SetAllocatedSendBitrateLimits,
+ (BitrateAllocationLimits),
+ (override));
+ MOCK_METHOD(void, SetPacingFactor, (float), (override));
+ MOCK_METHOD(void, SetQueueTimeLimit, (int), (override));
+ MOCK_METHOD(StreamFeedbackProvider*,
+ GetStreamFeedbackProvider,
+ (),
+ (override));
+ MOCK_METHOD(void,
+ RegisterTargetTransferRateObserver,
+ (TargetTransferRateObserver*),
+ (override));
+ MOCK_METHOD(void,
+ OnNetworkRouteChanged,
+ (const std::string&, const rtc::NetworkRoute&),
+ (override));
+ MOCK_METHOD(void, OnNetworkAvailability, (bool), (override));
+ MOCK_METHOD(RtcpBandwidthObserver*, GetBandwidthObserver, (), (override));
+ MOCK_METHOD(int64_t, GetPacerQueuingDelayMs, (), (const, override));
+ MOCK_METHOD(absl::optional<Timestamp>,
+ GetFirstPacketTime,
+ (),
+ (const, override));
+ MOCK_METHOD(void, EnablePeriodicAlrProbing, (bool), (override));
+ MOCK_METHOD(void, OnSentPacket, (const rtc::SentPacket&), (override));
+ MOCK_METHOD(void,
+ SetSdpBitrateParameters,
+ (const BitrateConstraints&),
+ (override));
+ MOCK_METHOD(void,
+ SetClientBitratePreferences,
+ (const BitrateSettings&),
+ (override));
+ MOCK_METHOD(void, OnTransportOverheadChanged, (size_t), (override));
+ MOCK_METHOD(void, AccountForAudioPacketsInPacedSender, (bool), (override));
+ MOCK_METHOD(void, IncludeOverheadInPacedSender, (), (override));
+ MOCK_METHOD(void, OnReceivedPacket, (const ReceivedPacket&), (override));
};
} // namespace webrtc
#endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_
diff --git a/chromium/third_party/webrtc/call/video_send_stream.h b/chromium/third_party/webrtc/call/video_send_stream.h
index 392c955f477..715d5d73e7b 100644
--- a/chromium/third_party/webrtc/call/video_send_stream.h
+++ b/chromium/third_party/webrtc/call/video_send_stream.h
@@ -18,10 +18,12 @@
#include <vector>
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/call/transport.h"
#include "api/crypto/crypto_options.h"
#include "api/frame_transformer_interface.h"
#include "api/rtp_parameters.h"
+#include "api/scoped_refptr.h"
#include "api/video/video_content_type.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
@@ -215,6 +217,15 @@ class VideoSendStream {
// When a stream is stopped, it can't receive, process or deliver packets.
virtual void Stop() = 0;
+ // If the resource is overusing, the VideoSendStream will try to reduce
+ // resolution or frame rate until no resource is overusing.
+ // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor
+ // is moved to Call this method could be deleted altogether in favor of
+ // Call-level APIs only.
+ virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) = 0;
+ virtual std::vector<rtc::scoped_refptr<Resource>>
+ GetAdaptationResources() = 0;
+
virtual void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) = 0;
diff --git a/chromium/third_party/webrtc/common_audio/BUILD.gn b/chromium/third_party/webrtc/common_audio/BUILD.gn
index 72eed1f0033..4077486d870 100644
--- a/chromium/third_party/webrtc/common_audio/BUILD.gn
+++ b/chromium/third_party/webrtc/common_audio/BUILD.gn
@@ -56,8 +56,8 @@ rtc_library("common_audio") {
"../system_wrappers",
"../system_wrappers:cpu_features_api",
"third_party/ooura:fft_size_256",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
diff --git a/chromium/third_party/webrtc/common_audio/OWNERS b/chromium/third_party/webrtc/common_audio/OWNERS
index 7c9c9af12a4..ba1c8b11f44 100644
--- a/chromium/third_party/webrtc/common_audio/OWNERS
+++ b/chromium/third_party/webrtc/common_audio/OWNERS
@@ -1,2 +1,3 @@
henrik.lundin@webrtc.org
kwiberg@webrtc.org
+peah@webrtc.org
diff --git a/chromium/third_party/webrtc/common_audio/channel_buffer_unittest.cc b/chromium/third_party/webrtc/common_audio/channel_buffer_unittest.cc
index 8ec42346d1c..a8b64891d6f 100644
--- a/chromium/third_party/webrtc/common_audio/channel_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/channel_buffer_unittest.cc
@@ -53,12 +53,12 @@ TEST(IFChannelBufferTest, SettingNumChannelsOfOneChannelBufferSetsTheOther) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(ChannelBufferTest, SetNumChannelsDeathTest) {
+TEST(ChannelBufferDeathTest, SetNumChannelsDeathTest) {
ChannelBuffer<float> chb(kNumFrames, kMono);
RTC_EXPECT_DEATH(chb.set_num_channels(kStereo), "num_channels");
}
-TEST(IFChannelBufferTest, SetNumChannelsDeathTest) {
+TEST(IFChannelBufferDeathTest, SetNumChannelsDeathTest) {
IFChannelBuffer ifchb(kNumFrames, kMono);
RTC_EXPECT_DEATH(ifchb.ibuf()->set_num_channels(kStereo), "num_channels");
}
diff --git a/chromium/third_party/webrtc/common_audio/mocks/mock_smoothing_filter.h b/chromium/third_party/webrtc/common_audio/mocks/mock_smoothing_filter.h
index 712049fa6a3..9df49dd11a6 100644
--- a/chromium/third_party/webrtc/common_audio/mocks/mock_smoothing_filter.h
+++ b/chromium/third_party/webrtc/common_audio/mocks/mock_smoothing_filter.h
@@ -18,9 +18,9 @@ namespace webrtc {
class MockSmoothingFilter : public SmoothingFilter {
public:
- MOCK_METHOD1(AddSample, void(float));
- MOCK_METHOD0(GetAverage, absl::optional<float>());
- MOCK_METHOD1(SetTimeConstantMs, bool(int));
+ MOCK_METHOD(void, AddSample, (float), (override));
+ MOCK_METHOD(absl::optional<float>, GetAverage, (), (override));
+ MOCK_METHOD(bool, SetTimeConstantMs, (int), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc b/chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc
index 61b9725b3aa..4724833fbb1 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/push_resampler_unittest.cc
@@ -31,19 +31,19 @@ TEST(PushResamplerTest, VerifiesInputParameters) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(PushResamplerTest, VerifiesBadInputParameters1) {
+TEST(PushResamplerDeathTest, VerifiesBadInputParameters1) {
PushResampler<int16_t> resampler;
RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(-1, 16000, 1),
"src_sample_rate_hz");
}
-TEST(PushResamplerTest, VerifiesBadInputParameters2) {
+TEST(PushResamplerDeathTest, VerifiesBadInputParameters2) {
PushResampler<int16_t> resampler;
RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, -1, 1),
"dst_sample_rate_hz");
}
-TEST(PushResamplerTest, VerifiesBadInputParameters3) {
+TEST(PushResamplerDeathTest, VerifiesBadInputParameters3) {
PushResampler<int16_t> resampler;
RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, 16000, 0),
"num_channels");
diff --git a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
index 7bcd7f146ec..b067b23b880 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
@@ -40,7 +40,7 @@ static const double kKernelInterpolationFactor = 0.5;
// Helper class to ensure ChunkedResample() functions properly.
class MockSource : public SincResamplerCallback {
public:
- MOCK_METHOD2(Run, void(size_t frames, float* destination));
+ MOCK_METHOD(void, Run, (size_t frames, float* destination), (override));
};
ACTION(ClearBuffer) {
diff --git a/chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc b/chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc
index 2918374bbac..6b6d6f1fd79 100644
--- a/chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc
+++ b/chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc
@@ -313,6 +313,14 @@ static void rftbsub_128_C(float* a) {
} // namespace
+OouraFft::OouraFft(bool sse2_available) {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ use_sse2_ = sse2_available;
+#else
+ use_sse2_ = false;
+#endif
+}
+
OouraFft::OouraFft() {
#if defined(WEBRTC_ARCH_X86_FAMILY)
use_sse2_ = (WebRtc_GetCPUInfo(kSSE2) != 0);
diff --git a/chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.h b/chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.h
index 0cdd6aa66f4..8273dfe58ee 100644
--- a/chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.h
+++ b/chromium/third_party/webrtc/common_audio/third_party/ooura/fft_size_128/ooura_fft.h
@@ -38,6 +38,10 @@ void rftbsub_128_neon(float* a);
class OouraFft {
public:
+ // Ctor allowing the availability of SSE2 support to be specified.
+ explicit OouraFft(bool sse2_available);
+
+ // Deprecated: This Ctor will soon be removed.
OouraFft();
~OouraFft();
void Fft(float* a) const;
diff --git a/chromium/third_party/webrtc/common_audio/vad/mock/mock_vad.h b/chromium/third_party/webrtc/common_audio/vad/mock/mock_vad.h
index afe80ef5e14..5a554ce1f92 100644
--- a/chromium/third_party/webrtc/common_audio/vad/mock/mock_vad.h
+++ b/chromium/third_party/webrtc/common_audio/vad/mock/mock_vad.h
@@ -18,14 +18,14 @@ namespace webrtc {
class MockVad : public Vad {
public:
- virtual ~MockVad() { Die(); }
- MOCK_METHOD0(Die, void());
+ ~MockVad() override { Die(); }
+ MOCK_METHOD(void, Die, ());
- MOCK_METHOD3(VoiceActivity,
- enum Activity(const int16_t* audio,
- size_t num_samples,
- int sample_rate_hz));
- MOCK_METHOD0(Reset, void());
+ MOCK_METHOD(enum Activity,
+ VoiceActivity,
+ (const int16_t* audio, size_t num_samples, int sample_rate_hz),
+ (override));
+ MOCK_METHOD(void, Reset, (), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_video/BUILD.gn b/chromium/third_party/webrtc/common_video/BUILD.gn
index ddf4c2d4959..9ae87d242da 100644
--- a/chromium/third_party/webrtc/common_video/BUILD.gn
+++ b/chromium/third_party/webrtc/common_video/BUILD.gn
@@ -60,9 +60,9 @@ rtc_library("common_video") {
"../rtc_base:safe_minmax",
"../rtc_base/system:rtc_export",
"../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -113,6 +113,7 @@ if (rtc_include_tests) {
"../test:test_support",
"../test:video_test_common",
"//testing/gtest",
+ "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
diff --git a/chromium/third_party/webrtc/common_video/generic_frame_descriptor/BUILD.gn b/chromium/third_party/webrtc/common_video/generic_frame_descriptor/BUILD.gn
index 05a4e2396c6..ab97e887f27 100644
--- a/chromium/third_party/webrtc/common_video/generic_frame_descriptor/BUILD.gn
+++ b/chromium/third_party/webrtc/common_video/generic_frame_descriptor/BUILD.gn
@@ -19,6 +19,8 @@ rtc_library("generic_frame_descriptor") {
"../../api/transport/rtp:dependency_descriptor",
"../../api/video:video_codec_constants",
"../../rtc_base:checks",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.cc b/chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.cc
index ca610727993..af66bbaf67f 100644
--- a/chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.cc
+++ b/chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.cc
@@ -15,33 +15,6 @@
namespace webrtc {
-absl::InlinedVector<DecodeTargetIndication, 10>
-GenericFrameInfo::DecodeTargetInfo(absl::string_view indication_symbols) {
- absl::InlinedVector<DecodeTargetIndication, 10> decode_targets;
- for (char symbol : indication_symbols) {
- DecodeTargetIndication indication;
- switch (symbol) {
- case '-':
- indication = DecodeTargetIndication::kNotPresent;
- break;
- case 'D':
- indication = DecodeTargetIndication::kDiscardable;
- break;
- case 'R':
- indication = DecodeTargetIndication::kRequired;
- break;
- case 'S':
- indication = DecodeTargetIndication::kSwitch;
- break;
- default:
- RTC_NOTREACHED();
- }
- decode_targets.push_back(indication);
- }
-
- return decode_targets;
-}
-
GenericFrameInfo::GenericFrameInfo() = default;
GenericFrameInfo::GenericFrameInfo(const GenericFrameInfo&) = default;
GenericFrameInfo::~GenericFrameInfo() = default;
@@ -65,14 +38,8 @@ GenericFrameInfo::Builder& GenericFrameInfo::Builder::S(int spatial_id) {
GenericFrameInfo::Builder& GenericFrameInfo::Builder::Dtis(
absl::string_view indication_symbols) {
- info_.decode_target_indications = DecodeTargetInfo(indication_symbols);
- return *this;
-}
-
-GenericFrameInfo::Builder& GenericFrameInfo::Builder::Fdiffs(
- std::initializer_list<int> frame_diffs) {
- info_.frame_diffs.insert(info_.frame_diffs.end(), frame_diffs.begin(),
- frame_diffs.end());
+ info_.decode_target_indications =
+ webrtc_impl::StringToDecodeTargetIndications(indication_symbols);
return *this;
}
diff --git a/chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.h b/chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.h
index b602ee06a65..19f413b5d4e 100644
--- a/chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.h
+++ b/chromium/third_party/webrtc/common_video/generic_frame_descriptor/generic_frame_info.h
@@ -11,7 +11,9 @@
#ifndef COMMON_VIDEO_GENERIC_FRAME_DESCRIPTOR_GENERIC_FRAME_INFO_H_
#define COMMON_VIDEO_GENERIC_FRAME_DESCRIPTOR_GENERIC_FRAME_INFO_H_
+#include <bitset>
#include <initializer_list>
+#include <vector>
#include "absl/container/inlined_vector.h"
#include "absl/strings/string_view.h"
@@ -31,17 +33,15 @@ struct CodecBufferUsage {
};
struct GenericFrameInfo : public FrameDependencyTemplate {
- static absl::InlinedVector<DecodeTargetIndication, 10> DecodeTargetInfo(
- absl::string_view indication_symbols);
-
class Builder;
GenericFrameInfo();
GenericFrameInfo(const GenericFrameInfo&);
~GenericFrameInfo();
- int64_t frame_id = 0;
absl::InlinedVector<CodecBufferUsage, kMaxEncoderBuffers> encoder_buffers;
+ std::vector<bool> part_of_chain;
+ std::bitset<32> active_decode_targets = ~uint32_t{0};
};
class GenericFrameInfo::Builder {
@@ -53,7 +53,6 @@ class GenericFrameInfo::Builder {
Builder& T(int temporal_id);
Builder& S(int spatial_id);
Builder& Dtis(absl::string_view indication_symbols);
- Builder& Fdiffs(std::initializer_list<int> frame_diffs);
private:
GenericFrameInfo info_;
diff --git a/chromium/third_party/webrtc/docs/faq.md b/chromium/third_party/webrtc/docs/faq.md
index ed9143812a1..9f31f31ee4f 100644
--- a/chromium/third_party/webrtc/docs/faq.md
+++ b/chromium/third_party/webrtc/docs/faq.md
@@ -231,7 +231,7 @@ Yes, you still have the right to redistribute and you still have a patent
license for Google's patents that cover the code that Google released.
-### What if my competitor uses the code and brings patent litigation against me for something unrelated to the code. Does he or she still have a patent license?
+### What if my competitor uses the code and brings patent litigation against me for something unrelated to the code. Do they still have a patent license?
-Yes, he/she still has the right to redistribute and he/she still has a patent
+Yes, they still have the right to redistribute and they still have a patent
license for Google's patents that cover the code that Google released.
diff --git a/chromium/third_party/webrtc/examples/BUILD.gn b/chromium/third_party/webrtc/examples/BUILD.gn
index 4d6d14d0d99..ab3d5edfc9d 100644
--- a/chromium/third_party/webrtc/examples/BUILD.gn
+++ b/chromium/third_party/webrtc/examples/BUILD.gn
@@ -101,7 +101,7 @@ if (is_android) {
rtc_android_library("AppRTCMobile_javalib") {
testonly = true
- android_manifest_for_lint = "androidapp/AndroidManifest.xml"
+ android_manifest = "androidapp/AndroidManifest.xml"
sources = [
"androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java",
@@ -180,10 +180,10 @@ if (is_android) {
"androidapp/res/layout/fragment_call.xml",
"androidapp/res/layout/fragment_hud.xml",
"androidapp/res/menu/connect_menu.xml",
- "androidapp/res/values/arrays.xml",
- "androidapp/res/values/strings.xml",
"androidapp/res/values-v17/styles.xml",
"androidapp/res/values-v21/styles.xml",
+ "androidapp/res/values/arrays.xml",
+ "androidapp/res/values/strings.xml",
"androidapp/res/xml/preferences.xml",
]
custom_package = "org.appspot.apprtc"
@@ -207,7 +207,10 @@ if (is_android) {
deps = [
":AppRTCMobile_javalib",
+ "../sdk/android:camera_java",
"../sdk/android:libjingle_peerconnection_java",
+ "../sdk/android:peerconnection_java",
+ "../sdk/android:video_api_java",
"../sdk/android:video_java",
"//third_party/android_support_test_runner:runner_java",
"//third_party/junit",
@@ -379,14 +382,18 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
if (rtc_apprtcmobile_broadcast_extension) {
bundle_data("AppRTCMobileBroadcastUpload_extension_bundle") {
testonly = true
- public_deps = [ ":AppRTCMobileBroadcastUpload" ] # no-presubmit-check TODO(webrtc:8603)
+ public_deps = [ # no-presubmit-check TODO(webrtc:8603)
+ ":AppRTCMobileBroadcastUpload", # prevent code format
+ ]
sources = [ "$root_out_dir/AppRTCMobileBroadcastUpload.appex" ]
outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ]
}
bundle_data("AppRTCMobileBroadcastSetupUI_extension_bundle") {
testonly = true
- public_deps = [ ":AppRTCMobileBroadcastSetupUI" ] # no-presubmit-check TODO(webrtc:8603)
+ public_deps = [ # no-presubmit-check TODO(webrtc:8603)
+ ":AppRTCMobileBroadcastSetupUI", # prevent code format
+ ]
sources = [ "$root_out_dir/AppRTCMobileBroadcastSetupUI.appex" ]
outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ]
}
@@ -859,6 +866,7 @@ if (is_android) {
deps = [
":AppRTCMobile_javalib",
+ "../sdk/android:peerconnection_java",
"//base:base_java_test_support",
"//third_party/google-truth:google_truth_java",
]
diff --git a/chromium/third_party/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java b/chromium/third_party/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
index 7ae3d838dd9..c32ab964ad1 100644
--- a/chromium/third_party/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
+++ b/chromium/third_party/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
@@ -185,8 +185,8 @@ public class AppRTCAudioManager {
// Note that, the sensor will not be active until start() has been called.
proximitySensor = AppRTCProximitySensor.create(context,
// This method will be called each time a state change is detected.
- // Example: user holds his hand over the device (closer than ~5 cm),
- // or removes his hand from the device.
+ // Example: user holds their hand over the device (closer than ~5 cm),
+ // or removes their hand from the device.
this ::onProximitySensorChangedState);
Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice);
diff --git a/chromium/third_party/webrtc/examples/androidnativeapi/BUILD.gn b/chromium/third_party/webrtc/examples/androidnativeapi/BUILD.gn
index 9c114e859c4..2b2c71b9e8a 100644
--- a/chromium/third_party/webrtc/examples/androidnativeapi/BUILD.gn
+++ b/chromium/third_party/webrtc/examples/androidnativeapi/BUILD.gn
@@ -16,6 +16,7 @@ if (is_android) {
deps = [
":resources",
"//modules/audio_device:audio_device_java",
+ "//rtc_base:base_java",
"//sdk/android:camera_java",
"//sdk/android:surfaceviewrenderer_java",
"//sdk/android:video_api_java",
diff --git a/chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.cc b/chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.cc
index 3203941672e..a463ceed46c 100644
--- a/chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.cc
+++ b/chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.cc
@@ -43,9 +43,15 @@ rtc::AsyncSocket* CreateClientSocket(int family) {
} // namespace
PeerConnectionClient::PeerConnectionClient()
- : callback_(NULL), resolver_(NULL), state_(NOT_CONNECTED), my_id_(-1) {}
-
-PeerConnectionClient::~PeerConnectionClient() {}
+ : MessageHandler(false),
+ callback_(NULL),
+ resolver_(NULL),
+ state_(NOT_CONNECTED),
+ my_id_(-1) {}
+
+PeerConnectionClient::~PeerConnectionClient() {
+ rtc::Thread::Current()->Clear(this);
+}
void PeerConnectionClient::InitSocketSignals() {
RTC_DCHECK(control_socket_.get() != NULL);
diff --git a/chromium/third_party/webrtc/logging/BUILD.gn b/chromium/third_party/webrtc/logging/BUILD.gn
index 28176d2583a..c1edd69680c 100644
--- a/chromium/third_party/webrtc/logging/BUILD.gn
+++ b/chromium/third_party/webrtc/logging/BUILD.gn
@@ -53,8 +53,8 @@ rtc_library("rtc_event_pacing") {
deps = [
"../api:scoped_refptr",
"../api/rtc_event_log",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("rtc_event_audio") {
@@ -75,8 +75,8 @@ rtc_library("rtc_event_audio") {
"../api/rtc_event_log",
"../modules/audio_coding:audio_network_adaptor_config",
"../rtc_base:checks",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("rtc_event_bwe") {
@@ -101,6 +101,8 @@ rtc_library("rtc_event_bwe") {
"../api/rtc_event_log",
"../api/units:data_rate",
"../modules/remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -119,6 +121,8 @@ rtc_library("rtc_event_generic_packet_events") {
deps = [
"../api/rtc_event_log",
"../rtc_base:timeutils",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -143,8 +147,8 @@ rtc_library("rtc_event_rtp_rtcp") {
"../modules/rtp_rtcp:rtp_rtcp_format",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("rtc_event_video") {
@@ -160,8 +164,8 @@ rtc_library("rtc_event_video") {
"../api:scoped_refptr",
"../api/rtc_event_log",
"../rtc_base:checks",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
# TODO(eladalon): Break down into (1) encoder and (2) decoder; we don't need
@@ -187,6 +191,8 @@ rtc_library("rtc_event_log_impl_encoder") {
"../rtc_base:checks",
"../rtc_base:ignore_wundef",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -245,8 +251,8 @@ if (rtc_enable_protobuf) {
"../rtc_base:rtc_task_queue",
"../rtc_base:safe_minmax",
"../rtc_base/synchronization:sequence_checker",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
@@ -318,6 +324,8 @@ if (rtc_enable_protobuf) {
"../rtc_base:protobuf_utils",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_numerics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -369,6 +377,8 @@ if (rtc_enable_protobuf) {
"../test:fileutils",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -417,8 +427,8 @@ rtc_library("ice_log") {
"../api:libjingle_peerconnection_api", # For api/dtls_transport_interface.h
"../api/rtc_event_log",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
if (rtc_include_tests) {
diff --git a/chromium/third_party/webrtc/logging/rtc_event_log/encoder/blob_encoding.h b/chromium/third_party/webrtc/logging/rtc_event_log/encoder/blob_encoding.h
index 4a38dc5d0b6..b5b589aaf6a 100644
--- a/chromium/third_party/webrtc/logging/rtc_event_log/encoder/blob_encoding.h
+++ b/chromium/third_party/webrtc/logging/rtc_event_log/encoder/blob_encoding.h
@@ -43,7 +43,7 @@ namespace webrtc {
//
// Note that the returned std::string might have been reserved for significantly
// more memory than it ends up using. If the caller to EncodeBlobs() intends
-// to store the result long-term, he should consider shrink_to_fit()-ing it.
+// to store the result long-term, they should consider shrink_to_fit()-ing it.
std::string EncodeBlobs(const std::vector<std::string>& blobs);
std::vector<absl::string_view> DecodeBlobs(absl::string_view encoded_blobs,
size_t num_of_blobs);
diff --git a/chromium/third_party/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc b/chromium/third_party/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
index 30596216907..cf85775f527 100644
--- a/chromium/third_party/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
+++ b/chromium/third_party/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc
@@ -675,13 +675,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStarted) {
}
TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStopped) {
- const int64_t timestamp_us = rtc::TimeMicros();
- std::string encoded = encoder_->EncodeLogEnd(timestamp_us);
+ const int64_t start_timestamp_us = rtc::TimeMicros();
+ const int64_t start_utc_time_us = rtc::TimeUTCMicros();
+ std::string encoded =
+ encoder_->EncodeLogStart(start_timestamp_us, start_utc_time_us);
+
+ const int64_t stop_timestamp_us = rtc::TimeMicros();
+ encoded += encoder_->EncodeLogEnd(stop_timestamp_us);
ASSERT_TRUE(parsed_log_.ParseString(encoded).ok());
const auto& stop_log_events = parsed_log_.stop_log_events();
ASSERT_EQ(stop_log_events.size(), 1u);
- verifier_.VerifyLoggedStopEvent(timestamp_us, stop_log_events[0]);
+ verifier_.VerifyLoggedStopEvent(stop_timestamp_us, stop_log_events[0]);
}
// TODO(eladalon/terelius): Test with multiple events in the batch.
diff --git a/chromium/third_party/webrtc/logging/rtc_event_log/mock/mock_rtc_event_log.h b/chromium/third_party/webrtc/logging/rtc_event_log/mock/mock_rtc_event_log.h
index 66a2065ed47..646831de272 100644
--- a/chromium/third_party/webrtc/logging/rtc_event_log/mock/mock_rtc_event_log.h
+++ b/chromium/third_party/webrtc/logging/rtc_event_log/mock/mock_rtc_event_log.h
@@ -21,20 +21,20 @@ namespace webrtc {
class MockRtcEventLog : public RtcEventLog {
public:
MockRtcEventLog();
- ~MockRtcEventLog();
+ ~MockRtcEventLog() override;
- virtual bool StartLogging(std::unique_ptr<RtcEventLogOutput> output,
- int64_t output_period_ms) {
- return StartLoggingProxy(output.get(), output_period_ms);
- }
- MOCK_METHOD2(StartLoggingProxy, bool(RtcEventLogOutput*, int64_t));
+ MOCK_METHOD(bool,
+ StartLogging,
+ (std::unique_ptr<RtcEventLogOutput> output,
+ int64_t output_period_ms),
+ (override));
- MOCK_METHOD0(StopLogging, void());
+ MOCK_METHOD(void, StopLogging, (), (override));
- virtual void Log(std::unique_ptr<RtcEvent> event) {
+ void Log(std::unique_ptr<RtcEvent> event) override {
return LogProxy(event.get());
}
- MOCK_METHOD1(LogProxy, void(RtcEvent*));
+ MOCK_METHOD(void, LogProxy, (RtcEvent*));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc b/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc
index 4016f84339a..c88207607cb 100644
--- a/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc
+++ b/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc
@@ -1076,6 +1076,7 @@ void ParsedRtcEventLog::Clear() {
first_timestamp_ = std::numeric_limits<int64_t>::max();
last_timestamp_ = std::numeric_limits<int64_t>::min();
+ first_log_segment_ = LogSegment(0, std::numeric_limits<int64_t>::max());
incoming_rtp_extensions_maps_.clear();
outgoing_rtp_extensions_maps_.clear();
@@ -1214,6 +1215,38 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream(
StoreFirstAndLastTimestamp(generic_packets_sent_);
StoreFirstAndLastTimestamp(generic_packets_received_);
StoreFirstAndLastTimestamp(generic_acks_received_);
+ StoreFirstAndLastTimestamp(remote_estimate_events_);
+
+ // Stop events could be missing due to file size limits. If so, use the
+ // last event, or the next start timestamp if available.
+ // TODO(terelius): This could be improved. Instead of using the next start
+ // event, we could use the timestamp of the the last previous regular event.
+ auto start_iter = start_log_events().begin();
+ auto stop_iter = stop_log_events().begin();
+ int64_t start_us = first_timestamp();
+ int64_t next_start_us = std::numeric_limits<int64_t>::max();
+ int64_t stop_us = std::numeric_limits<int64_t>::max();
+ if (start_iter != start_log_events().end()) {
+ start_us = std::min(start_us, start_iter->log_time_us());
+ ++start_iter;
+ if (start_iter != start_log_events().end())
+ next_start_us = start_iter->log_time_us();
+ }
+ if (stop_iter != stop_log_events().end()) {
+ stop_us = stop_iter->log_time_us();
+ }
+ stop_us = std::min(stop_us, next_start_us);
+ if (stop_us == std::numeric_limits<int64_t>::max() &&
+ last_timestamp() != std::numeric_limits<int64_t>::min()) {
+ stop_us = last_timestamp();
+ }
+ RTC_PARSE_CHECK_OR_RETURN_LE(start_us, stop_us);
+ first_log_segment_ = LogSegment(start_us, stop_us);
+
+ if (first_timestamp_ == std::numeric_limits<int64_t>::max() &&
+ last_timestamp_ == std::numeric_limits<int64_t>::min()) {
+ first_timestamp_ = last_timestamp_ = 0;
+ }
return status;
}
diff --git a/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.h b/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.h
index 7a162af897e..8d3351e815f 100644
--- a/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.h
+++ b/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_parser.h
@@ -11,6 +11,7 @@
#define LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_PARSER_H_
#include <iterator>
+#include <limits>
#include <map>
#include <set>
#include <sstream> // no-presubmit-check TODO(webrtc:8982)
@@ -193,6 +194,8 @@ class PacketView {
size_t size() const { return num_elements_; }
+ bool empty() const { return num_elements_ == 0; }
+
T& operator[](size_t i) {
auto elem_ptr = data_ + i * element_size_;
return *reinterpret_cast<T*>(elem_ptr);
@@ -330,6 +333,20 @@ class ParsedRtcEventLog {
PacketView<const LoggedRtpPacket> packet_view;
};
+ class LogSegment {
+ public:
+ LogSegment(int64_t start_time_us, int64_t stop_time_us)
+ : start_time_us_(start_time_us), stop_time_us_(stop_time_us) {}
+ int64_t start_time_ms() const { return start_time_us_ / 1000; }
+ int64_t start_time_us() const { return start_time_us_; }
+ int64_t stop_time_ms() const { return stop_time_us_ / 1000; }
+ int64_t stop_time_us() const { return stop_time_us_; }
+
+ private:
+ int64_t start_time_us_;
+ int64_t stop_time_us_;
+ };
+
static webrtc::RtpHeaderExtensionMap GetDefaultHeaderExtensionMap();
explicit ParsedRtcEventLog(
@@ -597,6 +614,8 @@ class ParsedRtcEventLog {
int64_t first_timestamp() const { return first_timestamp_; }
int64_t last_timestamp() const { return last_timestamp_; }
+ const LogSegment& first_log_segment() const { return first_log_segment_; }
+
std::vector<LoggedPacketInfo> GetPacketInfos(PacketDirection direction) const;
std::vector<LoggedPacketInfo> GetIncomingPacketInfos() const {
return GetPacketInfos(kIncomingPacket);
@@ -850,6 +869,9 @@ class ParsedRtcEventLog {
int64_t first_timestamp_;
int64_t last_timestamp_;
+ LogSegment first_log_segment_ =
+ LogSegment(0, std::numeric_limits<int64_t>::max());
+
// The extension maps are mutable to allow us to insert the default
// configuration when parsing an RTP header for an unconfigured stream.
// TODO(terelius): This is only used for the legacy format. Remove once we've
diff --git a/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_unittest.cc b/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_unittest.cc
index 579c6528804..e785d6160a0 100644
--- a/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_unittest.cc
+++ b/chromium/third_party/webrtc/logging/rtc_event_log/rtc_event_log_unittest.cc
@@ -739,6 +739,11 @@ void RtcEventLogSession::ReadAndVerifyLog() {
EXPECT_EQ(first_timestamp_ms_, parsed_log.first_timestamp() / 1000);
EXPECT_EQ(last_timestamp_ms_, parsed_log.last_timestamp() / 1000);
+ EXPECT_EQ(parsed_log.first_log_segment().start_time_ms(),
+ std::min(start_time_us_ / 1000, first_timestamp_ms_));
+ EXPECT_EQ(parsed_log.first_log_segment().stop_time_ms(),
+ stop_time_us_ / 1000);
+
// Clean up temporary file - can be pretty slow.
remove(temp_filename_.c_str());
}
diff --git a/chromium/third_party/webrtc/media/BUILD.gn b/chromium/third_party/webrtc/media/BUILD.gn
index 28a8755615a..b6c78fdb397 100644
--- a/chromium/third_party/webrtc/media/BUILD.gn
+++ b/chromium/third_party/webrtc/media/BUILD.gn
@@ -36,8 +36,8 @@ rtc_library("rtc_h264_profile_id") {
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("rtc_media_config") {
@@ -57,8 +57,8 @@ rtc_library("rtc_vp9_profile") {
"../api/video_codecs:video_codecs_api",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtc_sdp_fmtp_utils") {
@@ -71,8 +71,8 @@ rtc_library("rtc_sdp_fmtp_utils") {
deps = [
"../api/video_codecs:video_codecs_api",
"../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtc_media_base") {
@@ -96,7 +96,6 @@ rtc_library("rtc_media_base") {
"../api/crypto:frame_encryptor_interface",
"../api/crypto:options",
"../api/transport:stun_types",
- "../api/transport/media:media_transport_interface",
"../api/transport/rtp:rtp_source",
"../api/video:video_bitrate_allocation",
"../api/video:video_bitrate_allocator_factory",
@@ -121,6 +120,8 @@ rtc_library("rtc_media_base") {
"../rtc_base/system:rtc_export",
"../rtc_base/third_party/sigslot",
"../system_wrappers:field_trial",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -197,8 +198,8 @@ rtc_library("rtc_simulcast_encoder_adapter") {
"../rtc_base/system:rtc_export",
"../system_wrappers",
"../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtc_encoder_simulcast_proxy") {
@@ -253,8 +254,8 @@ rtc_library("rtc_internal_video_codecs") {
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
"../test:fake_video_codecs",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
sources = [
"engine/fake_video_codec_factory.cc",
"engine/fake_video_codec_factory.h",
@@ -290,8 +291,6 @@ rtc_library("rtc_audio_video") {
"../api/audio_codecs:audio_codecs_api",
"../api/task_queue",
"../api/transport:bitrate_settings",
- "../api/transport:datagram_transport_interface",
- "../api/transport/media:media_transport_interface",
"../api/transport/rtp:rtp_source",
"../api/units:data_rate",
"../api/video:video_bitrate_allocation",
@@ -318,6 +317,7 @@ rtc_library("rtc_audio_video") {
"../rtc_base",
"../rtc_base:audio_format_to_string",
"../rtc_base:checks",
+ "../rtc_base:ignore_wundef",
"../rtc_base:rtc_task_queue",
"../rtc_base:stringutils",
"../rtc_base/experiments:field_trial_parser",
@@ -329,6 +329,8 @@ rtc_library("rtc_audio_video") {
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -358,7 +360,10 @@ rtc_library("rtc_audio_video") {
deps += [ "../modules/video_capture:video_capture_internal_impl" ]
}
if (rtc_enable_protobuf) {
- deps += [ "../modules/audio_processing/aec_dump:aec_dump_impl" ]
+ deps += [
+ "../modules/audio_coding:ana_config_proto",
+ "../modules/audio_processing/aec_dump:aec_dump_impl",
+ ]
} else {
deps += [ "../modules/audio_processing/aec_dump:null_aec_dump_factory" ]
}
@@ -405,6 +410,8 @@ rtc_library("rtc_data") {
"../rtc_base:rtc_base_approved",
"../rtc_base/third_party/sigslot",
"../system_wrappers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
@@ -479,6 +486,8 @@ if (rtc_include_tests) {
"../rtc_base/third_party/sigslot",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
]
@@ -553,7 +562,6 @@ if (rtc_include_tests) {
"../api/task_queue:default_task_queue_factory",
"../api/test/video:function_video_factory",
"../api/transport:field_trial_based_config",
- "../api/transport/media:media_transport_interface",
"../api/units:time_delta",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_bitrate_allocation",
diff --git a/chromium/third_party/webrtc/media/base/fake_network_interface.h b/chromium/third_party/webrtc/media/base/fake_network_interface.h
index 7d50ca84bcd..24b553fd63f 100644
--- a/chromium/third_party/webrtc/media/base/fake_network_interface.h
+++ b/chromium/third_party/webrtc/media/base/fake_network_interface.h
@@ -28,7 +28,7 @@ namespace cricket {
// Fake NetworkInterface that sends/receives RTP/RTCP packets.
class FakeNetworkInterface : public MediaChannel::NetworkInterface,
- public rtc::MessageHandler {
+ public rtc::MessageHandlerAutoCleanup {
public:
FakeNetworkInterface()
: thread_(rtc::Thread::Current()),
@@ -43,13 +43,14 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
// Conference mode is a mode where instead of simply forwarding the packets,
// the transport will send multiple copies of the packet with the specified
// SSRCs. This allows us to simulate receiving media from multiple sources.
- void SetConferenceMode(bool conf, const std::vector<uint32_t>& ssrcs) {
+ void SetConferenceMode(bool conf, const std::vector<uint32_t>& ssrcs)
+ RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
conf_ = conf;
conf_sent_ssrcs_ = ssrcs;
}
- int NumRtpBytes() {
+ int NumRtpBytes() RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
int bytes = 0;
for (size_t i = 0; i < rtp_packets_.size(); ++i) {
@@ -58,48 +59,50 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
return bytes;
}
- int NumRtpBytes(uint32_t ssrc) {
+ int NumRtpBytes(uint32_t ssrc) RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
int bytes = 0;
GetNumRtpBytesAndPackets(ssrc, &bytes, NULL);
return bytes;
}
- int NumRtpPackets() {
+ int NumRtpPackets() RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
return static_cast<int>(rtp_packets_.size());
}
- int NumRtpPackets(uint32_t ssrc) {
+ int NumRtpPackets(uint32_t ssrc) RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
int packets = 0;
GetNumRtpBytesAndPackets(ssrc, NULL, &packets);
return packets;
}
- int NumSentSsrcs() {
+ int NumSentSsrcs() RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
return static_cast<int>(sent_ssrcs_.size());
}
// Note: callers are responsible for deleting the returned buffer.
- const rtc::CopyOnWriteBuffer* GetRtpPacket(int index) {
+ const rtc::CopyOnWriteBuffer* GetRtpPacket(int index)
+ RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
- if (index >= NumRtpPackets()) {
+ if (index >= static_cast<int>(rtp_packets_.size())) {
return NULL;
}
return new rtc::CopyOnWriteBuffer(rtp_packets_[index]);
}
- int NumRtcpPackets() {
+ int NumRtcpPackets() RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
return static_cast<int>(rtcp_packets_.size());
}
// Note: callers are responsible for deleting the returned buffer.
- const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index) {
+ const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index)
+ RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
- if (index >= NumRtcpPackets()) {
+ if (index >= static_cast<int>(rtcp_packets_.size())) {
return NULL;
}
return new rtc::CopyOnWriteBuffer(rtcp_packets_[index]);
@@ -112,7 +115,8 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
protected:
virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options) {
+ const rtc::PacketOptions& options)
+ RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
uint32_t cur_ssrc = 0;
@@ -137,7 +141,8 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface,
}
virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options) {
+ const rtc::PacketOptions& options)
+ RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope cs(&crit_);
rtcp_packets_.push_back(*packet);
options_ = options;
diff --git a/chromium/third_party/webrtc/media/base/media_channel.cc b/chromium/third_party/webrtc/media/base/media_channel.cc
index 2e9bfc3d318..5b0ed26f8bc 100644
--- a/chromium/third_party/webrtc/media/base/media_channel.cc
+++ b/chromium/third_party/webrtc/media/base/media_channel.cc
@@ -23,12 +23,9 @@ MediaChannel::MediaChannel() : enable_dscp_(false) {}
MediaChannel::~MediaChannel() {}
-void MediaChannel::SetInterface(
- NetworkInterface* iface,
- const webrtc::MediaTransportConfig& media_transport_config) {
+void MediaChannel::SetInterface(NetworkInterface* iface) {
rtc::CritScope cs(&network_interface_crit_);
network_interface_ = iface;
- media_transport_config_ = media_transport_config;
UpdateDscp();
}
diff --git a/chromium/third_party/webrtc/media/base/media_channel.h b/chromium/third_party/webrtc/media/base/media_channel.h
index d71ec9158a5..07be28cafab 100644
--- a/chromium/third_party/webrtc/media/base/media_channel.h
+++ b/chromium/third_party/webrtc/media/base/media_channel.h
@@ -26,7 +26,6 @@
#include "api/media_stream_interface.h"
#include "api/rtc_error.h"
#include "api/rtp_parameters.h"
-#include "api/transport/media/media_transport_config.h"
#include "api/transport/rtp/rtp_source.h"
#include "api/video/video_content_type.h"
#include "api/video/video_sink_interface.h"
@@ -195,15 +194,9 @@ class MediaChannel : public sigslot::has_slots<> {
virtual cricket::MediaType media_type() const = 0;
- // Sets the abstract interface class for sending RTP/RTCP data and
- // interface for media transport (experimental). If media transport is
- // provided, it should be used instead of RTP/RTCP.
- // TODO(sukhanov): Currently media transport can co-exist with RTP/RTCP, but
- // in the future we will refactor code to send all frames with media
- // transport.
- virtual void SetInterface(
- NetworkInterface* iface,
- const webrtc::MediaTransportConfig& media_transport_config);
+ // Sets the abstract interface class for sending RTP/RTCP data.
+ virtual void SetInterface(NetworkInterface* iface)
+ RTC_LOCKS_EXCLUDED(network_interface_crit_);
// Called when a RTP packet is received.
virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) = 0;
@@ -264,16 +257,9 @@ class MediaChannel : public sigslot::has_slots<> {
int SetOption(NetworkInterface::SocketType type,
rtc::Socket::Option opt,
- int option) {
+ int option) RTC_LOCKS_EXCLUDED(network_interface_crit_) {
rtc::CritScope cs(&network_interface_crit_);
- if (!network_interface_)
- return -1;
-
- return network_interface_->SetOption(type, opt, option);
- }
-
- const webrtc::MediaTransportConfig& media_transport_config() const {
- return media_transport_config_;
+ return SetOptionLocked(type, opt, option);
}
// Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285.
@@ -298,16 +284,27 @@ class MediaChannel : public sigslot::has_slots<> {
rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer);
protected:
+ int SetOptionLocked(NetworkInterface::SocketType type,
+ rtc::Socket::Option opt,
+ int option)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_crit_) {
+ if (!network_interface_)
+ return -1;
+ return network_interface_->SetOption(type, opt, option);
+ }
+
bool DscpEnabled() const { return enable_dscp_; }
// This is the DSCP value used for both RTP and RTCP channels if DSCP is
// enabled. It can be changed at any time via |SetPreferredDscp|.
- rtc::DiffServCodePoint PreferredDscp() const {
+ rtc::DiffServCodePoint PreferredDscp() const
+ RTC_LOCKS_EXCLUDED(network_interface_crit_) {
rtc::CritScope cs(&network_interface_crit_);
return preferred_dscp_;
}
- int SetPreferredDscp(rtc::DiffServCodePoint preferred_dscp) {
+ int SetPreferredDscp(rtc::DiffServCodePoint preferred_dscp)
+ RTC_LOCKS_EXCLUDED(network_interface_crit_) {
rtc::CritScope cs(&network_interface_crit_);
if (preferred_dscp == preferred_dscp_) {
return 0;
@@ -322,16 +319,19 @@ class MediaChannel : public sigslot::has_slots<> {
int UpdateDscp() RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_crit_) {
rtc::DiffServCodePoint value =
enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT;
- int ret = SetOption(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value);
+ int ret =
+ SetOptionLocked(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value);
if (ret == 0) {
- ret = SetOption(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP, value);
+ ret = SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP,
+ value);
}
return ret;
}
bool DoSendPacket(rtc::CopyOnWriteBuffer* packet,
bool rtcp,
- const rtc::PacketOptions& options) {
+ const rtc::PacketOptions& options)
+ RTC_LOCKS_EXCLUDED(network_interface_crit_) {
rtc::CritScope cs(&network_interface_crit_);
if (!network_interface_)
return false;
@@ -349,7 +349,6 @@ class MediaChannel : public sigslot::has_slots<> {
nullptr;
rtc::DiffServCodePoint preferred_dscp_
RTC_GUARDED_BY(network_interface_crit_) = rtc::DSCP_DEFAULT;
- webrtc::MediaTransportConfig media_transport_config_;
bool extmap_allow_mixed_ = false;
};
diff --git a/chromium/third_party/webrtc/media/base/media_constants.cc b/chromium/third_party/webrtc/media/base/media_constants.cc
index 5144a6ea655..03679d9627d 100644
--- a/chromium/third_party/webrtc/media/base/media_constants.cc
+++ b/chromium/third_party/webrtc/media/base/media_constants.cc
@@ -98,15 +98,13 @@ const char kCodecParamMaxMessageSize[] = "x-google-max-message-size";
const int kGoogleRtpDataCodecPlType = 109;
const char kGoogleRtpDataCodecName[] = "google-data";
-const int kGoogleSctpDataCodecPlType = 108;
-const char kGoogleSctpDataCodecName[] = "google-sctp-data";
-
const char kComfortNoiseCodecName[] = "CN";
const char kVp8CodecName[] = "VP8";
const char kVp9CodecName[] = "VP9";
const char kAv1CodecName[] = "AV1X";
const char kH264CodecName[] = "H264";
+const char kHEVCCodecName[] = "H265X";
// RFC 6184 RTP Payload Format for H.264 video
const char kH264FmtpProfileLevelId[] = "profile-level-id";
diff --git a/chromium/third_party/webrtc/media/base/media_constants.h b/chromium/third_party/webrtc/media/base/media_constants.h
index b9b8a336f7f..d2bfb36ee91 100644
--- a/chromium/third_party/webrtc/media/base/media_constants.h
+++ b/chromium/third_party/webrtc/media/base/media_constants.h
@@ -124,18 +124,13 @@ extern const char kCodecParamMaxMessageSize[];
extern const int kGoogleRtpDataCodecPlType;
extern const char kGoogleRtpDataCodecName[];
-// TODO(pthatcher): Find an id that won't conflict with anything. On
-// the other hand, it really shouldn't matter since the id won't be
-// used on the wire.
-extern const int kGoogleSctpDataCodecPlType;
-extern const char kGoogleSctpDataCodecName[];
-
extern const char kComfortNoiseCodecName[];
RTC_EXPORT extern const char kVp8CodecName[];
RTC_EXPORT extern const char kVp9CodecName[];
RTC_EXPORT extern const char kAv1CodecName[];
RTC_EXPORT extern const char kH264CodecName[];
+RTC_EXPORT extern const char kHEVCCodecName[];
// RFC 6184 RTP Payload Format for H.264 video
RTC_EXPORT extern const char kH264FmtpProfileLevelId[];
diff --git a/chromium/third_party/webrtc/media/base/media_engine_unittest.cc b/chromium/third_party/webrtc/media/base/media_engine_unittest.cc
index f4c6f5f0454..83f80c4669c 100644
--- a/chromium/third_party/webrtc/media/base/media_engine_unittest.cc
+++ b/chromium/third_party/webrtc/media/base/media_engine_unittest.cc
@@ -26,8 +26,10 @@ namespace {
class MockRtpHeaderExtensionQueryInterface
: public RtpHeaderExtensionQueryInterface {
public:
- MOCK_CONST_METHOD0(GetRtpHeaderExtensions,
- std::vector<RtpHeaderExtensionCapability>());
+ MOCK_METHOD(std::vector<RtpHeaderExtensionCapability>,
+ GetRtpHeaderExtensions,
+ (),
+ (const, override));
};
} // namespace
diff --git a/chromium/third_party/webrtc/media/base/rtp_data_engine_unittest.cc b/chromium/third_party/webrtc/media/base/rtp_data_engine_unittest.cc
index dab4058c331..f01c7c60c74 100644
--- a/chromium/third_party/webrtc/media/base/rtp_data_engine_unittest.cc
+++ b/chromium/third_party/webrtc/media/base/rtp_data_engine_unittest.cc
@@ -75,7 +75,7 @@ class RtpDataMediaChannelTest : public ::testing::Test {
cricket::MediaConfig config;
cricket::RtpDataMediaChannel* channel =
static_cast<cricket::RtpDataMediaChannel*>(dme->CreateChannel(config));
- channel->SetInterface(iface_.get(), webrtc::MediaTransportConfig());
+ channel->SetInterface(iface_.get());
channel->SignalDataReceived.connect(receiver_.get(),
&FakeDataReceiver::OnDataReceived);
return channel;
diff --git a/chromium/third_party/webrtc/media/base/rtp_utils.cc b/chromium/third_party/webrtc/media/base/rtp_utils.cc
index 0b45e69410d..4a2b3267ccf 100644
--- a/chromium/third_party/webrtc/media/base/rtp_utils.cc
+++ b/chromium/third_party/webrtc/media/base/rtp_utils.cc
@@ -34,6 +34,7 @@ static const size_t kRtcpPayloadTypeOffset = 1;
static const size_t kRtpExtensionHeaderLen = 4;
static const size_t kAbsSendTimeExtensionLen = 3;
static const size_t kOneByteExtensionHeaderLen = 1;
+static const size_t kTwoByteExtensionHeaderLen = 2;
namespace {
@@ -424,10 +425,13 @@ bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
rtp += kRtpExtensionHeaderLen; // Moving past extension header.
+ constexpr uint16_t kOneByteExtensionProfileId = 0xBEDE;
+ constexpr uint16_t kTwoByteExtensionProfileId = 0x1000;
+
bool found = false;
- // WebRTC is using one byte header extension.
- // TODO(mallinath) - Handle two byte header extension.
- if (profile_id == 0xBEDE) { // OneByte extension header
+ if (profile_id == kOneByteExtensionProfileId ||
+ profile_id == kTwoByteExtensionProfileId) {
+ // OneByte extension header
// 0
// 0 1 2 3 4 5 6 7
// +-+-+-+-+-+-+-+-+
@@ -445,24 +449,53 @@ bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | data |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ // TwoByte extension header
+ // 0
+ // 0 1 2 3 4 5 6 7
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | 0x10 | 0x00 | length=3 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | L=1 | data | ID |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | L=2 | data | 0 (pad) |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | L=2 | data |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ size_t extension_header_length = profile_id == kOneByteExtensionProfileId
+ ? kOneByteExtensionHeaderLen
+ : kTwoByteExtensionHeaderLen;
+
const uint8_t* extension_start = rtp;
const uint8_t* extension_end = extension_start + extension_length;
- while (rtp < extension_end) {
- const int id = (*rtp & 0xF0) >> 4;
- const size_t length = (*rtp & 0x0F) + 1;
- if (rtp + kOneByteExtensionHeaderLen + length > extension_end) {
+ // rtp + 1 since the minimum size per header extension is two bytes for both
+ // one- and two-byte header extensions.
+ while (rtp + 1 < extension_end) {
+ // See RFC8285 Section 4.2-4.3 for more information about one- and
+ // two-byte header extensions.
+ const int id =
+ profile_id == kOneByteExtensionProfileId ? (*rtp & 0xF0) >> 4 : *rtp;
+ const size_t length = profile_id == kOneByteExtensionProfileId
+ ? (*rtp & 0x0F) + 1
+ : *(rtp + 1);
+ if (rtp + extension_header_length + length > extension_end) {
return false;
}
- // The 4-bit length is the number minus one of data bytes of this header
- // extension element following the one-byte header.
if (id == extension_id) {
- UpdateAbsSendTimeExtensionValue(rtp + kOneByteExtensionHeaderLen,
- length, time_us);
+ UpdateAbsSendTimeExtensionValue(rtp + extension_header_length, length,
+ time_us);
found = true;
break;
}
- rtp += kOneByteExtensionHeaderLen + length;
+ rtp += extension_header_length + length;
// Counting padding bytes.
while ((rtp < extension_end) && (*rtp == 0)) {
++rtp;
diff --git a/chromium/third_party/webrtc/media/base/rtp_utils_unittest.cc b/chromium/third_party/webrtc/media/base/rtp_utils_unittest.cc
index 051508cd01a..a5e8a810f47 100644
--- a/chromium/third_party/webrtc/media/base/rtp_utils_unittest.cc
+++ b/chromium/third_party/webrtc/media/base/rtp_utils_unittest.cc
@@ -71,15 +71,25 @@ static uint8_t kRtpMsgWith2ByteExtnHeader[] = {
// clang-format on
};
-// RTP packet with single byte extension header of length 4 bytes.
-// Extension id = 3 and length = 3
-static uint8_t kRtpMsgWithAbsSendTimeExtension[] = {
+// RTP packet with two one-byte header extensions. The last 4 bytes consist of
+// abs-send-time with extension id = 3 and length = 3.
+static uint8_t kRtpMsgWithOneByteAbsSendTimeExtension[] = {
0x90, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xBE, 0xDE, 0x00, 0x02, 0x22, 0x00, 0x02, 0x1c, 0x32, 0xaa, 0xbb, 0xcc,
};
-// Index of AbsSendTimeExtn data in message |kRtpMsgWithAbsSendTimeExtension|.
-static const int kAstIndexInRtpMsg = 21;
+// RTP packet with two two-byte header extensions. The last 5 bytes consist of
+// abs-send-time with extension id = 3 and length = 3.
+static uint8_t kRtpMsgWithTwoByteAbsSendTimeExtension[] = {
+ 0x90, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x10, 0x00, 0x00, 0x02, 0x02, 0x01, 0x02, 0x03, 0x03, 0xaa, 0xbb, 0xcc,
+};
+
+// Index of AbsSendTimeExtn data in message
+// |kRtpMsgWithOneByteAbsSendTimeExtension|.
+static const int kAstIndexInOneByteRtpMsg = 21;
+// and in message |kRtpMsgWithTwoByteAbsSendTimeExtension|.
+static const int kAstIndexInTwoByteRtpMsg = 21;
static const rtc::ArrayView<const char> kPcmuFrameArrayView =
rtc::MakeArrayView(reinterpret_cast<const char*>(kPcmuFrame),
@@ -213,19 +223,17 @@ TEST(RtpUtilsTest, Valid2ByteExtnHdrRtpMessage) {
}
// Valid RTP packet which has 1 byte header AbsSendTime extension in it.
-TEST(RtpUtilsTest, ValidRtpPacketWithAbsSendTimeExtension) {
- EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWithAbsSendTimeExtension,
- sizeof(kRtpMsgWithAbsSendTimeExtension),
+TEST(RtpUtilsTest, ValidRtpPacketWithOneByteAbsSendTimeExtension) {
+ EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWithOneByteAbsSendTimeExtension,
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension),
nullptr));
}
-// Verify handling of a 2 byte extension header RTP messsage. Currently these
-// messages are not supported.
-TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionIn2ByteHeaderExtn) {
- std::vector<uint8_t> data(
- kRtpMsgWith2ByteExtnHeader,
- kRtpMsgWith2ByteExtnHeader + sizeof(kRtpMsgWith2ByteExtnHeader));
- EXPECT_FALSE(UpdateRtpAbsSendTimeExtension(&data[0], data.size(), 3, 0));
+// Valid RTP packet which has 2 byte header AbsSendTime extension in it.
+TEST(RtpUtilsTest, ValidRtpPacketWithTwoByteAbsSendTimeExtension) {
+ EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWithTwoByteAbsSendTimeExtension,
+ sizeof(kRtpMsgWithTwoByteAbsSendTimeExtension),
+ nullptr));
}
// Verify finding an extension ID in the TURN send indication message.
@@ -276,19 +284,21 @@ TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInTurnSendIndication) {
// without HMAC value in the packet.
TEST(RtpUtilsTest, ApplyPacketOptionsWithDefaultValues) {
rtc::PacketTimeUpdateParams packet_time_params;
- std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
- kRtpMsgWithAbsSendTimeExtension +
- sizeof(kRtpMsgWithAbsSendTimeExtension));
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithOneByteAbsSendTimeExtension,
+ kRtpMsgWithOneByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension));
rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
packet_time_params, 0));
// Making sure HMAC wasn't updated..
- EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
- kFakeTag, 4));
+ EXPECT_EQ(0,
+ memcmp(&rtp_packet[sizeof(kRtpMsgWithOneByteAbsSendTimeExtension)],
+ kFakeTag, 4));
// Verify AbsouluteSendTime extension field wasn't modified.
- EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kTestAstValue,
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInOneByteRtpMsg], kTestAstValue,
sizeof(kTestAstValue)));
}
@@ -299,34 +309,53 @@ TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParams) {
kTestKey + sizeof(kTestKey));
packet_time_params.srtp_auth_tag_len = 4;
- std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
- kRtpMsgWithAbsSendTimeExtension +
- sizeof(kRtpMsgWithAbsSendTimeExtension));
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithOneByteAbsSendTimeExtension,
+ kRtpMsgWithOneByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension));
rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
packet_time_params, 0));
uint8_t kExpectedTag[] = {0xc1, 0x7a, 0x8c, 0xa0};
- EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
- kExpectedTag, sizeof(kExpectedTag)));
+ EXPECT_EQ(0,
+ memcmp(&rtp_packet[sizeof(kRtpMsgWithOneByteAbsSendTimeExtension)],
+ kExpectedTag, sizeof(kExpectedTag)));
// Verify AbsouluteSendTime extension field is not modified.
- EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kTestAstValue,
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInOneByteRtpMsg], kTestAstValue,
sizeof(kTestAstValue)));
}
// Verify finding an extension ID in a raw rtp message.
-TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInRtpPacket) {
- std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
- kRtpMsgWithAbsSendTimeExtension +
- sizeof(kRtpMsgWithAbsSendTimeExtension));
+TEST(RtpUtilsTest, UpdateOneByteAbsSendTimeExtensionInRtpPacket) {
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithOneByteAbsSendTimeExtension,
+ kRtpMsgWithOneByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension));
+
+ EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(&rtp_packet[0], rtp_packet.size(),
+ 3, 51183266));
+
+ // Verify that the timestamp was updated.
+ const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInOneByteRtpMsg], kExpectedTimestamp,
+ sizeof(kExpectedTimestamp)));
+}
+
+// Verify finding an extension ID in a raw rtp message.
+TEST(RtpUtilsTest, UpdateTwoByteAbsSendTimeExtensionInRtpPacket) {
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithTwoByteAbsSendTimeExtension,
+ kRtpMsgWithTwoByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithTwoByteAbsSendTimeExtension));
EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(&rtp_packet[0], rtp_packet.size(),
3, 51183266));
// Verify that the timestamp was updated.
const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
- EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kExpectedTimestamp,
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInTwoByteRtpMsg], kExpectedTimestamp,
sizeof(kExpectedTimestamp)));
}
@@ -339,20 +368,22 @@ TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParamsAndAbsSendTime) {
packet_time_params.rtp_sendtime_extension_id = 3;
// 3 is also present in the test message.
- std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
- kRtpMsgWithAbsSendTimeExtension +
- sizeof(kRtpMsgWithAbsSendTimeExtension));
+ std::vector<uint8_t> rtp_packet(
+ kRtpMsgWithOneByteAbsSendTimeExtension,
+ kRtpMsgWithOneByteAbsSendTimeExtension +
+ sizeof(kRtpMsgWithOneByteAbsSendTimeExtension));
rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
packet_time_params, 51183266));
const uint8_t kExpectedTag[] = {0x81, 0xd1, 0x2c, 0x0e};
- EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
- kExpectedTag, sizeof(kExpectedTag)));
+ EXPECT_EQ(0,
+ memcmp(&rtp_packet[sizeof(kRtpMsgWithOneByteAbsSendTimeExtension)],
+ kExpectedTag, sizeof(kExpectedTag)));
// Verify that the timestamp was updated.
const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
- EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kExpectedTimestamp,
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInOneByteRtpMsg], kExpectedTimestamp,
sizeof(kExpectedTimestamp)));
}
diff --git a/chromium/third_party/webrtc/media/base/video_adapter.cc b/chromium/third_party/webrtc/media/base/video_adapter.cc
index 27b82646ac1..8ba91d7189f 100644
--- a/chromium/third_party/webrtc/media/base/video_adapter.cc
+++ b/chromium/third_party/webrtc/media/base/video_adapter.cc
@@ -145,8 +145,6 @@ VideoAdapter::VideoAdapter() : VideoAdapter(1) {}
VideoAdapter::~VideoAdapter() {}
bool VideoAdapter::KeepFrame(int64_t in_timestamp_ns) {
- rtc::CritScope cs(&critical_section_);
-
int max_fps = max_framerate_request_;
if (max_fps_)
max_fps = std::min(max_fps, *max_fps_);
diff --git a/chromium/third_party/webrtc/media/base/video_adapter.h b/chromium/third_party/webrtc/media/base/video_adapter.h
index 936cf8917ee..b7acf0e67cb 100644
--- a/chromium/third_party/webrtc/media/base/video_adapter.h
+++ b/chromium/third_party/webrtc/media/base/video_adapter.h
@@ -46,7 +46,8 @@ class VideoAdapter {
int* cropped_width,
int* cropped_height,
int* out_width,
- int* out_height);
+ int* out_height)
+ RTC_LOCKS_EXCLUDED(critical_section_);
// DEPRECATED. Please use OnOutputFormatRequest below.
// TODO(asapersson): Remove this once it is no longer used.
@@ -57,7 +58,8 @@ class VideoAdapter {
// maintain the input orientation, so it doesn't matter if e.g. 1280x720 or
// 720x1280 is requested.
// Note: Should be called from the source only.
- void OnOutputFormatRequest(const absl::optional<VideoFormat>& format);
+ void OnOutputFormatRequest(const absl::optional<VideoFormat>& format)
+ RTC_LOCKS_EXCLUDED(critical_section_);
// Requests output frame size and frame interval from |AdaptFrameResolution|.
// |target_aspect_ratio|: The input frame size will be cropped to match the
@@ -70,7 +72,7 @@ class VideoAdapter {
void OnOutputFormatRequest(
const absl::optional<std::pair<int, int>>& target_aspect_ratio,
const absl::optional<int>& max_pixel_count,
- const absl::optional<int>& max_fps);
+ const absl::optional<int>& max_fps) RTC_LOCKS_EXCLUDED(critical_section_);
// Same as above, but allows setting two different target aspect ratios
// depending on incoming frame orientation. This gives more fine-grained
@@ -81,7 +83,7 @@ class VideoAdapter {
const absl::optional<int>& max_landscape_pixel_count,
const absl::optional<std::pair<int, int>>& target_portrait_aspect_ratio,
const absl::optional<int>& max_portrait_pixel_count,
- const absl::optional<int>& max_fps);
+ const absl::optional<int>& max_fps) RTC_LOCKS_EXCLUDED(critical_section_);
// Requests the output frame size from |AdaptFrameResolution| to have as close
// as possible to |sink_wants.target_pixel_count| pixels (if set)
@@ -93,18 +95,25 @@ class VideoAdapter {
// The sink resolution alignment requirement is given by
// |sink_wants.resolution_alignment|.
// Note: Should be called from the sink only.
- void OnSinkWants(const rtc::VideoSinkWants& sink_wants);
+ void OnSinkWants(const rtc::VideoSinkWants& sink_wants)
+ RTC_LOCKS_EXCLUDED(critical_section_);
private:
// Determine if frame should be dropped based on input fps and requested fps.
- bool KeepFrame(int64_t in_timestamp_ns);
-
- int frames_in_; // Number of input frames.
- int frames_out_; // Number of output frames.
- int frames_scaled_; // Number of frames scaled.
- int adaption_changes_; // Number of changes in scale factor.
- int previous_width_; // Previous adapter output width.
- int previous_height_; // Previous adapter output height.
+ bool KeepFrame(int64_t in_timestamp_ns)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_);
+
+ int frames_in_ RTC_GUARDED_BY(critical_section_); // Number of input frames.
+ int frames_out_
+ RTC_GUARDED_BY(critical_section_); // Number of output frames.
+ int frames_scaled_
+ RTC_GUARDED_BY(critical_section_); // Number of frames scaled.
+ int adaption_changes_
+ RTC_GUARDED_BY(critical_section_); // Number of changes in scale factor.
+ int previous_width_
+ RTC_GUARDED_BY(critical_section_); // Previous adapter output width.
+ int previous_height_
+ RTC_GUARDED_BY(critical_section_); // Previous adapter output height.
const bool variable_start_scale_factor_;
// The fixed source resolution alignment requirement.
diff --git a/chromium/third_party/webrtc/media/base/vp9_profile.cc b/chromium/third_party/webrtc/media/base/vp9_profile.cc
index cfecc5e5450..abf2502fc80 100644
--- a/chromium/third_party/webrtc/media/base/vp9_profile.cc
+++ b/chromium/third_party/webrtc/media/base/vp9_profile.cc
@@ -24,6 +24,8 @@ std::string VP9ProfileToString(VP9Profile profile) {
switch (profile) {
case VP9Profile::kProfile0:
return "0";
+ case VP9Profile::kProfile1:
+ return "1";
case VP9Profile::kProfile2:
return "2";
}
@@ -38,6 +40,8 @@ absl::optional<VP9Profile> StringToVP9Profile(const std::string& str) {
switch (i.value()) {
case 0:
return VP9Profile::kProfile0;
+ case 1:
+ return VP9Profile::kProfile1;
case 2:
return VP9Profile::kProfile2;
default:
diff --git a/chromium/third_party/webrtc/media/base/vp9_profile.h b/chromium/third_party/webrtc/media/base/vp9_profile.h
index e2bbf190055..e47204fede4 100644
--- a/chromium/third_party/webrtc/media/base/vp9_profile.h
+++ b/chromium/third_party/webrtc/media/base/vp9_profile.h
@@ -24,6 +24,7 @@ extern RTC_EXPORT const char kVP9FmtpProfileId[];
enum class VP9Profile {
kProfile0,
+ kProfile1,
kProfile2,
};
diff --git a/chromium/third_party/webrtc/media/engine/fake_webrtc_call.cc b/chromium/third_party/webrtc/media/engine/fake_webrtc_call.cc
index 78d4ba41e03..cb62d9fc0a5 100644
--- a/chromium/third_party/webrtc/media/engine/fake_webrtc_call.cc
+++ b/chromium/third_party/webrtc/media/engine/fake_webrtc_call.cc
@@ -279,6 +279,14 @@ void FakeVideoSendStream::Stop() {
sending_ = false;
}
+void FakeVideoSendStream::AddAdaptationResource(
+ rtc::scoped_refptr<webrtc::Resource> resource) {}
+
+std::vector<rtc::scoped_refptr<webrtc::Resource>>
+FakeVideoSendStream::GetAdaptationResources() {
+ return {};
+}
+
void FakeVideoSendStream::SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const webrtc::DegradationPreference& degradation_preference) {
@@ -570,6 +578,9 @@ void FakeCall::DestroyFlexfecReceiveStream(
}
}
+void FakeCall::AddAdaptationResource(
+ rtc::scoped_refptr<webrtc::Resource> resource) {}
+
webrtc::PacketReceiver* FakeCall::Receiver() {
return this;
}
diff --git a/chromium/third_party/webrtc/media/engine/fake_webrtc_call.h b/chromium/third_party/webrtc/media/engine/fake_webrtc_call.h
index 4404dec5dfc..97eb49c897b 100644
--- a/chromium/third_party/webrtc/media/engine/fake_webrtc_call.h
+++ b/chromium/third_party/webrtc/media/engine/fake_webrtc_call.h
@@ -173,6 +173,10 @@ class FakeVideoSendStream final
const std::vector<bool> active_layers) override;
void Start() override;
void Stop() override;
+ void AddAdaptationResource(
+ rtc::scoped_refptr<webrtc::Resource> resource) override;
+ std::vector<rtc::scoped_refptr<webrtc::Resource>> GetAdaptationResources()
+ override;
void SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const webrtc::DegradationPreference& degradation_preference) override;
@@ -341,6 +345,9 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
void DestroyFlexfecReceiveStream(
webrtc::FlexfecReceiveStream* receive_stream) override;
+ void AddAdaptationResource(
+ rtc::scoped_refptr<webrtc::Resource> resource) override;
+
webrtc::PacketReceiver* Receiver() override;
DeliveryStatus DeliverPacket(webrtc::MediaType media_type,
diff --git a/chromium/third_party/webrtc/media/engine/internal_decoder_factory.cc b/chromium/third_party/webrtc/media/engine/internal_decoder_factory.cc
index e68bb369b55..d512b731af4 100644
--- a/chromium/third_party/webrtc/media/engine/internal_decoder_factory.cc
+++ b/chromium/third_party/webrtc/media/engine/internal_decoder_factory.cc
@@ -44,7 +44,7 @@ std::vector<SdpVideoFormat> InternalDecoderFactory::GetSupportedFormats()
const {
std::vector<SdpVideoFormat> formats;
formats.push_back(SdpVideoFormat(cricket::kVp8CodecName));
- for (const SdpVideoFormat& format : SupportedVP9Codecs())
+ for (const SdpVideoFormat& format : SupportedVP9DecoderCodecs())
formats.push_back(format);
for (const SdpVideoFormat& h264_format : SupportedH264Codecs())
formats.push_back(h264_format);
diff --git a/chromium/third_party/webrtc/media/engine/internal_decoder_factory_unittest.cc b/chromium/third_party/webrtc/media/engine/internal_decoder_factory_unittest.cc
index 705933d4395..61be5e72dfd 100644
--- a/chromium/third_party/webrtc/media/engine/internal_decoder_factory_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/internal_decoder_factory_unittest.cc
@@ -13,6 +13,7 @@
#include "api/video_codecs/sdp_video_format.h"
#include "api/video_codecs/video_decoder.h"
#include "media/base/media_constants.h"
+#include "media/base/vp9_profile.h"
#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -30,6 +31,26 @@ TEST(InternalDecoderFactory, TestVP8) {
EXPECT_TRUE(decoder);
}
+#ifdef RTC_ENABLE_VP9
+TEST(InternalDecoderFactory, TestVP9Profile0) {
+ InternalDecoderFactory factory;
+ std::unique_ptr<VideoDecoder> decoder =
+ factory.CreateVideoDecoder(SdpVideoFormat(
+ cricket::kVp9CodecName,
+ {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}}));
+ EXPECT_TRUE(decoder);
+}
+
+TEST(InternalDecoderFactory, TestVP9Profile1) {
+ InternalDecoderFactory factory;
+ std::unique_ptr<VideoDecoder> decoder =
+ factory.CreateVideoDecoder(SdpVideoFormat(
+ cricket::kVp9CodecName,
+ {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}}));
+ EXPECT_TRUE(decoder);
+}
+#endif // RTC_ENABLE_VP9
+
TEST(InternalDecoderFactory, Av1) {
InternalDecoderFactory factory;
if (kIsLibaomAv1DecoderSupported) {
diff --git a/chromium/third_party/webrtc/media/engine/payload_type_mapper.cc b/chromium/third_party/webrtc/media/engine/payload_type_mapper.cc
index fcacd448839..e9f863ca638 100644
--- a/chromium/third_party/webrtc/media/engine/payload_type_mapper.cc
+++ b/chromium/third_party/webrtc/media/engine/payload_type_mapper.cc
@@ -67,7 +67,6 @@ PayloadTypeMapper::PayloadTypeMapper()
{{kIsacCodecName, 32000, 1}, 104},
{{kCnCodecName, 16000, 1}, 105},
{{kCnCodecName, 32000, 1}, 106},
- {{kGoogleSctpDataCodecName, 0, 0}, kGoogleSctpDataCodecPlType},
{{kOpusCodecName,
48000,
2,
diff --git a/chromium/third_party/webrtc/media/engine/payload_type_mapper_unittest.cc b/chromium/third_party/webrtc/media/engine/payload_type_mapper_unittest.cc
index c8b2234c25e..fa6864b48aa 100644
--- a/chromium/third_party/webrtc/media/engine/payload_type_mapper_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/payload_type_mapper_unittest.cc
@@ -52,7 +52,6 @@ TEST_F(PayloadTypeMapperTest, WebRTCPayloadTypes) {
return mapper_.FindMappingFor({name, 0, 0});
};
EXPECT_EQ(kGoogleRtpDataCodecPlType, data_mapping(kGoogleRtpDataCodecName));
- EXPECT_EQ(kGoogleSctpDataCodecPlType, data_mapping(kGoogleSctpDataCodecName));
EXPECT_EQ(102, mapper_.FindMappingFor({kIlbcCodecName, 8000, 1}));
EXPECT_EQ(103, mapper_.FindMappingFor({kIsacCodecName, 16000, 1}));
diff --git a/chromium/third_party/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc b/chromium/third_party/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc
index b467c49166c..075cb83ee93 100644
--- a/chromium/third_party/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/simulcast_encoder_adapter_unittest.cc
@@ -199,8 +199,10 @@ class MockVideoEncoder : public VideoEncoder {
video_format_("unknown"),
callback_(nullptr) {}
- MOCK_METHOD1(SetFecControllerOverride,
- void(FecControllerOverride* fec_controller_override));
+ MOCK_METHOD(void,
+ SetFecControllerOverride,
+ (FecControllerOverride * fec_controller_override),
+ (override));
// TODO(nisse): Valid overrides commented out, because the gmock
// methods don't use any override declarations, and we want to avoid
@@ -212,10 +214,11 @@ class MockVideoEncoder : public VideoEncoder {
return init_encode_return_value_;
}
- MOCK_METHOD2(
- Encode,
- int32_t(const VideoFrame& inputImage,
- const std::vector<VideoFrameType>* frame_types) /* override */);
+ MOCK_METHOD(int32_t,
+ Encode,
+ (const VideoFrame& inputImage,
+ const std::vector<VideoFrameType>* frame_types),
+ (override));
int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) override {
@@ -223,7 +226,7 @@ class MockVideoEncoder : public VideoEncoder {
return 0;
}
- MOCK_METHOD0(Release, int32_t() /* override */);
+ MOCK_METHOD(int32_t, Release, (), (override));
void SetRates(const RateControlParameters& parameters) {
last_set_rates_ = parameters;
@@ -334,8 +337,7 @@ std::vector<SdpVideoFormat> MockVideoEncoderFactory::GetSupportedFormats()
std::unique_ptr<VideoEncoder> MockVideoEncoderFactory::CreateVideoEncoder(
const SdpVideoFormat& format) {
- std::unique_ptr<MockVideoEncoder> encoder(
- new ::testing::NiceMock<MockVideoEncoder>(this));
+ auto encoder = std::make_unique<::testing::NiceMock<MockVideoEncoder>>(this);
encoder->set_init_encode_return_value(init_encode_return_value_);
const char* encoder_name = encoder_names_.empty()
? "codec_implementation_name"
diff --git a/chromium/third_party/webrtc/media/engine/webrtc_video_engine.cc b/chromium/third_party/webrtc/media/engine/webrtc_video_engine.cc
index 3976a6a1c50..a7eff62b16c 100644
--- a/chromium/third_party/webrtc/media/engine/webrtc_video_engine.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtc_video_engine.cc
@@ -20,7 +20,6 @@
#include "absl/algorithm/container.h"
#include "absl/strings/match.h"
#include "api/media_stream_interface.h"
-#include "api/transport/datagram_transport_interface.h"
#include "api/units/data_rate.h"
#include "api/video/video_codec_constants.h"
#include "api/video/video_codec_type.h"
@@ -609,7 +608,6 @@ WebRtcVideoEngine::GetRtpHeaderExtensions() const {
webrtc::RtpExtension::kPlayoutDelayUri,
webrtc::RtpExtension::kVideoContentTypeUri,
webrtc::RtpExtension::kVideoTimingUri,
- webrtc::RtpExtension::kFrameMarkingUri,
webrtc::RtpExtension::kColorSpaceUri, webrtc::RtpExtension::kMidUri,
webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri}) {
result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kSendRecv);
@@ -1299,13 +1297,6 @@ bool WebRtcVideoChannel::AddSendStream(const StreamParams& sp) {
config.rtp.extmap_allow_mixed = ExtmapAllowMixed();
config.rtcp_report_interval_ms = video_config_.rtcp_report_interval_ms;
- // If sending through Datagram Transport, limit packet size to maximum
- // packet size supported by datagram_transport.
- if (media_transport_config().rtp_max_packet_size) {
- config.rtp.max_packet_size =
- media_transport_config().rtp_max_packet_size.value();
- }
-
WebRtcVideoSendStream* stream = new WebRtcVideoSendStream(
call_, sp, std::move(config), default_send_options_,
video_config_.enable_cpu_adaptation, bitrate_config_.max_bitrate_bps,
@@ -1759,11 +1750,9 @@ void WebRtcVideoChannel::OnNetworkRouteChanged(
network_route.packet_overhead);
}
-void WebRtcVideoChannel::SetInterface(
- NetworkInterface* iface,
- const webrtc::MediaTransportConfig& media_transport_config) {
+void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) {
RTC_DCHECK_RUN_ON(&thread_checker_);
- MediaChannel::SetInterface(iface, media_transport_config);
+ MediaChannel::SetInterface(iface);
// Set the RTP recv/send buffer to a bigger size.
// The group should be a positive integer with an explicit size, in
diff --git a/chromium/third_party/webrtc/media/engine/webrtc_video_engine.h b/chromium/third_party/webrtc/media/engine/webrtc_video_engine.h
index 00d249541a3..126abfd2905 100644
--- a/chromium/third_party/webrtc/media/engine/webrtc_video_engine.h
+++ b/chromium/third_party/webrtc/media/engine/webrtc_video_engine.h
@@ -169,9 +169,7 @@ class WebRtcVideoChannel : public VideoMediaChannel,
void OnReadyToSend(bool ready) override;
void OnNetworkRouteChanged(const std::string& transport_name,
const rtc::NetworkRoute& network_route) override;
- void SetInterface(
- NetworkInterface* iface,
- const webrtc::MediaTransportConfig& media_transport_config) override;
+ void SetInterface(NetworkInterface* iface) override;
// E2E Encrypted Video Frame API
// Set a frame decryptor to a particular ssrc that will intercept all
diff --git a/chromium/third_party/webrtc/media/engine/webrtc_video_engine_unittest.cc b/chromium/third_party/webrtc/media/engine/webrtc_video_engine_unittest.cc
index ae6f15d8af4..eae83938d4f 100644
--- a/chromium/third_party/webrtc/media/engine/webrtc_video_engine_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtc_video_engine_unittest.cc
@@ -28,7 +28,6 @@
#include "api/test/mock_video_encoder_factory.h"
#include "api/test/video/function_video_decoder_factory.h"
#include "api/transport/field_trial_based_config.h"
-#include "api/transport/media/media_transport_config.h"
#include "api/units/time_delta.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "api/video/i420_buffer.h"
@@ -209,11 +208,15 @@ int GetMaxDefaultBitrateBps(size_t width, size_t height) {
class MockVideoSource : public rtc::VideoSourceInterface<webrtc::VideoFrame> {
public:
- MOCK_METHOD2(AddOrUpdateSink,
- void(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
- const rtc::VideoSinkWants& wants));
- MOCK_METHOD1(RemoveSink,
- void(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink));
+ MOCK_METHOD(void,
+ AddOrUpdateSink,
+ (rtc::VideoSinkInterface<webrtc::VideoFrame> * sink,
+ const rtc::VideoSinkWants& wants),
+ (override));
+ MOCK_METHOD(void,
+ RemoveSink,
+ (rtc::VideoSinkInterface<webrtc::VideoFrame> * sink),
+ (override));
};
} // namespace
@@ -343,10 +346,6 @@ TEST_F(WebRtcVideoEngineTest, SupportsVideoTimingHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kVideoTimingUri, true);
}
-TEST_F(WebRtcVideoEngineTest, SupportsFrameMarkingHeaderExtension) {
- ExpectRtpCapabilitySupport(RtpExtension::kFrameMarkingUri, true);
-}
-
TEST_F(WebRtcVideoEngineTest, SupportsColorSpaceHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kColorSpaceUri, true);
}
@@ -1335,7 +1334,7 @@ class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test {
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get())))) {
network_interface_.SetDestination(channel_.get());
- channel_->SetInterface(&network_interface_, webrtc::MediaTransportConfig());
+ channel_->SetInterface(&network_interface_);
cricket::VideoRecvParameters parameters;
parameters.codecs = engine_.recv_codecs();
channel_->SetRecvParameters(parameters);
@@ -1481,7 +1480,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test {
channel_->OnReadyToSend(true);
EXPECT_TRUE(channel_.get() != NULL);
network_interface_.SetDestination(channel_.get());
- channel_->SetInterface(&network_interface_, webrtc::MediaTransportConfig());
+ channel_->SetInterface(&network_interface_);
cricket::VideoRecvParameters parameters;
parameters.codecs = engine_.send_codecs();
channel_->SetRecvParameters(parameters);
@@ -5064,8 +5063,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
static_cast<cricket::WebRtcVideoChannel*>(engine_.CreateMediaChannel(
call_.get(), config, VideoOptions(), webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get())));
- channel->SetInterface(network_interface.get(),
- webrtc::MediaTransportConfig());
+ channel->SetInterface(network_interface.get());
// Default value when DSCP is disabled should be DSCP_DEFAULT.
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
@@ -5076,8 +5074,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
static_cast<cricket::WebRtcVideoChannel*>(engine_.CreateMediaChannel(
call_.get(), config, VideoOptions(), webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get())));
- channel->SetInterface(network_interface.get(),
- webrtc::MediaTransportConfig());
+ channel->SetInterface(network_interface.get());
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
// Create a send stream to configure
@@ -5106,8 +5103,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) {
static_cast<cricket::WebRtcVideoChannel*>(engine_.CreateMediaChannel(
call_.get(), config, VideoOptions(), webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get())));
- channel->SetInterface(network_interface.get(),
- webrtc::MediaTransportConfig());
+ channel->SetInterface(network_interface.get());
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp());
}
diff --git a/chromium/third_party/webrtc/media/engine/webrtc_voice_engine.cc b/chromium/third_party/webrtc/media/engine/webrtc_voice_engine.cc
index 85c72804c15..38dc3462ace 100644
--- a/chromium/third_party/webrtc/media/engine/webrtc_voice_engine.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtc_voice_engine.cc
@@ -36,7 +36,9 @@
#include "rtc_base/constructor_magic.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/experiments/field_trial_units.h"
+#include "rtc_base/experiments/struct_parameters_parser.h"
#include "rtc_base/helpers.h"
+#include "rtc_base/ignore_wundef.h"
#include "rtc_base/logging.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/strings/audio_format_to_string.h"
@@ -46,6 +48,16 @@
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
+#if WEBRTC_ENABLE_PROTOBUF
+RTC_PUSH_IGNORING_WUNDEF()
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h"
+#else
+#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
+#endif
+RTC_POP_IGNORING_WUNDEF()
+#endif
+
namespace cricket {
namespace {
@@ -99,6 +111,12 @@ std::string ToString(const AudioCodec& codec) {
return ss.Release();
}
+// If this field trial is enabled, we will negotiate and use RFC 2198
+// redundancy for opus audio.
+bool IsAudioRedForOpusFieldTrialEnabled() {
+ return webrtc::field_trial::IsEnabled("WebRTC-Audio-Red-For-Opus");
+}
+
bool IsCodec(const AudioCodec& codec, const char* ref_name) {
return absl::EqualsIgnoreCase(codec.name, ref_name);
}
@@ -185,6 +203,38 @@ absl::optional<int> ComputeSendBitrate(int max_send_bitrate_bps,
}
}
+struct AdaptivePtimeConfig {
+ bool enabled = false;
+ webrtc::DataRate min_payload_bitrate = webrtc::DataRate::KilobitsPerSec(16);
+ webrtc::DataRate min_encoder_bitrate = webrtc::DataRate::KilobitsPerSec(12);
+ bool use_slow_adaptation = true;
+
+ absl::optional<std::string> audio_network_adaptor_config;
+
+ std::unique_ptr<webrtc::StructParametersParser> Parser() {
+ return webrtc::StructParametersParser::Create( //
+ "enabled", &enabled, //
+ "min_payload_bitrate", &min_payload_bitrate, //
+ "min_encoder_bitrate", &min_encoder_bitrate, //
+ "use_slow_adaptation", &use_slow_adaptation);
+ }
+
+ AdaptivePtimeConfig() {
+ Parser()->Parse(
+ webrtc::field_trial::FindFullName("WebRTC-Audio-AdaptivePtime"));
+#if WEBRTC_ENABLE_PROTOBUF
+ webrtc::audio_network_adaptor::config::ControllerManager config;
+ auto* frame_length_controller =
+ config.add_controllers()->mutable_frame_length_controller_v2();
+ frame_length_controller->set_min_payload_bitrate_bps(
+ min_payload_bitrate.bps());
+ frame_length_controller->set_use_slow_adaptation(use_slow_adaptation);
+ config.add_controllers()->mutable_bitrate_controller();
+ audio_network_adaptor_config = config.SerializeAsString();
+#endif
+ }
+};
+
} // namespace
WebRtcVoiceEngine::WebRtcVoiceEngine(
@@ -682,6 +732,11 @@ std::vector<AudioCodec> WebRtcVoiceEngine::CollectCodecs(
}
}
+ // Add red codec.
+ if (IsAudioRedForOpusFieldTrialEnabled()) {
+ map_format({kRedCodecName, 48000, 2}, &out);
+ }
+
// Add telephone-event codecs last.
for (const auto& dtmf : generate_dtmf) {
if (dtmf.second) {
@@ -726,7 +781,6 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
config_.rtp.extensions = extensions;
config_.has_dscp =
rtp_parameters_.encodings[0].network_priority != webrtc::Priority::kLow;
- config_.audio_network_adaptor_config = audio_network_adaptor_config;
config_.encoder_factory = encoder_factory;
config_.codec_pair_id = codec_pair_id;
config_.track_id = track_id;
@@ -737,6 +791,9 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
rtp_parameters_.rtcp.cname = c_name;
rtp_parameters_.header_extensions = extensions;
+ audio_network_adaptor_config_from_options_ = audio_network_adaptor_config;
+ UpdateAudioNetworkAdaptorConfig();
+
if (send_codec_spec) {
UpdateSendCodecSpec(*send_codec_spec);
}
@@ -787,10 +844,12 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
void SetAudioNetworkAdaptorConfig(
const absl::optional<std::string>& audio_network_adaptor_config) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
- if (config_.audio_network_adaptor_config == audio_network_adaptor_config) {
+ if (audio_network_adaptor_config_from_options_ ==
+ audio_network_adaptor_config) {
return;
}
- config_.audio_network_adaptor_config = audio_network_adaptor_config;
+ audio_network_adaptor_config_from_options_ = audio_network_adaptor_config;
+ UpdateAudioNetworkAdaptorConfig();
UpdateAllowedBitrateRange();
ReconfigureAudioSendStream();
}
@@ -937,6 +996,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
rtp_parameters_.encodings[0].max_bitrate_bps;
double old_priority = rtp_parameters_.encodings[0].bitrate_priority;
webrtc::Priority old_dscp = rtp_parameters_.encodings[0].network_priority;
+ bool old_adaptive_ptime = rtp_parameters_.encodings[0].adaptive_ptime;
rtp_parameters_ = parameters;
config_.bitrate_priority = rtp_parameters_.encodings[0].bitrate_priority;
config_.has_dscp = (rtp_parameters_.encodings[0].network_priority !=
@@ -945,15 +1005,19 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
bool reconfigure_send_stream =
(rtp_parameters_.encodings[0].max_bitrate_bps != old_rtp_max_bitrate) ||
(rtp_parameters_.encodings[0].bitrate_priority != old_priority) ||
- (rtp_parameters_.encodings[0].network_priority != old_dscp);
+ (rtp_parameters_.encodings[0].network_priority != old_dscp) ||
+ (rtp_parameters_.encodings[0].adaptive_ptime != old_adaptive_ptime);
if (rtp_parameters_.encodings[0].max_bitrate_bps != old_rtp_max_bitrate) {
// Update the bitrate range.
if (send_rate) {
config_.send_codec_spec->target_bitrate_bps = send_rate;
}
- UpdateAllowedBitrateRange();
}
if (reconfigure_send_stream) {
+ // Changing adaptive_ptime may update the audio network adaptor config
+ // used.
+ UpdateAudioNetworkAdaptorConfig();
+ UpdateAllowedBitrateRange();
ReconfigureAudioSendStream();
}
@@ -989,6 +1053,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
// The order of precedence, from lowest to highest is:
// - a reasonable default of 32kbps min/max
// - fixed target bitrate from codec spec
+ // - lower min bitrate if adaptive ptime is enabled
// - bitrate configured in the rtp_parameter encodings settings
const int kDefaultBitrateBps = 32000;
config_.min_bitrate_bps = kDefaultBitrateBps;
@@ -1000,6 +1065,12 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
config_.max_bitrate_bps = *config_.send_codec_spec->target_bitrate_bps;
}
+ if (rtp_parameters_.encodings[0].adaptive_ptime) {
+ config_.min_bitrate_bps = std::min(
+ config_.min_bitrate_bps,
+ static_cast<int>(adaptive_ptime_config_.min_encoder_bitrate.bps()));
+ }
+
if (rtp_parameters_.encodings[0].min_bitrate_bps) {
config_.min_bitrate_bps = *rtp_parameters_.encodings[0].min_bitrate_bps;
}
@@ -1033,12 +1104,24 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
UpdateAllowedBitrateRange();
}
+ void UpdateAudioNetworkAdaptorConfig() {
+ if (adaptive_ptime_config_.enabled ||
+ rtp_parameters_.encodings[0].adaptive_ptime) {
+ config_.audio_network_adaptor_config =
+ adaptive_ptime_config_.audio_network_adaptor_config;
+ return;
+ }
+ config_.audio_network_adaptor_config =
+ audio_network_adaptor_config_from_options_;
+ }
+
void ReconfigureAudioSendStream() {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
RTC_DCHECK(stream_);
stream_->Reconfigure(config_);
}
+ const AdaptivePtimeConfig adaptive_ptime_config_;
rtc::ThreadChecker worker_thread_checker_;
rtc::RaceChecker audio_capture_race_checker_;
webrtc::Call* call_ = nullptr;
@@ -1056,6 +1139,9 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
int max_send_bitrate_bps_;
webrtc::RtpParameters rtp_parameters_;
absl::optional<webrtc::AudioCodecSpec> audio_codec_spec_;
+ // TODO(webrtc:11717): Remove this once audio_network_adaptor in AudioOptions
+ // has been removed.
+ absl::optional<std::string> audio_network_adaptor_config_from_options_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioSendStream);
};
@@ -1541,7 +1627,9 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs(
<< old_codec.id << ")";
}
auto format = AudioCodecToSdpAudioFormat(codec);
- if (!IsCodec(codec, "cn") && !IsCodec(codec, "telephone-event") &&
+ if (!IsCodec(codec, kCnCodecName) && !IsCodec(codec, kDtmfCodecName) &&
+ (!IsAudioRedForOpusFieldTrialEnabled() ||
+ !IsCodec(codec, kRedCodecName)) &&
!engine()->decoder_factory_->IsSupportedDecoder(format)) {
RTC_LOG(LS_ERROR) << "Unsupported codec: " << rtc::ToString(format);
return false;
@@ -1692,6 +1780,19 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
}
}
+ if (IsAudioRedForOpusFieldTrialEnabled()) {
+ // Loop through the codecs to find the RED codec that matches opus
+ // with respect to clockrate and number of channels.
+ for (const AudioCodec& red_codec : codecs) {
+ if (IsCodec(red_codec, kRedCodecName) &&
+ red_codec.clockrate == send_codec_spec->format.clockrate_hz &&
+ red_codec.channels == send_codec_spec->format.num_channels) {
+ send_codec_spec->red_payload_type = red_codec.id;
+ break;
+ }
+ }
+ }
+
if (send_codec_spec_ != send_codec_spec) {
send_codec_spec_ = std::move(send_codec_spec);
// Apply new settings to all streams.
diff --git a/chromium/third_party/webrtc/media/engine/webrtc_voice_engine_unittest.cc b/chromium/third_party/webrtc/media/engine/webrtc_voice_engine_unittest.cc
index e7ebf8940f8..d70019e9f34 100644
--- a/chromium/third_party/webrtc/media/engine/webrtc_voice_engine_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtc_voice_engine_unittest.cc
@@ -59,6 +59,7 @@ const cricket::AudioCodec kG722CodecVoE(9, "G722", 16000, 64000, 1);
const cricket::AudioCodec kG722CodecSdp(9, "G722", 8000, 64000, 1);
const cricket::AudioCodec kCn8000Codec(13, "CN", 8000, 0, 1);
const cricket::AudioCodec kCn16000Codec(105, "CN", 16000, 0, 1);
+const cricket::AudioCodec kRed48000Codec(112, "RED", 48000, 32000, 2);
const cricket::AudioCodec kTelephoneEventCodec1(106,
"telephone-event",
8000,
@@ -1031,6 +1032,30 @@ TEST_P(WebRtcVoiceEngineTestFake, ChangeRecvCodecPayloadType) {
EXPECT_TRUE(channel_->SetRecvParameters(parameters));
}
+// Test that we set Opus/Red under the field trial.
+TEST_P(WebRtcVoiceEngineTestFake, RecvRed) {
+ webrtc::test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-Red-For-Opus/Enabled/");
+
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs.push_back(kRed48000Codec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+ EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map,
+ (ContainerEq<std::map<int, webrtc::SdpAudioFormat>>(
+ {{111, {"opus", 48000, 2}}, {112, {"red", 48000, 2}}})));
+}
+
+// Test that we do not allow setting Opus/Red by default.
+TEST_P(WebRtcVoiceEngineTestFake, RecvRedDefault) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs.push_back(kRed48000Codec);
+ EXPECT_FALSE(channel_->SetRecvParameters(parameters));
+}
+
TEST_P(WebRtcVoiceEngineTestFake, SetSendBandwidthAuto) {
EXPECT_TRUE(SetupSendStream());
@@ -1194,6 +1219,46 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersEncodingsActive) {
EXPECT_TRUE(GetSendStream(kSsrcX).IsSending());
}
+TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersAdaptivePtime) {
+ EXPECT_TRUE(SetupSendStream());
+ // Get current parameters and change "adaptive_ptime" to true.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX);
+ ASSERT_EQ(1u, parameters.encodings.size());
+ ASSERT_FALSE(parameters.encodings[0].adaptive_ptime);
+ parameters.encodings[0].adaptive_ptime = true;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok());
+ EXPECT_TRUE(GetAudioNetworkAdaptorConfig(kSsrcX));
+ EXPECT_EQ(12000, GetSendStreamConfig(kSsrcX).min_bitrate_bps);
+
+ parameters.encodings[0].adaptive_ptime = false;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok());
+ EXPECT_FALSE(GetAudioNetworkAdaptorConfig(kSsrcX));
+ EXPECT_EQ(32000, GetSendStreamConfig(kSsrcX).min_bitrate_bps);
+}
+
+TEST_P(WebRtcVoiceEngineTestFake,
+ DisablingAdaptivePtimeDoesNotRemoveAudioNetworkAdaptorFromOptions) {
+ EXPECT_TRUE(SetupSendStream());
+ send_parameters_.options.audio_network_adaptor = true;
+ send_parameters_.options.audio_network_adaptor_config = {"1234"};
+ SetSendParameters(send_parameters_);
+ EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
+ GetAudioNetworkAdaptorConfig(kSsrcX));
+
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrcX);
+ parameters.encodings[0].adaptive_ptime = false;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrcX, parameters).ok());
+ EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config,
+ GetAudioNetworkAdaptorConfig(kSsrcX));
+}
+
+TEST_P(WebRtcVoiceEngineTestFake, AdaptivePtimeFieldTrial) {
+ webrtc::test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-AdaptivePtime/enabled:true/");
+ EXPECT_TRUE(SetupSendStream());
+ EXPECT_TRUE(GetAudioNetworkAdaptorConfig(kSsrcX));
+}
+
// Test that SetRtpSendParameters configures the correct encoding channel for
// each SSRC.
TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) {
@@ -1442,6 +1507,37 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) {
EXPECT_FALSE(channel_->CanInsertDtmf());
}
+// Test that we set Opus/Red under the field trial.
+TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRed) {
+ webrtc::test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-Red-For-Opus/Enabled/");
+
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs.push_back(kRed48000Codec);
+ parameters.codecs[0].id = 96;
+ SetSendParameters(parameters);
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(112, send_codec_spec.red_payload_type);
+}
+
+// Test that we set do not interpret Opus/Red by default.
+TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedDefault) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kOpusCodec);
+ parameters.codecs.push_back(kRed48000Codec);
+ parameters.codecs[0].id = 96;
+ SetSendParameters(parameters);
+ const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec;
+ EXPECT_EQ(96, send_codec_spec.payload_type);
+ EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str());
+ EXPECT_EQ(absl::nullopt, send_codec_spec.red_payload_type);
+}
+
// Test that WebRtcVoiceEngine reconfigures, rather than recreates its
// AudioSendStream.
TEST_P(WebRtcVoiceEngineTestFake, DontRecreateSendStream) {
@@ -2046,6 +2142,10 @@ class WebRtcVoiceEngineWithSendSideBweTest : public WebRtcVoiceEngineTestFake {
: WebRtcVoiceEngineTestFake("WebRTC-Audio-SendSideBwe/Enabled/") {}
};
+INSTANTIATE_TEST_SUITE_P(UnusedParameter,
+ WebRtcVoiceEngineWithSendSideBweTest,
+ ::testing::Values(true));
+
TEST_P(WebRtcVoiceEngineWithSendSideBweTest,
SupportsTransportSequenceNumberHeaderExtension) {
const std::vector<webrtc::RtpExtension> header_extensions =
@@ -3098,7 +3198,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
channel.reset(static_cast<cricket::WebRtcVoiceMediaChannel*>(
engine_->CreateMediaChannel(&call_, config, cricket::AudioOptions(),
webrtc::CryptoOptions())));
- channel->SetInterface(&network_interface, webrtc::MediaTransportConfig());
+ channel->SetInterface(&network_interface);
// Default value when DSCP is disabled should be DSCP_DEFAULT.
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
@@ -3106,7 +3206,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
channel.reset(static_cast<cricket::WebRtcVoiceMediaChannel*>(
engine_->CreateMediaChannel(&call_, config, cricket::AudioOptions(),
webrtc::CryptoOptions())));
- channel->SetInterface(&network_interface, webrtc::MediaTransportConfig());
+ channel->SetInterface(&network_interface);
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
// Create a send stream to configure
@@ -3134,11 +3234,11 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
channel.reset(static_cast<cricket::WebRtcVoiceMediaChannel*>(
engine_->CreateMediaChannel(&call_, config, cricket::AudioOptions(),
webrtc::CryptoOptions())));
- channel->SetInterface(&network_interface, webrtc::MediaTransportConfig());
+ channel->SetInterface(&network_interface);
// Default value when DSCP is disabled should be DSCP_DEFAULT.
EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp());
- channel->SetInterface(nullptr, webrtc::MediaTransportConfig());
+ channel->SetInterface(nullptr);
}
TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolume) {
diff --git a/chromium/third_party/webrtc/media/sctp/sctp_transport.cc b/chromium/third_party/webrtc/media/sctp/sctp_transport.cc
index ad68c37ac97..5b434109eec 100644
--- a/chromium/third_party/webrtc/media/sctp/sctp_transport.cc
+++ b/chromium/third_party/webrtc/media/sctp/sctp_transport.cc
@@ -269,6 +269,11 @@ class SctpTransport::UsrSctpWrapper {
// TODO(ldixon): Consider turning this on/off.
usrsctp_sysctl_set_sctp_ecn_enable(0);
+ // WebRTC doesn't use these features, so disable them to reduce the
+ // potential attack surface.
+ usrsctp_sysctl_set_sctp_asconf_enable(0);
+ usrsctp_sysctl_set_sctp_auth_enable(0);
+
// This is harmless, but we should find out when the library default
// changes.
int send_size = usrsctp_sysctl_get_sctp_sendspace();
@@ -1263,6 +1268,9 @@ void SctpTransport::OnNotificationAssocChange(const sctp_assoc_change& change) {
max_outbound_streams_ = change.sac_outbound_streams;
max_inbound_streams_ = change.sac_inbound_streams;
SignalAssociationChangeCommunicationUp();
+ // In case someone tried to close a stream before communication
+ // came up, send any queued resets.
+ SendQueuedStreamResets();
break;
case SCTP_COMM_LOST:
RTC_LOG(LS_INFO) << "Association change SCTP_COMM_LOST";
diff --git a/chromium/third_party/webrtc/media/sctp/sctp_transport_unittest.cc b/chromium/third_party/webrtc/media/sctp/sctp_transport_unittest.cc
index ff3f2d70a98..da6c6290fd3 100644
--- a/chromium/third_party/webrtc/media/sctp/sctp_transport_unittest.cc
+++ b/chromium/third_party/webrtc/media/sctp/sctp_transport_unittest.cc
@@ -605,6 +605,15 @@ TEST_F(SctpTransportTest, ClosesRemoteStream) {
transport1()->ResetStream(1);
EXPECT_TRUE_WAIT(transport2_observer.WasStreamClosed(1), kDefaultTimeout);
}
+TEST_F(SctpTransportTest, ClosesRemoteStreamWithNoData) {
+ SetupConnectedTransportsWithTwoStreams();
+ SctpTransportObserver transport1_observer(transport1());
+ SctpTransportObserver transport2_observer(transport2());
+
+ // Close stream 1 on transport 1. Transport 2 should notify us.
+ transport1()->ResetStream(1);
+ EXPECT_TRUE_WAIT(transport2_observer.WasStreamClosed(1), kDefaultTimeout);
+}
TEST_F(SctpTransportTest, ClosesTwoRemoteStreams) {
SetupConnectedTransportsWithTwoStreams();
diff --git a/chromium/third_party/webrtc/modules/BUILD.gn b/chromium/third_party/webrtc/modules/BUILD.gn
index ffdd7016a1d..f6f44bd4b36 100644
--- a/chromium/third_party/webrtc/modules/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/BUILD.gn
@@ -31,10 +31,8 @@ group("modules") {
rtc_source_set("module_api_public") {
sources = [ "include/module_common_types_public.h" ]
- deps = [
- "..:webrtc_common",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "..:webrtc_common" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("module_api") {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn b/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
index ceee0c0f07e..3480e70df17 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
@@ -56,6 +56,8 @@ rtc_library("audio_coding") {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers",
"../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -71,8 +73,8 @@ rtc_library("legacy_encoded_audio_frame") {
"../../api/audio_codecs:audio_codecs_api",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("webrtc_cng") {
@@ -104,8 +106,8 @@ rtc_library("audio_encoder_cng") {
"../../api/units:time_delta",
"../../common_audio",
"../../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("red") {
@@ -122,8 +124,8 @@ rtc_library("red") {
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("g711") {
@@ -143,8 +145,8 @@ rtc_library("g711") {
"../../api/units:time_delta",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [ ":g711_c" ] # no-presubmit-check TODO(webrtc:8603)
}
@@ -175,8 +177,8 @@ rtc_library("g722") {
"../../api/units:time_delta",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [ ":g722_c" ] # no-presubmit-check TODO(webrtc:8603)
}
@@ -208,8 +210,8 @@ rtc_library("ilbc") {
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [ ":ilbc_c" ] # no-presubmit-check TODO(webrtc:8603)
}
@@ -384,8 +386,10 @@ rtc_source_set("isac_common") {
"../../api/units:time_delta",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
+ "../../rtc_base:safe_minmax",
+ "../../system_wrappers:field_trial",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("isac") {
@@ -723,6 +727,8 @@ rtc_library("audio_coding_opus_common") {
"../../api/audio_codecs:audio_codecs_api",
"../../rtc_base:checks",
"../../rtc_base:stringutils",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -751,6 +757,8 @@ rtc_library("webrtc_opus") {
"../../rtc_base:rtc_numerics",
"../../rtc_base:safe_minmax",
"../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -788,6 +796,8 @@ rtc_library("webrtc_multiopus") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:safe_minmax",
"../../rtc_base:stringutils",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -848,7 +858,7 @@ rtc_library("audio_network_adaptor_config") {
"audio_network_adaptor/audio_network_adaptor_config.cc",
"audio_network_adaptor/include/audio_network_adaptor_config.h",
]
- deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_network_adaptor") {
@@ -874,6 +884,8 @@ rtc_library("audio_network_adaptor") {
"audio_network_adaptor/fec_controller_plr_based.h",
"audio_network_adaptor/frame_length_controller.cc",
"audio_network_adaptor/frame_length_controller.h",
+ "audio_network_adaptor/frame_length_controller_v2.cc",
+ "audio_network_adaptor/frame_length_controller_v2.h",
"audio_network_adaptor/include/audio_network_adaptor.h",
"audio_network_adaptor/util/threshold_curve.h",
]
@@ -893,6 +905,9 @@ rtc_library("audio_network_adaptor") {
"../../rtc_base/system:file_wrapper",
"../../system_wrappers",
"../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -995,6 +1010,8 @@ rtc_library("neteq") {
"../../system_wrappers",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -1052,8 +1069,8 @@ rtc_library("neteq_tools_minimal") {
"../../system_wrappers",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = audio_codec_defines
}
@@ -1090,8 +1107,8 @@ rtc_library("neteq_test_tools") {
"../../test:rtp_test_utils",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [
":neteq_tools",
@@ -1130,6 +1147,8 @@ rtc_library("neteq_tools") {
"../../rtc_base:rtc_base_approved",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -1172,8 +1191,8 @@ if (rtc_enable_protobuf) {
"../../rtc_base:rtc_base_approved",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = # no-presubmit-check TODO(webrtc:8603)
[ "../../logging:rtc_event_log_proto" ]
}
@@ -1230,10 +1249,11 @@ rtc_library("audio_coding_modules_tests_shared") {
"../../test:test_support",
"../rtp_rtcp:rtp_rtcp_format",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
-
defines = audio_coding_defines
if (rtc_enable_protobuf) {
@@ -1372,6 +1392,8 @@ if (rtc_include_tests) {
"../../system_wrappers",
"../../test:fileutils",
"../../test:test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -1417,9 +1439,9 @@ if (rtc_include_tests) {
":neteq_tools",
"../../rtc_base:rtc_base_approved",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/strings",
"//testing/gtest",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("acm_send_test") {
@@ -1432,6 +1454,7 @@ if (rtc_include_tests) {
defines = audio_coding_defines
deps = audio_coding_deps + [
+ "//third_party/abseil-cpp/absl/strings",
"../../api/audio:audio_frame_api",
"../../rtc_base:checks",
":audio_coding",
@@ -1497,8 +1520,8 @@ if (rtc_include_tests) {
deps = [
"../../rtc_base:checks",
"../../test:fileutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
sources = [
"neteq/tools/neteq_test_factory.cc",
"neteq/tools/neteq_test_factory.h",
@@ -1626,15 +1649,14 @@ if (rtc_include_tests) {
"../../test:fileutils",
"../../test:test_support",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/flags:flag",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
}
rtc_executable("rtp_encode") {
testonly = true
deps = audio_coding_deps + [
- "//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
":audio_coding",
@@ -1648,6 +1670,7 @@ if (rtc_include_tests) {
"../../api/audio_codecs/isac:audio_encoder_isac",
"../../api/audio_codecs/opus:audio_encoder_opus",
"../../rtc_base:safe_conversions",
+ "//third_party/abseil-cpp/absl/memory",
]
sources = [ "neteq/tools/rtp_encode.cc" ]
@@ -1916,6 +1939,7 @@ if (rtc_include_tests) {
"audio_network_adaptor/event_log_writer_unittest.cc",
"audio_network_adaptor/fec_controller_plr_based_unittest.cc",
"audio_network_adaptor/frame_length_controller_unittest.cc",
+ "audio_network_adaptor/frame_length_controller_v2_unittest.cc",
"audio_network_adaptor/util/threshold_curve_unittest.cc",
"codecs/builtin_audio_decoder_factory_unittest.cc",
"codecs/builtin_audio_encoder_factory_unittest.cc",
@@ -2050,8 +2074,11 @@ if (rtc_include_tests) {
"codecs/opus/test",
"codecs/opus/test:test_unittest",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc
index 6c9b242e009..b53d456ff7e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc
@@ -252,6 +252,9 @@ class AudioCodingModuleTestOldApi : public ::testing::Test {
Clock* clock_;
};
+class AudioCodingModuleTestOldApiDeathTest
+ : public AudioCodingModuleTestOldApi {};
+
TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
AudioFrame audio_frame;
const int kSampleRateHz = 32000;
@@ -271,7 +274,7 @@ TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
// http://crbug.com/615050
#if !defined(WEBRTC_WIN) && defined(__clang__) && RTC_DCHECK_IS_ON && \
GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(AudioCodingModuleTestOldApi, FailOnZeroDesiredFrequency) {
+TEST_F(AudioCodingModuleTestOldApiDeathTest, FailOnZeroDesiredFrequency) {
AudioFrame audio_frame;
bool muted;
RTC_EXPECT_DEATH(acm_->PlayoutData10Ms(0, &audio_frame, &muted),
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gni b/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gni
index 9b0aba856a4..bf67d9cb8da 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gni
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gni
@@ -25,9 +25,6 @@ if (current_cpu == "arm") {
} else {
audio_codec_defines += [ "WEBRTC_CODEC_ISAC" ]
}
-if (!build_with_mozilla && !build_with_chromium) {
- audio_codec_defines += [ "WEBRTC_CODEC_RED" ]
-}
audio_coding_defines = audio_codec_defines
neteq_defines = audio_codec_defines
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/config.proto b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/config.proto
index 90c58e5c7d1..347372e8d99 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/config.proto
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/config.proto
@@ -1,8 +1,10 @@
syntax = "proto2";
+
+package webrtc.audio_network_adaptor.config;
+
option optimize_for = LITE_RUNTIME;
option java_package = "org.webrtc.AudioNetworkAdaptor";
option java_outer_classname = "Config";
-package webrtc.audio_network_adaptor.config;
message FecController {
message Threshold {
@@ -116,6 +118,19 @@ message FrameLengthController {
optional int32 fl_60ms_to_40ms_bandwidth_bps = 12;
}
+message FrameLengthControllerV2 {
+ // FrameLengthControllerV2 chooses the frame length by taking the target
+ // bitrate and subtracting the overhead bitrate to obtain the remaining
+ // bitrate for the payload. The chosen frame length is the shortest possible
+ // where the payload bitrate is more than |min_payload_bitrate_bps|.
+ optional int32 min_payload_bitrate_bps = 1;
+
+ // If true, uses the stable target bitrate to decide the frame length. This
+ // will result in less frame length toggling but spending more time at longer
+ // frame lengths compared to using the normal target bitrate.
+ optional bool use_slow_adaptation = 2;
+}
+
message ChannelController {
// Uplink bandwidth above which the number of encoded channels should switch
// from 1 to 2.
@@ -164,6 +179,7 @@ message Controller {
DtxController dtx_controller = 24;
BitrateController bitrate_controller = 25;
FecControllerRplrBased fec_controller_rplr_based = 26;
+ FrameLengthControllerV2 frame_length_controller_v2 = 27;
}
}
@@ -177,4 +193,3 @@ message ControllerManager {
// made.
optional float min_reordering_squared_distance = 3;
}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc
index c7aad1da879..415b9fcf52c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc
@@ -11,6 +11,7 @@
#include "modules/audio_coding/audio_network_adaptor/controller_manager.h"
#include <cmath>
+#include <memory>
#include <string>
#include <utility>
@@ -20,6 +21,7 @@
#include "modules/audio_coding/audio_network_adaptor/dtx_controller.h"
#include "modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h"
#include "modules/audio_coding/audio_network_adaptor/frame_length_controller.h"
+#include "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h"
#include "modules/audio_coding/audio_network_adaptor/util/threshold_curve.h"
#include "rtc_base/ignore_wundef.h"
#include "rtc_base/logging.h"
@@ -197,6 +199,14 @@ std::unique_ptr<BitrateController> CreateBitrateController(
initial_bitrate_bps, initial_frame_length_ms,
fl_increase_overhead_offset, fl_decrease_overhead_offset)));
}
+
+std::unique_ptr<FrameLengthControllerV2> CreateFrameLengthControllerV2(
+ const audio_network_adaptor::config::FrameLengthControllerV2& config,
+ rtc::ArrayView<const int> encoder_frame_lengths_ms) {
+ return std::make_unique<FrameLengthControllerV2>(
+ encoder_frame_lengths_ms, config.min_payload_bitrate_bps(),
+ config.use_slow_adaptation());
+}
#endif // WEBRTC_ENABLE_PROTOBUF
} // namespace
@@ -277,6 +287,11 @@ std::unique_ptr<ControllerManager> ControllerManagerImpl::Create(
controller_config.bitrate_controller(), initial_bitrate_bps,
initial_frame_length_ms);
break;
+ case audio_network_adaptor::config::Controller::kFrameLengthControllerV2:
+ controller = CreateFrameLengthControllerV2(
+ controller_config.frame_length_controller_v2(),
+ encoder_frame_lengths_ms);
+ break;
default:
RTC_NOTREACHED();
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
index 4286434b5b9..c71bbc9e2ad 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
@@ -260,6 +260,14 @@ void AddFrameLengthControllerConfig(
kChracteristicPacketLossFraction[1]);
}
+void AddFrameLengthControllerV2Config(
+ audio_network_adaptor::config::ControllerManager* config) {
+ auto controller =
+ config->add_controllers()->mutable_frame_length_controller_v2();
+ controller->set_min_payload_bitrate_bps(16000);
+ controller->set_use_slow_adaptation(true);
+}
+
constexpr int kInitialBitrateBps = 24000;
constexpr size_t kIntialChannelsToEncode = 1;
constexpr bool kInitialDtxEnabled = true;
@@ -464,6 +472,14 @@ TEST(ControllerManagerTest, CreateFromConfigStringAndCheckReordering) {
ControllerType::CHANNEL, ControllerType::DTX,
ControllerType::BIT_RATE});
}
+
+TEST(ControllerManagerTest, CreateFrameLengthControllerV2) {
+ audio_network_adaptor::config::ControllerManager config;
+ AddFrameLengthControllerV2Config(&config);
+ auto states = CreateControllerManager(config.SerializeAsString());
+ auto controllers = states.controller_manager->GetControllers();
+ EXPECT_TRUE(controllers.size() == 1);
+}
#endif // WEBRTC_ENABLE_PROTOBUF
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc
new file mode 100644
index 00000000000..36fc10ba825
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h"
+
+#include <algorithm>
+
+#include "absl/algorithm/container.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+
+int OverheadBps(int overhead_bytes_per_packet, int frame_length_ms) {
+ return overhead_bytes_per_packet * 8 * 1000 / frame_length_ms;
+}
+
+} // namespace
+
+FrameLengthControllerV2::FrameLengthControllerV2(
+ rtc::ArrayView<const int> encoder_frame_lengths_ms,
+ int min_payload_bitrate_bps,
+ bool use_slow_adaptation)
+ : encoder_frame_lengths_ms_(encoder_frame_lengths_ms.begin(),
+ encoder_frame_lengths_ms.end()),
+ min_payload_bitrate_bps_(min_payload_bitrate_bps),
+ use_slow_adaptation_(use_slow_adaptation) {
+ RTC_CHECK(!encoder_frame_lengths_ms_.empty());
+ absl::c_sort(encoder_frame_lengths_ms_);
+}
+
+void FrameLengthControllerV2::UpdateNetworkMetrics(
+ const NetworkMetrics& network_metrics) {
+ if (network_metrics.target_audio_bitrate_bps) {
+ target_bitrate_bps_ = network_metrics.target_audio_bitrate_bps;
+ }
+ if (network_metrics.overhead_bytes_per_packet) {
+ overhead_bytes_per_packet_ = network_metrics.overhead_bytes_per_packet;
+ }
+ if (network_metrics.uplink_bandwidth_bps) {
+ uplink_bandwidth_bps_ = network_metrics.uplink_bandwidth_bps;
+ }
+}
+
+void FrameLengthControllerV2::MakeDecision(AudioEncoderRuntimeConfig* config) {
+ if (!target_bitrate_bps_ || !overhead_bytes_per_packet_ ||
+ !uplink_bandwidth_bps_) {
+ return;
+ }
+
+ auto it =
+ absl::c_find_if(encoder_frame_lengths_ms_, [&](int frame_length_ms) {
+ int target = use_slow_adaptation_ ? *uplink_bandwidth_bps_
+ : *target_bitrate_bps_;
+ return target -
+ OverheadBps(*overhead_bytes_per_packet_, frame_length_ms) >
+ min_payload_bitrate_bps_;
+ });
+
+ // Longest frame length is chosen if none match our criteria.
+ config->frame_length_ms = it != encoder_frame_lengths_ms_.end()
+ ? *it
+ : encoder_frame_lengths_ms_.back();
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h
new file mode 100644
index 00000000000..d7102b0b44d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FRAME_LENGTH_CONTROLLER_V2_H_
+#define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FRAME_LENGTH_CONTROLLER_V2_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "modules/audio_coding/audio_network_adaptor/controller.h"
+#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h"
+
+namespace webrtc {
+
+class FrameLengthControllerV2 final : public Controller {
+ public:
+ FrameLengthControllerV2(rtc::ArrayView<const int> encoder_frame_lengths_ms,
+ int min_payload_bitrate_bps,
+ bool use_slow_adaptation);
+
+ void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override;
+
+ void MakeDecision(AudioEncoderRuntimeConfig* config) override;
+
+ private:
+ std::vector<int> encoder_frame_lengths_ms_;
+ const int min_payload_bitrate_bps_;
+ const bool use_slow_adaptation_;
+
+ absl::optional<int> uplink_bandwidth_bps_;
+ absl::optional<int> target_bitrate_bps_;
+ absl::optional<int> overhead_bytes_per_packet_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FRAME_LENGTH_CONTROLLER_V2_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc
new file mode 100644
index 00000000000..1c88f47c583
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h"
+
+#include <algorithm>
+#include <memory>
+
+#include "modules/audio_coding/audio_network_adaptor/controller.h"
+#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+constexpr int kANASupportedFrameLengths[] = {20, 40, 60, 120};
+constexpr int kMinPayloadBitrateBps = 16000;
+
+} // namespace
+
+class FrameLengthControllerV2Test : public testing::Test {
+ protected:
+ AudioEncoderRuntimeConfig GetDecision() {
+ AudioEncoderRuntimeConfig config;
+ controller_->MakeDecision(&config);
+ return config;
+ }
+
+ void SetOverhead(int overhead_bytes_per_packet) {
+ overhead_bytes_per_packet_ = overhead_bytes_per_packet;
+ Controller::NetworkMetrics metrics;
+ metrics.overhead_bytes_per_packet = overhead_bytes_per_packet;
+ controller_->UpdateNetworkMetrics(metrics);
+ }
+
+ void SetTargetBitrate(int target_audio_bitrate_bps) {
+ target_audio_bitrate_bps_ = target_audio_bitrate_bps;
+ Controller::NetworkMetrics metrics;
+ metrics.target_audio_bitrate_bps = target_audio_bitrate_bps;
+ controller_->UpdateNetworkMetrics(metrics);
+ }
+
+ void SetUplinkBandwidth(int uplink_bandwidth_bps) {
+ Controller::NetworkMetrics metrics;
+ metrics.uplink_bandwidth_bps = uplink_bandwidth_bps;
+ controller_->UpdateNetworkMetrics(metrics);
+ }
+
+ void ExpectFrameLengthDecision(int expected_frame_length_ms) {
+ auto config = GetDecision();
+ EXPECT_EQ(*config.frame_length_ms, expected_frame_length_ms);
+ }
+
+ std::unique_ptr<FrameLengthControllerV2> controller_ =
+ std::make_unique<FrameLengthControllerV2>(kANASupportedFrameLengths,
+ kMinPayloadBitrateBps,
+ /*use_slow_adaptation=*/false);
+ absl::optional<int> target_audio_bitrate_bps_;
+ absl::optional<int> overhead_bytes_per_packet_;
+};
+
+// Don't return any decision if we haven't received all required network
+// metrics.
+TEST_F(FrameLengthControllerV2Test, RequireNetworkMetrics) {
+ auto config = GetDecision();
+ EXPECT_FALSE(config.bitrate_bps);
+ EXPECT_FALSE(config.frame_length_ms);
+
+ SetOverhead(30);
+ config = GetDecision();
+ EXPECT_FALSE(config.frame_length_ms);
+
+ SetTargetBitrate(32000);
+ config = GetDecision();
+ EXPECT_FALSE(config.frame_length_ms);
+
+ SetUplinkBandwidth(32000);
+ config = GetDecision();
+ EXPECT_TRUE(config.frame_length_ms);
+}
+
+TEST_F(FrameLengthControllerV2Test, UseFastAdaptation) {
+ SetOverhead(50);
+ SetTargetBitrate(50000);
+ SetUplinkBandwidth(50000);
+ ExpectFrameLengthDecision(20);
+
+ SetTargetBitrate(20000);
+ ExpectFrameLengthDecision(120);
+
+ SetTargetBitrate(30000);
+ ExpectFrameLengthDecision(40);
+
+ SetTargetBitrate(25000);
+ ExpectFrameLengthDecision(60);
+}
+
+TEST_F(FrameLengthControllerV2Test, UseSlowAdaptation) {
+ controller_ = std::make_unique<FrameLengthControllerV2>(
+ kANASupportedFrameLengths, kMinPayloadBitrateBps,
+ /*use_slow_adaptation=*/true);
+ SetOverhead(50);
+ SetTargetBitrate(50000);
+ SetUplinkBandwidth(20000);
+ ExpectFrameLengthDecision(120);
+
+ SetUplinkBandwidth(30000);
+ ExpectFrameLengthDecision(40);
+
+ SetUplinkBandwidth(40000);
+ ExpectFrameLengthDecision(20);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h
index 8c048496cad..26a9061745d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h
@@ -18,27 +18,38 @@ namespace webrtc {
class MockAudioNetworkAdaptor : public AudioNetworkAdaptor {
public:
- virtual ~MockAudioNetworkAdaptor() { Die(); }
- MOCK_METHOD0(Die, void());
+ ~MockAudioNetworkAdaptor() override { Die(); }
+ MOCK_METHOD(void, Die, ());
- MOCK_METHOD1(SetUplinkBandwidth, void(int uplink_bandwidth_bps));
+ MOCK_METHOD(void, SetUplinkBandwidth, (int uplink_bandwidth_bps), (override));
- MOCK_METHOD1(SetUplinkPacketLossFraction,
- void(float uplink_packet_loss_fraction));
+ MOCK_METHOD(void,
+ SetUplinkPacketLossFraction,
+ (float uplink_packet_loss_fraction),
+ (override));
- MOCK_METHOD1(SetRtt, void(int rtt_ms));
+ MOCK_METHOD(void, SetRtt, (int rtt_ms), (override));
- MOCK_METHOD1(SetTargetAudioBitrate, void(int target_audio_bitrate_bps));
+ MOCK_METHOD(void,
+ SetTargetAudioBitrate,
+ (int target_audio_bitrate_bps),
+ (override));
- MOCK_METHOD1(SetOverhead, void(size_t overhead_bytes_per_packet));
+ MOCK_METHOD(void,
+ SetOverhead,
+ (size_t overhead_bytes_per_packet),
+ (override));
- MOCK_METHOD0(GetEncoderRuntimeConfig, AudioEncoderRuntimeConfig());
+ MOCK_METHOD(AudioEncoderRuntimeConfig,
+ GetEncoderRuntimeConfig,
+ (),
+ (override));
- MOCK_METHOD1(StartDebugDump, void(FILE* file_handle));
+ MOCK_METHOD(void, StartDebugDump, (FILE * file_handle), (override));
- MOCK_METHOD0(StopDebugDump, void());
+ MOCK_METHOD(void, StopDebugDump, (), (override));
- MOCK_CONST_METHOD0(GetStats, ANAStats());
+ MOCK_METHOD(ANAStats, GetStats, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h
index df28e9e26f1..de554c05177 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h
@@ -18,11 +18,16 @@ namespace webrtc {
class MockController : public Controller {
public:
- virtual ~MockController() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD1(UpdateNetworkMetrics,
- void(const NetworkMetrics& network_metrics));
- MOCK_METHOD1(MakeDecision, void(AudioEncoderRuntimeConfig* config));
+ ~MockController() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void,
+ UpdateNetworkMetrics,
+ (const NetworkMetrics& network_metrics),
+ (override));
+ MOCK_METHOD(void,
+ MakeDecision,
+ (AudioEncoderRuntimeConfig * config),
+ (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h
index 8d410a742d6..9e2fa466fc0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h
@@ -20,12 +20,13 @@ namespace webrtc {
class MockControllerManager : public ControllerManager {
public:
- virtual ~MockControllerManager() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD1(
- GetSortedControllers,
- std::vector<Controller*>(const Controller::NetworkMetrics& metrics));
- MOCK_CONST_METHOD0(GetControllers, std::vector<Controller*>());
+ ~MockControllerManager() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(std::vector<Controller*>,
+ GetSortedControllers,
+ (const Controller::NetworkMetrics& metrics),
+ (override));
+ MOCK_METHOD(std::vector<Controller*>, GetControllers, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h
index 06650abbd60..0c6a9efe1d2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h
@@ -18,20 +18,24 @@ namespace webrtc {
class MockDebugDumpWriter : public DebugDumpWriter {
public:
- virtual ~MockDebugDumpWriter() { Die(); }
- MOCK_METHOD0(Die, void());
+ ~MockDebugDumpWriter() override { Die(); }
+ MOCK_METHOD(void, Die, ());
- MOCK_METHOD2(DumpEncoderRuntimeConfig,
- void(const AudioEncoderRuntimeConfig& config,
- int64_t timestamp));
- MOCK_METHOD2(DumpNetworkMetrics,
- void(const Controller::NetworkMetrics& metrics,
- int64_t timestamp));
+ MOCK_METHOD(void,
+ DumpEncoderRuntimeConfig,
+ (const AudioEncoderRuntimeConfig& config, int64_t timestamp),
+ (override));
+ MOCK_METHOD(void,
+ DumpNetworkMetrics,
+ (const Controller::NetworkMetrics& metrics, int64_t timestamp),
+ (override));
#if WEBRTC_ENABLE_PROTOBUF
- MOCK_METHOD2(DumpControllerManagerConfig,
- void(const audio_network_adaptor::config::ControllerManager&
- controller_manager_config,
- int64_t timestamp));
+ MOCK_METHOD(void,
+ DumpControllerManagerConfig,
+ (const audio_network_adaptor::config::ControllerManager&
+ controller_manager_config,
+ int64_t timestamp),
+ (override));
#endif
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc
index 9984049d501..dc3aec0b185 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_network_adaptor/util/threshold_curve_unittest.cc
@@ -621,7 +621,7 @@ TEST(ThresholdCurveTest, NearlyIdenticalCurvesSecondContinuesOnOtherRightSide) {
// The higher-left point must be given as the first point, and the lower-right
// point must be given as the second.
// This necessarily produces a non-positive slope.
-TEST(ThresholdCurveTest, WrongOrderPoints) {
+TEST(ThresholdCurveDeathTest, WrongOrderPoints) {
std::unique_ptr<ThresholdCurve> curve;
constexpr ThresholdCurve::Point left{5, 10};
constexpr ThresholdCurve::Point right{10, 5};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
index 80349e25047..0e6ab793943 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
@@ -40,6 +40,8 @@ class CngTest : public ::testing::Test {
int16_t speech_data_[640]; // Max size of CNG internal buffers.
};
+class CngDeathTest : public CngTest {};
+
void CngTest::SetUp() {
FILE* input_file;
const std::string file_name =
@@ -69,7 +71,7 @@ void CngTest::TestCngEncode(int sample_rate_hz, int quality) {
#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Create CNG encoder, init with faulty values, free CNG encoder.
-TEST_F(CngTest, CngInitFail) {
+TEST_F(CngDeathTest, CngInitFail) {
// Call with too few parameters.
EXPECT_DEATH(
{
@@ -86,7 +88,7 @@ TEST_F(CngTest, CngInitFail) {
}
// Encode Cng with too long input vector.
-TEST_F(CngTest, CngEncodeTooLong) {
+TEST_F(CngDeathTest, CngEncodeTooLong) {
rtc::Buffer sid_data;
// Create encoder.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
index a3b8e76a30e..d99e9c893f8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
@@ -19,6 +19,7 @@
#include "api/scoped_refptr.h"
#include "api/units/time_delta.h"
#include "rtc_base/constructor_magic.h"
+#include "system_wrappers/include/field_trial.h"
namespace webrtc {
@@ -48,6 +49,13 @@ class AudioEncoderIsacT final : public AudioEncoder {
size_t Num10MsFramesInNextPacket() const override;
size_t Max10MsFramesInAPacket() const override;
int GetTargetBitrate() const override;
+ void SetTargetBitrate(int target_bps) override;
+ void OnReceivedTargetAudioBitrate(int target_bps) override;
+ void OnReceivedUplinkBandwidth(
+ int target_audio_bitrate_bps,
+ absl::optional<int64_t> bwe_period_ms) override;
+ void OnReceivedUplinkAllocation(BitrateAllocationUpdate update) override;
+ void OnReceivedOverhead(size_t overhead_bytes_per_packet) override;
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
@@ -60,7 +68,13 @@ class AudioEncoderIsacT final : public AudioEncoder {
// STREAM_MAXW16_60MS for iSAC fix (60 ms).
static const size_t kSufficientEncodeBufferSizeBytes = 400;
- static const int kDefaultBitRate = 32000;
+ static constexpr int kDefaultBitRate = 32000;
+ static constexpr int kMinBitrateBps = 10000;
+ static constexpr int MaxBitrateBps(int sample_rate_hz) {
+ return sample_rate_hz == 32000 ? 56000 : 32000;
+ }
+
+ void SetTargetBitrate(int target_bps, bool subtract_per_packet_overhead);
// Recreate the iSAC encoder instance with the given settings, and save them.
void RecreateEncoderInstance(const Config& config);
@@ -77,6 +91,15 @@ class AudioEncoderIsacT final : public AudioEncoder {
// Timestamp of the previously encoded packet.
uint32_t last_encoded_timestamp_;
+ // Cache the value of the "WebRTC-SendSideBwe-WithOverhead" field trial.
+ const bool send_side_bwe_with_overhead_ =
+ field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead");
+
+ // When we send a packet, expect this many bytes of headers to be added to it.
+ // Start out with a reasonable default that we can use until we receive a real
+ // value.
+ DataSize overhead_per_packet_ = DataSize::Bytes(28);
+
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderIsacT);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
index 9ddb94326d1..0bde3f797f2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
@@ -12,6 +12,7 @@
#define MODULES_AUDIO_CODING_CODECS_ISAC_AUDIO_ENCODER_ISAC_T_IMPL_H_
#include "rtc_base/checks.h"
+#include "rtc_base/numerics/safe_minmax.h"
namespace webrtc {
@@ -81,6 +82,51 @@ int AudioEncoderIsacT<T>::GetTargetBitrate() const {
}
template <typename T>
+void AudioEncoderIsacT<T>::SetTargetBitrate(int target_bps) {
+ // Set target bitrate directly without subtracting per-packet overhead,
+ // because that's what AudioEncoderOpus does.
+ SetTargetBitrate(target_bps,
+ /*subtract_per_packet_overhead=*/false);
+}
+
+template <typename T>
+void AudioEncoderIsacT<T>::OnReceivedTargetAudioBitrate(int target_bps) {
+ // Set target bitrate directly without subtracting per-packet overhead,
+ // because that's what AudioEncoderOpus does.
+ SetTargetBitrate(target_bps,
+ /*subtract_per_packet_overhead=*/false);
+}
+
+template <typename T>
+void AudioEncoderIsacT<T>::OnReceivedUplinkBandwidth(
+ int target_audio_bitrate_bps,
+ absl::optional<int64_t> /*bwe_period_ms*/) {
+ // Set target bitrate, subtracting the per-packet overhead if
+ // WebRTC-SendSideBwe-WithOverhead is enabled, because that's what
+ // AudioEncoderOpus does.
+ SetTargetBitrate(
+ target_audio_bitrate_bps,
+ /*subtract_per_packet_overhead=*/send_side_bwe_with_overhead_);
+}
+
+template <typename T>
+void AudioEncoderIsacT<T>::OnReceivedUplinkAllocation(
+ BitrateAllocationUpdate update) {
+ // Set target bitrate, subtracting the per-packet overhead if
+ // WebRTC-SendSideBwe-WithOverhead is enabled, because that's what
+ // AudioEncoderOpus does.
+ SetTargetBitrate(
+ update.target_bitrate.bps<int>(),
+ /*subtract_per_packet_overhead=*/send_side_bwe_with_overhead_);
+}
+
+template <typename T>
+void AudioEncoderIsacT<T>::OnReceivedOverhead(
+ size_t overhead_bytes_per_packet) {
+ overhead_per_packet_ = DataSize::Bytes(overhead_bytes_per_packet);
+}
+
+template <typename T>
AudioEncoder::EncodedInfo AudioEncoderIsacT<T>::EncodeImpl(
uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
@@ -127,6 +173,21 @@ AudioEncoderIsacT<T>::GetFrameLengthRange() const {
}
template <typename T>
+void AudioEncoderIsacT<T>::SetTargetBitrate(int target_bps,
+ bool subtract_per_packet_overhead) {
+ if (subtract_per_packet_overhead) {
+ const DataRate overhead_rate =
+ overhead_per_packet_ / TimeDelta::Millis(config_.frame_size_ms);
+ target_bps -= overhead_rate.bps();
+ }
+ target_bps = rtc::SafeClamp(target_bps, kMinBitrateBps,
+ MaxBitrateBps(config_.sample_rate_hz));
+ int result = T::Control(isac_state_, target_bps, config_.frame_size_ms);
+ RTC_DCHECK_EQ(result, 0);
+ config_.bit_rate = target_bps;
+}
+
+template <typename T>
void AudioEncoderIsacT<T>::RecreateEncoderInstance(const Config& config) {
RTC_CHECK(config.IsOk());
packet_in_progress_ = false;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc
index c4d7ab8fa80..a2e1e088e6e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/isac_webrtc_api_test.cc
@@ -9,6 +9,7 @@
*/
#include <array>
+#include <map>
#include <memory>
#include <vector>
@@ -159,6 +160,33 @@ TEST_P(EncoderTest, TestDifferentBitrates) {
EXPECT_LT(num_bytes_low, num_bytes_high);
}
+// Encodes an input audio sequence first with a low, then with a high target
+// bitrate *using the same encoder* and checks that the number of emitted bytes
+// in the first case is less than in the second case.
+TEST_P(EncoderTest, TestDynamicBitrateChange) {
+ constexpr int kLowBps = 20000;
+ constexpr int kHighBps = 25000;
+ constexpr int kStartBps = 30000;
+ auto encoder = CreateEncoder(GetIsacImpl(), GetSampleRateHz(),
+ GetFrameSizeMs(), kStartBps);
+ std::map<int, int> num_bytes;
+ constexpr int kNumFrames = 200; // 2 seconds.
+ for (int bitrate_bps : {kLowBps, kHighBps}) {
+ auto pcm_file = GetPcmTestFileReader(GetSampleRateHz());
+ encoder->OnReceivedTargetAudioBitrate(bitrate_bps);
+ for (int i = 0; i < kNumFrames; ++i) {
+ AudioFrame in;
+ pcm_file->Read10MsData(in);
+ rtc::Buffer buf;
+ encoder->Encode(/*rtp_timestamp=*/0, AudioFrameToView(in), &buf);
+ num_bytes[bitrate_bps] += buf.size();
+ }
+ }
+ // kHighBps / kLowBps == 1.25, so require the high-bitrate run to produce at
+ // least 1.2 times the number of bytes.
+ EXPECT_LT(1.2 * num_bytes[kLowBps], num_bytes[kHighBps]);
+}
+
// Checks that, given a target bitrate, the encoder does not overshoot too much.
TEST_P(EncoderTest, DoNotOvershootTargetBitrate) {
for (int bitrate_bps : {10000, 15000, 20000, 26000, 32000}) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
index ef32f4ce02d..220e96f1b77 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
@@ -66,46 +66,7 @@ constexpr int kOpusSupportedFrameLengths[] = {10, 20, 40, 60};
// PacketLossFractionSmoother uses an exponential filter with a time constant
// of -1.0 / ln(0.9999) = 10000 ms.
constexpr float kAlphaForPacketLossFractionSmoother = 0.9999f;
-
-// Optimize the loss rate to configure Opus. Basically, optimized loss rate is
-// the input loss rate rounded down to various levels, because a robustly good
-// audio quality is achieved by lowering the packet loss down.
-// Additionally, to prevent toggling, margins are used, i.e., when jumping to
-// a loss rate from below, a higher threshold is used than jumping to the same
-// level from above.
-float OptimizePacketLossRate(float new_loss_rate, float old_loss_rate) {
- RTC_DCHECK_GE(new_loss_rate, 0.0f);
- RTC_DCHECK_LE(new_loss_rate, 1.0f);
- RTC_DCHECK_GE(old_loss_rate, 0.0f);
- RTC_DCHECK_LE(old_loss_rate, 1.0f);
- constexpr float kPacketLossRate20 = 0.20f;
- constexpr float kPacketLossRate10 = 0.10f;
- constexpr float kPacketLossRate5 = 0.05f;
- constexpr float kPacketLossRate1 = 0.01f;
- constexpr float kLossRate20Margin = 0.02f;
- constexpr float kLossRate10Margin = 0.01f;
- constexpr float kLossRate5Margin = 0.01f;
- if (new_loss_rate >=
- kPacketLossRate20 +
- kLossRate20Margin *
- (kPacketLossRate20 - old_loss_rate > 0 ? 1 : -1)) {
- return kPacketLossRate20;
- } else if (new_loss_rate >=
- kPacketLossRate10 +
- kLossRate10Margin *
- (kPacketLossRate10 - old_loss_rate > 0 ? 1 : -1)) {
- return kPacketLossRate10;
- } else if (new_loss_rate >=
- kPacketLossRate5 +
- kLossRate5Margin *
- (kPacketLossRate5 - old_loss_rate > 0 ? 1 : -1)) {
- return kPacketLossRate5;
- } else if (new_loss_rate >= kPacketLossRate1) {
- return kPacketLossRate1;
- } else {
- return 0.0f;
- }
-}
+constexpr float kMaxPacketLossFraction = 0.2f;
int CalculateDefaultBitrate(int max_playback_rate, size_t num_channels) {
const int bitrate = [&] {
@@ -201,35 +162,6 @@ int GetBitrateBps(const AudioEncoderOpusConfig& config) {
return *config.bitrate_bps;
}
-bool IsValidPacketLossRate(int value) {
- return value >= 0 && value <= 100;
-}
-
-float ToFraction(int percent) {
- return static_cast<float>(percent) / 100;
-}
-
-float GetMinPacketLossRate() {
- constexpr char kPacketLossFieldTrial[] = "WebRTC-Audio-OpusMinPacketLossRate";
- const bool use_opus_min_packet_loss_rate =
- webrtc::field_trial::IsEnabled(kPacketLossFieldTrial);
- if (use_opus_min_packet_loss_rate) {
- const std::string field_trial_string =
- webrtc::field_trial::FindFullName(kPacketLossFieldTrial);
- constexpr int kDefaultMinPacketLossRate = 1;
- int value = kDefaultMinPacketLossRate;
- if (sscanf(field_trial_string.c_str(), "Enabled-%d", &value) == 1 &&
- !IsValidPacketLossRate(value)) {
- RTC_LOG(LS_WARNING) << "Invalid parameter for " << kPacketLossFieldTrial
- << ", using default value: "
- << kDefaultMinPacketLossRate;
- value = kDefaultMinPacketLossRate;
- }
- return ToFraction(value);
- }
- return 0.0;
-}
-
std::vector<float> GetBitrateMultipliers() {
constexpr char kBitrateMultipliersName[] =
"WebRTC-Audio-OpusBitrateMultipliers";
@@ -425,14 +357,13 @@ AudioEncoderOpusImpl::AudioEncoderOpusImpl(
: payload_type_(payload_type),
send_side_bwe_with_overhead_(
webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")),
- use_stable_target_for_adaptation_(webrtc::field_trial::IsEnabled(
+ use_stable_target_for_adaptation_(!webrtc::field_trial::IsDisabled(
"WebRTC-Audio-StableTargetAdaptation")),
adjust_bandwidth_(
webrtc::field_trial::IsEnabled("WebRTC-AdjustOpusBandwidth")),
bitrate_changed_(true),
bitrate_multipliers_(GetBitrateMultipliers()),
packet_loss_rate_(0.0),
- min_packet_loss_rate_(GetMinPacketLossRate()),
inst_(nullptr),
packet_loss_fraction_smoother_(new PacketLossFractionSmoother()),
audio_network_adaptor_creator_(audio_network_adaptor_creator),
@@ -541,14 +472,14 @@ void AudioEncoderOpusImpl::DisableAudioNetworkAdaptor() {
void AudioEncoderOpusImpl::OnReceivedUplinkPacketLossFraction(
float uplink_packet_loss_fraction) {
- if (!audio_network_adaptor_) {
- packet_loss_fraction_smoother_->AddSample(uplink_packet_loss_fraction);
- float average_fraction_loss = packet_loss_fraction_smoother_->GetAverage();
- return SetProjectedPacketLossRate(average_fraction_loss);
+ if (audio_network_adaptor_) {
+ audio_network_adaptor_->SetUplinkPacketLossFraction(
+ uplink_packet_loss_fraction);
+ ApplyAudioNetworkAdaptor();
}
- audio_network_adaptor_->SetUplinkPacketLossFraction(
- uplink_packet_loss_fraction);
- ApplyAudioNetworkAdaptor();
+ packet_loss_fraction_smoother_->AddSample(uplink_packet_loss_fraction);
+ float average_fraction_loss = packet_loss_fraction_smoother_->GetAverage();
+ SetProjectedPacketLossRate(average_fraction_loss);
}
void AudioEncoderOpusImpl::OnReceivedTargetAudioBitrate(
@@ -789,8 +720,7 @@ void AudioEncoderOpusImpl::SetNumChannelsToEncode(
}
void AudioEncoderOpusImpl::SetProjectedPacketLossRate(float fraction) {
- fraction = OptimizePacketLossRate(fraction, packet_loss_rate_);
- fraction = std::max(fraction, min_packet_loss_rate_);
+ fraction = std::min(std::max(fraction, 0.0f), kMaxPacketLossFraction);
if (packet_loss_rate_ != fraction) {
packet_loss_rate_ = fraction;
RTC_CHECK_EQ(
@@ -828,10 +758,6 @@ void AudioEncoderOpusImpl::ApplyAudioNetworkAdaptor() {
SetTargetBitrate(*config.bitrate_bps);
if (config.frame_length_ms)
SetFrameLength(*config.frame_length_ms);
- if (config.enable_fec)
- SetFec(*config.enable_fec);
- if (config.uplink_packet_loss_fraction)
- SetProjectedPacketLossRate(*config.uplink_packet_loss_fraction);
if (config.enable_dtx)
SetDtx(*config.enable_dtx);
if (config.num_channels)
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
index 540413290d7..ab954feba78 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
@@ -160,7 +160,6 @@ class AudioEncoderOpusImpl final : public AudioEncoder {
// 1 kbps range.
std::vector<float> bitrate_multipliers_;
float packet_loss_rate_;
- const float min_packet_loss_rate_;
std::vector<int16_t> input_buffer_;
OpusEncInst* inst_;
uint32_t first_timestamp_in_buffer_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
index b469885c1f0..1cbc4a3ff70 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc
@@ -93,17 +93,13 @@ std::unique_ptr<AudioEncoderOpusStates> CreateCodec(int sample_rate_hz,
AudioEncoderRuntimeConfig CreateEncoderRuntimeConfig() {
constexpr int kBitrate = 40000;
constexpr int kFrameLength = 60;
- constexpr bool kEnableFec = true;
constexpr bool kEnableDtx = false;
constexpr size_t kNumChannels = 1;
- constexpr float kPacketLossFraction = 0.1f;
AudioEncoderRuntimeConfig config;
config.bitrate_bps = kBitrate;
config.frame_length_ms = kFrameLength;
- config.enable_fec = kEnableFec;
config.enable_dtx = kEnableDtx;
config.num_channels = kNumChannels;
- config.uplink_packet_loss_fraction = kPacketLossFraction;
return config;
}
@@ -111,7 +107,6 @@ void CheckEncoderRuntimeConfig(const AudioEncoderOpusImpl* encoder,
const AudioEncoderRuntimeConfig& config) {
EXPECT_EQ(*config.bitrate_bps, encoder->GetTargetBitrate());
EXPECT_EQ(*config.frame_length_ms, encoder->next_frame_length_ms());
- EXPECT_EQ(*config.enable_fec, encoder->fec_enabled());
EXPECT_EQ(*config.enable_dtx, encoder->GetDtx());
EXPECT_EQ(*config.num_channels, encoder->num_channels_to_encode());
}
@@ -222,84 +217,6 @@ TEST_P(AudioEncoderOpusTest,
}
}
-namespace {
-
-// Returns a vector with the n evenly-spaced numbers a, a + (b - a)/(n - 1),
-// ..., b.
-std::vector<float> IntervalSteps(float a, float b, size_t n) {
- RTC_DCHECK_GT(n, 1u);
- const float step = (b - a) / (n - 1);
- std::vector<float> points;
- points.push_back(a);
- for (size_t i = 1; i < n - 1; ++i)
- points.push_back(a + i * step);
- points.push_back(b);
- return points;
-}
-
-// Sets the packet loss rate to each number in the vector in turn, and verifies
-// that the loss rate as reported by the encoder is |expected_return| for all
-// of them.
-void TestSetPacketLossRate(const AudioEncoderOpusStates* states,
- const std::vector<float>& losses,
- float expected_return) {
- // |kSampleIntervalMs| is chosen to ease the calculation since
- // 0.9999 ^ 184198 = 1e-8. Which minimizes the effect of
- // PacketLossFractionSmoother used in AudioEncoderOpus.
- constexpr int64_t kSampleIntervalMs = 184198;
- for (float loss : losses) {
- states->encoder->OnReceivedUplinkPacketLossFraction(loss);
- states->fake_clock->AdvanceTime(TimeDelta::Millis(kSampleIntervalMs));
- EXPECT_FLOAT_EQ(expected_return, states->encoder->packet_loss_rate());
- }
-}
-
-} // namespace
-
-TEST_P(AudioEncoderOpusTest, PacketLossRateOptimized) {
- auto states = CreateCodec(sample_rate_hz_, 1);
- auto I = [](float a, float b) { return IntervalSteps(a, b, 10); };
- constexpr float eps = 1e-8f;
-
- // Note that the order of the following calls is critical.
-
- // clang-format off
- TestSetPacketLossRate(states.get(), I(0.00f , 0.01f - eps), 0.00f);
- TestSetPacketLossRate(states.get(), I(0.01f + eps, 0.06f - eps), 0.01f);
- TestSetPacketLossRate(states.get(), I(0.06f + eps, 0.11f - eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.11f + eps, 0.22f - eps), 0.10f);
- TestSetPacketLossRate(states.get(), I(0.22f + eps, 1.00f ), 0.20f);
-
- TestSetPacketLossRate(states.get(), I(1.00f , 0.18f + eps), 0.20f);
- TestSetPacketLossRate(states.get(), I(0.18f - eps, 0.09f + eps), 0.10f);
- TestSetPacketLossRate(states.get(), I(0.09f - eps, 0.04f + eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.04f - eps, 0.01f + eps), 0.01f);
- TestSetPacketLossRate(states.get(), I(0.01f - eps, 0.00f ), 0.00f);
- // clang-format on
-}
-
-TEST_P(AudioEncoderOpusTest, PacketLossRateLowerBounded) {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Audio-OpusMinPacketLossRate/Enabled-5/");
- auto states = CreateCodec(sample_rate_hz_, 1);
- auto I = [](float a, float b) { return IntervalSteps(a, b, 10); };
- constexpr float eps = 1e-8f;
-
- // clang-format off
- TestSetPacketLossRate(states.get(), I(0.00f , 0.01f - eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.01f + eps, 0.06f - eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.06f + eps, 0.11f - eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.11f + eps, 0.22f - eps), 0.10f);
- TestSetPacketLossRate(states.get(), I(0.22f + eps, 1.00f ), 0.20f);
-
- TestSetPacketLossRate(states.get(), I(1.00f , 0.18f + eps), 0.20f);
- TestSetPacketLossRate(states.get(), I(0.18f - eps, 0.09f + eps), 0.10f);
- TestSetPacketLossRate(states.get(), I(0.09f - eps, 0.04f + eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.04f - eps, 0.01f + eps), 0.05f);
- TestSetPacketLossRate(states.get(), I(0.01f - eps, 0.00f ), 0.05f);
- // clang-format on
-}
-
TEST_P(AudioEncoderOpusTest, SetReceiverFrameLengthRange) {
auto states = CreateCodec(sample_rate_hz_, 2);
// Before calling to |SetReceiverFrameLengthRange|,
@@ -337,6 +254,8 @@ TEST_P(AudioEncoderOpusTest,
TEST_P(AudioEncoderOpusTest,
InvokeAudioNetworkAdaptorOnReceivedUplinkBandwidth) {
+ test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-StableTargetAdaptation/Disabled/");
auto states = CreateCodec(sample_rate_hz_, 2);
states->encoder->EnableAudioNetworkAdaptor("", nullptr);
@@ -358,6 +277,28 @@ TEST_P(AudioEncoderOpusTest,
CheckEncoderRuntimeConfig(states->encoder.get(), config);
}
+TEST_P(AudioEncoderOpusTest,
+ InvokeAudioNetworkAdaptorOnReceivedUplinkAllocation) {
+ auto states = CreateCodec(sample_rate_hz_, 2);
+ states->encoder->EnableAudioNetworkAdaptor("", nullptr);
+
+ auto config = CreateEncoderRuntimeConfig();
+ EXPECT_CALL(*states->mock_audio_network_adaptor, GetEncoderRuntimeConfig())
+ .WillOnce(Return(config));
+
+ BitrateAllocationUpdate update;
+ update.target_bitrate = DataRate::BitsPerSec(30000);
+ update.stable_target_bitrate = DataRate::BitsPerSec(20000);
+ update.bwe_period = TimeDelta::Millis(200);
+ EXPECT_CALL(*states->mock_audio_network_adaptor,
+ SetTargetAudioBitrate(update.target_bitrate.bps()));
+ EXPECT_CALL(*states->mock_audio_network_adaptor,
+ SetUplinkBandwidth(update.stable_target_bitrate.bps()));
+ states->encoder->OnReceivedUplinkAllocation(update);
+
+ CheckEncoderRuntimeConfig(states->encoder.get(), config);
+}
+
TEST_P(AudioEncoderOpusTest, InvokeAudioNetworkAdaptorOnReceivedRtt) {
auto states = CreateCodec(sample_rate_hz_, 2);
states->encoder->EnableAudioNetworkAdaptor("", nullptr);
@@ -404,16 +345,21 @@ TEST_P(AudioEncoderOpusTest,
// First time, no filtering.
states->encoder->OnReceivedUplinkPacketLossFraction(kPacketLossFraction_1);
- EXPECT_FLOAT_EQ(0.01f, states->encoder->packet_loss_rate());
+ EXPECT_FLOAT_EQ(0.02f, states->encoder->packet_loss_rate());
states->fake_clock->AdvanceTime(TimeDelta::Millis(kSecondSampleTimeMs));
states->encoder->OnReceivedUplinkPacketLossFraction(kPacketLossFraction_2);
// Now the output of packet loss fraction smoother should be
- // (0.02 + 0.198) / 2 = 0.109, which reach the threshold for the optimized
- // packet loss rate to increase to 0.05. If no smoothing has been made, the
- // optimized packet loss rate should have been increase to 0.1.
- EXPECT_FLOAT_EQ(0.05f, states->encoder->packet_loss_rate());
+ // (0.02 + 0.198) / 2 = 0.109.
+ EXPECT_NEAR(0.109f, states->encoder->packet_loss_rate(), 0.001);
+}
+
+TEST_P(AudioEncoderOpusTest, PacketLossRateUpperBounded) {
+ auto states = CreateCodec(sample_rate_hz_, 2);
+
+ states->encoder->OnReceivedUplinkPacketLossFraction(0.5);
+ EXPECT_FLOAT_EQ(0.2f, states->encoder->packet_loss_rate());
}
TEST_P(AudioEncoderOpusTest, DoNotInvokeSetTargetBitrateIfOverheadUnknown) {
@@ -477,29 +423,6 @@ TEST_P(AudioEncoderOpusTest, BitrateBounded) {
EXPECT_EQ(kMaxBitrateBps, states->encoder->GetTargetBitrate());
}
-TEST_P(AudioEncoderOpusTest, MinPacketLossRate) {
- constexpr float kDefaultMinPacketLossRate = 0.01;
- {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Audio-OpusMinPacketLossRate/Enabled/");
- auto states = CreateCodec(sample_rate_hz_, 1);
- EXPECT_EQ(kDefaultMinPacketLossRate, states->encoder->packet_loss_rate());
- }
- {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Audio-OpusMinPacketLossRate/Enabled-200/");
- auto states = CreateCodec(sample_rate_hz_, 1);
- EXPECT_EQ(kDefaultMinPacketLossRate, states->encoder->packet_loss_rate());
- }
- {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Audio-OpusMinPacketLossRate/Enabled-50/");
- constexpr float kMinPacketLossRate = 0.5;
- auto states = CreateCodec(sample_rate_hz_, 1);
- EXPECT_EQ(kMinPacketLossRate, states->encoder->packet_loss_rate());
- }
-}
-
// Verifies that the complexity adaptation in the config works as intended.
TEST(AudioEncoderOpusTest, ConfigComplexityAdaptation) {
AudioEncoderOpusConfig config;
@@ -602,6 +525,8 @@ TEST_P(AudioEncoderOpusTest, EmptyConfigDoesNotAffectEncoderSettings) {
}
TEST_P(AudioEncoderOpusTest, UpdateUplinkBandwidthInAudioNetworkAdaptor) {
+ test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-StableTargetAdaptation/Disabled/");
auto states = CreateCodec(sample_rate_hz_, 2);
states->encoder->EnableAudioNetworkAdaptor("", nullptr);
const size_t opus_rate_khz = rtc::CheckedDivExact(sample_rate_hz_, 1000);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc
index 4bac365a89a..ca39ed82354 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc
@@ -678,33 +678,7 @@ int WebRtcOpus_FecDurationEst(const uint8_t* payload,
return samples;
}
-// This method is based on Definition of the Opus Audio Codec
-// (https://tools.ietf.org/html/rfc6716). Basically, this method is based on
-// parsing the LP layer of an Opus packet, particularly the LBRR flag.
-int WebRtcOpus_PacketHasFec(const uint8_t* payload,
- size_t payload_length_bytes) {
- if (payload == NULL || payload_length_bytes == 0)
- return 0;
-
- // In CELT_ONLY mode, packets should not have FEC.
- if (payload[0] & 0x80)
- return 0;
-
- // Max number of frames in an Opus packet is 48.
- opus_int16 frame_sizes[48];
- const unsigned char* frame_data[48];
-
- // Parse packet to get the frames. But we only care about the first frame,
- // since we can only decode the FEC from the first one.
- if (opus_packet_parse(payload, static_cast<opus_int32>(payload_length_bytes),
- NULL, frame_data, frame_sizes, NULL) < 0) {
- return 0;
- }
-
- if (frame_sizes[0] <= 1) {
- return 0;
- }
-
+int WebRtcOpus_NumSilkFrames(const uint8_t* payload) {
// For computing the payload length in ms, the sample rate is not important
// since it cancels out. We use 48 kHz, but any valid sample rate would work.
int payload_length_ms =
@@ -727,10 +701,43 @@ int WebRtcOpus_PacketHasFec(const uint8_t* payload,
default:
return 0; // It is actually even an invalid packet.
}
+ return silk_frames;
+}
+
+// This method is based on Definition of the Opus Audio Codec
+// (https://tools.ietf.org/html/rfc6716). Basically, this method is based on
+// parsing the LP layer of an Opus packet, particularly the LBRR flag.
+int WebRtcOpus_PacketHasFec(const uint8_t* payload,
+ size_t payload_length_bytes) {
+ if (payload == NULL || payload_length_bytes == 0)
+ return 0;
+
+ // In CELT_ONLY mode, packets should not have FEC.
+ if (payload[0] & 0x80)
+ return 0;
+
+ int silk_frames = WebRtcOpus_NumSilkFrames(payload);
+ if (silk_frames == 0)
+ return 0; // Not valid.
const int channels = opus_packet_get_nb_channels(payload);
RTC_DCHECK(channels == 1 || channels == 2);
+ // Max number of frames in an Opus packet is 48.
+ opus_int16 frame_sizes[48];
+ const unsigned char* frame_data[48];
+
+ // Parse packet to get the frames. But we only care about the first frame,
+ // since we can only decode the FEC from the first one.
+ if (opus_packet_parse(payload, static_cast<opus_int32>(payload_length_bytes),
+ NULL, frame_data, frame_sizes, NULL) < 0) {
+ return 0;
+ }
+
+ if (frame_sizes[0] < 1) {
+ return 0;
+ }
+
// A frame starts with the LP layer. The LP layer begins with two to eight
// header bits.These consist of one VAD bit per SILK frame (up to 3),
// followed by a single flag indicating the presence of LBRR frames.
@@ -748,3 +755,45 @@ int WebRtcOpus_PacketHasFec(const uint8_t* payload,
return 0;
}
+
+int WebRtcOpus_PacketHasVoiceActivity(const uint8_t* payload,
+ size_t payload_length_bytes) {
+ if (payload == NULL || payload_length_bytes == 0)
+ return 0;
+
+ // In CELT_ONLY mode we can not determine whether there is VAD.
+ if (payload[0] & 0x80)
+ return -1;
+
+ int silk_frames = WebRtcOpus_NumSilkFrames(payload);
+ if (silk_frames == 0)
+ return -1;
+
+ const int channels = opus_packet_get_nb_channels(payload);
+ RTC_DCHECK(channels == 1 || channels == 2);
+
+ // Max number of frames in an Opus packet is 48.
+ opus_int16 frame_sizes[48];
+ const unsigned char* frame_data[48];
+
+ // Parse packet to get the frames.
+ int frames =
+ opus_packet_parse(payload, static_cast<opus_int32>(payload_length_bytes),
+ NULL, frame_data, frame_sizes, NULL);
+ if (frames < 0)
+ return -1;
+
+ // Iterate over all Opus frames which may contain multiple SILK frames.
+ for (int frame = 0; frame < frames; frame++) {
+ if (frame_sizes[frame] < 1) {
+ continue;
+ }
+ if (frame_data[frame][0] >> (8 - silk_frames))
+ return 1;
+ if (channels == 2 &&
+ (frame_data[frame][0] << (silk_frames + 1)) >> (8 - silk_frames))
+ return 1;
+ }
+
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.h
index e8de9730109..2a3ceaa7d31 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.h
@@ -510,6 +510,22 @@ int WebRtcOpus_FecDurationEst(const uint8_t* payload,
int WebRtcOpus_PacketHasFec(const uint8_t* payload,
size_t payload_length_bytes);
+/****************************************************************************
+ * WebRtcOpus_PacketHasVoiceActivity(...)
+ *
+ * This function returns the SILK VAD information encoded in the opus packet.
+ * For CELT-only packets that do not have VAD information, it returns -1.
+ * Input:
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : 0 - no frame had the VAD flag set.
+ * 1 - at least one frame had the VAD flag set.
+ * -1 - VAD status could not be determined.
+ */
+int WebRtcOpus_PacketHasVoiceActivity(const uint8_t* payload,
+ size_t payload_length_bytes);
+
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
index 3407d7d3cf9..80cab501372 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
@@ -949,4 +949,30 @@ TEST_P(OpusTest, OpusDecodeRepacketized) {
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_));
}
+TEST(OpusVadTest, CeltUnknownStatus) {
+ const uint8_t celt[] = {0x80};
+ EXPECT_EQ(WebRtcOpus_PacketHasVoiceActivity(celt, 1), -1);
+}
+
+TEST(OpusVadTest, Mono20msVadSet) {
+ uint8_t silk20msMonoVad[] = {0x78, 0x80};
+ EXPECT_TRUE(WebRtcOpus_PacketHasVoiceActivity(silk20msMonoVad, 2));
+}
+
+TEST(OpusVadTest, Mono20MsVadUnset) {
+ uint8_t silk20msMonoSilence[] = {0x78, 0x00};
+ EXPECT_FALSE(WebRtcOpus_PacketHasVoiceActivity(silk20msMonoSilence, 2));
+}
+
+TEST(OpusVadTest, Stereo20MsVadOnSideChannel) {
+ uint8_t silk20msStereoVadSideChannel[] = {0x78 | 0x04, 0x20};
+ EXPECT_TRUE(
+ WebRtcOpus_PacketHasVoiceActivity(silk20msStereoVadSideChannel, 2));
+}
+
+TEST(OpusVadTest, TwoOpusMonoFramesVadOnSecond) {
+ uint8_t twoMonoFrames[] = {0x78 | 0x1, 0x00, 0x80};
+ EXPECT_TRUE(WebRtcOpus_PacketHasVoiceActivity(twoMonoFrames, 3));
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
index e75806af104..8d028c9b9ad 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
@@ -15,6 +15,7 @@
#include <utility>
#include <vector>
+#include "rtc_base/byte_order.h"
#include "rtc_base/checks.h"
namespace webrtc {
@@ -59,32 +60,62 @@ AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl(
uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) {
- const size_t primary_offset = encoded->size();
+ // Allocate room for RFC 2198 header if there is redundant data.
+ // Otherwise this will send the primary payload type without
+ // wrapping in RED.
+ const size_t header_length_bytes = secondary_info_.encoded_bytes > 0 ? 5 : 0;
+ size_t secondary_length_bytes = 0;
+
+ if (secondary_info_.encoded_bytes > 0) {
+ encoded->SetSize(header_length_bytes);
+ encoded->AppendData(secondary_encoded_);
+ secondary_length_bytes = secondary_info_.encoded_bytes;
+ }
EncodedInfo info = speech_encoder_->Encode(rtp_timestamp, audio, encoded);
+ if (info.encoded_bytes == 0) {
+ encoded->Clear();
+ return info;
+ }
+
+ // Actually construct the RFC 2198 header.
+ if (secondary_info_.encoded_bytes > 0) {
+ const uint32_t timestamp_delta =
+ info.encoded_timestamp - secondary_info_.encoded_timestamp;
+
+ encoded->data()[0] = secondary_info_.payload_type | 0x80;
+ RTC_DCHECK_LT(secondary_info_.encoded_bytes, 1 << 10);
+ rtc::SetBE16(static_cast<uint8_t*>(encoded->data()) + 1,
+ (timestamp_delta << 2) | (secondary_info_.encoded_bytes >> 8));
+ encoded->data()[3] = secondary_info_.encoded_bytes & 0xff;
+ encoded->data()[4] = info.payload_type;
+ }
+
RTC_CHECK(info.redundant.empty()) << "Cannot use nested redundant encoders.";
- RTC_DCHECK_EQ(encoded->size() - primary_offset, info.encoded_bytes);
-
- if (info.encoded_bytes > 0) {
- // |info| will be implicitly cast to an EncodedInfoLeaf struct, effectively
- // discarding the (empty) vector of redundant information. This is
- // intentional.
- info.redundant.push_back(info);
- RTC_DCHECK_EQ(info.redundant.size(), 1);
- if (secondary_info_.encoded_bytes > 0) {
- encoded->AppendData(secondary_encoded_);
- info.redundant.push_back(secondary_info_);
- RTC_DCHECK_EQ(info.redundant.size(), 2);
- }
- // Save primary to secondary.
- secondary_encoded_.SetData(encoded->data() + primary_offset,
- info.encoded_bytes);
- secondary_info_ = info;
- RTC_DCHECK_EQ(info.speech, info.redundant[0].speech);
+ RTC_DCHECK_EQ(encoded->size() - header_length_bytes - secondary_length_bytes,
+ info.encoded_bytes);
+
+ // |info| will be implicitly cast to an EncodedInfoLeaf struct, effectively
+ // discarding the (empty) vector of redundant information. This is
+ // intentional.
+ info.redundant.push_back(info);
+ RTC_DCHECK_EQ(info.redundant.size(), 1);
+ if (secondary_info_.encoded_bytes > 0) {
+ info.redundant.push_back(secondary_info_);
+ RTC_DCHECK_EQ(info.redundant.size(), 2);
}
+ // Save primary to secondary.
+ secondary_encoded_.SetData(
+ &encoded->data()[header_length_bytes + secondary_info_.encoded_bytes],
+ info.encoded_bytes);
+ secondary_info_ = info;
+ RTC_DCHECK_EQ(info.speech, info.redundant[0].speech);
+
// Update main EncodedInfo.
- info.payload_type = red_payload_type_;
- info.encoded_bytes = 0;
+ if (header_length_bytes > 0) {
+ info.payload_type = red_payload_type_;
+ }
+ info.encoded_bytes = header_length_bytes;
for (std::vector<EncodedInfoLeaf>::const_iterator it = info.redundant.begin();
it != info.redundant.end(); ++it) {
info.encoded_bytes += it->encoded_bytes;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
index e20515a1650..720acb4f879 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
@@ -139,6 +139,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckImmediateEncode) {
// new data, even if the RED codec is loaded with a secondary encoding.
TEST_F(AudioEncoderCopyRedTest, CheckNoOutput) {
static const size_t kEncodedSize = 17;
+ static const size_t kHeaderLenBytes = 5;
{
InSequence s;
EXPECT_CALL(*mock_encoder_, EncodeImpl(_, _, _))
@@ -160,7 +161,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckNoOutput) {
// Final call to the speech encoder will produce output.
Encode();
- EXPECT_EQ(2 * kEncodedSize, encoded_info_.encoded_bytes);
+ EXPECT_EQ(2 * kEncodedSize + kHeaderLenBytes, encoded_info_.encoded_bytes);
ASSERT_EQ(2u, encoded_info_.redundant.size());
}
@@ -187,7 +188,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadSizes) {
ASSERT_EQ(2u, encoded_info_.redundant.size());
EXPECT_EQ(i, encoded_info_.redundant[0].encoded_bytes);
EXPECT_EQ(i - 1, encoded_info_.redundant[1].encoded_bytes);
- EXPECT_EQ(i + i - 1, encoded_info_.encoded_bytes);
+ EXPECT_EQ(5 + i + i - 1, encoded_info_.encoded_bytes);
}
}
@@ -224,6 +225,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloads) {
// Let the mock encoder write payloads with increasing values. The first
// payload will have values 0, 1, 2, ..., kPayloadLenBytes - 1.
static const size_t kPayloadLenBytes = 5;
+ static const size_t kHeaderLenBytes = 5;
uint8_t payload[kPayloadLenBytes];
for (uint8_t i = 0; i < kPayloadLenBytes; ++i) {
payload[i] = i;
@@ -239,7 +241,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloads) {
EXPECT_EQ(i, encoded_.data()[i]);
}
- for (int j = 0; j < 5; ++j) {
+ for (int j = 0; j < 1; ++j) {
// Increment all values of the payload by 10.
for (size_t i = 0; i < kPayloadLenBytes; ++i)
payload[i] += 10;
@@ -249,16 +251,17 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloads) {
EXPECT_EQ(kPayloadLenBytes, encoded_info_.redundant[0].encoded_bytes);
EXPECT_EQ(kPayloadLenBytes, encoded_info_.redundant[1].encoded_bytes);
for (size_t i = 0; i < kPayloadLenBytes; ++i) {
- // Check primary payload.
- EXPECT_EQ((j + 1) * 10 + i, encoded_.data()[i]);
// Check secondary payload.
- EXPECT_EQ(j * 10 + i, encoded_.data()[i + kPayloadLenBytes]);
+ EXPECT_EQ(j * 10 + i, encoded_.data()[kHeaderLenBytes + i]);
+
+ // Check primary payload.
+ EXPECT_EQ((j + 1) * 10 + i,
+ encoded_.data()[kHeaderLenBytes + i + kPayloadLenBytes]);
}
}
}
// Checks correct propagation of payload type.
-// Checks that the correct timestamps are returned.
TEST_F(AudioEncoderCopyRedTest, CheckPayloadType) {
const int primary_payload_type = red_payload_type_ + 1;
AudioEncoder::EncodedInfo info;
@@ -272,7 +275,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadType) {
Encode();
ASSERT_EQ(1u, encoded_info_.redundant.size());
EXPECT_EQ(primary_payload_type, encoded_info_.redundant[0].payload_type);
- EXPECT_EQ(red_payload_type_, encoded_info_.payload_type);
+ EXPECT_EQ(primary_payload_type, encoded_info_.payload_type);
const int secondary_payload_type = red_payload_type_ + 2;
info.payload_type = secondary_payload_type;
@@ -286,6 +289,36 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadType) {
EXPECT_EQ(red_payload_type_, encoded_info_.payload_type);
}
+TEST_F(AudioEncoderCopyRedTest, CheckRFC2198Header) {
+ const int primary_payload_type = red_payload_type_ + 1;
+ AudioEncoder::EncodedInfo info;
+ info.encoded_bytes = 10;
+ info.encoded_timestamp = timestamp_;
+ info.payload_type = primary_payload_type;
+
+ EXPECT_CALL(*mock_encoder_, EncodeImpl(_, _, _))
+ .WillOnce(Invoke(MockAudioEncoder::FakeEncoding(info)));
+ Encode();
+ info.encoded_timestamp = timestamp_; // update timestamp.
+ EXPECT_CALL(*mock_encoder_, EncodeImpl(_, _, _))
+ .WillOnce(Invoke(MockAudioEncoder::FakeEncoding(info)));
+ Encode(); // Second call will produce a redundant encoding.
+
+ EXPECT_EQ(encoded_.size(),
+ 5u + 2 * 10u); // header size + two encoded payloads.
+ EXPECT_EQ(encoded_[0], primary_payload_type | 0x80);
+
+ uint32_t timestamp_delta = encoded_info_.encoded_timestamp -
+ encoded_info_.redundant[1].encoded_timestamp;
+ // Timestamp delta is encoded as a 14 bit value.
+ EXPECT_EQ(encoded_[1], timestamp_delta >> 6);
+ EXPECT_EQ(static_cast<uint8_t>(encoded_[2] >> 2), timestamp_delta & 0x3f);
+ // Redundant length is encoded as 10 bit value.
+ EXPECT_EQ(encoded_[2] & 0x3u, encoded_info_.redundant[1].encoded_bytes >> 8);
+ EXPECT_EQ(encoded_[3], encoded_info_.redundant[1].encoded_bytes & 0xff);
+ EXPECT_EQ(encoded_[4], primary_payload_type);
+}
+
#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// This test fixture tests various error conditions that makes the
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
index 836c49c12fa..d1e1ec1e307 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
@@ -536,7 +536,11 @@ TEST_F(AudioDecoderIsacFloatTest, EncodeDecode) {
}
TEST_F(AudioDecoderIsacFloatTest, SetTargetBitrate) {
- TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 32000);
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 9999));
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 10000));
+ EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), 23456));
+ EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), 32000));
+ EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), 32001));
}
TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) {
@@ -549,7 +553,11 @@ TEST_F(AudioDecoderIsacSwbTest, EncodeDecode) {
}
TEST_F(AudioDecoderIsacSwbTest, SetTargetBitrate) {
- TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 32000);
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 9999));
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 10000));
+ EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), 23456));
+ EXPECT_EQ(56000, SetAndGetTargetBitrate(audio_encoder_.get(), 56000));
+ EXPECT_EQ(56000, SetAndGetTargetBitrate(audio_encoder_.get(), 56001));
}
TEST_F(AudioDecoderIsacFixTest, EncodeDecode) {
@@ -569,7 +577,11 @@ TEST_F(AudioDecoderIsacFixTest, EncodeDecode) {
}
TEST_F(AudioDecoderIsacFixTest, SetTargetBitrate) {
- TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 32000);
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 9999));
+ EXPECT_EQ(10000, SetAndGetTargetBitrate(audio_encoder_.get(), 10000));
+ EXPECT_EQ(23456, SetAndGetTargetBitrate(audio_encoder_.get(), 23456));
+ EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), 32000));
+ EXPECT_EQ(32000, SetAndGetTargetBitrate(audio_encoder_.get(), 32001));
}
TEST_F(AudioDecoderG722Test, EncodeDecode) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
index d83dc7f62ca..b8dc031fa4b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
@@ -23,22 +23,28 @@ class MockDecoderDatabase : public DecoderDatabase {
explicit MockDecoderDatabase(
rtc::scoped_refptr<AudioDecoderFactory> factory = nullptr)
: DecoderDatabase(factory, absl::nullopt) {}
- virtual ~MockDecoderDatabase() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_CONST_METHOD0(Empty, bool());
- MOCK_CONST_METHOD0(Size, int());
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD2(RegisterPayload,
- int(int rtp_payload_type, const SdpAudioFormat& audio_format));
- MOCK_METHOD1(Remove, int(uint8_t rtp_payload_type));
- MOCK_METHOD0(RemoveAll, void());
- MOCK_CONST_METHOD1(GetDecoderInfo,
- const DecoderInfo*(uint8_t rtp_payload_type));
- MOCK_METHOD2(SetActiveDecoder,
- int(uint8_t rtp_payload_type, bool* new_decoder));
- MOCK_CONST_METHOD0(GetActiveDecoder, AudioDecoder*());
- MOCK_METHOD1(SetActiveCngDecoder, int(uint8_t rtp_payload_type));
- MOCK_CONST_METHOD0(GetActiveCngDecoder, ComfortNoiseDecoder*());
+ ~MockDecoderDatabase() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(bool, Empty, (), (const, override));
+ MOCK_METHOD(int, Size, (), (const, override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int,
+ RegisterPayload,
+ (int rtp_payload_type, const SdpAudioFormat& audio_format),
+ (override));
+ MOCK_METHOD(int, Remove, (uint8_t rtp_payload_type), (override));
+ MOCK_METHOD(void, RemoveAll, (), (override));
+ MOCK_METHOD(const DecoderInfo*,
+ GetDecoderInfo,
+ (uint8_t rtp_payload_type),
+ (const, override));
+ MOCK_METHOD(int,
+ SetActiveDecoder,
+ (uint8_t rtp_payload_type, bool* new_decoder),
+ (override));
+ MOCK_METHOD(AudioDecoder*, GetActiveDecoder, (), (const, override));
+ MOCK_METHOD(int, SetActiveCngDecoder, (uint8_t rtp_payload_type), (override));
+ MOCK_METHOD(ComfortNoiseDecoder*, GetActiveCngDecoder, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
index d9fe5d4492d..c60c56d36b3 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
@@ -19,13 +19,16 @@ namespace webrtc {
class MockDtmfBuffer : public DtmfBuffer {
public:
MockDtmfBuffer(int fs) : DtmfBuffer(fs) {}
- virtual ~MockDtmfBuffer() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Flush, void());
- MOCK_METHOD1(InsertEvent, int(const DtmfEvent& event));
- MOCK_METHOD2(GetEvent, bool(uint32_t current_timestamp, DtmfEvent* event));
- MOCK_CONST_METHOD0(Length, size_t());
- MOCK_CONST_METHOD0(Empty, bool());
+ ~MockDtmfBuffer() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void, Flush, (), (override));
+ MOCK_METHOD(int, InsertEvent, (const DtmfEvent& event), (override));
+ MOCK_METHOD(bool,
+ GetEvent,
+ (uint32_t current_timestamp, DtmfEvent* event),
+ (override));
+ MOCK_METHOD(size_t, Length, (), (const, override));
+ MOCK_METHOD(bool, Empty, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
index eea8bee1c2c..60de167c29b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
@@ -18,12 +18,15 @@ namespace webrtc {
class MockDtmfToneGenerator : public DtmfToneGenerator {
public:
- virtual ~MockDtmfToneGenerator() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD3(Init, int(int fs, int event, int attenuation));
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD2(Generate, int(size_t num_samples, AudioMultiVector* output));
- MOCK_CONST_METHOD0(initialized, bool());
+ ~MockDtmfToneGenerator() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(int, Init, (int fs, int event, int attenuation), (override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int,
+ Generate,
+ (size_t num_samples, AudioMultiVector* output),
+ (override));
+ MOCK_METHOD(bool, initialized, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_expand.h
index 286325b841e..9d667790216 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_expand.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_expand.h
@@ -30,13 +30,13 @@ class MockExpand : public Expand {
statistics,
fs,
num_channels) {}
- virtual ~MockExpand() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(Process, int(AudioMultiVector* output));
- MOCK_METHOD0(SetParametersForNormalAfterExpand, void());
- MOCK_METHOD0(SetParametersForMergeAfterExpand, void());
- MOCK_CONST_METHOD0(overlap_length, size_t());
+ ~MockExpand() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int, Process, (AudioMultiVector * output), (override));
+ MOCK_METHOD(void, SetParametersForNormalAfterExpand, (), (override));
+ MOCK_METHOD(void, SetParametersForMergeAfterExpand, (), (override));
+ MOCK_METHOD(size_t, overlap_length, (), (const, override));
};
} // namespace webrtc
@@ -45,13 +45,15 @@ namespace webrtc {
class MockExpandFactory : public ExpandFactory {
public:
- MOCK_CONST_METHOD6(Create,
- Expand*(BackgroundNoise* background_noise,
- SyncBuffer* sync_buffer,
- RandomVector* random_vector,
- StatisticsCalculator* statistics,
- int fs,
- size_t num_channels));
+ MOCK_METHOD(Expand*,
+ Create,
+ (BackgroundNoise * background_noise,
+ SyncBuffer* sync_buffer,
+ RandomVector* random_vector,
+ StatisticsCalculator* statistics,
+ int fs,
+ size_t num_channels),
+ (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_histogram.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_histogram.h
index 91ae18f5e8a..03abbc1d4b0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_histogram.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_histogram.h
@@ -22,8 +22,8 @@ class MockHistogram : public Histogram {
: Histogram(num_buckets, forget_factor) {}
virtual ~MockHistogram() {}
- MOCK_METHOD1(Add, void(int));
- MOCK_METHOD1(Quantile, int(int));
+ MOCK_METHOD(void, Add, (int), (override));
+ MOCK_METHOD(int, Quantile, (int), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h
index d1008c8a30a..b7df85fb205 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h
@@ -19,46 +19,45 @@ namespace webrtc {
class MockNetEqController : public NetEqController {
public:
MockNetEqController() = default;
- virtual ~MockNetEqController() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD0(SoftReset, void());
- MOCK_METHOD2(GetDecision,
- NetEq::Operation(const NetEqStatus& neteq_status,
- bool* reset_decoder));
- MOCK_METHOD6(Update,
- void(uint16_t sequence_number,
- uint32_t timestamp,
- uint32_t last_played_out_timestamp,
- bool new_codec,
- bool cng_or_dtmf,
- size_t packet_length_samples));
- MOCK_METHOD0(RegisterEmptyPacket, void());
- MOCK_METHOD2(SetSampleRate, void(int fs_hz, size_t output_size_samples));
- MOCK_METHOD1(SetMaximumDelay, bool(int delay_ms));
- MOCK_METHOD1(SetMinimumDelay, bool(int delay_ms));
- MOCK_METHOD1(SetBaseMinimumDelay, bool(int delay_ms));
- MOCK_CONST_METHOD0(GetBaseMinimumDelay, int());
- MOCK_CONST_METHOD0(CngRfc3389On, bool());
- MOCK_CONST_METHOD0(CngOff, bool());
- MOCK_METHOD0(SetCngOff, void());
- MOCK_METHOD1(ExpandDecision, void(NetEq::Operation operation));
- MOCK_METHOD1(AddSampleMemory, void(int32_t value));
- MOCK_METHOD0(TargetLevelMs, int());
- MOCK_METHOD6(PacketArrived,
- absl::optional<int>(bool last_cng_or_dtmf,
- size_t packet_length_samples,
- bool should_update_stats,
- uint16_t main_sequence_number,
- uint32_t main_timestamp,
- int fs_hz));
- MOCK_CONST_METHOD0(PeakFound, bool());
- MOCK_CONST_METHOD0(GetFilteredBufferLevel, int());
- MOCK_METHOD1(set_sample_memory, void(int32_t value));
- MOCK_CONST_METHOD0(noise_fast_forward, size_t());
- MOCK_CONST_METHOD0(packet_length_samples, size_t());
- MOCK_METHOD1(set_packet_length_samples, void(size_t value));
- MOCK_METHOD1(set_prev_time_scale, void(bool value));
+ ~MockNetEqController() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(void, SoftReset, (), (override));
+ MOCK_METHOD(NetEq::Operation,
+ GetDecision,
+ (const NetEqStatus& neteq_status, bool* reset_decoder),
+ (override));
+ MOCK_METHOD(void, RegisterEmptyPacket, (), (override));
+ MOCK_METHOD(void,
+ SetSampleRate,
+ (int fs_hz, size_t output_size_samples),
+ (override));
+ MOCK_METHOD(bool, SetMaximumDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, SetMinimumDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, SetBaseMinimumDelay, (int delay_ms), (override));
+ MOCK_METHOD(int, GetBaseMinimumDelay, (), (const, override));
+ MOCK_METHOD(bool, CngRfc3389On, (), (const, override));
+ MOCK_METHOD(bool, CngOff, (), (const, override));
+ MOCK_METHOD(void, SetCngOff, (), (override));
+ MOCK_METHOD(void, ExpandDecision, (NetEq::Operation operation), (override));
+ MOCK_METHOD(void, AddSampleMemory, (int32_t value), (override));
+ MOCK_METHOD(int, TargetLevelMs, (), (override));
+ MOCK_METHOD(absl::optional<int>,
+ PacketArrived,
+ (bool last_cng_or_dtmf,
+ size_t packet_length_samples,
+ bool should_update_stats,
+ uint16_t main_sequence_number,
+ uint32_t main_timestamp,
+ int fs_hz),
+ (override));
+ MOCK_METHOD(bool, PeakFound, (), (const, override));
+ MOCK_METHOD(int, GetFilteredBufferLevel, (), (const, override));
+ MOCK_METHOD(void, set_sample_memory, (int32_t value), (override));
+ MOCK_METHOD(size_t, noise_fast_forward, (), (const, override));
+ MOCK_METHOD(size_t, packet_length_samples, (), (const, override));
+ MOCK_METHOD(void, set_packet_length_samples, (size_t value), (override));
+ MOCK_METHOD(void, set_prev_time_scale, (bool value), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
index 7efeb15e473..e466ea6c8bb 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
@@ -20,39 +20,47 @@ class MockPacketBuffer : public PacketBuffer {
public:
MockPacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer)
: PacketBuffer(max_number_of_packets, tick_timer) {}
- virtual ~MockPacketBuffer() { Die(); }
- MOCK_METHOD0(Die, void());
- MOCK_METHOD0(Flush, void());
- MOCK_CONST_METHOD0(Empty, bool());
- int InsertPacket(Packet&& packet, StatisticsCalculator* stats) {
- return InsertPacketWrapped(&packet, stats);
- }
- // Since gtest does not properly support move-only types, InsertPacket is
- // implemented as a wrapper. You'll have to implement InsertPacketWrapped
- // instead and move from |*packet|.
- MOCK_METHOD2(InsertPacketWrapped,
- int(Packet* packet, StatisticsCalculator* stats));
- MOCK_METHOD5(InsertPacketList,
- int(PacketList* packet_list,
- const DecoderDatabase& decoder_database,
- absl::optional<uint8_t>* current_rtp_payload_type,
- absl::optional<uint8_t>* current_cng_rtp_payload_type,
- StatisticsCalculator* stats));
- MOCK_CONST_METHOD1(NextTimestamp, int(uint32_t* next_timestamp));
- MOCK_CONST_METHOD2(NextHigherTimestamp,
- int(uint32_t timestamp, uint32_t* next_timestamp));
- MOCK_CONST_METHOD0(PeekNextPacket, const Packet*());
- MOCK_METHOD0(GetNextPacket, absl::optional<Packet>());
- MOCK_METHOD1(DiscardNextPacket, int(StatisticsCalculator* stats));
- MOCK_METHOD3(DiscardOldPackets,
- void(uint32_t timestamp_limit,
- uint32_t horizon_samples,
- StatisticsCalculator* stats));
- MOCK_METHOD2(DiscardAllOldPackets,
- void(uint32_t timestamp_limit, StatisticsCalculator* stats));
- MOCK_CONST_METHOD0(NumPacketsInBuffer, size_t());
- MOCK_METHOD1(IncrementWaitingTimes, void(int));
- MOCK_CONST_METHOD0(current_memory_bytes, int());
+ ~MockPacketBuffer() override { Die(); }
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(void, Flush, (), (override));
+ MOCK_METHOD(bool, Empty, (), (const, override));
+ MOCK_METHOD(int,
+ InsertPacket,
+ (Packet && packet, StatisticsCalculator* stats),
+ (override));
+ MOCK_METHOD(int,
+ InsertPacketList,
+ (PacketList * packet_list,
+ const DecoderDatabase& decoder_database,
+ absl::optional<uint8_t>* current_rtp_payload_type,
+ absl::optional<uint8_t>* current_cng_rtp_payload_type,
+ StatisticsCalculator* stats),
+ (override));
+ MOCK_METHOD(int,
+ NextTimestamp,
+ (uint32_t * next_timestamp),
+ (const, override));
+ MOCK_METHOD(int,
+ NextHigherTimestamp,
+ (uint32_t timestamp, uint32_t* next_timestamp),
+ (const, override));
+ MOCK_METHOD(const Packet*, PeekNextPacket, (), (const, override));
+ MOCK_METHOD(absl::optional<Packet>, GetNextPacket, (), (override));
+ MOCK_METHOD(int,
+ DiscardNextPacket,
+ (StatisticsCalculator * stats),
+ (override));
+ MOCK_METHOD(void,
+ DiscardOldPackets,
+ (uint32_t timestamp_limit,
+ uint32_t horizon_samples,
+ StatisticsCalculator* stats),
+ (override));
+ MOCK_METHOD(void,
+ DiscardAllOldPackets,
+ (uint32_t timestamp_limit, StatisticsCalculator* stats),
+ (override));
+ MOCK_METHOD(size_t, NumPacketsInBuffer, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h
index 68fd3566c92..9daf571a804 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h
@@ -18,10 +18,12 @@ namespace webrtc {
class MockRedPayloadSplitter : public RedPayloadSplitter {
public:
- MOCK_METHOD1(SplitRed, bool(PacketList* packet_list));
- MOCK_METHOD2(CheckRedPayloads,
- void(PacketList* packet_list,
- const DecoderDatabase& decoder_database));
+ MOCK_METHOD(bool, SplitRed, (PacketList * packet_list), (override));
+ MOCK_METHOD(void,
+ CheckRedPayloads,
+ (PacketList * packet_list,
+ const DecoderDatabase& decoder_database),
+ (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_statistics_calculator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_statistics_calculator.h
index 086c7c55644..f8812478d6d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_statistics_calculator.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_statistics_calculator.h
@@ -18,9 +18,12 @@ namespace webrtc {
class MockStatisticsCalculator : public StatisticsCalculator {
public:
- MOCK_METHOD1(PacketsDiscarded, void(size_t num_packets));
- MOCK_METHOD1(SecondaryPacketsDiscarded, void(size_t num_packets));
- MOCK_METHOD1(RelativePacketArrivalDelay, void(size_t delay_ms));
+ MOCK_METHOD(void, PacketsDiscarded, (size_t num_packets), (override));
+ MOCK_METHOD(void,
+ SecondaryPacketsDiscarded,
+ (size_t num_packets),
+ (override));
+ MOCK_METHOD(void, RelativePacketArrivalDelay, (size_t delay_ms), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
index 0b7510d3418..f1cd8015e6f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
@@ -51,6 +51,7 @@
#include "rtc_base/strings/audio_format_to_string.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/clock.h"
+#include "system_wrappers/include/field_trial.h"
namespace webrtc {
namespace {
@@ -73,6 +74,24 @@ std::unique_ptr<NetEqController> CreateNetEqController(
return controller_factory.CreateNetEqController(config);
}
+int GetDelayChainLengthMs(int config_extra_delay_ms) {
+ constexpr char kExtraDelayFieldTrial[] = "WebRTC-Audio-NetEqExtraDelay";
+ if (webrtc::field_trial::IsEnabled(kExtraDelayFieldTrial)) {
+ const auto field_trial_string =
+ webrtc::field_trial::FindFullName(kExtraDelayFieldTrial);
+ int extra_delay_ms = -1;
+ if (sscanf(field_trial_string.c_str(), "Enabled-%d", &extra_delay_ms) ==
+ 1 &&
+ extra_delay_ms >= 0 && extra_delay_ms <= 2000) {
+ RTC_LOG(LS_INFO) << "Delay chain length set to " << extra_delay_ms
+ << " ms in field trial";
+ return (extra_delay_ms / 10) * 10; // Rounding down to multiple of 10.
+ }
+ }
+ // Field trial not set, or invalid value read. Use value from config.
+ return config_extra_delay_ms;
+}
+
} // namespace
NetEqImpl::Dependencies::Dependencies(
@@ -140,7 +159,10 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
10, // Report once every 10 s.
tick_timer_.get()),
no_time_stretching_(config.for_test_no_time_stretching),
- enable_rtx_handling_(config.enable_rtx_handling) {
+ enable_rtx_handling_(config.enable_rtx_handling),
+ output_delay_chain_ms_(
+ GetDelayChainLengthMs(config.extra_output_delay_ms)),
+ output_delay_chain_(rtc::CheckedDivExact(output_delay_chain_ms_, 10)) {
RTC_LOG(LS_INFO) << "NetEq config: " << config.ToString();
int fs = config.sample_rate_hz;
if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
@@ -255,6 +277,25 @@ int NetEqImpl::GetAudio(AudioFrame* audio_frame,
last_output_sample_rate_hz_ == 32000 ||
last_output_sample_rate_hz_ == 48000)
<< "Unexpected sample rate " << last_output_sample_rate_hz_;
+
+ if (!output_delay_chain_.empty()) {
+ if (output_delay_chain_empty_) {
+ for (auto& f : output_delay_chain_) {
+ f.CopyFrom(*audio_frame);
+ }
+ output_delay_chain_empty_ = false;
+ delayed_last_output_sample_rate_hz_ = last_output_sample_rate_hz_;
+ } else {
+ RTC_DCHECK_GE(output_delay_chain_ix_, 0);
+ RTC_DCHECK_LT(output_delay_chain_ix_, output_delay_chain_.size());
+ swap(output_delay_chain_[output_delay_chain_ix_], *audio_frame);
+ *muted = audio_frame->muted();
+ output_delay_chain_ix_ =
+ (output_delay_chain_ix_ + 1) % output_delay_chain_.size();
+ delayed_last_output_sample_rate_hz_ = audio_frame->sample_rate_hz();
+ }
+ }
+
return kOK;
}
@@ -297,7 +338,8 @@ bool NetEqImpl::SetMinimumDelay(int delay_ms) {
rtc::CritScope lock(&crit_sect_);
if (delay_ms >= 0 && delay_ms <= 10000) {
assert(controller_.get());
- return controller_->SetMinimumDelay(delay_ms);
+ return controller_->SetMinimumDelay(
+ std::max(delay_ms - output_delay_chain_ms_, 0));
}
return false;
}
@@ -306,7 +348,8 @@ bool NetEqImpl::SetMaximumDelay(int delay_ms) {
rtc::CritScope lock(&crit_sect_);
if (delay_ms >= 0 && delay_ms <= 10000) {
assert(controller_.get());
- return controller_->SetMaximumDelay(delay_ms);
+ return controller_->SetMaximumDelay(
+ std::max(delay_ms - output_delay_chain_ms_, 0));
}
return false;
}
@@ -327,7 +370,7 @@ int NetEqImpl::GetBaseMinimumDelayMs() const {
int NetEqImpl::TargetDelayMs() const {
rtc::CritScope lock(&crit_sect_);
RTC_DCHECK(controller_.get());
- return controller_->TargetLevelMs();
+ return controller_->TargetLevelMs() + output_delay_chain_ms_;
}
int NetEqImpl::FilteredCurrentDelayMs() const {
@@ -337,7 +380,8 @@ int NetEqImpl::FilteredCurrentDelayMs() const {
const int delay_samples =
controller_->GetFilteredBufferLevel() + sync_buffer_->FutureLength();
// The division below will truncate. The return value is in ms.
- return delay_samples / rtc::CheckedDivExact(fs_hz_, 1000);
+ return delay_samples / rtc::CheckedDivExact(fs_hz_, 1000) +
+ output_delay_chain_ms_;
}
int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) {
@@ -351,6 +395,13 @@ int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) {
stats->jitter_peaks_found = controller_->PeakFound();
stats_->GetNetworkStatistics(fs_hz_, total_samples_in_buffers,
decoder_frame_length_, stats);
+ // Compensate for output delay chain.
+ stats->current_buffer_size_ms += output_delay_chain_ms_;
+ stats->preferred_buffer_size_ms += output_delay_chain_ms_;
+ stats->mean_waiting_time_ms += output_delay_chain_ms_;
+ stats->median_waiting_time_ms += output_delay_chain_ms_;
+ stats->min_waiting_time_ms += output_delay_chain_ms_;
+ stats->max_waiting_time_ms += output_delay_chain_ms_;
return 0;
}
@@ -394,12 +445,19 @@ absl::optional<uint32_t> NetEqImpl::GetPlayoutTimestamp() const {
// which is indicated by returning an empty value.
return absl::nullopt;
}
- return timestamp_scaler_->ToExternal(playout_timestamp_);
+ size_t sum_samples_in_output_delay_chain = 0;
+ for (const auto& audio_frame : output_delay_chain_) {
+ sum_samples_in_output_delay_chain += audio_frame.samples_per_channel();
+ }
+ return timestamp_scaler_->ToExternal(
+ playout_timestamp_ -
+ static_cast<uint32_t>(sum_samples_in_output_delay_chain));
}
int NetEqImpl::last_output_sample_rate_hz() const {
rtc::CritScope lock(&crit_sect_);
- return last_output_sample_rate_hz_;
+ return delayed_last_output_sample_rate_hz_.value_or(
+ last_output_sample_rate_hz_);
}
absl::optional<NetEq::DecoderFormat> NetEqImpl::GetDecoderFormat(
@@ -1988,8 +2046,9 @@ int NetEqImpl::ExtractPackets(size_t required_samples,
extracted_samples = packet->timestamp - first_timestamp + packet_duration;
RTC_DCHECK(controller_);
- stats_->JitterBufferDelay(packet_duration, waiting_time_ms,
- controller_->TargetLevelMs());
+ stats_->JitterBufferDelay(
+ packet_duration, waiting_time_ms + output_delay_chain_ms_,
+ controller_->TargetLevelMs() + output_delay_chain_ms_);
packet_list->push_back(std::move(*packet)); // Store packet in list.
packet = absl::nullopt; // Ensure it's never used after the move.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
index 956cb6ef178..623968aefd1 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
@@ -402,6 +402,22 @@ class NetEqImpl : public webrtc::NetEq {
bool no_time_stretching_ RTC_GUARDED_BY(crit_sect_); // Only used for test.
rtc::BufferT<int16_t> concealment_audio_ RTC_GUARDED_BY(crit_sect_);
const bool enable_rtx_handling_ RTC_GUARDED_BY(crit_sect_);
+ // Data members used for adding extra delay to the output of NetEq.
+ // The delay in ms (which is 10 times the number of elements in
+ // output_delay_chain_).
+ const int output_delay_chain_ms_ RTC_GUARDED_BY(crit_sect_);
+ // Vector of AudioFrames which contains the delayed audio. Accessed as a
+ // circular buffer.
+ std::vector<AudioFrame> output_delay_chain_ RTC_GUARDED_BY(crit_sect_);
+ // Index into output_delay_chain_.
+ size_t output_delay_chain_ix_ RTC_GUARDED_BY(crit_sect_) = 0;
+ // Did output_delay_chain_ get populated yet?
+ bool output_delay_chain_empty_ RTC_GUARDED_BY(crit_sect_) = true;
+ // Contains the sample rate of the AudioFrame last emitted from the delay
+ // chain. If the extra output delay chain is not used, or if no audio has been
+ // emitted yet, the variable is empty.
+ absl::optional<int> delayed_last_output_sample_rate_hz_
+ RTC_GUARDED_BY(crit_sect_);
private:
RTC_DISALLOW_COPY_AND_ASSIGN(NetEqImpl);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
index d35c44c4c26..df346227226 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
@@ -52,10 +52,10 @@ class MockAudioDecoder final : public AudioDecoder {
: sample_rate_hz_(sample_rate_hz),
num_channels_(num_channels),
fec_enabled_(false) {}
- ~MockAudioDecoder() /* override */ { Die(); }
- MOCK_METHOD0(Die, void());
+ ~MockAudioDecoder() override { Die(); }
+ MOCK_METHOD(void, Die, ());
- MOCK_METHOD0(Reset, void());
+ MOCK_METHOD(void, Reset, (), (override));
class MockFrame : public AudioDecoder::EncodedAudioFrame {
public:
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
index d78e2c64886..f5fb6479658 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
@@ -1102,5 +1102,186 @@ TEST(NetEqNoTimeStretchingMode, RunTest) {
EXPECT_EQ(0, stats.preemptive_rate);
}
+namespace {
+// Helper classes and data types and functions for NetEqOutputDelayTest.
+
+class VectorAudioSink : public AudioSink {
+ public:
+ // Does not take ownership of the vector.
+ VectorAudioSink(std::vector<int16_t>* output_vector) : v_(output_vector) {}
+
+ virtual ~VectorAudioSink() = default;
+
+ bool WriteArray(const int16_t* audio, size_t num_samples) override {
+ v_->reserve(v_->size() + num_samples);
+ for (size_t i = 0; i < num_samples; ++i) {
+ v_->push_back(audio[i]);
+ }
+ return true;
+ }
+
+ private:
+ std::vector<int16_t>* const v_;
+};
+
+struct TestResult {
+ NetEqLifetimeStatistics lifetime_stats;
+ NetEqNetworkStatistics network_stats;
+ absl::optional<uint32_t> playout_timestamp;
+ int target_delay_ms;
+ int filtered_current_delay_ms;
+ int sample_rate_hz;
+};
+
+// This class is used as callback object to NetEqTest to collect some stats
+// at the end of the simulation.
+class SimEndStatsCollector : public NetEqSimulationEndedCallback {
+ public:
+ SimEndStatsCollector(TestResult& result) : result_(result) {}
+
+ void SimulationEnded(int64_t /*simulation_time_ms*/, NetEq* neteq) override {
+ result_.playout_timestamp = neteq->GetPlayoutTimestamp();
+ result_.target_delay_ms = neteq->TargetDelayMs();
+ result_.filtered_current_delay_ms = neteq->FilteredCurrentDelayMs();
+ result_.sample_rate_hz = neteq->last_output_sample_rate_hz();
+ }
+
+ private:
+ TestResult& result_;
+};
+
+TestResult DelayLineNetEqTest(int delay_ms,
+ std::vector<int16_t>* output_vector) {
+ NetEq::Config config;
+ config.for_test_no_time_stretching = true;
+ config.extra_output_delay_ms = delay_ms;
+ auto codecs = NetEqTest::StandardDecoderMap();
+ NetEqPacketSourceInput::RtpHeaderExtensionMap rtp_ext_map = {
+ {1, kRtpExtensionAudioLevel},
+ {3, kRtpExtensionAbsoluteSendTime},
+ {5, kRtpExtensionTransportSequenceNumber},
+ {7, kRtpExtensionVideoContentType},
+ {8, kRtpExtensionVideoTiming}};
+ std::unique_ptr<NetEqInput> input = std::make_unique<NetEqRtpDumpInput>(
+ webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp"),
+ rtp_ext_map, absl::nullopt /*No SSRC filter*/);
+ std::unique_ptr<TimeLimitedNetEqInput> input_time_limit(
+ new TimeLimitedNetEqInput(std::move(input), 10000));
+ std::unique_ptr<AudioSink> output =
+ std::make_unique<VectorAudioSink>(output_vector);
+
+ TestResult result;
+ SimEndStatsCollector stats_collector(result);
+ NetEqTest::Callbacks callbacks;
+ callbacks.simulation_ended_callback = &stats_collector;
+
+ NetEqTest test(config, CreateBuiltinAudioDecoderFactory(), codecs,
+ /*text_log=*/nullptr, /*neteq_factory=*/nullptr,
+ /*input=*/std::move(input_time_limit), std::move(output),
+ callbacks);
+ test.Run();
+ result.lifetime_stats = test.LifetimeStats();
+ result.network_stats = test.SimulationStats();
+ return result;
+}
+} // namespace
+
+// Tests the extra output delay functionality of NetEq.
+TEST(NetEqOutputDelayTest, RunTest) {
+ std::vector<int16_t> output;
+ const auto result_no_delay = DelayLineNetEqTest(0, &output);
+ std::vector<int16_t> output_delayed;
+ constexpr int kDelayMs = 100;
+ const auto result_delay = DelayLineNetEqTest(kDelayMs, &output_delayed);
+
+ // Verify that the loss concealment remains unchanged. The point of the delay
+ // is to not affect the jitter buffering behavior.
+ // First verify that there are concealments in the test.
+ EXPECT_GT(result_no_delay.lifetime_stats.concealed_samples, 0u);
+ // And that not all of the output is concealment.
+ EXPECT_GT(result_no_delay.lifetime_stats.total_samples_received,
+ result_no_delay.lifetime_stats.concealed_samples);
+ // Now verify that they remain unchanged by the delay.
+ EXPECT_EQ(result_no_delay.lifetime_stats.concealed_samples,
+ result_delay.lifetime_stats.concealed_samples);
+ // Accelerate and pre-emptive expand should also be unchanged.
+ EXPECT_EQ(result_no_delay.lifetime_stats.inserted_samples_for_deceleration,
+ result_delay.lifetime_stats.inserted_samples_for_deceleration);
+ EXPECT_EQ(result_no_delay.lifetime_stats.removed_samples_for_acceleration,
+ result_delay.lifetime_stats.removed_samples_for_acceleration);
+ // Verify that delay stats are increased with the delay chain.
+ EXPECT_EQ(
+ result_no_delay.lifetime_stats.jitter_buffer_delay_ms +
+ kDelayMs * result_no_delay.lifetime_stats.jitter_buffer_emitted_count,
+ result_delay.lifetime_stats.jitter_buffer_delay_ms);
+ EXPECT_EQ(
+ result_no_delay.lifetime_stats.jitter_buffer_target_delay_ms +
+ kDelayMs * result_no_delay.lifetime_stats.jitter_buffer_emitted_count,
+ result_delay.lifetime_stats.jitter_buffer_target_delay_ms);
+ EXPECT_EQ(result_no_delay.network_stats.current_buffer_size_ms + kDelayMs,
+ result_delay.network_stats.current_buffer_size_ms);
+ EXPECT_EQ(result_no_delay.network_stats.preferred_buffer_size_ms + kDelayMs,
+ result_delay.network_stats.preferred_buffer_size_ms);
+ EXPECT_EQ(result_no_delay.network_stats.mean_waiting_time_ms + kDelayMs,
+ result_delay.network_stats.mean_waiting_time_ms);
+ EXPECT_EQ(result_no_delay.network_stats.median_waiting_time_ms + kDelayMs,
+ result_delay.network_stats.median_waiting_time_ms);
+ EXPECT_EQ(result_no_delay.network_stats.min_waiting_time_ms + kDelayMs,
+ result_delay.network_stats.min_waiting_time_ms);
+ EXPECT_EQ(result_no_delay.network_stats.max_waiting_time_ms + kDelayMs,
+ result_delay.network_stats.max_waiting_time_ms);
+
+ ASSERT_TRUE(result_no_delay.playout_timestamp);
+ ASSERT_TRUE(result_delay.playout_timestamp);
+ EXPECT_EQ(*result_no_delay.playout_timestamp -
+ static_cast<uint32_t>(
+ kDelayMs *
+ rtc::CheckedDivExact(result_no_delay.sample_rate_hz, 1000)),
+ *result_delay.playout_timestamp);
+ EXPECT_EQ(result_no_delay.target_delay_ms + kDelayMs,
+ result_delay.target_delay_ms);
+ EXPECT_EQ(result_no_delay.filtered_current_delay_ms + kDelayMs,
+ result_delay.filtered_current_delay_ms);
+
+ // Verify expected delay in decoded signal. The test vector uses 8 kHz sample
+ // rate, so the delay will be 8 times the delay in ms.
+ constexpr size_t kExpectedDelaySamples = kDelayMs * 8;
+ for (size_t i = 0;
+ i < output.size() && i + kExpectedDelaySamples < output_delayed.size();
+ ++i) {
+ EXPECT_EQ(output[i], output_delayed[i + kExpectedDelaySamples]);
+ }
+}
+
+// Tests the extra output delay functionality of NetEq when configured via
+// field trial.
+TEST(NetEqOutputDelayTest, RunTestWithFieldTrial) {
+ test::ScopedFieldTrials field_trial(
+ "WebRTC-Audio-NetEqExtraDelay/Enabled-50/");
+ constexpr int kExpectedDelayMs = 50;
+ std::vector<int16_t> output;
+ const auto result = DelayLineNetEqTest(0, &output);
+
+ // The base delay values are taken from the resuts of the non-delayed case in
+ // NetEqOutputDelayTest.RunTest above.
+ EXPECT_EQ(10 + kExpectedDelayMs, result.target_delay_ms);
+ EXPECT_EQ(24 + kExpectedDelayMs, result.filtered_current_delay_ms);
+}
+
+// Set a non-multiple-of-10 value in the field trial, and verify that we don't
+// crash, and that the result is rounded down.
+TEST(NetEqOutputDelayTest, RunTestWithFieldTrialOddValue) {
+ test::ScopedFieldTrials field_trial(
+ "WebRTC-Audio-NetEqExtraDelay/Enabled-103/");
+ constexpr int kRoundedDelayMs = 100;
+ std::vector<int16_t> output;
+ const auto result = DelayLineNetEqTest(0, &output);
+
+ // The base delay values are taken from the resuts of the non-delayed case in
+ // NetEqOutputDelayTest.RunTest above.
+ EXPECT_EQ(10 + kRoundedDelayMs, result.target_delay_ms);
+ EXPECT_EQ(24 + kRoundedDelayMs, result.filtered_current_delay_ms);
+}
+
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
index 0aff955fd75..40e7d5371af 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
@@ -31,13 +31,14 @@ using ::testing::StrictMock;
namespace {
class MockEncodedAudioFrame : public webrtc::AudioDecoder::EncodedAudioFrame {
public:
- MOCK_CONST_METHOD0(Duration, size_t());
+ MOCK_METHOD(size_t, Duration, (), (const, override));
- MOCK_CONST_METHOD0(IsDtxPacket, bool());
+ MOCK_METHOD(bool, IsDtxPacket, (), (const, override));
- MOCK_CONST_METHOD1(
- Decode,
- absl::optional<DecodeResult>(rtc::ArrayView<int16_t> decoded));
+ MOCK_METHOD(absl::optional<DecodeResult>,
+ Decode,
+ (rtc::ArrayView<int16_t> decoded),
+ (const, override));
};
// Helper class to generate packets. Packets must be deleted by the user.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc
index 3f06b1cfc44..337f54ed6ed 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.cc
@@ -33,7 +33,8 @@ NetEqStatsPlotter::NetEqStatsPlotter(bool make_matlab_plot,
stats_getter_.reset(new NetEqStatsGetter(std::move(delay_analyzer)));
}
-void NetEqStatsPlotter::SimulationEnded(int64_t simulation_time_ms) {
+void NetEqStatsPlotter::SimulationEnded(int64_t simulation_time_ms,
+ NetEq* /*neteq*/) {
if (make_matlab_plot_) {
auto matlab_script_name = base_file_name_;
std::replace(matlab_script_name.begin(), matlab_script_name.end(), '.',
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.h
index c4df24e0731..d6918670fd8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_stats_plotter.h
@@ -28,7 +28,7 @@ class NetEqStatsPlotter : public NetEqSimulationEndedCallback {
bool show_concealment_events,
std::string base_file_name);
- void SimulationEnded(int64_t simulation_time_ms) override;
+ void SimulationEnded(int64_t simulation_time_ms, NetEq* neteq) override;
NetEqStatsGetter* stats_getter() { return stats_getter_.get(); }
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.cc
index f8b6161a98c..a263a737217 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.cc
@@ -91,7 +91,8 @@ int64_t NetEqTest::Run() {
simulation_time += step_result.simulation_step_ms;
} while (!step_result.is_simulation_finished);
if (callbacks_.simulation_ended_callback) {
- callbacks_.simulation_ended_callback->SimulationEnded(simulation_time);
+ callbacks_.simulation_ended_callback->SimulationEnded(simulation_time,
+ neteq_.get());
}
return simulation_time;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.h
index 0a6c24f3d66..3b787a6cfbd 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_test.h
@@ -61,7 +61,7 @@ class NetEqGetAudioCallback {
class NetEqSimulationEndedCallback {
public:
virtual ~NetEqSimulationEndedCallback() = default;
- virtual void SimulationEnded(int64_t simulation_time_ms) = 0;
+ virtual void SimulationEnded(int64_t simulation_time_ms, NetEq* neteq) = 0;
};
// Class that provides an input--output test for NetEq. The input (both packets
diff --git a/chromium/third_party/webrtc/modules/audio_device/BUILD.gn b/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
index 2ce0ae20e88..0d1ee81b47a 100644
--- a/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
@@ -144,8 +144,8 @@ rtc_source_set("audio_device_module_from_input_and_output") {
"../../api/task_queue",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
@@ -385,8 +385,8 @@ if (rtc_include_tests) {
"../../test:fileutils",
"../../test:test_support",
"../utility",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (is_linux || is_mac || is_win) {
sources += [ "audio_device_unittest.cc" ]
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.cc b/chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.cc
index 776f0cfd704..daaeeca1eaf 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.cc
@@ -19,6 +19,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
namespace webrtc {
@@ -27,13 +28,15 @@ AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
NativeRegistration* native_reg,
std::unique_ptr<GlobalRef> audio_track)
: audio_track_(std::move(audio_track)),
- init_playout_(native_reg->GetMethodId("initPlayout", "(IID)Z")),
+ init_playout_(native_reg->GetMethodId("initPlayout", "(IID)I")),
start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
get_stream_max_volume_(
native_reg->GetMethodId("getStreamMaxVolume", "()I")),
- get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")) {}
+ get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")),
+ get_buffer_size_in_frames_(
+ native_reg->GetMethodId("getBufferSizeInFrames", "()I")) {}
AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
@@ -45,8 +48,29 @@ bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
nullptr);
if (buffer_size_factor == 0)
buffer_size_factor = 1.0;
- return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels,
- buffer_size_factor);
+ int requested_buffer_size_bytes = audio_track_->CallIntMethod(
+ init_playout_, sample_rate, channels, buffer_size_factor);
+ // Update UMA histograms for both the requested and actual buffer size.
+ if (requested_buffer_size_bytes >= 0) {
+ // To avoid division by zero, we assume the sample rate is 48k if an invalid
+ // value is found.
+ sample_rate = sample_rate <= 0 ? 48000 : sample_rate;
+ // This calculation assumes that audio is mono.
+ const int requested_buffer_size_ms =
+ (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
+ requested_buffer_size_ms, 0, 1000, 100);
+ int actual_buffer_size_frames =
+ audio_track_->CallIntMethod(get_buffer_size_in_frames_);
+ if (actual_buffer_size_frames >= 0) {
+ const int actual_buffer_size_ms =
+ actual_buffer_size_frames * 1000 / sample_rate;
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
+ actual_buffer_size_ms, 0, 1000, 100);
+ }
+ return true;
+ }
+ return false;
}
bool AudioTrackJni::JavaAudioTrack::StartPlayout() {
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.h b/chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.h
index 6303d754c8a..529a9013e80 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/audio_track_jni.h
@@ -62,6 +62,7 @@ class AudioTrackJni {
jmethodID set_stream_volume_;
jmethodID get_stream_max_volume_;
jmethodID get_stream_volume_;
+ jmethodID get_buffer_size_in_frames_;
};
explicit AudioTrackJni(AudioManager* audio_manager);
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/chromium/third_party/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
index 3023c99fa26..7e6ad5acf4f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
+++ b/chromium/third_party/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
@@ -215,7 +215,7 @@ public class WebRtcAudioTrack {
}
}
- private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
@@ -244,14 +244,14 @@ public class WebRtcAudioTrack {
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
- return false;
+ return -1;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
- return false;
+ return -1;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
@@ -273,7 +273,7 @@ public class WebRtcAudioTrack {
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
- return false;
+ return -1;
}
// It can happen that an AudioTrack is created but it was not successfully
@@ -282,11 +282,11 @@ public class WebRtcAudioTrack {
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
- return false;
+ return -1;
}
logMainParameters();
logMainParametersExtended();
- return true;
+ return minBufferSizeInBytes;
}
private boolean startPlayout() {
@@ -433,6 +433,13 @@ public class WebRtcAudioTrack {
}
}
+ private int getBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= 23) {
+ return audioTrack.getBufferSizeInFrames();
+ }
+ return -1;
+ }
+
private void logBufferCapacityInFrames() {
if (Build.VERSION.SDK_INT >= 24) {
Logging.d(TAG,
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/mock_audio_device.h b/chromium/third_party/webrtc/modules/audio_device/include/mock_audio_device.h
index a05e64e6c9d..0ca19de156a 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/mock_audio_device.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/mock_audio_device.h
@@ -32,76 +32,123 @@ class MockAudioDeviceModule : public AudioDeviceModule {
}
// AudioDeviceModule.
- MOCK_CONST_METHOD1(ActiveAudioLayer, int32_t(AudioLayer* audioLayer));
- MOCK_METHOD1(RegisterAudioCallback, int32_t(AudioTransport* audioCallback));
- MOCK_METHOD0(Init, int32_t());
- MOCK_METHOD0(Terminate, int32_t());
- MOCK_CONST_METHOD0(Initialized, bool());
- MOCK_METHOD0(PlayoutDevices, int16_t());
- MOCK_METHOD0(RecordingDevices, int16_t());
- MOCK_METHOD3(PlayoutDeviceName,
- int32_t(uint16_t index,
- char name[kAdmMaxDeviceNameSize],
- char guid[kAdmMaxGuidSize]));
- MOCK_METHOD3(RecordingDeviceName,
- int32_t(uint16_t index,
- char name[kAdmMaxDeviceNameSize],
- char guid[kAdmMaxGuidSize]));
- MOCK_METHOD1(SetPlayoutDevice, int32_t(uint16_t index));
- MOCK_METHOD1(SetPlayoutDevice, int32_t(WindowsDeviceType device));
- MOCK_METHOD1(SetRecordingDevice, int32_t(uint16_t index));
- MOCK_METHOD1(SetRecordingDevice, int32_t(WindowsDeviceType device));
- MOCK_METHOD1(PlayoutIsAvailable, int32_t(bool* available));
- MOCK_METHOD0(InitPlayout, int32_t());
- MOCK_CONST_METHOD0(PlayoutIsInitialized, bool());
- MOCK_METHOD1(RecordingIsAvailable, int32_t(bool* available));
- MOCK_METHOD0(InitRecording, int32_t());
- MOCK_CONST_METHOD0(RecordingIsInitialized, bool());
- MOCK_METHOD0(StartPlayout, int32_t());
- MOCK_METHOD0(StopPlayout, int32_t());
- MOCK_CONST_METHOD0(Playing, bool());
- MOCK_METHOD0(StartRecording, int32_t());
- MOCK_METHOD0(StopRecording, int32_t());
- MOCK_CONST_METHOD0(Recording, bool());
- MOCK_METHOD1(SetAGC, int32_t(bool enable));
- MOCK_CONST_METHOD0(AGC, bool());
- MOCK_METHOD0(InitSpeaker, int32_t());
- MOCK_CONST_METHOD0(SpeakerIsInitialized, bool());
- MOCK_METHOD0(InitMicrophone, int32_t());
- MOCK_CONST_METHOD0(MicrophoneIsInitialized, bool());
- MOCK_METHOD1(SpeakerVolumeIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetSpeakerVolume, int32_t(uint32_t volume));
- MOCK_CONST_METHOD1(SpeakerVolume, int32_t(uint32_t* volume));
- MOCK_CONST_METHOD1(MaxSpeakerVolume, int32_t(uint32_t* maxVolume));
- MOCK_CONST_METHOD1(MinSpeakerVolume, int32_t(uint32_t* minVolume));
- MOCK_METHOD1(MicrophoneVolumeIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetMicrophoneVolume, int32_t(uint32_t volume));
- MOCK_CONST_METHOD1(MicrophoneVolume, int32_t(uint32_t* volume));
- MOCK_CONST_METHOD1(MaxMicrophoneVolume, int32_t(uint32_t* maxVolume));
- MOCK_CONST_METHOD1(MinMicrophoneVolume, int32_t(uint32_t* minVolume));
- MOCK_METHOD1(SpeakerMuteIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetSpeakerMute, int32_t(bool enable));
- MOCK_CONST_METHOD1(SpeakerMute, int32_t(bool* enabled));
- MOCK_METHOD1(MicrophoneMuteIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetMicrophoneMute, int32_t(bool enable));
- MOCK_CONST_METHOD1(MicrophoneMute, int32_t(bool* enabled));
- MOCK_CONST_METHOD1(StereoPlayoutIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetStereoPlayout, int32_t(bool enable));
- MOCK_CONST_METHOD1(StereoPlayout, int32_t(bool* enabled));
- MOCK_CONST_METHOD1(StereoRecordingIsAvailable, int32_t(bool* available));
- MOCK_METHOD1(SetStereoRecording, int32_t(bool enable));
- MOCK_CONST_METHOD1(StereoRecording, int32_t(bool* enabled));
- MOCK_CONST_METHOD1(PlayoutDelay, int32_t(uint16_t* delayMS));
- MOCK_CONST_METHOD0(BuiltInAECIsAvailable, bool());
- MOCK_CONST_METHOD0(BuiltInAGCIsAvailable, bool());
- MOCK_CONST_METHOD0(BuiltInNSIsAvailable, bool());
- MOCK_METHOD1(EnableBuiltInAEC, int32_t(bool enable));
- MOCK_METHOD1(EnableBuiltInAGC, int32_t(bool enable));
- MOCK_METHOD1(EnableBuiltInNS, int32_t(bool enable));
- MOCK_CONST_METHOD0(GetPlayoutUnderrunCount, int32_t());
+ MOCK_METHOD(int32_t,
+ ActiveAudioLayer,
+ (AudioLayer * audioLayer),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ RegisterAudioCallback,
+ (AudioTransport * audioCallback),
+ (override));
+ MOCK_METHOD(int32_t, Init, (), (override));
+ MOCK_METHOD(int32_t, Terminate, (), (override));
+ MOCK_METHOD(bool, Initialized, (), (const, override));
+ MOCK_METHOD(int16_t, PlayoutDevices, (), (override));
+ MOCK_METHOD(int16_t, RecordingDevices, (), (override));
+ MOCK_METHOD(int32_t,
+ PlayoutDeviceName,
+ (uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]),
+ (override));
+ MOCK_METHOD(int32_t,
+ RecordingDeviceName,
+ (uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]),
+ (override));
+ MOCK_METHOD(int32_t, SetPlayoutDevice, (uint16_t index), (override));
+ MOCK_METHOD(int32_t,
+ SetPlayoutDevice,
+ (WindowsDeviceType device),
+ (override));
+ MOCK_METHOD(int32_t, SetRecordingDevice, (uint16_t index), (override));
+ MOCK_METHOD(int32_t,
+ SetRecordingDevice,
+ (WindowsDeviceType device),
+ (override));
+ MOCK_METHOD(int32_t, PlayoutIsAvailable, (bool* available), (override));
+ MOCK_METHOD(int32_t, InitPlayout, (), (override));
+ MOCK_METHOD(bool, PlayoutIsInitialized, (), (const, override));
+ MOCK_METHOD(int32_t, RecordingIsAvailable, (bool* available), (override));
+ MOCK_METHOD(int32_t, InitRecording, (), (override));
+ MOCK_METHOD(bool, RecordingIsInitialized, (), (const, override));
+ MOCK_METHOD(int32_t, StartPlayout, (), (override));
+ MOCK_METHOD(int32_t, StopPlayout, (), (override));
+ MOCK_METHOD(bool, Playing, (), (const, override));
+ MOCK_METHOD(int32_t, StartRecording, (), (override));
+ MOCK_METHOD(int32_t, StopRecording, (), (override));
+ MOCK_METHOD(bool, Recording, (), (const, override));
+ MOCK_METHOD(int32_t, InitSpeaker, (), (override));
+ MOCK_METHOD(bool, SpeakerIsInitialized, (), (const, override));
+ MOCK_METHOD(int32_t, InitMicrophone, (), (override));
+ MOCK_METHOD(bool, MicrophoneIsInitialized, (), (const, override));
+ MOCK_METHOD(int32_t, SpeakerVolumeIsAvailable, (bool* available), (override));
+ MOCK_METHOD(int32_t, SetSpeakerVolume, (uint32_t volume), (override));
+ MOCK_METHOD(int32_t, SpeakerVolume, (uint32_t * volume), (const, override));
+ MOCK_METHOD(int32_t,
+ MaxSpeakerVolume,
+ (uint32_t * maxVolume),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ MinSpeakerVolume,
+ (uint32_t * minVolume),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ MicrophoneVolumeIsAvailable,
+ (bool* available),
+ (override));
+ MOCK_METHOD(int32_t, SetMicrophoneVolume, (uint32_t volume), (override));
+ MOCK_METHOD(int32_t,
+ MicrophoneVolume,
+ (uint32_t * volume),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ MaxMicrophoneVolume,
+ (uint32_t * maxVolume),
+ (const, override));
+ MOCK_METHOD(int32_t,
+ MinMicrophoneVolume,
+ (uint32_t * minVolume),
+ (const, override));
+ MOCK_METHOD(int32_t, SpeakerMuteIsAvailable, (bool* available), (override));
+ MOCK_METHOD(int32_t, SetSpeakerMute, (bool enable), (override));
+ MOCK_METHOD(int32_t, SpeakerMute, (bool* enabled), (const, override));
+ MOCK_METHOD(int32_t,
+ MicrophoneMuteIsAvailable,
+ (bool* available),
+ (override));
+ MOCK_METHOD(int32_t, SetMicrophoneMute, (bool enable), (override));
+ MOCK_METHOD(int32_t, MicrophoneMute, (bool* enabled), (const, override));
+ MOCK_METHOD(int32_t,
+ StereoPlayoutIsAvailable,
+ (bool* available),
+ (const, override));
+ MOCK_METHOD(int32_t, SetStereoPlayout, (bool enable), (override));
+ MOCK_METHOD(int32_t, StereoPlayout, (bool* enabled), (const, override));
+ MOCK_METHOD(int32_t,
+ StereoRecordingIsAvailable,
+ (bool* available),
+ (const, override));
+ MOCK_METHOD(int32_t, SetStereoRecording, (bool enable), (override));
+ MOCK_METHOD(int32_t, StereoRecording, (bool* enabled), (const, override));
+ MOCK_METHOD(int32_t, PlayoutDelay, (uint16_t * delayMS), (const, override));
+ MOCK_METHOD(bool, BuiltInAECIsAvailable, (), (const, override));
+ MOCK_METHOD(bool, BuiltInAGCIsAvailable, (), (const, override));
+ MOCK_METHOD(bool, BuiltInNSIsAvailable, (), (const, override));
+ MOCK_METHOD(int32_t, EnableBuiltInAEC, (bool enable), (override));
+ MOCK_METHOD(int32_t, EnableBuiltInAGC, (bool enable), (override));
+ MOCK_METHOD(int32_t, EnableBuiltInNS, (bool enable), (override));
+ MOCK_METHOD(int32_t, GetPlayoutUnderrunCount, (), (const, override));
#if defined(WEBRTC_IOS)
- MOCK_CONST_METHOD1(GetPlayoutAudioParameters, int(AudioParameters* params));
- MOCK_CONST_METHOD1(GetRecordAudioParameters, int(AudioParameters* params));
+ MOCK_METHOD(int,
+ GetPlayoutAudioParameters,
+ (AudioParameters * params),
+ (const, override));
+ MOCK_METHOD(int,
+ GetRecordAudioParameters,
+ (AudioParameters * params),
+ (const, override));
#endif // WEBRTC_IOS
};
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/mock_audio_transport.h b/chromium/third_party/webrtc/modules/audio_device/include/mock_audio_transport.h
index ebdfbc70154..8f71a2d71f7 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/mock_audio_transport.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/mock_audio_transport.h
@@ -22,36 +22,42 @@ class MockAudioTransport : public AudioTransport {
MockAudioTransport() {}
~MockAudioTransport() {}
- MOCK_METHOD10(RecordedDataIsAvailable,
- int32_t(const void* audioSamples,
- const size_t nSamples,
- const size_t nBytesPerSample,
- const size_t nChannels,
- const uint32_t samplesPerSec,
- const uint32_t totalDelayMS,
- const int32_t clockDrift,
- const uint32_t currentMicLevel,
- const bool keyPressed,
- uint32_t& newMicLevel));
-
- MOCK_METHOD8(NeedMorePlayData,
- int32_t(const size_t nSamples,
- const size_t nBytesPerSample,
- const size_t nChannels,
- const uint32_t samplesPerSec,
- void* audioSamples,
- size_t& nSamplesOut,
- int64_t* elapsed_time_ms,
- int64_t* ntp_time_ms));
-
- MOCK_METHOD7(PullRenderData,
- void(int bits_per_sample,
- int sample_rate,
- size_t number_of_channels,
- size_t number_of_frames,
- void* audio_data,
- int64_t* elapsed_time_ms,
- int64_t* ntp_time_ms));
+ MOCK_METHOD(int32_t,
+ RecordedDataIsAvailable,
+ (const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel),
+ (override));
+
+ MOCK_METHOD(int32_t,
+ NeedMorePlayData,
+ (const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms),
+ (override));
+
+ MOCK_METHOD(void,
+ PullRenderData,
+ (int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms),
+ (override));
};
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
index e894cf309a4..9368c312639 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
@@ -410,7 +410,10 @@ int32_t AudioDeviceMac::SpeakerIsAvailable(bool& available) {
int32_t AudioDeviceMac::InitSpeaker() {
rtc::CritScope lock(&_critSect);
+ return InitSpeakerLocked();
+}
+int32_t AudioDeviceMac::InitSpeakerLocked() {
if (_playing) {
return -1;
}
@@ -458,7 +461,10 @@ int32_t AudioDeviceMac::MicrophoneIsAvailable(bool& available) {
int32_t AudioDeviceMac::InitMicrophone() {
rtc::CritScope lock(&_critSect);
+ return InitMicrophoneLocked();
+}
+int32_t AudioDeviceMac::InitMicrophoneLocked() {
if (_recording) {
return -1;
}
@@ -960,7 +966,7 @@ int32_t AudioDeviceMac::InitPlayout() {
}
// Initialize the speaker (devices might have been added or removed)
- if (InitSpeaker() == -1) {
+ if (InitSpeakerLocked() == -1) {
RTC_LOG(LS_WARNING) << "InitSpeaker() failed";
}
@@ -1098,7 +1104,7 @@ int32_t AudioDeviceMac::InitRecording() {
}
// Initialize the microphone (devices might have been added or removed)
- if (InitMicrophone() == -1) {
+ if (InitMicrophoneLocked() == -1) {
RTC_LOG(LS_WARNING) << "InitMicrophone() failed";
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
index d7076a3c1cc..8d0e7fa571b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
+++ b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
@@ -69,8 +69,8 @@ class AudioDeviceMac : public AudioDeviceGeneric {
AudioDeviceModule::AudioLayer& audioLayer) const;
// Main initializaton and termination
- virtual InitStatus Init();
- virtual int32_t Terminate();
+ virtual InitStatus Init() RTC_LOCKS_EXCLUDED(_critSect);
+ virtual int32_t Terminate() RTC_LOCKS_EXCLUDED(_critSect);
virtual bool Initialized() const;
// Device enumeration
@@ -84,7 +84,8 @@ class AudioDeviceMac : public AudioDeviceGeneric {
char guid[kAdmMaxGuidSize]);
// Device selection
- virtual int32_t SetPlayoutDevice(uint16_t index);
+ virtual int32_t SetPlayoutDevice(uint16_t index)
+ RTC_LOCKS_EXCLUDED(_critSect);
virtual int32_t SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
virtual int32_t SetRecordingDevice(uint16_t index);
virtual int32_t SetRecordingDevice(
@@ -92,24 +93,24 @@ class AudioDeviceMac : public AudioDeviceGeneric {
// Audio transport initialization
virtual int32_t PlayoutIsAvailable(bool& available);
- virtual int32_t InitPlayout();
+ virtual int32_t InitPlayout() RTC_LOCKS_EXCLUDED(_critSect);
virtual bool PlayoutIsInitialized() const;
virtual int32_t RecordingIsAvailable(bool& available);
- virtual int32_t InitRecording();
+ virtual int32_t InitRecording() RTC_LOCKS_EXCLUDED(_critSect);
virtual bool RecordingIsInitialized() const;
// Audio transport control
- virtual int32_t StartPlayout();
- virtual int32_t StopPlayout();
+ virtual int32_t StartPlayout() RTC_LOCKS_EXCLUDED(_critSect);
+ virtual int32_t StopPlayout() RTC_LOCKS_EXCLUDED(_critSect);
virtual bool Playing() const;
- virtual int32_t StartRecording();
- virtual int32_t StopRecording();
+ virtual int32_t StartRecording() RTC_LOCKS_EXCLUDED(_critSect);
+ virtual int32_t StopRecording() RTC_LOCKS_EXCLUDED(_critSect);
virtual bool Recording() const;
// Audio mixer initialization
- virtual int32_t InitSpeaker();
+ virtual int32_t InitSpeaker() RTC_LOCKS_EXCLUDED(_critSect);
virtual bool SpeakerIsInitialized() const;
- virtual int32_t InitMicrophone();
+ virtual int32_t InitMicrophone() RTC_LOCKS_EXCLUDED(_critSect);
virtual bool MicrophoneIsInitialized() const;
// Speaker volume controls
@@ -147,9 +148,13 @@ class AudioDeviceMac : public AudioDeviceGeneric {
// Delay information and control
virtual int32_t PlayoutDelay(uint16_t& delayMS) const;
- virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+ RTC_LOCKS_EXCLUDED(_critSect);
private:
+ int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(_critSect);
+ int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(_critSect);
+
virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t SpeakerIsAvailable(bool& available);
@@ -229,13 +234,15 @@ class AudioDeviceMac : public AudioDeviceGeneric {
OSStatus implDeviceIOProc(const AudioBufferList* inputData,
const AudioTimeStamp* inputTime,
AudioBufferList* outputData,
- const AudioTimeStamp* outputTime);
+ const AudioTimeStamp* outputTime)
+ RTC_LOCKS_EXCLUDED(_critSect);
OSStatus implOutConverterProc(UInt32* numberDataPackets,
AudioBufferList* data);
OSStatus implInDeviceIOProc(const AudioBufferList* inputData,
- const AudioTimeStamp* inputTime);
+ const AudioTimeStamp* inputTime)
+ RTC_LOCKS_EXCLUDED(_critSect);
OSStatus implInConverterProc(UInt32* numberDataPackets,
AudioBufferList* data);
diff --git a/chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc b/chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc
index e2b7d6370d9..b34b5c34226 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.cc
@@ -63,16 +63,19 @@ int32_t AudioMixerManagerMac::Close() {
rtc::CritScope lock(&_critSect);
- CloseSpeaker();
- CloseMicrophone();
+ CloseSpeakerLocked();
+ CloseMicrophoneLocked();
return 0;
}
int32_t AudioMixerManagerMac::CloseSpeaker() {
- RTC_LOG(LS_VERBOSE) << __FUNCTION__;
-
rtc::CritScope lock(&_critSect);
+ return CloseSpeakerLocked();
+}
+
+int32_t AudioMixerManagerMac::CloseSpeakerLocked() {
+ RTC_LOG(LS_VERBOSE) << __FUNCTION__;
_outputDeviceID = kAudioObjectUnknown;
_noOutputChannels = 0;
@@ -81,9 +84,12 @@ int32_t AudioMixerManagerMac::CloseSpeaker() {
}
int32_t AudioMixerManagerMac::CloseMicrophone() {
- RTC_LOG(LS_VERBOSE) << __FUNCTION__;
-
rtc::CritScope lock(&_critSect);
+ return CloseMicrophoneLocked();
+}
+
+int32_t AudioMixerManagerMac::CloseMicrophoneLocked() {
+ RTC_LOG(LS_VERBOSE) << __FUNCTION__;
_inputDeviceID = kAudioObjectUnknown;
_noInputChannels = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h b/chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h
index 342e1c997c0..ee6149d672f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h
+++ b/chromium/third_party/webrtc/modules/audio_device/mac/audio_mixer_manager_mac.h
@@ -21,29 +21,29 @@ namespace webrtc {
class AudioMixerManagerMac {
public:
- int32_t OpenSpeaker(AudioDeviceID deviceID);
- int32_t OpenMicrophone(AudioDeviceID deviceID);
- int32_t SetSpeakerVolume(uint32_t volume);
+ int32_t OpenSpeaker(AudioDeviceID deviceID) RTC_LOCKS_EXCLUDED(_critSect);
+ int32_t OpenMicrophone(AudioDeviceID deviceID) RTC_LOCKS_EXCLUDED(_critSect);
+ int32_t SetSpeakerVolume(uint32_t volume) RTC_LOCKS_EXCLUDED(_critSect);
int32_t SpeakerVolume(uint32_t& volume) const;
int32_t MaxSpeakerVolume(uint32_t& maxVolume) const;
int32_t MinSpeakerVolume(uint32_t& minVolume) const;
int32_t SpeakerVolumeIsAvailable(bool& available);
int32_t SpeakerMuteIsAvailable(bool& available);
- int32_t SetSpeakerMute(bool enable);
+ int32_t SetSpeakerMute(bool enable) RTC_LOCKS_EXCLUDED(_critSect);
int32_t SpeakerMute(bool& enabled) const;
int32_t StereoPlayoutIsAvailable(bool& available);
int32_t StereoRecordingIsAvailable(bool& available);
int32_t MicrophoneMuteIsAvailable(bool& available);
- int32_t SetMicrophoneMute(bool enable);
+ int32_t SetMicrophoneMute(bool enable) RTC_LOCKS_EXCLUDED(_critSect);
int32_t MicrophoneMute(bool& enabled) const;
int32_t MicrophoneVolumeIsAvailable(bool& available);
- int32_t SetMicrophoneVolume(uint32_t volume);
+ int32_t SetMicrophoneVolume(uint32_t volume) RTC_LOCKS_EXCLUDED(_critSect);
int32_t MicrophoneVolume(uint32_t& volume) const;
int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const;
int32_t MinMicrophoneVolume(uint32_t& minVolume) const;
- int32_t Close();
- int32_t CloseSpeaker();
- int32_t CloseMicrophone();
+ int32_t Close() RTC_LOCKS_EXCLUDED(_critSect);
+ int32_t CloseSpeaker() RTC_LOCKS_EXCLUDED(_critSect);
+ int32_t CloseMicrophone() RTC_LOCKS_EXCLUDED(_critSect);
bool SpeakerIsInitialized() const;
bool MicrophoneIsInitialized() const;
@@ -52,6 +52,8 @@ class AudioMixerManagerMac {
~AudioMixerManagerMac();
private:
+ int32_t CloseSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(_critSect);
+ int32_t CloseMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(_critSect);
static void logCAMsg(const rtc::LoggingSeverity sev,
const char* msg,
const char* err);
diff --git a/chromium/third_party/webrtc/modules/audio_device/mock_audio_device_buffer.h b/chromium/third_party/webrtc/modules/audio_device/mock_audio_device_buffer.h
index 1f809cc5dcb..b0f54c20ffa 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mock_audio_device_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_device/mock_audio_device_buffer.h
@@ -20,12 +20,14 @@ class MockAudioDeviceBuffer : public AudioDeviceBuffer {
public:
using AudioDeviceBuffer::AudioDeviceBuffer;
virtual ~MockAudioDeviceBuffer() {}
- MOCK_METHOD1(RequestPlayoutData, int32_t(size_t nSamples));
- MOCK_METHOD1(GetPlayoutData, int32_t(void* audioBuffer));
- MOCK_METHOD2(SetRecordedBuffer,
- int32_t(const void* audioBuffer, size_t nSamples));
- MOCK_METHOD2(SetVQEData, void(int playDelayMS, int recDelayMS));
- MOCK_METHOD0(DeliverRecordedData, int32_t());
+ MOCK_METHOD(int32_t, RequestPlayoutData, (size_t nSamples), (override));
+ MOCK_METHOD(int32_t, GetPlayoutData, (void* audioBuffer), (override));
+ MOCK_METHOD(int32_t,
+ SetRecordedBuffer,
+ (const void* audioBuffer, size_t nSamples),
+ (override));
+ MOCK_METHOD(void, SetVQEData, (int playDelayMS, int recDelayMS), (override));
+ MOCK_METHOD(int32_t, DeliverRecordedData, (), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc
index f899dd618a5..383771ce605 100644
--- a/chromium/third_party/webrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc
@@ -74,11 +74,13 @@ class MockMixerAudioSource : public ::testing::NiceMock<AudioMixer::Source> {
.WillByDefault(Return(kDefaultSampleRateHz));
}
- MOCK_METHOD2(GetAudioFrameWithInfo,
- AudioFrameInfo(int sample_rate_hz, AudioFrame* audio_frame));
+ MOCK_METHOD(AudioFrameInfo,
+ GetAudioFrameWithInfo,
+ (int sample_rate_hz, AudioFrame* audio_frame),
+ (override));
- MOCK_CONST_METHOD0(PreferredSampleRate, int());
- MOCK_CONST_METHOD0(Ssrc, int());
+ MOCK_METHOD(int, PreferredSampleRate, (), (const, override));
+ MOCK_METHOD(int, Ssrc, (), (const, override));
AudioFrame* fake_frame() { return &fake_frame_; }
AudioFrameInfo fake_info() { return fake_audio_frame_info_; }
@@ -604,7 +606,7 @@ class HighOutputRateCalculator : public OutputRateCalculator {
};
const int HighOutputRateCalculator::kDefaultFrequency;
-TEST(AudioMixer, MultipleChannelsAndHighRate) {
+TEST(AudioMixerDeathTest, MultipleChannelsAndHighRate) {
constexpr size_t kSamplesPerChannel =
HighOutputRateCalculator::kDefaultFrequency / 100;
// As many channels as an AudioFrame can fit:
diff --git a/chromium/third_party/webrtc/modules/audio_mixer/frame_combiner_unittest.cc b/chromium/third_party/webrtc/modules/audio_mixer/frame_combiner_unittest.cc
index 5f024a4a556..4b189a052e3 100644
--- a/chromium/third_party/webrtc/modules/audio_mixer/frame_combiner_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_mixer/frame_combiner_unittest.cc
@@ -89,7 +89,7 @@ TEST(FrameCombiner, BasicApiCallsLimiter) {
}
// There are DCHECKs in place to check for invalid parameters.
-TEST(FrameCombiner, DebugBuildCrashesWithManyChannels) {
+TEST(FrameCombinerDeathTest, DebugBuildCrashesWithManyChannels) {
FrameCombiner combiner(true);
for (const int rate : {8000, 18000, 34000, 48000}) {
for (const int number_of_channels : {10, 20, 21}) {
@@ -118,7 +118,7 @@ TEST(FrameCombiner, DebugBuildCrashesWithManyChannels) {
}
}
-TEST(FrameCombiner, DebugBuildCrashesWithHighRate) {
+TEST(FrameCombinerDeathTest, DebugBuildCrashesWithHighRate) {
FrameCombiner combiner(true);
for (const int rate : {50000, 96000, 128000, 196000}) {
for (const int number_of_channels : {1, 2, 3}) {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
index 7ca78e20b4f..22e128da99d 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
@@ -53,8 +53,8 @@ rtc_library("api") {
"../../rtc_base/system:file_wrapper",
"../../rtc_base/system:rtc_export",
"agc:gain_control_interface",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_frame_proxies") {
@@ -203,8 +203,8 @@ rtc_library("audio_processing") {
"ns",
"transient:transient_suppressor_api",
"vad",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
deps += [
"../../common_audio",
@@ -255,8 +255,8 @@ rtc_source_set("rms_level") {
deps = [
"../../api:array_view",
"../../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_processing_statistics") {
@@ -265,10 +265,8 @@ rtc_library("audio_processing_statistics") {
"include/audio_processing_statistics.cc",
"include/audio_processing_statistics.h",
]
- deps = [
- "../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../../rtc_base/system:rtc_export" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("audio_frame_view") {
@@ -395,8 +393,8 @@ if (rtc_include_tests) {
"utility:pffft_wrapper_unittest",
"vad:vad_unittests",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
@@ -422,8 +420,8 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_task_queue",
"aec_dump",
"aec_dump:aec_dump_unittests",
- "//third_party/abseil-cpp/absl/flags:flag",
]
+ absl_deps += [ "//third_party/abseil-cpp/absl/flags:flag" ]
sources += [
"audio_processing_impl_locking_unittest.cc",
"audio_processing_impl_unittest.cc",
@@ -481,8 +479,8 @@ if (rtc_include_tests) {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:safe_minmax",
"agc:gain_map",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_enable_protobuf) {
@@ -527,6 +525,8 @@ if (rtc_include_tests) {
"aec_dump",
"aec_dump:aec_dump_impl",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/strings",
@@ -609,6 +609,6 @@ rtc_library("audioproc_test_utils") {
"../../test:test_support",
"../audio_coding:neteq_input_audio_tools",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/aec3/BUILD.gn
index d07ffa6abe4..507f2bc8bda 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/BUILD.gn
@@ -150,8 +150,8 @@ rtc_library("aec3") {
"../../../system_wrappers:field_trial",
"../../../system_wrappers:metrics",
"../utility:cascaded_biquad_filter",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -187,8 +187,8 @@ if (rtc_include_tests) {
"../../../test:field_trial",
"../../../test:test_support",
"../utility:cascaded_biquad_filter",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc
index 8e4f5d96443..39f4e111928 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc
@@ -285,13 +285,13 @@ TEST_P(AdaptiveFirFilterOneTwoFourEightRenderChannels,
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null data dumper works.
-TEST(AdaptiveFirFilterTest, NullDataDumper) {
+TEST(AdaptiveFirFilterDeathTest, NullDataDumper) {
EXPECT_DEATH(AdaptiveFirFilter(9, 9, 250, 1, DetectOptimization(), nullptr),
"");
}
// Verifies that the check for non-null filter output works.
-TEST(AdaptiveFirFilterTest, NullFilterOutput) {
+TEST(AdaptiveFirFilterDeathTest, NullFilterOutput) {
ApmDataDumper data_dumper(42);
AdaptiveFirFilter filter(9, 9, 250, 1, DetectOptimization(), &data_dumper);
std::unique_ptr<RenderDelayBuffer> render_delay_buffer(
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.cc
index 18321018552..d1d4f7da067 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.cc
@@ -15,6 +15,7 @@
#include <iterator>
#include "rtc_base/checks.h"
+#include "system_wrappers/include/cpu_features_wrapper.h"
namespace webrtc {
@@ -70,8 +71,18 @@ const float kSqrtHanning128[kFftLength] = {
0.19509032201613f, 0.17096188876030f, 0.14673047445536f, 0.12241067519922f,
0.09801714032956f, 0.07356456359967f, 0.04906767432742f, 0.02454122852291f};
+bool IsSse2Available() {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ return WebRtc_GetCPUInfo(kSSE2) != 0;
+#else
+ return false;
+#endif
+}
+
} // namespace
+Aec3Fft::Aec3Fft() : ooura_fft_(IsSse2Available()) {}
+
// TODO(peah): Change x to be std::array once the rest of the code allows this.
void Aec3Fft::ZeroPaddedFft(rtc::ArrayView<const float> x,
Window window,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.h b/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.h
index 7a2e024d759..6f7fbe4d0e1 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft.h
@@ -28,7 +28,8 @@ class Aec3Fft {
public:
enum class Window { kRectangular, kHanning, kSqrtHanning };
- Aec3Fft() = default;
+ Aec3Fft();
+
// Computes the FFT. Note that both the input and output are modified.
void Fft(std::array<float, kFftLength>* x, FftData* X) const {
RTC_DCHECK(x);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft_unittest.cc
index 82d6e766cc8..e60ef5b7132 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/aec3_fft_unittest.cc
@@ -20,28 +20,28 @@ namespace webrtc {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null input in Fft works.
-TEST(Aec3Fft, NullFftInput) {
+TEST(Aec3FftDeathTest, NullFftInput) {
Aec3Fft fft;
FftData X;
EXPECT_DEATH(fft.Fft(nullptr, &X), "");
}
// Verifies that the check for non-null input in Fft works.
-TEST(Aec3Fft, NullFftOutput) {
+TEST(Aec3FftDeathTest, NullFftOutput) {
Aec3Fft fft;
std::array<float, kFftLength> x;
EXPECT_DEATH(fft.Fft(&x, nullptr), "");
}
// Verifies that the check for non-null output in Ifft works.
-TEST(Aec3Fft, NullIfftOutput) {
+TEST(Aec3FftDeathTest, NullIfftOutput) {
Aec3Fft fft;
FftData X;
EXPECT_DEATH(fft.Ifft(X, nullptr), "");
}
// Verifies that the check for non-null output in ZeroPaddedFft works.
-TEST(Aec3Fft, NullZeroPaddedFftOutput) {
+TEST(Aec3FftDeathTest, NullZeroPaddedFftOutput) {
Aec3Fft fft;
std::array<float, kFftLengthBy2> x;
EXPECT_DEATH(fft.ZeroPaddedFft(x, Aec3Fft::Window::kRectangular, nullptr),
@@ -49,7 +49,7 @@ TEST(Aec3Fft, NullZeroPaddedFftOutput) {
}
// Verifies that the check for input length in ZeroPaddedFft works.
-TEST(Aec3Fft, ZeroPaddedFftWrongInputLength) {
+TEST(Aec3FftDeathTest, ZeroPaddedFftWrongInputLength) {
Aec3Fft fft;
FftData X;
std::array<float, kFftLengthBy2 - 1> x;
@@ -57,7 +57,7 @@ TEST(Aec3Fft, ZeroPaddedFftWrongInputLength) {
}
// Verifies that the check for non-null output in PaddedFft works.
-TEST(Aec3Fft, NullPaddedFftOutput) {
+TEST(Aec3FftDeathTest, NullPaddedFftOutput) {
Aec3Fft fft;
std::array<float, kFftLengthBy2> x;
std::array<float, kFftLengthBy2> x_old;
@@ -65,7 +65,7 @@ TEST(Aec3Fft, NullPaddedFftOutput) {
}
// Verifies that the check for input length in PaddedFft works.
-TEST(Aec3Fft, PaddedFftWrongInputLength) {
+TEST(Aec3FftDeathTest, PaddedFftWrongInputLength) {
Aec3Fft fft;
FftData X;
std::array<float, kFftLengthBy2 - 1> x;
@@ -74,7 +74,7 @@ TEST(Aec3Fft, PaddedFftWrongInputLength) {
}
// Verifies that the check for length in the old value in PaddedFft works.
-TEST(Aec3Fft, PaddedFftWrongOldValuesLength) {
+TEST(Aec3FftDeathTest, PaddedFftWrongOldValuesLength) {
Aec3Fft fft;
FftData X;
std::array<float, kFftLengthBy2> x;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/alignment_mixer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/alignment_mixer_unittest.cc
index 832e4ea8845..03ef06614b5 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/alignment_mixer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/alignment_mixer_unittest.cc
@@ -175,7 +175,7 @@ TEST(AlignmentMixer, FixedMode) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(AlignmentMixer, ZeroNumChannels) {
+TEST(AlignmentMixerDeathTest, ZeroNumChannels) {
EXPECT_DEATH(
AlignmentMixer(/*num_channels*/ 0, /*downmix*/ false,
/*adaptive_selection*/ false, /*excitation_limit*/ 1.f,
@@ -183,7 +183,7 @@ TEST(AlignmentMixer, ZeroNumChannels) {
, "");
}
-TEST(AlignmentMixer, IncorrectVariant) {
+TEST(AlignmentMixerDeathTest, IncorrectVariant) {
EXPECT_DEATH(
AlignmentMixer(/*num_channels*/ 1, /*downmix*/ true,
/*adaptive_selection*/ true, /*excitation_limit*/ 1.f,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/block_framer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/block_framer_unittest.cc
index e9a16d06d56..d67967bc02a 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/block_framer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/block_framer_unittest.cc
@@ -214,7 +214,8 @@ std::string ProduceDebugText(int sample_rate_hz, size_t num_channels) {
} // namespace
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(BlockFramer, WrongNumberOfBandsInBlockForInsertBlockAndExtractSubFrame) {
+TEST(BlockFramerDeathTest,
+ WrongNumberOfBandsInBlockForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -227,7 +228,7 @@ TEST(BlockFramer, WrongNumberOfBandsInBlockForInsertBlockAndExtractSubFrame) {
}
}
-TEST(BlockFramer,
+TEST(BlockFramerDeathTest,
WrongNumberOfChannelsInBlockForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
@@ -241,7 +242,7 @@ TEST(BlockFramer,
}
}
-TEST(BlockFramer,
+TEST(BlockFramerDeathTest,
WrongNumberOfBandsInSubFrameForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
@@ -255,7 +256,7 @@ TEST(BlockFramer,
}
}
-TEST(BlockFramer,
+TEST(BlockFramerDeathTest,
WrongNumberOfChannelsInSubFrameForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
@@ -269,7 +270,8 @@ TEST(BlockFramer,
}
}
-TEST(BlockFramer, WrongNumberOfSamplesInBlockForInsertBlockAndExtractSubFrame) {
+TEST(BlockFramerDeathTest,
+ WrongNumberOfSamplesInBlockForInsertBlockAndExtractSubFrame) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -282,7 +284,7 @@ TEST(BlockFramer, WrongNumberOfSamplesInBlockForInsertBlockAndExtractSubFrame) {
}
}
-TEST(BlockFramer,
+TEST(BlockFramerDeathTest,
WrongNumberOfSamplesInSubFrameForInsertBlockAndExtractSubFrame) {
const size_t correct_num_channels = 1;
for (auto rate : {16000, 32000, 48000}) {
@@ -295,7 +297,7 @@ TEST(BlockFramer,
}
}
-TEST(BlockFramer, WrongNumberOfBandsInBlockForInsertBlock) {
+TEST(BlockFramerDeathTest, WrongNumberOfBandsInBlockForInsertBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -308,7 +310,7 @@ TEST(BlockFramer, WrongNumberOfBandsInBlockForInsertBlock) {
}
}
-TEST(BlockFramer, WrongNumberOfChannelsInBlockForInsertBlock) {
+TEST(BlockFramerDeathTest, WrongNumberOfChannelsInBlockForInsertBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -321,7 +323,7 @@ TEST(BlockFramer, WrongNumberOfChannelsInBlockForInsertBlock) {
}
}
-TEST(BlockFramer, WrongNumberOfSamplesInBlockForInsertBlock) {
+TEST(BlockFramerDeathTest, WrongNumberOfSamplesInBlockForInsertBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (auto correct_num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -333,7 +335,7 @@ TEST(BlockFramer, WrongNumberOfSamplesInBlockForInsertBlock) {
}
}
-TEST(BlockFramer, WrongNumberOfPreceedingApiCallsForInsertBlock) {
+TEST(BlockFramerDeathTest, WrongNumberOfPreceedingApiCallsForInsertBlock) {
for (size_t num_channels : {1, 2, 8}) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_calls = 0; num_calls < 4; ++num_calls) {
@@ -351,17 +353,17 @@ TEST(BlockFramer, WrongNumberOfPreceedingApiCallsForInsertBlock) {
}
// Verifies that the verification for 0 number of channels works.
-TEST(BlockFramer, ZeroNumberOfChannelsParameter) {
+TEST(BlockFramerDeathTest, ZeroNumberOfChannelsParameter) {
EXPECT_DEATH(BlockFramer(16000, 0), "");
}
// Verifies that the verification for 0 number of bands works.
-TEST(BlockFramer, ZeroNumberOfBandsParameter) {
+TEST(BlockFramerDeathTest, ZeroNumberOfBandsParameter) {
EXPECT_DEATH(BlockFramer(0, 1), "");
}
// Verifies that the verification for null sub_frame pointer works.
-TEST(BlockFramer, NullSubFrameParameter) {
+TEST(BlockFramerDeathTest, NullSubFrameParameter) {
EXPECT_DEATH(BlockFramer(1, 1).InsertBlockAndExtractSubFrame(
std::vector<std::vector<std::vector<float>>>(
1, std::vector<std::vector<float>>(
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/block_processor_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/block_processor_unittest.cc
index 2b928e877b1..911dad4c818 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/block_processor_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/block_processor_unittest.cc
@@ -252,21 +252,21 @@ TEST(BlockProcessor, TestLongerCall) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// TODO(gustaf): Re-enable the test once the issue with memory leaks during
// DEATH tests on test bots has been fixed.
-TEST(BlockProcessor, DISABLED_VerifyRenderBlockSizeCheck) {
+TEST(BlockProcessorDeathTest, DISABLED_VerifyRenderBlockSizeCheck) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
RunRenderBlockSizeVerificationTest(rate);
}
}
-TEST(BlockProcessor, VerifyCaptureBlockSizeCheck) {
+TEST(BlockProcessorDeathTest, VerifyCaptureBlockSizeCheck) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
RunCaptureBlockSizeVerificationTest(rate);
}
}
-TEST(BlockProcessor, VerifyRenderNumBandsCheck) {
+TEST(BlockProcessorDeathTest, VerifyRenderNumBandsCheck) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
RunRenderNumBandsVerificationTest(rate);
@@ -275,7 +275,7 @@ TEST(BlockProcessor, VerifyRenderNumBandsCheck) {
// TODO(peah): Verify the check for correct number of bands in the capture
// signal.
-TEST(BlockProcessor, VerifyCaptureNumBandsCheck) {
+TEST(BlockProcessorDeathTest, VerifyCaptureNumBandsCheck) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
RunCaptureNumBandsVerificationTest(rate);
@@ -283,7 +283,7 @@ TEST(BlockProcessor, VerifyCaptureNumBandsCheck) {
}
// Verifiers that the verification for null ProcessCapture input works.
-TEST(BlockProcessor, NullProcessCaptureParameter) {
+TEST(BlockProcessorDeathTest, NullProcessCaptureParameter) {
EXPECT_DEATH(std::unique_ptr<BlockProcessor>(
BlockProcessor::Create(EchoCanceller3Config(), 16000, 1, 1))
->ProcessCapture(false, false, nullptr, nullptr),
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/clockdrift_detector.h b/chromium/third_party/webrtc/modules/audio_processing/aec3/clockdrift_detector.h
index 22528c94892..2ba90bb8890 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/clockdrift_detector.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/clockdrift_detector.h
@@ -11,6 +11,8 @@
#ifndef MODULES_AUDIO_PROCESSING_AEC3_CLOCKDRIFT_DETECTOR_H_
#define MODULES_AUDIO_PROCESSING_AEC3_CLOCKDRIFT_DETECTOR_H_
+#include <stddef.h>
+
#include <array>
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc
index 4185c1adb81..92775cf7028 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc
@@ -138,7 +138,7 @@ std::string ProduceDebugText(size_t delay, int filter_length_blocks) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null output gain parameter works.
-TEST(CoarseFilterUpdateGain, NullDataOutputGain) {
+TEST(CoarseFilterUpdateGainDeathTest, NullDataOutputGain) {
ApmDataDumper data_dumper(42);
FftBuffer fft_buffer(1, 1);
RenderSignalAnalyzer analyzer(EchoCanceller3Config{});
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/decimator_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/decimator_unittest.cc
index 1e279cea3e5..e6f5ea04034 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/decimator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/decimator_unittest.cc
@@ -103,7 +103,7 @@ TEST(Decimator, NoLeakageFromUpperFrequencies) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for the input size.
-TEST(Decimator, WrongInputSize) {
+TEST(DecimatorDeathTest, WrongInputSize) {
Decimator decimator(4);
std::vector<float> x(kBlockSize - 1, 0.f);
std::array<float, kBlockSize / 4> x_downsampled;
@@ -111,14 +111,14 @@ TEST(Decimator, WrongInputSize) {
}
// Verifies the check for non-null output parameter.
-TEST(Decimator, NullOutput) {
+TEST(DecimatorDeathTest, NullOutput) {
Decimator decimator(4);
std::vector<float> x(kBlockSize, 0.f);
EXPECT_DEATH(decimator.Decimate(x, nullptr), "");
}
// Verifies the check for the output size.
-TEST(Decimator, WrongOutputSize) {
+TEST(DecimatorDeathTest, WrongOutputSize) {
Decimator decimator(4);
std::vector<float> x(kBlockSize, 0.f);
std::array<float, kBlockSize / 4 - 1> x_downsampled;
@@ -126,7 +126,7 @@ TEST(Decimator, WrongOutputSize) {
}
// Verifies the check for the correct downsampling factor.
-TEST(Decimator, CorrectDownSamplingFactor) {
+TEST(DecimatorDeathTest, CorrectDownSamplingFactor) {
EXPECT_DEATH(Decimator(3), "");
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_canceller3_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_canceller3_unittest.cc
index 21255f192e1..04d93e4db43 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_canceller3_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_canceller3_unittest.cc
@@ -890,7 +890,7 @@ TEST(EchoCanceller3FieldTrials, Aec3SuppressorTuningOverrideOneParam) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(EchoCanceller3InputCheck, WrongCaptureNumBandsCheckVerification) {
+TEST(EchoCanceller3InputCheckDeathTest, WrongCaptureNumBandsCheckVerification) {
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
EchoCanceller3Tester(rate).RunProcessCaptureNumBandsCheckVerification();
@@ -899,7 +899,7 @@ TEST(EchoCanceller3InputCheck, WrongCaptureNumBandsCheckVerification) {
// Verifiers that the verification for null input to the capture processing api
// call works.
-TEST(EchoCanceller3InputCheck, NullCaptureProcessingParameter) {
+TEST(EchoCanceller3InputCheckDeathTest, NullCaptureProcessingParameter) {
EXPECT_DEATH(EchoCanceller3(EchoCanceller3Config(), 16000, 1, 1)
.ProcessCapture(nullptr, false),
"");
@@ -908,7 +908,7 @@ TEST(EchoCanceller3InputCheck, NullCaptureProcessingParameter) {
// Verifies the check for correct sample rate.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(EchoCanceller3InputCheck, DISABLED_WrongSampleRate) {
+TEST(EchoCanceller3InputCheckDeathTest, DISABLED_WrongSampleRate) {
ApmDataDumper data_dumper(0);
EXPECT_DEATH(EchoCanceller3(EchoCanceller3Config(), 8001, 1, 1), "");
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc
index 8003a11bbcf..6ba4cdd0d7c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc
@@ -165,7 +165,7 @@ TEST(EchoPathDelayEstimator, NoDelayEstimatesForLowLevelRenderSignals) {
// Verifies the check for the render blocksize.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(EchoPathDelayEstimator, DISABLED_WrongRenderBlockSize) {
+TEST(EchoPathDelayEstimatorDeathTest, DISABLED_WrongRenderBlockSize) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EchoPathDelayEstimator estimator(&data_dumper, config, 1);
@@ -180,7 +180,7 @@ TEST(EchoPathDelayEstimator, DISABLED_WrongRenderBlockSize) {
// Verifies the check for the capture blocksize.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(EchoPathDelayEstimator, WrongCaptureBlockSize) {
+TEST(EchoPathDelayEstimatorDeathTest, WrongCaptureBlockSize) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EchoPathDelayEstimator estimator(&data_dumper, config, 1);
@@ -194,7 +194,7 @@ TEST(EchoPathDelayEstimator, WrongCaptureBlockSize) {
}
// Verifies the check for non-null data dumper.
-TEST(EchoPathDelayEstimator, NullDataDumper) {
+TEST(EchoPathDelayEstimatorDeathTest, NullDataDumper) {
EXPECT_DEATH(EchoPathDelayEstimator(nullptr, EchoCanceller3Config(), 1), "");
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc
index 30c66118699..45b30a9c74c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc
@@ -23,7 +23,7 @@ namespace webrtc {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for non-null input.
-TEST(UpdateDbMetric, NullValue) {
+TEST(UpdateDbMetricDeathTest, NullValue) {
std::array<float, kFftLengthBy2Plus1> value;
value.fill(0.f);
EXPECT_DEATH(aec3::UpdateDbMetric(value, nullptr), "");
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_unittest.cc
index e050027c63f..77a207659ce 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/echo_remover_unittest.cc
@@ -91,14 +91,14 @@ TEST_P(EchoRemoverMultiChannel, BasicApiCalls) {
// Verifies the check for the samplerate.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(EchoRemover, DISABLED_WrongSampleRate) {
+TEST(EchoRemoverDeathTest, DISABLED_WrongSampleRate) {
EXPECT_DEATH(std::unique_ptr<EchoRemover>(
EchoRemover::Create(EchoCanceller3Config(), 8001, 1, 1)),
"");
}
// Verifies the check for the capture block size.
-TEST(EchoRemover, WrongCaptureBlockSize) {
+TEST(EchoRemoverDeathTest, WrongCaptureBlockSize) {
absl::optional<DelayEstimate> delay_estimate;
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
@@ -121,7 +121,7 @@ TEST(EchoRemover, WrongCaptureBlockSize) {
// Verifies the check for the number of capture bands.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.c
-TEST(EchoRemover, DISABLED_WrongCaptureNumBands) {
+TEST(EchoRemoverDeathTest, DISABLED_WrongCaptureNumBands) {
absl::optional<DelayEstimate> delay_estimate;
for (auto rate : {16000, 32000, 48000}) {
SCOPED_TRACE(ProduceDebugText(rate));
@@ -143,7 +143,7 @@ TEST(EchoRemover, DISABLED_WrongCaptureNumBands) {
}
// Verifies the check for non-null capture block.
-TEST(EchoRemover, NullCapture) {
+TEST(EchoRemoverDeathTest, NullCapture) {
absl::optional<DelayEstimate> delay_estimate;
std::unique_ptr<EchoRemover> remover(
EchoRemover::Create(EchoCanceller3Config(), 16000, 1, 1));
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/fft_data_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/fft_data_unittest.cc
index 0812fd64208..9be26804533 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/fft_data_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/fft_data_unittest.cc
@@ -44,12 +44,12 @@ TEST(FftData, TestOptimizations) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for null output in CopyToPackedArray.
-TEST(FftData, NonNullCopyToPackedArrayOutput) {
+TEST(FftDataDeathTest, NonNullCopyToPackedArrayOutput) {
EXPECT_DEATH(FftData().CopyToPackedArray(nullptr), "");
}
// Verifies the check for null output in Spectrum.
-TEST(FftData, NonNullSpectrumOutput) {
+TEST(FftDataDeathTest, NonNullSpectrumOutput) {
EXPECT_DEATH(FftData().Spectrum(Aec3Optimization::kNone, nullptr), "");
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/frame_blocker_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/frame_blocker_unittest.cc
index e907608d95f..216f5150377 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/frame_blocker_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/frame_blocker_unittest.cc
@@ -287,7 +287,8 @@ std::string ProduceDebugText(int sample_rate_hz, size_t num_channels) {
} // namespace
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(FrameBlocker, WrongNumberOfBandsInBlockForInsertSubFrameAndExtractBlock) {
+TEST(FrameBlockerDeathTest,
+ WrongNumberOfBandsInBlockForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -300,7 +301,7 @@ TEST(FrameBlocker, WrongNumberOfBandsInBlockForInsertSubFrameAndExtractBlock) {
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfChannelsInBlockForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -314,7 +315,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfBandsInSubFrameForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -328,7 +329,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfChannelsInSubFrameForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -342,7 +343,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfSamplesInBlockForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -356,7 +357,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker,
+TEST(FrameBlockerDeathTest,
WrongNumberOfSamplesInSubFrameForInsertSubFrameAndExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
@@ -370,7 +371,7 @@ TEST(FrameBlocker,
}
}
-TEST(FrameBlocker, WrongNumberOfBandsInBlockForExtractBlock) {
+TEST(FrameBlockerDeathTest, WrongNumberOfBandsInBlockForExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -383,7 +384,7 @@ TEST(FrameBlocker, WrongNumberOfBandsInBlockForExtractBlock) {
}
}
-TEST(FrameBlocker, WrongNumberOfChannelsInBlockForExtractBlock) {
+TEST(FrameBlockerDeathTest, WrongNumberOfChannelsInBlockForExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -396,7 +397,7 @@ TEST(FrameBlocker, WrongNumberOfChannelsInBlockForExtractBlock) {
}
}
-TEST(FrameBlocker, WrongNumberOfSamplesInBlockForExtractBlock) {
+TEST(FrameBlockerDeathTest, WrongNumberOfSamplesInBlockForExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t correct_num_channels : {1, 2, 4, 8}) {
SCOPED_TRACE(ProduceDebugText(rate, correct_num_channels));
@@ -408,7 +409,7 @@ TEST(FrameBlocker, WrongNumberOfSamplesInBlockForExtractBlock) {
}
}
-TEST(FrameBlocker, WrongNumberOfPreceedingApiCallsForExtractBlock) {
+TEST(FrameBlockerDeathTest, WrongNumberOfPreceedingApiCallsForExtractBlock) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_channels : {1, 2, 4, 8}) {
for (size_t num_calls = 0; num_calls < 4; ++num_calls) {
@@ -426,17 +427,17 @@ TEST(FrameBlocker, WrongNumberOfPreceedingApiCallsForExtractBlock) {
}
// Verifies that the verification for 0 number of channels works.
-TEST(FrameBlocker, ZeroNumberOfChannelsParameter) {
+TEST(FrameBlockerDeathTest, ZeroNumberOfChannelsParameter) {
EXPECT_DEATH(FrameBlocker(16000, 0), "");
}
// Verifies that the verification for 0 number of bands works.
-TEST(FrameBlocker, ZeroNumberOfBandsParameter) {
+TEST(FrameBlockerDeathTest, ZeroNumberOfBandsParameter) {
EXPECT_DEATH(FrameBlocker(0, 1), "");
}
// Verifiers that the verification for null sub_frame pointer works.
-TEST(FrameBlocker, NullBlockParameter) {
+TEST(FrameBlockerDeathTest, NullBlockParameter) {
std::vector<std::vector<std::vector<float>>> sub_frame(
1, std::vector<std::vector<float>>(
1, std::vector<float>(kSubFrameLength, 0.f)));
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc
index e136c898772..8e2a12e6c5d 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc
@@ -144,7 +144,7 @@ TEST(MatchedFilterLagAggregator, DISABLED_PersistentAggregatedLag) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for non-null data dumper.
-TEST(MatchedFilterLagAggregator, NullDataDumper) {
+TEST(MatchedFilterLagAggregatorDeathTest, NullDataDumper) {
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilterLagAggregator(
nullptr, 10, config.delay.delay_selection_thresholds),
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_unittest.cc
index 8a6e22eecaa..7d9a7d4d0af 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/matched_filter_unittest.cc
@@ -375,7 +375,7 @@ TEST(MatchedFilter, NumberOfLagEstimates) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for non-zero windows size.
-TEST(MatchedFilter, ZeroWindowSize) {
+TEST(MatchedFilterDeathTest, ZeroWindowSize) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 16, 0, 1, 1,
@@ -385,7 +385,7 @@ TEST(MatchedFilter, ZeroWindowSize) {
}
// Verifies the check for non-null data dumper.
-TEST(MatchedFilter, NullDataDumper) {
+TEST(MatchedFilterDeathTest, NullDataDumper) {
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(nullptr, DetectOptimization(), 16, 1, 1, 1, 150,
config.delay.delay_estimate_smoothing,
@@ -395,7 +395,7 @@ TEST(MatchedFilter, NullDataDumper) {
// Verifies the check for that the sub block size is a multiple of 4.
// TODO(peah): Activate the unittest once the required code has been landed.
-TEST(MatchedFilter, DISABLED_BlockSizeMultipleOf4) {
+TEST(MatchedFilterDeathTest, DISABLED_BlockSizeMultipleOf4) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 15, 1, 1, 1,
@@ -407,7 +407,7 @@ TEST(MatchedFilter, DISABLED_BlockSizeMultipleOf4) {
// Verifies the check for that there is an integer number of sub blocks that add
// up to a block size.
// TODO(peah): Activate the unittest once the required code has been landed.
-TEST(MatchedFilter, DISABLED_SubBlockSizeAddsUpToBlockSize) {
+TEST(MatchedFilterDeathTest, DISABLED_SubBlockSizeAddsUpToBlockSize) {
ApmDataDumper data_dumper(0);
EchoCanceller3Config config;
EXPECT_DEATH(MatchedFilter(&data_dumper, DetectOptimization(), 12, 1, 1, 1,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h b/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h
index e9a95c837d2..e1eb26702f1 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h
@@ -24,17 +24,26 @@ class MockBlockProcessor : public BlockProcessor {
MockBlockProcessor();
virtual ~MockBlockProcessor();
- MOCK_METHOD4(
- ProcessCapture,
- void(bool level_change,
- bool saturated_microphone_signal,
- std::vector<std::vector<std::vector<float>>>* linear_output,
- std::vector<std::vector<std::vector<float>>>* capture_block));
- MOCK_METHOD1(BufferRender,
- void(const std::vector<std::vector<std::vector<float>>>& block));
- MOCK_METHOD1(UpdateEchoLeakageStatus, void(bool leakage_detected));
- MOCK_CONST_METHOD1(GetMetrics, void(EchoControl::Metrics* metrics));
- MOCK_METHOD1(SetAudioBufferDelay, void(int delay_ms));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (bool level_change,
+ bool saturated_microphone_signal,
+ std::vector<std::vector<std::vector<float>>>* linear_output,
+ std::vector<std::vector<std::vector<float>>>* capture_block),
+ (override));
+ MOCK_METHOD(void,
+ BufferRender,
+ (const std::vector<std::vector<std::vector<float>>>& block),
+ (override));
+ MOCK_METHOD(void,
+ UpdateEchoLeakageStatus,
+ (bool leakage_detected),
+ (override));
+ MOCK_METHOD(void,
+ GetMetrics,
+ (EchoControl::Metrics * metrics),
+ (const, override));
+ MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override));
};
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h b/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h
index 6c580f3a91d..8a3044bcf11 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h
@@ -27,16 +27,23 @@ class MockEchoRemover : public EchoRemover {
MockEchoRemover();
virtual ~MockEchoRemover();
- MOCK_METHOD6(ProcessCapture,
- void(EchoPathVariability echo_path_variability,
- bool capture_signal_saturation,
- const absl::optional<DelayEstimate>& delay_estimate,
- RenderBuffer* render_buffer,
- std::vector<std::vector<std::vector<float>>>* linear_output,
- std::vector<std::vector<std::vector<float>>>* capture));
- MOCK_CONST_METHOD0(Delay, absl::optional<int>());
- MOCK_METHOD1(UpdateEchoLeakageStatus, void(bool leakage_detected));
- MOCK_CONST_METHOD1(GetMetrics, void(EchoControl::Metrics* metrics));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (EchoPathVariability echo_path_variability,
+ bool capture_signal_saturation,
+ const absl::optional<DelayEstimate>& delay_estimate,
+ RenderBuffer* render_buffer,
+ std::vector<std::vector<std::vector<float>>>* linear_output,
+ std::vector<std::vector<std::vector<float>>>* capture),
+ (override));
+ MOCK_METHOD(void,
+ UpdateEchoLeakageStatus,
+ (bool leakage_detected),
+ (override));
+ MOCK_METHOD(void,
+ GetMetrics,
+ (EchoControl::Metrics * metrics),
+ (const, override));
};
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h b/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h
index f83c6706327..26f58cfe1ec 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h
@@ -27,21 +27,26 @@ class MockRenderDelayBuffer : public RenderDelayBuffer {
MockRenderDelayBuffer(int sample_rate_hz, size_t num_channels);
virtual ~MockRenderDelayBuffer();
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(Insert,
- RenderDelayBuffer::BufferingEvent(
- const std::vector<std::vector<std::vector<float>>>& block));
- MOCK_METHOD0(PrepareCaptureProcessing, RenderDelayBuffer::BufferingEvent());
- MOCK_METHOD1(AlignFromDelay, bool(size_t delay));
- MOCK_METHOD0(AlignFromExternalDelay, void());
- MOCK_CONST_METHOD0(Delay, size_t());
- MOCK_CONST_METHOD0(MaxDelay, size_t());
- MOCK_METHOD0(GetRenderBuffer, RenderBuffer*());
- MOCK_CONST_METHOD0(GetDownsampledRenderBuffer,
- const DownsampledRenderBuffer&());
- MOCK_CONST_METHOD1(CausalDelay, bool(size_t delay));
- MOCK_METHOD1(SetAudioBufferDelay, void(int delay_ms));
- MOCK_METHOD0(HasReceivedBufferDelay, bool());
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(RenderDelayBuffer::BufferingEvent,
+ Insert,
+ (const std::vector<std::vector<std::vector<float>>>& block),
+ (override));
+ MOCK_METHOD(RenderDelayBuffer::BufferingEvent,
+ PrepareCaptureProcessing,
+ (),
+ (override));
+ MOCK_METHOD(bool, AlignFromDelay, (size_t delay), (override));
+ MOCK_METHOD(void, AlignFromExternalDelay, (), (override));
+ MOCK_METHOD(size_t, Delay, (), (const, override));
+ MOCK_METHOD(size_t, MaxDelay, (), (const, override));
+ MOCK_METHOD(RenderBuffer*, GetRenderBuffer, (), (override));
+ MOCK_METHOD(const DownsampledRenderBuffer&,
+ GetDownsampledRenderBuffer,
+ (),
+ (const, override));
+ MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, HasReceivedBufferDelay, (), (override));
private:
RenderBuffer* FakeGetRenderBuffer() { return &render_buffer_; }
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h b/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h
index e72333eaeb3..67d8baefe6a 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/mock/mock_render_delay_controller.h
@@ -25,14 +25,15 @@ class MockRenderDelayController : public RenderDelayController {
MockRenderDelayController();
virtual ~MockRenderDelayController();
- MOCK_METHOD1(Reset, void(bool reset_delay_statistics));
- MOCK_METHOD0(LogRenderCall, void());
- MOCK_METHOD3(GetDelay,
- absl::optional<DelayEstimate>(
- const DownsampledRenderBuffer& render_buffer,
- size_t render_delay_buffer_delay,
- const std::vector<std::vector<float>>& capture));
- MOCK_CONST_METHOD0(HasClockdrift, bool());
+ MOCK_METHOD(void, Reset, (bool reset_delay_statistics), (override));
+ MOCK_METHOD(void, LogRenderCall, (), (override));
+ MOCK_METHOD(absl::optional<DelayEstimate>,
+ GetDelay,
+ (const DownsampledRenderBuffer& render_buffer,
+ size_t render_delay_buffer_delay,
+ const std::vector<std::vector<float>>& capture),
+ (override));
+ MOCK_METHOD(bool, HasClockdrift, (), (const, override));
};
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc
index 117f34508ee..2393fddd6fa 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc
@@ -234,7 +234,7 @@ std::string ProduceDebugText(size_t delay, int filter_length_blocks) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null output gain parameter works.
-TEST(RefinedFilterUpdateGain, NullDataOutputGain) {
+TEST(RefinedFilterUpdateGainDeathTest, NullDataOutputGain) {
ApmDataDumper data_dumper(42);
EchoCanceller3Config config;
RenderSignalAnalyzer analyzer(config);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/render_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/render_buffer_unittest.cc
index 6981f6d5108..45595286009 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/render_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/render_buffer_unittest.cc
@@ -21,21 +21,21 @@ namespace webrtc {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for non-null fft buffer.
-TEST(RenderBuffer, NullExternalFftBuffer) {
+TEST(RenderBufferDeathTest, NullExternalFftBuffer) {
BlockBuffer block_buffer(10, 3, 1, kBlockSize);
SpectrumBuffer spectrum_buffer(10, 1);
EXPECT_DEATH(RenderBuffer(&block_buffer, &spectrum_buffer, nullptr), "");
}
// Verifies the check for non-null spectrum buffer.
-TEST(RenderBuffer, NullExternalSpectrumBuffer) {
+TEST(RenderBufferDeathTest, NullExternalSpectrumBuffer) {
FftBuffer fft_buffer(10, 1);
BlockBuffer block_buffer(10, 3, 1, kBlockSize);
EXPECT_DEATH(RenderBuffer(&block_buffer, nullptr, &fft_buffer), "");
}
// Verifies the check for non-null block buffer.
-TEST(RenderBuffer, NullExternalBlockBuffer) {
+TEST(RenderBufferDeathTest, NullExternalBlockBuffer) {
FftBuffer fft_buffer(10, 1);
SpectrumBuffer spectrum_buffer(10, 1);
EXPECT_DEATH(RenderBuffer(nullptr, &spectrum_buffer, &fft_buffer), "");
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_buffer_unittest.cc
index 35e81319cf1..efd4a299206 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_buffer_unittest.cc
@@ -97,14 +97,14 @@ TEST(RenderDelayBuffer, AlignFromDelay) {
// Verifies the check for feasible delay.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(RenderDelayBuffer, DISABLED_WrongDelay) {
+TEST(RenderDelayBufferDeathTest, DISABLED_WrongDelay) {
std::unique_ptr<RenderDelayBuffer> delay_buffer(
RenderDelayBuffer::Create(EchoCanceller3Config(), 48000, 1));
EXPECT_DEATH(delay_buffer->AlignFromDelay(21), "");
}
// Verifies the check for the number of bands in the inserted blocks.
-TEST(RenderDelayBuffer, WrongNumberOfBands) {
+TEST(RenderDelayBufferDeathTest, WrongNumberOfBands) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate));
@@ -120,7 +120,7 @@ TEST(RenderDelayBuffer, WrongNumberOfBands) {
}
// Verifies the check for the number of channels in the inserted blocks.
-TEST(RenderDelayBuffer, WrongNumberOfChannels) {
+TEST(RenderDelayBufferDeathTest, WrongNumberOfChannels) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate));
@@ -136,7 +136,7 @@ TEST(RenderDelayBuffer, WrongNumberOfChannels) {
}
// Verifies the check of the length of the inserted blocks.
-TEST(RenderDelayBuffer, WrongBlockLength) {
+TEST(RenderDelayBufferDeathTest, WrongBlockLength) {
for (auto rate : {16000, 32000, 48000}) {
for (size_t num_channels : {1, 2, 8}) {
SCOPED_TRACE(ProduceDebugText(rate));
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_controller_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_controller_unittest.cc
index fb7b86a75dc..0d3c8564662 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_controller_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/render_delay_controller_unittest.cc
@@ -325,7 +325,7 @@ TEST(RenderDelayController, DISABLED_AlignmentWithJitter) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for the capture signal block size.
-TEST(RenderDelayController, WrongCaptureSize) {
+TEST(RenderDelayControllerDeathTest, WrongCaptureSize) {
std::vector<std::vector<float>> block(
1, std::vector<float>(kBlockSize - 1, 0.f));
EchoCanceller3Config config;
@@ -345,7 +345,7 @@ TEST(RenderDelayController, WrongCaptureSize) {
// Verifies the check for correct sample rate.
// TODO(peah): Re-enable the test once the issue with memory leaks during DEATH
// tests on test bots has been fixed.
-TEST(RenderDelayController, DISABLED_WrongSampleRate) {
+TEST(RenderDelayControllerDeathTest, DISABLED_WrongSampleRate) {
for (auto rate : {-1, 0, 8001, 16001}) {
SCOPED_TRACE(ProduceDebugText(rate));
EchoCanceller3Config config;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc
index f40fade8302..7a48cc4b698 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc
@@ -117,7 +117,7 @@ std::string ProduceDebugText(size_t num_channels) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null output parameter works.
-TEST(RenderSignalAnalyzer, NullMaskOutput) {
+TEST(RenderSignalAnalyzerDeathTest, NullMaskOutput) {
RenderSignalAnalyzer analyzer(EchoCanceller3Config{});
EXPECT_DEATH(analyzer.MaskRegionsAroundNarrowBands(nullptr), "");
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/subtractor_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/subtractor_unittest.cc
index 72e57879a02..bbc1e4ffc61 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/subtractor_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/subtractor_unittest.cc
@@ -189,7 +189,7 @@ std::string ProduceDebugText(size_t num_render_channels,
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non data dumper works.
-TEST(Subtractor, NullDataDumper) {
+TEST(SubtractorDeathTest, NullDataDumper) {
EXPECT_DEATH(
Subtractor(EchoCanceller3Config(), 1, 1, nullptr, DetectOptimization()),
"");
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter.h b/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter.h
index 281c2c30c48..dcf2292c7f1 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter.h
@@ -14,7 +14,6 @@
#include <array>
#include <vector>
-#include "common_audio/third_party/ooura/fft_size_128/ooura_fft.h"
#include "modules/audio_processing/aec3/aec3_common.h"
#include "modules/audio_processing/aec3/aec3_fft.h"
#include "modules/audio_processing/aec3/fft_data.h"
@@ -39,7 +38,6 @@ class SuppressionFilter {
const Aec3Optimization optimization_;
const int sample_rate_hz_;
const size_t num_capture_channels_;
- const OouraFft ooura_fft_;
const Aec3Fft fft_;
std::vector<std::vector<std::array<float, kFftLengthBy2>>> e_output_old_;
RTC_DISALLOW_COPY_AND_ASSIGN(SuppressionFilter);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter_unittest.cc
index b55c719fa9f..a160bec0451 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_filter_unittest.cc
@@ -50,7 +50,7 @@ void ProduceSinusoid(int sample_rate_hz,
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies the check for null suppressor output.
-TEST(SuppressionFilter, NullOutput) {
+TEST(SuppressionFilterDeathTest, NullOutput) {
std::vector<FftData> cn(1);
std::vector<FftData> cn_high_bands(1);
std::vector<FftData> E(1);
@@ -62,7 +62,7 @@ TEST(SuppressionFilter, NullOutput) {
}
// Verifies the check for allowed sample rate.
-TEST(SuppressionFilter, ProperSampleRate) {
+TEST(SuppressionFilterDeathTest, ProperSampleRate) {
EXPECT_DEATH(SuppressionFilter(Aec3Optimization::kNone, 16001, 1), "");
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_gain_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_gain_unittest.cc
index 0452f2e1fb7..4fb4cd7142b 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_gain_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec3/suppression_gain_unittest.cc
@@ -25,7 +25,7 @@ namespace aec3 {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check for non-null output gains works.
-TEST(SuppressionGain, NullOutputGains) {
+TEST(SuppressionGainDeathTest, NullOutputGains) {
std::vector<std::array<float, kFftLengthBy2Plus1>> E2(1, {0.f});
std::vector<std::array<float, kFftLengthBy2Plus1>> R2(1, {0.f});
std::vector<std::array<float, kFftLengthBy2Plus1>> S2(1);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec_dump/mock_aec_dump.h b/chromium/third_party/webrtc/modules/audio_processing/aec_dump/mock_aec_dump.h
index 65306a7b285..b396739de4c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec_dump/mock_aec_dump.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec_dump/mock_aec_dump.h
@@ -25,36 +25,54 @@ class MockAecDump : public AecDump {
MockAecDump();
virtual ~MockAecDump();
- MOCK_METHOD2(WriteInitMessage,
- void(const ProcessingConfig& api_format, int64_t time_now_ms));
+ MOCK_METHOD(void,
+ WriteInitMessage,
+ (const ProcessingConfig& api_format, int64_t time_now_ms),
+ (override));
- MOCK_METHOD1(AddCaptureStreamInput,
- void(const AudioFrameView<const float>& src));
- MOCK_METHOD1(AddCaptureStreamOutput,
- void(const AudioFrameView<const float>& src));
- MOCK_METHOD3(AddCaptureStreamInput,
- void(const int16_t* const data,
- int num_channels,
- int samples_per_channel));
- MOCK_METHOD3(AddCaptureStreamOutput,
- void(const int16_t* const data,
- int num_channels,
- int samples_per_channel));
- MOCK_METHOD1(AddAudioProcessingState,
- void(const AudioProcessingState& state));
- MOCK_METHOD0(WriteCaptureStreamMessage, void());
+ MOCK_METHOD(void,
+ AddCaptureStreamInput,
+ (const AudioFrameView<const float>& src),
+ (override));
+ MOCK_METHOD(void,
+ AddCaptureStreamOutput,
+ (const AudioFrameView<const float>& src),
+ (override));
+ MOCK_METHOD(void,
+ AddCaptureStreamInput,
+ (const int16_t* const data,
+ int num_channels,
+ int samples_per_channel),
+ (override));
+ MOCK_METHOD(void,
+ AddCaptureStreamOutput,
+ (const int16_t* const data,
+ int num_channels,
+ int samples_per_channel),
+ (override));
+ MOCK_METHOD(void,
+ AddAudioProcessingState,
+ (const AudioProcessingState& state),
+ (override));
+ MOCK_METHOD(void, WriteCaptureStreamMessage, (), (override));
- MOCK_METHOD3(WriteRenderStreamMessage,
- void(const int16_t* const data,
- int num_channels,
- int samples_per_channel));
- MOCK_METHOD1(WriteRenderStreamMessage,
- void(const AudioFrameView<const float>& src));
+ MOCK_METHOD(void,
+ WriteRenderStreamMessage,
+ (const int16_t* const data,
+ int num_channels,
+ int samples_per_channel),
+ (override));
+ MOCK_METHOD(void,
+ WriteRenderStreamMessage,
+ (const AudioFrameView<const float>& src),
+ (override));
- MOCK_METHOD1(WriteConfig, void(const InternalAPMConfig& config));
+ MOCK_METHOD(void, WriteConfig, (const InternalAPMConfig& config), (override));
- MOCK_METHOD1(WriteRuntimeSetting,
- void(const AudioProcessing::RuntimeSetting& config));
+ MOCK_METHOD(void,
+ WriteRuntimeSetting,
+ (const AudioProcessing::RuntimeSetting& config),
+ (override));
};
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/agc/BUILD.gn
index 42830c918d8..9ed6399cbf8 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/BUILD.gn
@@ -36,8 +36,8 @@ rtc_library("agc") {
"../../../system_wrappers:metrics",
"../agc2:level_estimation_agc",
"../vad",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("level_estimation") {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/agc_manager_direct_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/agc/agc_manager_direct_unittest.cc
index c5e65adec1b..995801a8cb3 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/agc_manager_direct_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/agc_manager_direct_unittest.cc
@@ -37,22 +37,23 @@ const int kMinMicLevel = 12;
class MockGainControl : public GainControl {
public:
virtual ~MockGainControl() {}
- MOCK_METHOD0(Initialize, void());
- MOCK_CONST_METHOD0(is_enabled, bool());
- MOCK_METHOD1(set_stream_analog_level, int(int level));
- MOCK_CONST_METHOD0(stream_analog_level, int());
- MOCK_METHOD1(set_mode, int(Mode mode));
- MOCK_CONST_METHOD0(mode, Mode());
- MOCK_METHOD1(set_target_level_dbfs, int(int level));
- MOCK_CONST_METHOD0(target_level_dbfs, int());
- MOCK_METHOD1(set_compression_gain_db, int(int gain));
- MOCK_CONST_METHOD0(compression_gain_db, int());
- MOCK_METHOD1(enable_limiter, int(bool enable));
- MOCK_CONST_METHOD0(is_limiter_enabled, bool());
- MOCK_METHOD2(set_analog_level_limits, int(int minimum, int maximum));
- MOCK_CONST_METHOD0(analog_level_minimum, int());
- MOCK_CONST_METHOD0(analog_level_maximum, int());
- MOCK_CONST_METHOD0(stream_is_saturated, bool());
+ MOCK_METHOD(int, set_stream_analog_level, (int level), (override));
+ MOCK_METHOD(int, stream_analog_level, (), (const, override));
+ MOCK_METHOD(int, set_mode, (Mode mode), (override));
+ MOCK_METHOD(Mode, mode, (), (const, override));
+ MOCK_METHOD(int, set_target_level_dbfs, (int level), (override));
+ MOCK_METHOD(int, target_level_dbfs, (), (const, override));
+ MOCK_METHOD(int, set_compression_gain_db, (int gain), (override));
+ MOCK_METHOD(int, compression_gain_db, (), (const, override));
+ MOCK_METHOD(int, enable_limiter, (bool enable), (override));
+ MOCK_METHOD(bool, is_limiter_enabled, (), (const, override));
+ MOCK_METHOD(int,
+ set_analog_level_limits,
+ (int minimum, int maximum),
+ (override));
+ MOCK_METHOD(int, analog_level_minimum, (), (const, override));
+ MOCK_METHOD(int, analog_level_maximum, (), (const, override));
+ MOCK_METHOD(bool, stream_is_saturated, (), (const, override));
};
} // namespace
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/mock_agc.h b/chromium/third_party/webrtc/modules/audio_processing/agc/mock_agc.h
index 6542acc8d54..0ef41c6e52a 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/mock_agc.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/mock_agc.h
@@ -19,14 +19,14 @@ namespace webrtc {
class MockAgc : public Agc {
public:
virtual ~MockAgc() {}
- MOCK_METHOD3(Process,
- void(const int16_t* audio, size_t length, int sample_rate_hz));
- MOCK_METHOD1(GetRmsErrorDb, bool(int* error));
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(set_target_level_dbfs, int(int level));
- MOCK_CONST_METHOD0(target_level_dbfs, int());
- MOCK_METHOD1(EnableStandaloneVad, void(bool enable));
- MOCK_CONST_METHOD0(standalone_vad_enabled, bool());
+ MOCK_METHOD(void,
+ Process,
+ (const int16_t* audio, size_t length, int sample_rate_hz),
+ (override));
+ MOCK_METHOD(bool, GetRmsErrorDb, (int* error), (override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int, set_target_level_dbfs, (int level), (override));
+ MOCK_METHOD(int, target_level_dbfs, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc2/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/agc2/BUILD.gn
index 8d9bb147311..bfef2252c3e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc2/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc2/BUILD.gn
@@ -153,6 +153,7 @@ rtc_library("noise_level_estimator") {
"../../../common_audio/third_party/ooura:fft_size_128",
"../../../rtc_base:checks",
"../../../rtc_base:macromagic",
+ "../../../system_wrappers:cpu_features_api",
]
configs += [ "..:apm_debug_dump" ]
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc2/rnn_vad/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/agc2/rnn_vad/BUILD.gn
index 5d96fad5834..99b4e82488c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc2/rnn_vad/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc2/rnn_vad/BUILD.gn
@@ -117,9 +117,9 @@ if (rtc_include_tests) {
"../../../../rtc_base/system:arch",
"../../../../test:test_support",
"../../utility:pffft_wrapper",
- "//third_party/abseil-cpp/absl/memory",
"//third_party/rnnoise:rnn_vad",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
data = unittest_resources
if (is_ios) {
deps += [ ":unittests_bundle_data" ]
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc2/signal_classifier.cc b/chromium/third_party/webrtc/modules/audio_processing/agc2/signal_classifier.cc
index 8778c494265..38334f7ec5b 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc2/signal_classifier.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc2/signal_classifier.cc
@@ -19,10 +19,19 @@
#include "modules/audio_processing/agc2/noise_spectrum_estimator.h"
#include "modules/audio_processing/logging/apm_data_dumper.h"
#include "rtc_base/checks.h"
+#include "system_wrappers/include/cpu_features_wrapper.h"
namespace webrtc {
namespace {
+bool IsSse2Available() {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+ return WebRtc_GetCPUInfo(kSSE2) != 0;
+#else
+ return false;
+#endif
+}
+
void RemoveDcLevel(rtc::ArrayView<float> x) {
RTC_DCHECK_LT(0, x.size());
float mean = std::accumulate(x.data(), x.data() + x.size(), 0.f);
@@ -109,7 +118,8 @@ void SignalClassifier::FrameExtender::ExtendFrame(
SignalClassifier::SignalClassifier(ApmDataDumper* data_dumper)
: data_dumper_(data_dumper),
down_sampler_(data_dumper_),
- noise_spectrum_estimator_(data_dumper_) {
+ noise_spectrum_estimator_(data_dumper_),
+ ooura_fft_(IsSse2Available()) {
Initialize(48000);
}
SignalClassifier::~SignalClassifier() {}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer_unittest.cc
index 7cb51ca5f1a..f3b2ddc6895 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer_unittest.cc
@@ -40,7 +40,7 @@ TEST(AudioBufferTest, SetNumChannelsSetsChannelBuffersNumChannels) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(AudioBufferTest, SetNumChannelsDeathTest) {
+TEST(AudioBufferDeathTest, SetNumChannelsDeathTest) {
AudioBuffer ab(kSampleRateHz, kMono, kSampleRateHz, kMono, kSampleRateHz,
kMono);
RTC_EXPECT_DEATH(ab.set_num_channels(kStereo), "num_channels");
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
index 71352bc65ab..8f28941cdfc 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
@@ -37,13 +37,13 @@ class MockInitialize : public AudioProcessingImpl {
explicit MockInitialize(const webrtc::Config& config)
: AudioProcessingImpl(config) {}
- MOCK_METHOD0(InitializeLocked, int());
+ MOCK_METHOD(int, InitializeLocked, (), (override));
int RealInitializeLocked() RTC_NO_THREAD_SAFETY_ANALYSIS {
return AudioProcessingImpl::InitializeLocked();
}
- MOCK_CONST_METHOD0(AddRef, void());
- MOCK_CONST_METHOD0(Release, rtc::RefCountReleaseStatus());
+ MOCK_METHOD(void, AddRef, (), (const, override));
+ MOCK_METHOD(rtc::RefCountReleaseStatus, Release, (), (const, override));
};
// Creates MockEchoControl instances and provides a raw pointer access to
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc
index 90413a84bee..93ddc973660 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc
@@ -962,49 +962,51 @@ TEST_F(ApmTest, GainControl) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(ApmTest, GainControlDiesOnTooLowTargetLevelDbfs) {
+using ApmDeathTest = ApmTest;
+
+TEST_F(ApmDeathTest, GainControlDiesOnTooLowTargetLevelDbfs) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.target_level_dbfs = -1;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooHighTargetLevelDbfs) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooHighTargetLevelDbfs) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.target_level_dbfs = 32;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooLowCompressionGainDb) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooLowCompressionGainDb) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.compression_gain_db = -1;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooHighCompressionGainDb) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooHighCompressionGainDb) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.compression_gain_db = 91;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooLowAnalogLevelLowerLimit) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooLowAnalogLevelLowerLimit) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_minimum = -1;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnTooHighAnalogLevelUpperLimit) {
+TEST_F(ApmDeathTest, GainControlDiesOnTooHighAnalogLevelUpperLimit) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_maximum = 65536;
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, GainControlDiesOnInvertedAnalogLevelLimits) {
+TEST_F(ApmDeathTest, GainControlDiesOnInvertedAnalogLevelLimits) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_minimum = 512;
@@ -1012,7 +1014,7 @@ TEST_F(ApmTest, GainControlDiesOnInvertedAnalogLevelLimits) {
EXPECT_DEATH(apm_->ApplyConfig(config), "");
}
-TEST_F(ApmTest, ApmDiesOnTooLowAnalogLevel) {
+TEST_F(ApmDeathTest, ApmDiesOnTooLowAnalogLevel) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_minimum = 255;
@@ -1021,7 +1023,7 @@ TEST_F(ApmTest, ApmDiesOnTooLowAnalogLevel) {
EXPECT_DEATH(apm_->set_stream_analog_level(254), "");
}
-TEST_F(ApmTest, ApmDiesOnTooHighAnalogLevel) {
+TEST_F(ApmDeathTest, ApmDiesOnTooHighAnalogLevel) {
auto config = apm_->GetConfig();
config.gain_controller1.enabled = true;
config.gain_controller1.analog_level_minimum = 255;
@@ -2414,7 +2416,7 @@ TEST(RuntimeSettingTest, TestDefaultCtor) {
EXPECT_EQ(AudioProcessing::RuntimeSetting::Type::kNotSpecified, s.type());
}
-TEST(RuntimeSettingTest, TestCapturePreGain) {
+TEST(RuntimeSettingDeathTest, TestCapturePreGain) {
using Type = AudioProcessing::RuntimeSetting::Type;
{
auto s = AudioProcessing::RuntimeSetting::CreateCapturePreGain(1.25f);
@@ -2429,7 +2431,7 @@ TEST(RuntimeSettingTest, TestCapturePreGain) {
#endif
}
-TEST(RuntimeSettingTest, TestCaptureFixedPostGain) {
+TEST(RuntimeSettingDeathTest, TestCaptureFixedPostGain) {
using Type = AudioProcessing::RuntimeSetting::Type;
{
auto s = AudioProcessing::RuntimeSetting::CreateCaptureFixedPostGain(1.25f);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h b/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
index bdae99a91a2..562b23f7d5d 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
@@ -24,35 +24,47 @@ namespace test {
class MockCustomProcessing : public CustomProcessing {
public:
virtual ~MockCustomProcessing() {}
- MOCK_METHOD2(Initialize, void(int sample_rate_hz, int num_channels));
- MOCK_METHOD1(Process, void(AudioBuffer* audio));
- MOCK_METHOD1(SetRuntimeSetting,
- void(AudioProcessing::RuntimeSetting setting));
- MOCK_CONST_METHOD0(ToString, std::string());
+ MOCK_METHOD(void,
+ Initialize,
+ (int sample_rate_hz, int num_channels),
+ (override));
+ MOCK_METHOD(void, Process, (AudioBuffer * audio), (override));
+ MOCK_METHOD(void,
+ SetRuntimeSetting,
+ (AudioProcessing::RuntimeSetting setting),
+ (override));
+ MOCK_METHOD(std::string, ToString, (), (const, override));
};
class MockCustomAudioAnalyzer : public CustomAudioAnalyzer {
public:
virtual ~MockCustomAudioAnalyzer() {}
- MOCK_METHOD2(Initialize, void(int sample_rate_hz, int num_channels));
- MOCK_METHOD1(Analyze, void(const AudioBuffer* audio));
- MOCK_CONST_METHOD0(ToString, std::string());
+ MOCK_METHOD(void,
+ Initialize,
+ (int sample_rate_hz, int num_channels),
+ (override));
+ MOCK_METHOD(void, Analyze, (const AudioBuffer* audio), (override));
+ MOCK_METHOD(std::string, ToString, (), (const, override));
};
class MockEchoControl : public EchoControl {
public:
virtual ~MockEchoControl() {}
- MOCK_METHOD1(AnalyzeRender, void(AudioBuffer* render));
- MOCK_METHOD1(AnalyzeCapture, void(AudioBuffer* capture));
- MOCK_METHOD2(ProcessCapture,
- void(AudioBuffer* capture, bool echo_path_change));
- MOCK_METHOD3(ProcessCapture,
- void(AudioBuffer* capture,
- AudioBuffer* linear_output,
- bool echo_path_change));
- MOCK_CONST_METHOD0(GetMetrics, Metrics());
- MOCK_METHOD1(SetAudioBufferDelay, void(int delay_ms));
- MOCK_CONST_METHOD0(ActiveProcessing, bool());
+ MOCK_METHOD(void, AnalyzeRender, (AudioBuffer * render), (override));
+ MOCK_METHOD(void, AnalyzeCapture, (AudioBuffer * capture), (override));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (AudioBuffer * capture, bool echo_path_change),
+ (override));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (AudioBuffer * capture,
+ AudioBuffer* linear_output,
+ bool echo_path_change),
+ (override));
+ MOCK_METHOD(Metrics, GetMetrics, (), (const, override));
+ MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, ActiveProcessing, (), (const, override));
};
class MockAudioProcessing : public ::testing::NiceMock<AudioProcessing> {
@@ -61,87 +73,93 @@ class MockAudioProcessing : public ::testing::NiceMock<AudioProcessing> {
virtual ~MockAudioProcessing() {}
- MOCK_METHOD0(Initialize, int());
- MOCK_METHOD6(Initialize,
- int(int capture_input_sample_rate_hz,
- int capture_output_sample_rate_hz,
- int render_sample_rate_hz,
- ChannelLayout capture_input_layout,
- ChannelLayout capture_output_layout,
- ChannelLayout render_input_layout));
- MOCK_METHOD1(Initialize, int(const ProcessingConfig& processing_config));
- MOCK_METHOD1(ApplyConfig, void(const Config& config));
- MOCK_METHOD1(SetExtraOptions, void(const webrtc::Config& config));
- MOCK_CONST_METHOD0(proc_sample_rate_hz, int());
- MOCK_CONST_METHOD0(proc_split_sample_rate_hz, int());
- MOCK_CONST_METHOD0(num_input_channels, size_t());
- MOCK_CONST_METHOD0(num_proc_channels, size_t());
- MOCK_CONST_METHOD0(num_output_channels, size_t());
- MOCK_CONST_METHOD0(num_reverse_channels, size_t());
- MOCK_METHOD1(set_output_will_be_muted, void(bool muted));
- MOCK_METHOD1(SetRuntimeSetting, void(RuntimeSetting setting));
- MOCK_METHOD4(ProcessStream,
- int(const int16_t* const src,
- const StreamConfig& input_config,
- const StreamConfig& output_config,
- int16_t* const dest));
- MOCK_METHOD7(ProcessStream,
- int(const float* const* src,
- size_t samples_per_channel,
- int input_sample_rate_hz,
- ChannelLayout input_layout,
- int output_sample_rate_hz,
- ChannelLayout output_layout,
- float* const* dest));
- MOCK_METHOD4(ProcessStream,
- int(const float* const* src,
- const StreamConfig& input_config,
- const StreamConfig& output_config,
- float* const* dest));
- MOCK_METHOD4(ProcessReverseStream,
- int(const int16_t* const src,
- const StreamConfig& input_config,
- const StreamConfig& output_config,
- int16_t* const dest));
- MOCK_METHOD4(AnalyzeReverseStream,
- int(const float* const* data,
- size_t samples_per_channel,
- int sample_rate_hz,
- ChannelLayout layout));
- MOCK_METHOD2(AnalyzeReverseStream,
- int(const float* const* data,
- const StreamConfig& reverse_config));
- MOCK_METHOD4(ProcessReverseStream,
- int(const float* const* src,
- const StreamConfig& input_config,
- const StreamConfig& output_config,
- float* const* dest));
- MOCK_CONST_METHOD1(
- GetLinearAecOutput,
- bool(rtc::ArrayView<std::array<float, 160>> linear_output));
- MOCK_METHOD1(set_stream_delay_ms, int(int delay));
- MOCK_CONST_METHOD0(stream_delay_ms, int());
- MOCK_CONST_METHOD0(was_stream_delay_set, bool());
- MOCK_METHOD1(set_stream_key_pressed, void(bool key_pressed));
- MOCK_METHOD1(set_delay_offset_ms, void(int offset));
- MOCK_CONST_METHOD0(delay_offset_ms, int());
- MOCK_METHOD1(set_stream_analog_level, void(int));
- MOCK_CONST_METHOD0(recommended_stream_analog_level, int());
- MOCK_METHOD3(CreateAndAttachAecDump,
- bool(const std::string& file_name,
- int64_t max_log_size_bytes,
- rtc::TaskQueue* worker_queue));
- MOCK_METHOD3(CreateAndAttachAecDump,
- bool(FILE* handle,
- int64_t max_log_size_bytes,
- rtc::TaskQueue* worker_queue));
- MOCK_METHOD1(AttachAecDump, void(std::unique_ptr<AecDump>));
- MOCK_METHOD0(DetachAecDump, void());
+ MOCK_METHOD(int, Initialize, (), (override));
+ MOCK_METHOD(int,
+ Initialize,
+ (int capture_input_sample_rate_hz,
+ int capture_output_sample_rate_hz,
+ int render_sample_rate_hz,
+ ChannelLayout capture_input_layout,
+ ChannelLayout capture_output_layout,
+ ChannelLayout render_input_layout),
+ (override));
+ MOCK_METHOD(int,
+ Initialize,
+ (const ProcessingConfig& processing_config),
+ (override));
+ MOCK_METHOD(void, ApplyConfig, (const Config& config), (override));
+ MOCK_METHOD(void,
+ SetExtraOptions,
+ (const webrtc::Config& config),
+ (override));
+ MOCK_METHOD(int, proc_sample_rate_hz, (), (const, override));
+ MOCK_METHOD(int, proc_split_sample_rate_hz, (), (const, override));
+ MOCK_METHOD(size_t, num_input_channels, (), (const, override));
+ MOCK_METHOD(size_t, num_proc_channels, (), (const, override));
+ MOCK_METHOD(size_t, num_output_channels, (), (const, override));
+ MOCK_METHOD(size_t, num_reverse_channels, (), (const, override));
+ MOCK_METHOD(void, set_output_will_be_muted, (bool muted), (override));
+ MOCK_METHOD(void, SetRuntimeSetting, (RuntimeSetting setting), (override));
+ MOCK_METHOD(int,
+ ProcessStream,
+ (const int16_t* const src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config,
+ int16_t* const dest),
+ (override));
+ MOCK_METHOD(int,
+ ProcessStream,
+ (const float* const* src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config,
+ float* const* dest),
+ (override));
+ MOCK_METHOD(int,
+ ProcessReverseStream,
+ (const int16_t* const src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config,
+ int16_t* const dest),
+ (override));
+ MOCK_METHOD(int,
+ AnalyzeReverseStream,
+ (const float* const* data, const StreamConfig& reverse_config),
+ (override));
+ MOCK_METHOD(int,
+ ProcessReverseStream,
+ (const float* const* src,
+ const StreamConfig& input_config,
+ const StreamConfig& output_config,
+ float* const* dest),
+ (override));
+ MOCK_METHOD(bool,
+ GetLinearAecOutput,
+ ((rtc::ArrayView<std::array<float, 160>> linear_output)),
+ (const, override));
+ MOCK_METHOD(int, set_stream_delay_ms, (int delay), (override));
+ MOCK_METHOD(int, stream_delay_ms, (), (const, override));
+ MOCK_METHOD(void, set_stream_key_pressed, (bool key_pressed), (override));
+ MOCK_METHOD(void, set_stream_analog_level, (int), (override));
+ MOCK_METHOD(int, recommended_stream_analog_level, (), (const, override));
+ MOCK_METHOD(bool,
+ CreateAndAttachAecDump,
+ (const std::string& file_name,
+ int64_t max_log_size_bytes,
+ rtc::TaskQueue* worker_queue),
+ (override));
+ MOCK_METHOD(bool,
+ CreateAndAttachAecDump,
+ (FILE * handle,
+ int64_t max_log_size_bytes,
+ rtc::TaskQueue* worker_queue),
+ (override));
+ MOCK_METHOD(void, AttachAecDump, (std::unique_ptr<AecDump>), (override));
+ MOCK_METHOD(void, DetachAecDump, (), (override));
- MOCK_METHOD0(GetStatistics, AudioProcessingStats());
- MOCK_METHOD1(GetStatistics, AudioProcessingStats(bool));
+ MOCK_METHOD(AudioProcessingStats, GetStatistics, (), (override));
+ MOCK_METHOD(AudioProcessingStats, GetStatistics, (bool), (override));
- MOCK_CONST_METHOD0(GetConfig, AudioProcessing::Config());
+ MOCK_METHOD(AudioProcessing::Config, GetConfig, (), (const, override));
};
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/ns/BUILD.gn
index 442a313e640..7197705c3d0 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/BUILD.gn
@@ -64,8 +64,8 @@ rtc_static_library("ns") {
"../../../system_wrappers:field_trial",
"../../../system_wrappers:metrics",
"../utility:cascaded_biquad_filter",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -90,8 +90,8 @@ if (rtc_include_tests) {
"../../../system_wrappers:cpu_features_api",
"../../../test:test_support",
"../utility:cascaded_biquad_filter",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.cc b/chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.cc
index b3b113da448..f5bd6452e30 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.cc
@@ -66,8 +66,11 @@ bool VerifyFloatBitExactness(const webrtc::audioproc::Stream& msg,
AecDumpBasedSimulator::AecDumpBasedSimulator(
const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder)
- : AudioProcessingSimulator(settings, std::move(ap_builder)) {
+ : AudioProcessingSimulator(settings,
+ std::move(audio_processing),
+ std::move(ap_builder)) {
MaybeOpenCallOrderFile();
}
@@ -206,7 +209,8 @@ void AecDumpBasedSimulator::PrepareReverseProcessStreamCall(
}
void AecDumpBasedSimulator::Process() {
- CreateAudioProcessor();
+ ConfigureAudioProcessor();
+
if (settings_.artificial_nearend_filename) {
std::unique_ptr<WavReader> artificial_nearend_file(
new WavReader(settings_.artificial_nearend_filename->c_str()));
@@ -237,7 +241,7 @@ void AecDumpBasedSimulator::Process() {
fclose(dump_input_file_);
}
- DestroyAudioProcessor();
+ DetachAecDump();
}
void AecDumpBasedSimulator::HandleEvent(
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.h b/chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.h
index ef032d03160..092b82bdbc6 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/aec_dump_based_simulator.h
@@ -33,6 +33,7 @@ namespace test {
class AecDumpBasedSimulator final : public AudioProcessingSimulator {
public:
AecDumpBasedSimulator(const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder);
~AecDumpBasedSimulator() override;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.cc b/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.cc
index a37a83f1e3c..adbc298e9eb 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.cc
@@ -113,10 +113,10 @@ SimulationSettings::~SimulationSettings() = default;
AudioProcessingSimulator::AudioProcessingSimulator(
const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder)
: settings_(settings),
- ap_builder_(ap_builder ? std::move(ap_builder)
- : std::make_unique<AudioProcessingBuilder>()),
+ ap_(std::move(audio_processing)),
analog_mic_level_(settings.initial_mic_level),
fake_recording_device_(
settings.initial_mic_level,
@@ -139,6 +139,51 @@ AudioProcessingSimulator::AudioProcessingSimulator(
if (settings_.simulate_mic_gain)
RTC_LOG(LS_VERBOSE) << "Simulating analog mic gain";
+
+ // Create the audio processing object.
+ RTC_CHECK(!(ap_ && ap_builder))
+ << "The AudioProcessing and the AudioProcessingBuilder cannot both be "
+ "specified at the same time.";
+
+ if (ap_) {
+ RTC_CHECK(!settings_.aec_settings_filename);
+ RTC_CHECK(!settings_.print_aec_parameter_values);
+ } else {
+ // Use specied builder if such is provided, otherwise create a new builder.
+ std::unique_ptr<AudioProcessingBuilder> builder =
+ !!ap_builder ? std::move(ap_builder)
+ : std::make_unique<AudioProcessingBuilder>();
+
+ // Create and set an EchoCanceller3Factory if needed.
+ const bool use_aec = settings_.use_aec && *settings_.use_aec;
+ if (use_aec) {
+ EchoCanceller3Config cfg;
+ if (settings_.aec_settings_filename) {
+ if (settings_.use_verbose_logging) {
+ std::cout << "Reading AEC Parameters from JSON input." << std::endl;
+ }
+ cfg = ReadAec3ConfigFromJsonFile(*settings_.aec_settings_filename);
+ }
+
+ if (settings_.linear_aec_output_filename) {
+ cfg.filter.export_linear_aec_output = true;
+ }
+
+ if (settings_.print_aec_parameter_values) {
+ if (!settings_.use_quiet_output) {
+ std::cout << "AEC settings:" << std::endl;
+ }
+ std::cout << Aec3ConfigToJsonString(cfg) << std::endl;
+ }
+
+ auto echo_control_factory = std::make_unique<EchoCanceller3Factory>(cfg);
+ builder->SetEchoControlFactory(std::move(echo_control_factory));
+ }
+
+ // Create an audio processing object.
+ ap_ = builder->Create();
+ RTC_CHECK(ap_);
+ }
}
AudioProcessingSimulator::~AudioProcessingSimulator() {
@@ -369,16 +414,14 @@ void AudioProcessingSimulator::SetupOutput() {
++output_reset_counter_;
}
-void AudioProcessingSimulator::DestroyAudioProcessor() {
+void AudioProcessingSimulator::DetachAecDump() {
if (settings_.aec_dump_output_filename) {
ap_->DetachAecDump();
}
}
-void AudioProcessingSimulator::CreateAudioProcessor() {
- Config config;
+void AudioProcessingSimulator::ConfigureAudioProcessor() {
AudioProcessing::Config apm_config;
- std::unique_ptr<EchoControlFactory> echo_control_factory;
if (settings_.use_ts) {
apm_config.transient_suppression.enabled = *settings_.use_ts;
}
@@ -421,29 +464,6 @@ void AudioProcessingSimulator::CreateAudioProcessor() {
apm_config.echo_canceller.export_linear_aec_output =
!!settings_.linear_aec_output_filename;
- if (use_aec) {
- EchoCanceller3Config cfg;
- if (settings_.aec_settings_filename) {
- if (settings_.use_verbose_logging) {
- std::cout << "Reading AEC Parameters from JSON input." << std::endl;
- }
- cfg = ReadAec3ConfigFromJsonFile(*settings_.aec_settings_filename);
- }
-
- if (settings_.linear_aec_output_filename) {
- cfg.filter.export_linear_aec_output = true;
- }
-
- echo_control_factory.reset(new EchoCanceller3Factory(cfg));
-
- if (settings_.print_aec_parameter_values) {
- if (!settings_.use_quiet_output) {
- std::cout << "AEC settings:" << std::endl;
- }
- std::cout << Aec3ConfigToJsonString(cfg) << std::endl;
- }
- }
-
if (settings_.use_hpf) {
apm_config.high_pass_filter.enabled = *settings_.use_hpf;
}
@@ -512,14 +532,6 @@ void AudioProcessingSimulator::CreateAudioProcessor() {
*settings_.ns_analysis_on_linear_aec_output;
}
- RTC_CHECK(ap_builder_);
- if (echo_control_factory) {
- ap_builder_->SetEchoControlFactory(std::move(echo_control_factory));
- }
- ap_.reset((*ap_builder_).Create(config));
-
- RTC_CHECK(ap_);
-
ap_->ApplyConfig(apm_config);
if (settings_.use_ts) {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.h b/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.h
index fa6efc2842c..8579f4b4d0c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_simulator.h
@@ -150,8 +150,8 @@ struct SimulationSettings {
// Provides common functionality for performing audioprocessing simulations.
class AudioProcessingSimulator {
public:
-
AudioProcessingSimulator(const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder);
virtual ~AudioProcessingSimulator();
@@ -174,8 +174,8 @@ class AudioProcessingSimulator {
protected:
void ProcessStream(bool fixed_interface);
void ProcessReverseStream(bool fixed_interface);
- void CreateAudioProcessor();
- void DestroyAudioProcessor();
+ void ConfigureAudioProcessor();
+ void DetachAecDump();
void SetupBuffersConfigsOutputs(int input_sample_rate_hz,
int output_sample_rate_hz,
int reverse_input_sample_rate_hz,
@@ -186,8 +186,7 @@ class AudioProcessingSimulator {
int reverse_output_num_channels);
const SimulationSettings settings_;
- std::unique_ptr<AudioProcessing> ap_;
- std::unique_ptr<AudioProcessingBuilder> ap_builder_;
+ rtc::scoped_refptr<AudioProcessing> ap_;
std::unique_ptr<ChannelBuffer<float>> in_buf_;
std::unique_ptr<ChannelBuffer<float>> out_buf_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.cc
index d9a4227eb72..ab395f1018c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.cc
@@ -457,7 +457,10 @@ void ReportConditionalErrorAndExit(bool condition, const std::string& message) {
}
}
-void PerformBasicParameterSanityChecks(const SimulationSettings& settings) {
+void PerformBasicParameterSanityChecks(
+ const SimulationSettings& settings,
+ bool pre_constructed_ap_provided,
+ bool pre_constructed_ap_builder_provided) {
if (settings.input_filename || settings.reverse_input_filename) {
ReportConditionalErrorAndExit(
!!settings.aec_dump_input_filename,
@@ -624,21 +627,41 @@ void PerformBasicParameterSanityChecks(const SimulationSettings& settings) {
settings.pre_amplifier_gain_factor.has_value(),
"Error: --pre_amplifier_gain_factor needs --pre_amplifier to be "
"specified and set.\n");
-}
-} // namespace
+ ReportConditionalErrorAndExit(
+ pre_constructed_ap_provided && pre_constructed_ap_builder_provided,
+ "Error: The AudioProcessing and the AudioProcessingBuilder cannot both "
+ "be specified at the same time.\n");
-int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
- int argc,
- char* argv[],
- absl::string_view input_aecdump,
- std::vector<float>* processed_capture_samples) {
+ ReportConditionalErrorAndExit(
+ settings.aec_settings_filename && pre_constructed_ap_provided,
+ "Error: The aec_settings_filename cannot be specified when a "
+ "pre-constructed audio processing object is provided.\n");
+
+ ReportConditionalErrorAndExit(
+ settings.aec_settings_filename && pre_constructed_ap_provided,
+ "Error: The print_aec_parameter_values cannot be set when a "
+ "pre-constructed audio processing object is provided.\n");
+
+ if (settings.linear_aec_output_filename && pre_constructed_ap_provided) {
+ std::cout << "Warning: For the linear AEC output to be stored, this must "
+ "be configured in the AEC that is part of the provided "
+ "AudioProcessing object."
+ << std::endl;
+ }
+}
+
+int RunSimulation(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ std::unique_ptr<AudioProcessingBuilder> ap_builder,
+ int argc,
+ char* argv[],
+ absl::string_view input_aecdump,
+ std::vector<float>* processed_capture_samples) {
std::vector<char*> args = absl::ParseCommandLine(argc, argv);
if (args.size() != 1) {
printf("%s", kUsageDescription);
return 1;
}
-
// InitFieldTrialsFromString stores the char*, so the char array must
// outlive the application.
const std::string field_trials = absl::GetFlag(FLAGS_force_fieldtrials);
@@ -650,13 +673,15 @@ int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
settings.processed_capture_samples = processed_capture_samples;
RTC_CHECK(settings.processed_capture_samples);
}
- PerformBasicParameterSanityChecks(settings);
+ PerformBasicParameterSanityChecks(settings, !!audio_processing, !!ap_builder);
std::unique_ptr<AudioProcessingSimulator> processor;
if (settings.aec_dump_input_filename || settings.aec_dump_input_string) {
- processor.reset(new AecDumpBasedSimulator(settings, std::move(ap_builder)));
+ processor.reset(new AecDumpBasedSimulator(
+ settings, std::move(audio_processing), std::move(ap_builder)));
} else {
- processor.reset(new WavBasedSimulator(settings, std::move(ap_builder)));
+ processor.reset(new WavBasedSimulator(settings, std::move(audio_processing),
+ std::move(ap_builder)));
}
processor->Process();
@@ -680,5 +705,24 @@ int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
return 0;
}
+} // namespace
+
+int AudioprocFloatImpl(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ int argc,
+ char* argv[]) {
+ return RunSimulation(
+ std::move(audio_processing), /*ap_builder=*/nullptr, argc, argv,
+ /*input_aecdump=*/"", /*processed_capture_samples=*/nullptr);
+}
+
+int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
+ int argc,
+ char* argv[],
+ absl::string_view input_aecdump,
+ std::vector<float>* processed_capture_samples) {
+ return RunSimulation(/*audio_processing=*/nullptr, std::move(ap_builder),
+ argc, argv, input_aecdump, processed_capture_samples);
+}
+
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.h b/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.h
index 9a9013c6445..0687c43a5d6 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float_impl.h
@@ -24,6 +24,21 @@ namespace test {
// via the |argv| argument. Pass |processed_capture_samples| to write in it the
// samples processed on the capture side; if |processed_capture_samples| is not
// passed, the output file can optionally be specified via the |argv| argument.
+// Any audio_processing object specified in the input is used for the
+// simulation. Note that when the audio_processing object is specified all
+// functionality that relies on using the internal builder is deactivated,
+// since the AudioProcessing object is already created and the builder is not
+// used in the simulation.
+int AudioprocFloatImpl(rtc::scoped_refptr<AudioProcessing> audio_processing,
+ int argc,
+ char* argv[]);
+
+// This function implements the audio processing simulation utility. Pass
+// |input_aecdump| to provide the content of an AEC dump file as a string; if
+// |input_aecdump| is not passed, a WAV or AEC input dump file must be specified
+// via the |argv| argument. Pass |processed_capture_samples| to write in it the
+// samples processed on the capture side; if |processed_capture_samples| is not
+// passed, the output file can optionally be specified via the |argv| argument.
int AudioprocFloatImpl(std::unique_ptr<AudioProcessingBuilder> ap_builder,
int argc,
char* argv[],
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/BUILD.gn
index fb532befb95..b311abdbd17 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/BUILD.gn
@@ -68,6 +68,6 @@ rtc_library("unittest") {
"../../../../test:fileutils",
"../../../../test:test_support",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader.h b/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader.h
index 591299eefe5..94e20b9ec60 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader.h
@@ -28,12 +28,12 @@ class MockWavReader : public WavReaderInterface {
~MockWavReader();
// TODO(alessiob): use ON_CALL to return random samples if needed.
- MOCK_METHOD1(ReadFloatSamples, size_t(rtc::ArrayView<float>));
- MOCK_METHOD1(ReadInt16Samples, size_t(rtc::ArrayView<int16_t>));
+ MOCK_METHOD(size_t, ReadFloatSamples, (rtc::ArrayView<float>), (override));
+ MOCK_METHOD(size_t, ReadInt16Samples, (rtc::ArrayView<int16_t>), (override));
- MOCK_CONST_METHOD0(SampleRate, int());
- MOCK_CONST_METHOD0(NumChannels, size_t());
- MOCK_CONST_METHOD0(NumSamples, size_t());
+ MOCK_METHOD(int, SampleRate, (), (const, override));
+ MOCK_METHOD(size_t, NumChannels, (), (const, override));
+ MOCK_METHOD(size_t, NumSamples, (), (const, override));
private:
const int sample_rate_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h b/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h
index e84f5f35ccb..c2db85f6f68 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h
@@ -36,8 +36,10 @@ class MockWavReaderFactory : public WavReaderAbstractFactory {
explicit MockWavReaderFactory(const Params& default_params);
~MockWavReaderFactory();
- MOCK_CONST_METHOD1(Create,
- std::unique_ptr<WavReaderInterface>(const std::string&));
+ MOCK_METHOD(std::unique_ptr<WavReaderInterface>,
+ Create,
+ (const std::string&),
+ (const, override));
private:
// Creates a MockWavReader instance using the parameters in
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/echo_control_mock.h b/chromium/third_party/webrtc/modules/audio_processing/test/echo_control_mock.h
index 95d3be5cdfa..927de43ae09 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/echo_control_mock.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/echo_control_mock.h
@@ -20,17 +20,21 @@ class AudioBuffer;
class MockEchoControl : public EchoControl {
public:
- MOCK_METHOD1(AnalyzeRender, void(AudioBuffer* render));
- MOCK_METHOD1(AnalyzeCapture, void(AudioBuffer* capture));
- MOCK_METHOD2(ProcessCapture,
- void(AudioBuffer* capture, bool echo_path_change));
- MOCK_METHOD3(ProcessCapture,
- void(AudioBuffer* capture,
- AudioBuffer* linear_output,
- bool echo_path_change));
- MOCK_CONST_METHOD0(GetMetrics, EchoControl::Metrics());
- MOCK_METHOD1(SetAudioBufferDelay, void(int delay_ms));
- MOCK_CONST_METHOD0(ActiveProcessing, bool());
+ MOCK_METHOD(void, AnalyzeRender, (AudioBuffer * render), (override));
+ MOCK_METHOD(void, AnalyzeCapture, (AudioBuffer * capture), (override));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (AudioBuffer * capture, bool echo_path_change),
+ (override));
+ MOCK_METHOD(void,
+ ProcessCapture,
+ (AudioBuffer * capture,
+ AudioBuffer* linear_output,
+ bool echo_path_change),
+ (override));
+ MOCK_METHOD(EchoControl::Metrics, GetMetrics, (), (const, override));
+ MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override));
+ MOCK_METHOD(bool, ActiveProcessing, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.cc b/chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.cc
index 7179fc3431f..75946fb3fa6 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.cc
@@ -56,8 +56,18 @@ WavBasedSimulator::GetCustomEventChain(const std::string& filename) {
WavBasedSimulator::WavBasedSimulator(
const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder)
- : AudioProcessingSimulator(settings, std::move(ap_builder)) {}
+ : AudioProcessingSimulator(settings,
+ std::move(audio_processing),
+ std::move(ap_builder)) {
+ if (settings_.call_order_input_filename) {
+ call_chain_ = WavBasedSimulator::GetCustomEventChain(
+ *settings_.call_order_input_filename);
+ } else {
+ call_chain_ = WavBasedSimulator::GetDefaultEventChain();
+ }
+}
WavBasedSimulator::~WavBasedSimulator() = default;
@@ -89,13 +99,7 @@ void WavBasedSimulator::PrepareReverseProcessStreamCall() {
}
void WavBasedSimulator::Process() {
- if (settings_.call_order_input_filename) {
- call_chain_ = WavBasedSimulator::GetCustomEventChain(
- *settings_.call_order_input_filename);
- } else {
- call_chain_ = WavBasedSimulator::GetDefaultEventChain();
- }
- CreateAudioProcessor();
+ ConfigureAudioProcessor();
Initialize();
@@ -120,7 +124,7 @@ void WavBasedSimulator::Process() {
call_chain_index = (call_chain_index + 1) % call_chain_.size();
}
- DestroyAudioProcessor();
+ DetachAecDump();
}
bool WavBasedSimulator::HandleProcessStreamCall() {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.h b/chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.h
index 991f1dbaadf..3adbe7022c4 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/wav_based_simulator.h
@@ -23,6 +23,7 @@ namespace test {
class WavBasedSimulator final : public AudioProcessingSimulator {
public:
WavBasedSimulator(const SimulationSettings& settings,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioProcessingBuilder> ap_builder);
~WavBasedSimulator() override;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc
index 989e362a499..ff7022dba41 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc
@@ -103,7 +103,7 @@ TEST(CascadedBiquadFilter, TransparentConfiguration) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Verifies that the check of the lengths for the input and output works for the
// non-in-place call.
-TEST(CascadedBiquadFilter, InputSizeCheckVerification) {
+TEST(CascadedBiquadFilterDeathTest, InputSizeCheckVerification) {
const std::vector<float> input = CreateInputWithIncreasingValues(10);
std::vector<float> output(input.size() - 1);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/pffft_wrapper_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/utility/pffft_wrapper_unittest.cc
index 9aed548934e..2ad6849cd47 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/pffft_wrapper_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/pffft_wrapper_unittest.cc
@@ -125,23 +125,24 @@ TEST(PffftTest, CreateWrapperWithValidSize) {
#if !defined(NDEBUG) && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-class PffftInvalidSizeTest : public ::testing::Test,
- public ::testing::WithParamInterface<size_t> {};
+class PffftInvalidSizeDeathTest : public ::testing::Test,
+ public ::testing::WithParamInterface<size_t> {
+};
-TEST_P(PffftInvalidSizeTest, DoNotCreateRealWrapper) {
+TEST_P(PffftInvalidSizeDeathTest, DoNotCreateRealWrapper) {
size_t fft_size = GetParam();
ASSERT_FALSE(Pffft::IsValidFftSize(fft_size, Pffft::FftType::kReal));
EXPECT_DEATH(CreatePffftWrapper(fft_size, Pffft::FftType::kReal), "");
}
-TEST_P(PffftInvalidSizeTest, DoNotCreateComplexWrapper) {
+TEST_P(PffftInvalidSizeDeathTest, DoNotCreateComplexWrapper) {
size_t fft_size = GetParam();
ASSERT_FALSE(Pffft::IsValidFftSize(fft_size, Pffft::FftType::kComplex));
EXPECT_DEATH(CreatePffftWrapper(fft_size, Pffft::FftType::kComplex), "");
}
INSTANTIATE_TEST_SUITE_P(PffftTest,
- PffftInvalidSizeTest,
+ PffftInvalidSizeDeathTest,
::testing::Values(17,
33,
65,
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/goog_cc/BUILD.gn b/chromium/third_party/webrtc/modules/congestion_controller/goog_cc/BUILD.gn
index fa95bc186c8..52daad2bce8 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/goog_cc/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/congestion_controller/goog_cc/BUILD.gn
@@ -51,6 +51,8 @@ rtc_library("goog_cc") {
"../../../rtc_base/experiments:rate_control_settings",
"../../../system_wrappers",
"../../remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -64,8 +66,8 @@ rtc_library("link_capacity_estimator") {
deps = [
"../../../api/units:data_rate",
"../../../rtc_base:safe_minmax",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("pushback_controller") {
@@ -79,6 +81,8 @@ rtc_library("pushback_controller") {
"../../../api/units:data_size",
"../../../rtc_base:checks",
"../../../rtc_base/experiments:rate_control_settings",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -100,8 +104,8 @@ rtc_library("alr_detector") {
"../../../rtc_base/experiments:alr_experiment",
"../../../rtc_base/experiments:field_trial_parser",
"../../pacing:interval_budget",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("estimators") {
configs += [ ":bwe_test_logging" ]
@@ -137,6 +141,8 @@ rtc_library("estimators") {
"../../../rtc_base:safe_minmax",
"../../../rtc_base/experiments:field_trial_parser",
"../../remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -163,6 +169,8 @@ rtc_library("loss_based_controller") {
"../../../system_wrappers:field_trial",
"../../../system_wrappers:metrics",
"../../remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -189,6 +197,8 @@ rtc_library("delay_based_bwe") {
"../../../system_wrappers:metrics",
"../../pacing",
"../../remote_bitrate_estimator",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -216,6 +226,8 @@ rtc_library("probe_controller") {
"../../../rtc_base/experiments:field_trial_parser",
"../../../rtc_base/system:unused",
"../../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -240,8 +252,8 @@ if (rtc_include_tests) {
"../../../rtc_base:checks",
"../../../test/logging:log_writer",
"../../remote_bitrate_estimator",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("goog_cc_unittests") {
testonly = true
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc b/chromium/third_party/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc
index 9031c5d272e..e5b733b1196 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc
@@ -35,10 +35,12 @@ constexpr size_t kPayloadSize = 10;
class MockBitrateEstimator : public BitrateEstimator {
public:
using BitrateEstimator::BitrateEstimator;
- MOCK_METHOD3(Update,
- void(Timestamp at_time, DataSize data_size, bool in_alr));
- MOCK_CONST_METHOD0(bitrate, absl::optional<DataRate>());
- MOCK_METHOD0(ExpectFastRateChange, void());
+ MOCK_METHOD(void,
+ Update,
+ (Timestamp at_time, DataSize data_size, bool in_alr),
+ (override));
+ MOCK_METHOD(absl::optional<DataRate>, bitrate, (), (const, override));
+ MOCK_METHOD(void, ExpectFastRateChange, (), (override));
};
struct AcknowledgedBitrateEstimatorTestStates {
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/pcc/BUILD.gn b/chromium/third_party/webrtc/modules/congestion_controller/pcc/BUILD.gn
index d0111725d29..2f378769e7a 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/pcc/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/congestion_controller/pcc/BUILD.gn
@@ -37,8 +37,8 @@ rtc_library("pcc_controller") {
"../../../api/units:timestamp",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("monitor_interval") {
@@ -94,8 +94,8 @@ rtc_library("bitrate_controller") {
"../../../api/transport:network_control",
"../../../api/units:data_rate",
"../../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/pcc/bitrate_controller_unittest.cc b/chromium/third_party/webrtc/modules/congestion_controller/pcc/bitrate_controller_unittest.cc
index 6693b7a833c..957d99b1ded 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/pcc/bitrate_controller_unittest.cc
+++ b/chromium/third_party/webrtc/modules/congestion_controller/pcc/bitrate_controller_unittest.cc
@@ -67,8 +67,10 @@ std::vector<PacketResult> CreatePacketResults(
class MockUtilityFunction : public PccUtilityFunctionInterface {
public:
- MOCK_CONST_METHOD1(Compute,
- double(const PccMonitorInterval& monitor_interval));
+ MOCK_METHOD(double,
+ Compute,
+ (const PccMonitorInterval& monitor_interval),
+ (const, override));
};
} // namespace
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/receive_side_congestion_controller_unittest.cc b/chromium/third_party/webrtc/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
index 95143f71752..b5846237eec 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
+++ b/chromium/third_party/webrtc/modules/congestion_controller/receive_side_congestion_controller_unittest.cc
@@ -37,8 +37,10 @@ uint32_t AbsSendTime(int64_t t, int64_t denom) {
class MockPacketRouter : public PacketRouter {
public:
- MOCK_METHOD2(OnReceiveBitrateChanged,
- void(const std::vector<uint32_t>& ssrcs, uint32_t bitrate));
+ MOCK_METHOD(void,
+ OnReceiveBitrateChanged,
+ (const std::vector<uint32_t>& ssrcs, uint32_t bitrate),
+ (override));
};
const uint32_t kInitialBitrateBps = 60000;
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/rtp/BUILD.gn b/chromium/third_party/webrtc/modules/congestion_controller/rtp/BUILD.gn
index b444f5495b9..45c53edcb64 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/rtp/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/congestion_controller/rtp/BUILD.gn
@@ -33,8 +33,8 @@ rtc_library("control_handler") {
"../../../rtc_base/synchronization:sequence_checker",
"../../../system_wrappers:field_trial",
"../../pacing",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (!build_with_mozilla) {
deps += [ "../../../rtc_base" ]
@@ -61,6 +61,8 @@ rtc_library("transport_feedback") {
"../../../system_wrappers",
"../../../system_wrappers:field_trial",
"../../rtp_rtcp:rtp_rtcp_format",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc b/chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
index 1c74b196d83..3849cb37077 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc
@@ -83,8 +83,10 @@ namespace test {
class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver {
public:
- MOCK_METHOD1(OnPacketFeedbackVector,
- void(std::vector<StreamPacketInfo> packet_feedback_vector));
+ MOCK_METHOD(void,
+ OnPacketFeedbackVector,
+ (std::vector<StreamPacketInfo> packet_feedback_vector),
+ (override));
};
class TransportFeedbackAdapterTest : public ::testing::Test {
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc b/chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
index dce52de5574..6514a4eda7c 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc
@@ -21,8 +21,10 @@ static constexpr uint32_t kSsrc = 8492;
class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver {
public:
- MOCK_METHOD1(OnPacketFeedbackVector,
- void(std::vector<StreamPacketInfo> packet_feedback_vector));
+ MOCK_METHOD(void,
+ OnPacketFeedbackVector,
+ (std::vector<StreamPacketInfo> packet_feedback_vector),
+ (override));
};
RtpPacketSendInfo CreatePacket(uint32_t ssrc,
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn b/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
index 4f93c246fe6..e49e8381f6a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
@@ -42,7 +42,8 @@ rtc_library("primitives") {
]
if (!build_with_mozilla) {
- deps += [ "../../rtc_base" ] # TODO(kjellander): Cleanup in bugs.webrtc.org/3806.
+ deps += [ "../../rtc_base" ] # TODO(kjellander): Cleanup in
+ # bugs.webrtc.org/3806.
}
}
@@ -224,7 +225,8 @@ if (is_linux) {
rtc_source_set("desktop_capture") {
visibility = [ "*" ]
- public_deps = [ ":desktop_capture_generic" ] # no-presubmit-check TODO(webrtc:8603)
+ public_deps = # no-presubmit-check TODO(webrtc:8603)
+ [ ":desktop_capture_generic" ]
if (is_mac) {
public_deps += [ ":desktop_capture_objc" ]
}
@@ -480,6 +482,8 @@ rtc_library("desktop_capture_generic") {
"../../system_wrappers",
"../../system_wrappers:cpu_features_api",
"../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.h b/chromium/third_party/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.h
index 659239ab9db..6530dc5542a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mock_desktop_capturer_callback.h
@@ -22,9 +22,10 @@ class MockDesktopCapturerCallback : public DesktopCapturer::Callback {
MockDesktopCapturerCallback();
~MockDesktopCapturerCallback() override;
- MOCK_METHOD2(OnCaptureResultPtr,
- void(DesktopCapturer::Result result,
- std::unique_ptr<DesktopFrame>* frame));
+ MOCK_METHOD(void,
+ OnCaptureResultPtr,
+ (DesktopCapturer::Result result,
+ std::unique_ptr<DesktopFrame>* frame));
void OnCaptureResult(DesktopCapturer::Result result,
std::unique_ptr<DesktopFrame> frame) final;
diff --git a/chromium/third_party/webrtc/modules/pacing/BUILD.gn b/chromium/third_party/webrtc/modules/pacing/BUILD.gn
index 6f65c33942e..b19c304e1f6 100644
--- a/chromium/third_party/webrtc/modules/pacing/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/pacing/BUILD.gn
@@ -49,6 +49,7 @@ rtc_library("pacing") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
"../../rtc_base/experiments:field_trial_parser",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers",
@@ -57,6 +58,8 @@ rtc_library("pacing") {
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
"../utility",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
index cd298f9b0bf..e38863031b3 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
@@ -22,13 +22,15 @@
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
const int64_t PacedSender::kMaxQueueLengthMs = 2000;
const float PacedSender::kDefaultPaceMultiplier = 2.5f;
-PacedSender::PacedSender(Clock* clock, PacketRouter* packet_router,
+PacedSender::PacedSender(Clock* clock,
+ PacketRouter* packet_router,
RtcEventLog* event_log,
const WebRtcKeyValueConfig* field_trials,
ProcessThread* process_thread)
@@ -40,7 +42,9 @@ PacedSender::PacedSender(Clock* clock, PacketRouter* packet_router,
: PacingController::ProcessMode::kPeriodic),
pacing_controller_(clock,
static_cast<PacingController::PacketSender*>(this),
- event_log, field_trials, process_mode_),
+ event_log,
+ field_trials,
+ process_mode_),
clock_(clock),
packet_router_(packet_router),
process_thread_(process_thread) {
@@ -112,8 +116,15 @@ void PacedSender::SetPacingRates(DataRate pacing_rate, DataRate padding_rate) {
void PacedSender::EnqueuePackets(
std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
{
+ TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "PacedSender::EnqueuePackets");
rtc::CritScope cs(&critsect_);
for (auto& packet : packets) {
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "PacedSender::EnqueuePackets::Loop", "sequence_number",
+ packet->SequenceNumber(), "rtp_timestamp",
+ packet->Timestamp());
+
pacing_controller_.EnqueuePacket(std::move(packet));
}
}
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
index 26d2eac4132..dcbe7d56556 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
@@ -39,12 +39,15 @@ constexpr size_t kDefaultPacketSize = 234;
// Mock callback implementing the raw api.
class MockCallback : public PacketRouter {
public:
- MOCK_METHOD2(SendPacket,
- void(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info));
- MOCK_METHOD1(
- GeneratePadding,
- std::vector<std::unique_ptr<RtpPacketToSend>>(size_t target_size_bytes));
+ MOCK_METHOD(void,
+ SendPacket,
+ (std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info),
+ (override));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ GeneratePadding,
+ (size_t target_size_bytes),
+ (override));
};
class ProcessModeTrials : public WebRtcKeyValueConfig {
diff --git a/chromium/third_party/webrtc/modules/pacing/pacing_controller.cc b/chromium/third_party/webrtc/modules/pacing/pacing_controller.cc
index 4b4fb0bd260..7c523068438 100644
--- a/chromium/third_party/webrtc/modules/pacing/pacing_controller.cc
+++ b/chromium/third_party/webrtc/modules/pacing/pacing_controller.cc
@@ -193,6 +193,10 @@ bool PacingController::Congested() const {
return false;
}
+bool PacingController::IsProbing() const {
+ return prober_.is_probing();
+}
+
Timestamp PacingController::CurrentTime() const {
Timestamp time = clock_->CurrentTime();
if (time < last_timestamp_) {
diff --git a/chromium/third_party/webrtc/modules/pacing/pacing_controller.h b/chromium/third_party/webrtc/modules/pacing/pacing_controller.h
index 27f1614b08c..20d2539e452 100644
--- a/chromium/third_party/webrtc/modules/pacing/pacing_controller.h
+++ b/chromium/third_party/webrtc/modules/pacing/pacing_controller.h
@@ -146,6 +146,8 @@ class PacingController {
bool Congested() const;
+ bool IsProbing() const;
+
private:
void EnqueuePacketInternal(std::unique_ptr<RtpPacketToSend> packet,
int priority);
diff --git a/chromium/third_party/webrtc/modules/pacing/pacing_controller_unittest.cc b/chromium/third_party/webrtc/modules/pacing/pacing_controller_unittest.cc
index fa23da70a02..e7a61f75e47 100644
--- a/chromium/third_party/webrtc/modules/pacing/pacing_controller_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/pacing_controller_unittest.cc
@@ -90,24 +90,28 @@ class MockPacingControllerCallback : public PacingController::PacketSender {
return ret;
}
- MOCK_METHOD5(SendPacket,
- void(uint32_t ssrc,
- uint16_t sequence_number,
- int64_t capture_timestamp,
- bool retransmission,
- bool padding));
- MOCK_METHOD1(SendPadding, size_t(size_t target_size));
+ MOCK_METHOD(void,
+ SendPacket,
+ (uint32_t ssrc,
+ uint16_t sequence_number,
+ int64_t capture_timestamp,
+ bool retransmission,
+ bool padding));
+ MOCK_METHOD(size_t, SendPadding, (size_t target_size));
};
// Mock callback implementing the raw api.
class MockPacketSender : public PacingController::PacketSender {
public:
- MOCK_METHOD2(SendRtpPacket,
- void(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info));
- MOCK_METHOD1(
- GeneratePadding,
- std::vector<std::unique_ptr<RtpPacketToSend>>(DataSize target_size));
+ MOCK_METHOD(void,
+ SendRtpPacket,
+ (std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info),
+ (override));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ GeneratePadding,
+ (DataSize target_size),
+ (override));
};
class PacingControllerPadding : public PacingController::PacketSender {
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router.cc b/chromium/third_party/webrtc/modules/pacing/packet_router.cc
index fa643314934..3569738cdf1 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router.cc
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router.cc
@@ -17,13 +17,14 @@
#include <utility>
#include "absl/types/optional.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
namespace webrtc {
namespace {
@@ -52,8 +53,9 @@ PacketRouter::~PacketRouter() {
RTC_DCHECK(active_remb_module_ == nullptr);
}
-void PacketRouter::AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate) {
- rtc::CritScope cs(&modules_crit_);
+void PacketRouter::AddSendRtpModule(RtpRtcpInterface* rtp_module,
+ bool remb_candidate) {
+ MutexLock lock(&modules_mutex_);
AddSendRtpModuleToMap(rtp_module, rtp_module->SSRC());
if (absl::optional<uint32_t> rtx_ssrc = rtp_module->RtxSsrc()) {
@@ -72,7 +74,8 @@ void PacketRouter::AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate) {
}
}
-void PacketRouter::AddSendRtpModuleToMap(RtpRtcp* rtp_module, uint32_t ssrc) {
+void PacketRouter::AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module,
+ uint32_t ssrc) {
RTC_DCHECK(send_modules_map_.find(ssrc) == send_modules_map_.end());
// Always keep the audio modules at the back of the list, so that when we
// iterate over the modules in order to find one that can send padding we
@@ -93,8 +96,8 @@ void PacketRouter::RemoveSendRtpModuleFromMap(uint32_t ssrc) {
send_modules_map_.erase(kv);
}
-void PacketRouter::RemoveSendRtpModule(RtpRtcp* rtp_module) {
- rtc::CritScope cs(&modules_crit_);
+void PacketRouter::RemoveSendRtpModule(RtpRtcpInterface* rtp_module) {
+ MutexLock lock(&modules_mutex_);
MaybeRemoveRembModuleCandidate(rtp_module, /* media_sender = */ true);
RemoveSendRtpModuleFromMap(rtp_module->SSRC());
@@ -112,7 +115,7 @@ void PacketRouter::RemoveSendRtpModule(RtpRtcp* rtp_module) {
void PacketRouter::AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender,
bool remb_candidate) {
- rtc::CritScope cs(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
RTC_DCHECK(std::find(rtcp_feedback_senders_.begin(),
rtcp_feedback_senders_.end(),
rtcp_sender) == rtcp_feedback_senders_.end());
@@ -126,7 +129,7 @@ void PacketRouter::AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender,
void PacketRouter::RemoveReceiveRtpModule(
RtcpFeedbackSenderInterface* rtcp_sender) {
- rtc::CritScope cs(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
MaybeRemoveRembModuleCandidate(rtcp_sender, /* media_sender = */ false);
auto it = std::find(rtcp_feedback_senders_.begin(),
rtcp_feedback_senders_.end(), rtcp_sender);
@@ -136,7 +139,11 @@ void PacketRouter::RemoveReceiveRtpModule(
void PacketRouter::SendPacket(std::unique_ptr<RtpPacketToSend> packet,
const PacedPacketInfo& cluster_info) {
- rtc::CritScope cs(&modules_crit_);
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), "PacketRouter::SendPacket",
+ "sequence_number", packet->SequenceNumber(), "rtp_timestamp",
+ packet->Timestamp());
+
+ MutexLock lock(&modules_mutex_);
// With the new pacer code path, transport sequence numbers are only set here,
// on the pacer thread. Therefore we don't need atomics/synchronization.
if (packet->HasExtension<TransportSequenceNumber>()) {
@@ -153,7 +160,7 @@ void PacketRouter::SendPacket(std::unique_ptr<RtpPacketToSend> packet,
return;
}
- RtpRtcp* rtp_module = kv->second;
+ RtpRtcpInterface* rtp_module = kv->second;
if (!rtp_module->TrySendPacket(packet.get(), cluster_info)) {
RTC_LOG(LS_WARNING) << "Failed to send packet, rejected by RTP module.";
return;
@@ -168,7 +175,10 @@ void PacketRouter::SendPacket(std::unique_ptr<RtpPacketToSend> packet,
std::vector<std::unique_ptr<RtpPacketToSend>> PacketRouter::GeneratePadding(
size_t target_size_bytes) {
- rtc::CritScope cs(&modules_crit_);
+ TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "PacketRouter::GeneratePadding", "bytes", target_size_bytes);
+
+ MutexLock lock(&modules_mutex_);
// First try on the last rtp module to have sent media. This increases the
// the chance that any payload based padding will be useful as it will be
// somewhat distributed over modules according the packet rate, even if it
@@ -179,29 +189,37 @@ std::vector<std::unique_ptr<RtpPacketToSend>> PacketRouter::GeneratePadding(
if (last_send_module_ != nullptr &&
last_send_module_->SupportsRtxPayloadPadding()) {
padding_packets = last_send_module_->GeneratePadding(target_size_bytes);
- if (!padding_packets.empty()) {
- return padding_packets;
- }
}
- // Iterate over all modules send module. Video modules will be at the front
- // and so will be prioritized. This is important since audio packets may not
- // be taken into account by the bandwidth estimator, e.g. in FF.
- for (RtpRtcp* rtp_module : send_modules_list_) {
- if (rtp_module->SupportsPadding()) {
- padding_packets = rtp_module->GeneratePadding(target_size_bytes);
- if (!padding_packets.empty()) {
- last_send_module_ = rtp_module;
- break;
+ if (padding_packets.empty()) {
+ // Iterate over all modules send module. Video modules will be at the front
+ // and so will be prioritized. This is important since audio packets may not
+ // be taken into account by the bandwidth estimator, e.g. in FF.
+ for (RtpRtcpInterface* rtp_module : send_modules_list_) {
+ if (rtp_module->SupportsPadding()) {
+ padding_packets = rtp_module->GeneratePadding(target_size_bytes);
+ if (!padding_packets.empty()) {
+ last_send_module_ = rtp_module;
+ break;
+ }
}
}
}
+#if RTC_TRACE_EVENTS_ENABLED
+ for (auto& packet : padding_packets) {
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "PacketRouter::GeneratePadding::Loop", "sequence_number",
+ packet->SequenceNumber(), "rtp_timestamp",
+ packet->Timestamp());
+ }
+#endif
+
return padding_packets;
}
uint16_t PacketRouter::CurrentTransportSequenceNumber() const {
- rtc::CritScope lock(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
return transport_seq_ & 0xFFFF;
}
@@ -215,7 +233,7 @@ void PacketRouter::OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
int64_t now_ms = rtc::TimeMillis();
{
- rtc::CritScope lock(&remb_crit_);
+ MutexLock lock(&remb_mutex_);
// If we already have an estimate, check if the new total estimate is below
// kSendThresholdPercent of the previous estimate.
@@ -248,7 +266,7 @@ void PacketRouter::OnReceiveBitrateChanged(const std::vector<uint32_t>& ssrcs,
void PacketRouter::SetMaxDesiredReceiveBitrate(int64_t bitrate_bps) {
RTC_DCHECK_GE(bitrate_bps, 0);
{
- rtc::CritScope lock(&remb_crit_);
+ MutexLock lock(&remb_mutex_);
max_bitrate_bps_ = bitrate_bps;
if (rtc::TimeMillis() - last_remb_time_ms_ < kRembSendIntervalMs &&
last_send_bitrate_bps_ > 0 &&
@@ -262,7 +280,7 @@ void PacketRouter::SetMaxDesiredReceiveBitrate(int64_t bitrate_bps) {
bool PacketRouter::SendRemb(int64_t bitrate_bps,
const std::vector<uint32_t>& ssrcs) {
- rtc::CritScope lock(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
if (!active_remb_module_) {
return false;
@@ -277,10 +295,10 @@ bool PacketRouter::SendRemb(int64_t bitrate_bps,
bool PacketRouter::SendCombinedRtcpPacket(
std::vector<std::unique_ptr<rtcp::RtcpPacket>> packets) {
- rtc::CritScope cs(&modules_crit_);
+ MutexLock lock(&modules_mutex_);
// Prefer send modules.
- for (RtpRtcp* rtp_module : send_modules_list_) {
+ for (RtpRtcpInterface* rtp_module : send_modules_list_) {
if (rtp_module->RTCP() == RtcpMode::kOff) {
continue;
}
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router.h b/chromium/third_party/webrtc/modules/pacing/packet_router.h
index 40b3ad14077..379ec20f200 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router.h
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router.h
@@ -27,11 +27,12 @@
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
-class RtpRtcp;
+class RtpRtcpInterface;
// PacketRouter keeps track of rtp send modules to support the pacer.
// In addition, it handles feedback messages, which are sent on a send
@@ -45,8 +46,8 @@ class PacketRouter : public RemoteBitrateObserver,
explicit PacketRouter(uint16_t start_transport_seq);
~PacketRouter() override;
- void AddSendRtpModule(RtpRtcp* rtp_module, bool remb_candidate);
- void RemoveSendRtpModule(RtpRtcp* rtp_module);
+ void AddSendRtpModule(RtpRtcpInterface* rtp_module, bool remb_candidate);
+ void RemoveSendRtpModule(RtpRtcpInterface* rtp_module);
void AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender,
bool remb_candidate);
@@ -82,48 +83,49 @@ class PacketRouter : public RemoteBitrateObserver,
private:
void AddRembModuleCandidate(RtcpFeedbackSenderInterface* candidate_module,
bool media_sender)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
void MaybeRemoveRembModuleCandidate(
RtcpFeedbackSenderInterface* candidate_module,
- bool media_sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
- void UnsetActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
- void DetermineActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
- void AddSendRtpModuleToMap(RtpRtcp* rtp_module, uint32_t ssrc)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
+ bool media_sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
+ void UnsetActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
+ void DetermineActiveRembModule() RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
+ void AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module, uint32_t ssrc)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
void RemoveSendRtpModuleFromMap(uint32_t ssrc)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_crit_);
-
- rtc::CriticalSection modules_crit_;
- // Ssrc to RtpRtcp module;
- std::unordered_map<uint32_t, RtpRtcp*> send_modules_map_
- RTC_GUARDED_BY(modules_crit_);
- std::list<RtpRtcp*> send_modules_list_ RTC_GUARDED_BY(modules_crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(modules_mutex_);
+
+ mutable Mutex modules_mutex_;
+ // Ssrc to RtpRtcpInterface module;
+ std::unordered_map<uint32_t, RtpRtcpInterface*> send_modules_map_
+ RTC_GUARDED_BY(modules_mutex_);
+ std::list<RtpRtcpInterface*> send_modules_list_
+ RTC_GUARDED_BY(modules_mutex_);
// The last module used to send media.
- RtpRtcp* last_send_module_ RTC_GUARDED_BY(modules_crit_);
+ RtpRtcpInterface* last_send_module_ RTC_GUARDED_BY(modules_mutex_);
// Rtcp modules of the rtp receivers.
std::vector<RtcpFeedbackSenderInterface*> rtcp_feedback_senders_
- RTC_GUARDED_BY(modules_crit_);
+ RTC_GUARDED_BY(modules_mutex_);
- // TODO(eladalon): remb_crit_ only ever held from one function, and it's not
+ // TODO(eladalon): remb_mutex_ only ever held from one function, and it's not
// clear if that function can actually be called from more than one thread.
- rtc::CriticalSection remb_crit_;
+ Mutex remb_mutex_;
// The last time a REMB was sent.
- int64_t last_remb_time_ms_ RTC_GUARDED_BY(remb_crit_);
- int64_t last_send_bitrate_bps_ RTC_GUARDED_BY(remb_crit_);
+ int64_t last_remb_time_ms_ RTC_GUARDED_BY(remb_mutex_);
+ int64_t last_send_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
// The last bitrate update.
- int64_t bitrate_bps_ RTC_GUARDED_BY(remb_crit_);
- int64_t max_bitrate_bps_ RTC_GUARDED_BY(remb_crit_);
+ int64_t bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
+ int64_t max_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_);
// Candidates for the REMB module can be RTP sender/receiver modules, with
// the sender modules taking precedence.
std::vector<RtcpFeedbackSenderInterface*> sender_remb_candidates_
- RTC_GUARDED_BY(modules_crit_);
+ RTC_GUARDED_BY(modules_mutex_);
std::vector<RtcpFeedbackSenderInterface*> receiver_remb_candidates_
- RTC_GUARDED_BY(modules_crit_);
+ RTC_GUARDED_BY(modules_mutex_);
RtcpFeedbackSenderInterface* active_remb_module_
- RTC_GUARDED_BY(modules_crit_);
+ RTC_GUARDED_BY(modules_mutex_);
- uint64_t transport_seq_ RTC_GUARDED_BY(modules_crit_);
+ uint64_t transport_seq_ RTC_GUARDED_BY(modules_mutex_);
RTC_DISALLOW_COPY_AND_ASSIGN(PacketRouter);
};
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc b/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
index b8f16cb9242..6af7529e861 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
@@ -101,12 +101,12 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesRtx) {
const uint16_t kSsrc1 = 1234;
const uint16_t kSsrc2 = 4567;
- NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
ON_CALL(rtp_1, RtxSendStatus()).WillByDefault(Return(kRtxRedundantPayloads));
ON_CALL(rtp_1, SSRC()).WillByDefault(Return(kSsrc1));
ON_CALL(rtp_1, SupportsPadding).WillByDefault(Return(false));
- NiceMock<MockRtpRtcp> rtp_2;
+ NiceMock<MockRtpRtcpInterface> rtp_2;
ON_CALL(rtp_2, RtxSendStatus()).WillByDefault(Return(kRtxOff));
ON_CALL(rtp_2, SSRC()).WillByDefault(Return(kSsrc2));
ON_CALL(rtp_2, SupportsPadding).WillByDefault(Return(true));
@@ -142,13 +142,13 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) {
kExpectedPaddingPackets);
};
- NiceMock<MockRtpRtcp> audio_module;
+ NiceMock<MockRtpRtcpInterface> audio_module;
ON_CALL(audio_module, RtxSendStatus()).WillByDefault(Return(kRtxOff));
ON_CALL(audio_module, SSRC()).WillByDefault(Return(kSsrc1));
ON_CALL(audio_module, SupportsPadding).WillByDefault(Return(true));
ON_CALL(audio_module, IsAudioConfigured).WillByDefault(Return(true));
- NiceMock<MockRtpRtcp> video_module;
+ NiceMock<MockRtpRtcpInterface> video_module;
ON_CALL(video_module, RtxSendStatus()).WillByDefault(Return(kRtxOff));
ON_CALL(video_module, SSRC()).WillByDefault(Return(kSsrc2));
ON_CALL(video_module, SupportsPadding).WillByDefault(Return(true));
@@ -194,7 +194,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
const uint16_t kSsrc3 = 8901;
// First two rtp modules send media and have rtx.
- NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1));
EXPECT_CALL(rtp_1, SupportsPadding).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_1, SupportsRtxPayloadPadding).WillRepeatedly(Return(true));
@@ -205,7 +205,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
::testing::Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc1)), _))
.WillRepeatedly(Return(true));
- NiceMock<MockRtpRtcp> rtp_2;
+ NiceMock<MockRtpRtcpInterface> rtp_2;
EXPECT_CALL(rtp_2, SSRC()).WillRepeatedly(Return(kSsrc2));
EXPECT_CALL(rtp_2, SupportsPadding).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_2, SupportsRtxPayloadPadding).WillRepeatedly(Return(true));
@@ -217,7 +217,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
.WillRepeatedly(Return(true));
// Third module is sending media, but does not support rtx.
- NiceMock<MockRtpRtcp> rtp_3;
+ NiceMock<MockRtpRtcpInterface> rtp_3;
EXPECT_CALL(rtp_3, SSRC()).WillRepeatedly(Return(kSsrc3));
EXPECT_CALL(rtp_3, SupportsPadding).WillRepeatedly(Return(true));
EXPECT_CALL(rtp_3, SupportsRtxPayloadPadding).WillRepeatedly(Return(false));
@@ -265,7 +265,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) {
packet_router_.RemoveSendRtpModule(&rtp_2);
// Send on and then remove all remaining modules.
- RtpRtcp* last_send_module;
+ RtpRtcpInterface* last_send_module;
EXPECT_CALL(rtp_1, GeneratePadding(kPaddingBytes))
.Times(1)
.WillOnce([&](size_t target_size_bytes) {
@@ -297,7 +297,7 @@ TEST_F(PacketRouterTest, AllocatesTransportSequenceNumbers) {
const uint16_t kSsrc1 = 1234;
PacketRouter packet_router(kStartSeq - 1);
- NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1));
EXPECT_CALL(rtp_1, TrySendPacket).WillRepeatedly(Return(true));
packet_router.AddSendRtpModule(&rtp_1, false);
@@ -315,8 +315,8 @@ TEST_F(PacketRouterTest, AllocatesTransportSequenceNumbers) {
}
TEST_F(PacketRouterTest, SendTransportFeedback) {
- NiceMock<MockRtpRtcp> rtp_1;
- NiceMock<MockRtpRtcp> rtp_2;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_2;
ON_CALL(rtp_1, RTCP()).WillByDefault(Return(RtcpMode::kCompound));
ON_CALL(rtp_2, RTCP()).WillByDefault(Return(RtcpMode::kCompound));
@@ -338,7 +338,7 @@ TEST_F(PacketRouterTest, SendTransportFeedback) {
TEST_F(PacketRouterTest, SendPacketWithoutTransportSequenceNumbers) {
const uint16_t kSsrc1 = 1234;
- NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
ON_CALL(rtp_1, SendingMedia).WillByDefault(Return(true));
ON_CALL(rtp_1, SSRC).WillByDefault(Return(kSsrc1));
packet_router_.AddSendRtpModule(&rtp_1, false);
@@ -361,8 +361,8 @@ TEST_F(PacketRouterTest, SendPacketWithoutTransportSequenceNumbers) {
}
TEST_F(PacketRouterTest, SendPacketAssignsTransportSequenceNumbers) {
- NiceMock<MockRtpRtcp> rtp_1;
- NiceMock<MockRtpRtcp> rtp_2;
+ NiceMock<MockRtpRtcpInterface> rtp_1;
+ NiceMock<MockRtpRtcpInterface> rtp_2;
const uint16_t kSsrc1 = 1234;
const uint16_t kSsrc2 = 2345;
@@ -405,8 +405,9 @@ TEST_F(PacketRouterTest, SendPacketAssignsTransportSequenceNumbers) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(PacketRouterTest, DoubleRegistrationOfSendModuleDisallowed) {
- NiceMock<MockRtpRtcp> module;
+using PacketRouterDeathTest = PacketRouterTest;
+TEST_F(PacketRouterDeathTest, DoubleRegistrationOfSendModuleDisallowed) {
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = false; // Value irrelevant.
packet_router_.AddSendRtpModule(&module, remb_candidate);
@@ -416,8 +417,8 @@ TEST_F(PacketRouterTest, DoubleRegistrationOfSendModuleDisallowed) {
packet_router_.RemoveSendRtpModule(&module);
}
-TEST_F(PacketRouterTest, DoubleRegistrationOfReceiveModuleDisallowed) {
- NiceMock<MockRtpRtcp> module;
+TEST_F(PacketRouterDeathTest, DoubleRegistrationOfReceiveModuleDisallowed) {
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = false; // Value irrelevant.
packet_router_.AddReceiveRtpModule(&module, remb_candidate);
@@ -427,14 +428,14 @@ TEST_F(PacketRouterTest, DoubleRegistrationOfReceiveModuleDisallowed) {
packet_router_.RemoveReceiveRtpModule(&module);
}
-TEST_F(PacketRouterTest, RemovalOfNeverAddedSendModuleDisallowed) {
- NiceMock<MockRtpRtcp> module;
+TEST_F(PacketRouterDeathTest, RemovalOfNeverAddedSendModuleDisallowed) {
+ NiceMock<MockRtpRtcpInterface> module;
EXPECT_DEATH(packet_router_.RemoveSendRtpModule(&module), "");
}
-TEST_F(PacketRouterTest, RemovalOfNeverAddedReceiveModuleDisallowed) {
- NiceMock<MockRtpRtcp> module;
+TEST_F(PacketRouterDeathTest, RemovalOfNeverAddedReceiveModuleDisallowed) {
+ NiceMock<MockRtpRtcpInterface> module;
EXPECT_DEATH(packet_router_.RemoveReceiveRtpModule(&module), "");
}
@@ -442,7 +443,7 @@ TEST_F(PacketRouterTest, RemovalOfNeverAddedReceiveModuleDisallowed) {
TEST(PacketRouterRembTest, LowerEstimateToSendRemb) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp, true);
@@ -468,7 +469,7 @@ TEST(PacketRouterRembTest, LowerEstimateToSendRemb) {
TEST(PacketRouterRembTest, VerifyIncreasingAndDecreasing) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp, true);
@@ -493,7 +494,7 @@ TEST(PacketRouterRembTest, VerifyIncreasingAndDecreasing) {
TEST(PacketRouterRembTest, NoRembForIncreasedBitrate) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp, true);
@@ -521,8 +522,8 @@ TEST(PacketRouterRembTest, NoRembForIncreasedBitrate) {
TEST(PacketRouterRembTest, ChangeSendRtpModule) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp_send;
- NiceMock<MockRtpRtcp> rtp_recv;
+ NiceMock<MockRtpRtcpInterface> rtp_send;
+ NiceMock<MockRtpRtcpInterface> rtp_recv;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp_send, true);
packet_router.AddReceiveRtpModule(&rtp_recv, true);
@@ -556,7 +557,7 @@ TEST(PacketRouterRembTest, ChangeSendRtpModule) {
TEST(PacketRouterRembTest, OnlyOneRembForRepeatedOnReceiveBitrateChanged) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddSendRtpModule(&rtp, true);
@@ -585,7 +586,7 @@ TEST(PacketRouterRembTest, SetMaxDesiredReceiveBitrateLimitsSetRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -608,7 +609,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -630,7 +631,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -652,7 +653,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -674,7 +675,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -697,7 +698,7 @@ TEST(PacketRouterRembTest,
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
clock.AdvanceTime(TimeDelta::Millis(1000));
- NiceMock<MockRtpRtcp> remb_sender;
+ NiceMock<MockRtpRtcpInterface> remb_sender;
constexpr bool remb_candidate = true;
packet_router.AddSendRtpModule(&remb_sender, remb_candidate);
@@ -719,7 +720,7 @@ TEST(PacketRouterRembTest,
// packet on this one.
TEST(PacketRouterRembTest, NoSendingRtpModule) {
rtc::ScopedFakeClock clock;
- NiceMock<MockRtpRtcp> rtp;
+ NiceMock<MockRtpRtcpInterface> rtp;
PacketRouter packet_router;
packet_router.AddReceiveRtpModule(&rtp, true);
@@ -745,7 +746,7 @@ TEST(PacketRouterRembTest, NoSendingRtpModule) {
TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> module;
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = false;
@@ -764,7 +765,7 @@ TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) {
TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> module;
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = true;
@@ -783,7 +784,7 @@ TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) {
TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> module;
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = false;
@@ -802,7 +803,7 @@ TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) {
TEST(PacketRouterRembTest, CandidateReceiveRtpModuleUsedForRemb) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> module;
+ NiceMock<MockRtpRtcpInterface> module;
constexpr bool remb_candidate = true;
@@ -822,8 +823,8 @@ TEST(PacketRouterRembTest,
SendCandidatePreferredOverReceiveCandidate_SendModuleAddedFirst) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> send_module;
- NiceMock<MockRtpRtcp> receive_module;
+ NiceMock<MockRtpRtcpInterface> send_module;
+ NiceMock<MockRtpRtcpInterface> receive_module;
constexpr bool remb_candidate = true;
@@ -850,8 +851,8 @@ TEST(PacketRouterRembTest,
SendCandidatePreferredOverReceiveCandidate_ReceiveModuleAddedFirst) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> send_module;
- NiceMock<MockRtpRtcp> receive_module;
+ NiceMock<MockRtpRtcpInterface> send_module;
+ NiceMock<MockRtpRtcpInterface> receive_module;
constexpr bool remb_candidate = true;
@@ -877,8 +878,8 @@ TEST(PacketRouterRembTest,
TEST(PacketRouterRembTest, ReceiveModuleTakesOverWhenLastSendModuleRemoved) {
rtc::ScopedFakeClock clock;
PacketRouter packet_router;
- NiceMock<MockRtpRtcp> send_module;
- NiceMock<MockRtpRtcp> receive_module;
+ NiceMock<MockRtpRtcpInterface> send_module;
+ NiceMock<MockRtpRtcpInterface> receive_module;
constexpr bool remb_candidate = true;
diff --git a/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.cc b/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.cc
index 16d6df554c2..531e9d6ad32 100644
--- a/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.cc
+++ b/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.cc
@@ -17,6 +17,7 @@
#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/task_utils/to_queued_task.h"
+#include "rtc_base/trace_event.h"
namespace webrtc {
namespace {
@@ -34,8 +35,10 @@ TaskQueuePacedSender::TaskQueuePacedSender(
PacketRouter* packet_router,
RtcEventLog* event_log,
const WebRtcKeyValueConfig* field_trials,
- TaskQueueFactory* task_queue_factory)
+ TaskQueueFactory* task_queue_factory,
+ TimeDelta hold_back_window)
: clock_(clock),
+ hold_back_window_(hold_back_window),
packet_router_(packet_router),
pacing_controller_(clock,
static_cast<PacingController::PacketSender*>(this),
@@ -120,6 +123,17 @@ void TaskQueuePacedSender::SetPacingRates(DataRate pacing_rate,
void TaskQueuePacedSender::EnqueuePackets(
std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+#if RTC_TRACE_EVENTS_ENABLED
+ TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "TaskQueuePacedSender::EnqueuePackets");
+ for (auto& packet : packets) {
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"),
+ "TaskQueuePacedSender::EnqueuePackets::Loop",
+ "sequence_number", packet->SequenceNumber(), "rtp_timestamp",
+ packet->Timestamp());
+ }
+#endif
+
task_queue_.PostTask([this, packets_ = std::move(packets)]() mutable {
RTC_DCHECK_RUN_ON(&task_queue_);
for (auto& packet : packets_) {
@@ -175,7 +189,7 @@ TimeDelta TaskQueuePacedSender::OldestPacketWaitTime() const {
}
void TaskQueuePacedSender::OnStatsUpdated(const Stats& stats) {
- rtc::CritScope cs(&stats_crit_);
+ MutexLock lock(&stats_mutex_);
current_stats_ = stats;
}
@@ -205,8 +219,10 @@ void TaskQueuePacedSender::MaybeProcessPackets(
next_process_time = pacing_controller_.NextSendTime();
}
- next_process_time =
- std::max(now + PacingController::kMinSleepTime, next_process_time);
+ const TimeDelta min_sleep = pacing_controller_.IsProbing()
+ ? PacingController::kMinSleepTime
+ : hold_back_window_;
+ next_process_time = std::max(now + min_sleep, next_process_time);
TimeDelta sleep_time = next_process_time - now;
if (next_process_time_.IsMinusInfinity() ||
@@ -295,7 +311,7 @@ void TaskQueuePacedSender::MaybeUpdateStats(bool is_scheduled_call) {
}
TaskQueuePacedSender::Stats TaskQueuePacedSender::GetStats() const {
- rtc::CritScope cs(&stats_crit_);
+ MutexLock lock(&stats_mutex_);
return current_stats_;
}
diff --git a/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.h b/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.h
index 3f53f000970..71b3be27e6f 100644
--- a/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.h
+++ b/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender.h
@@ -30,6 +30,7 @@
#include "modules/pacing/rtp_packet_pacer.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_annotations.h"
@@ -42,11 +43,18 @@ class TaskQueuePacedSender : public RtpPacketPacer,
public RtpPacketSender,
private PacingController::PacketSender {
public:
- TaskQueuePacedSender(Clock* clock,
- PacketRouter* packet_router,
- RtcEventLog* event_log,
- const WebRtcKeyValueConfig* field_trials,
- TaskQueueFactory* task_queue_factory);
+ // The |hold_back_window| parameter sets a lower bound on time to sleep if
+ // there is currently a pacer queue and packets can't immediately be
+ // processed. Increasing this reduces thread wakeups at the expense of higher
+ // latency.
+ // TODO(bugs.webrtc.org/10809): Remove default value for hold_back_window.
+ TaskQueuePacedSender(
+ Clock* clock,
+ PacketRouter* packet_router,
+ RtcEventLog* event_log,
+ const WebRtcKeyValueConfig* field_trials,
+ TaskQueueFactory* task_queue_factory,
+ TimeDelta hold_back_window = PacingController::kMinSleepTime);
~TaskQueuePacedSender() override;
@@ -134,6 +142,7 @@ class TaskQueuePacedSender : public RtpPacketPacer,
Stats GetStats() const;
Clock* const clock_;
+ const TimeDelta hold_back_window_;
PacketRouter* const packet_router_ RTC_GUARDED_BY(task_queue_);
PacingController pacing_controller_ RTC_GUARDED_BY(task_queue_);
@@ -159,8 +168,8 @@ class TaskQueuePacedSender : public RtpPacketPacer,
// never drain.
bool is_shutdown_ RTC_GUARDED_BY(task_queue_);
- rtc::CriticalSection stats_crit_;
- Stats current_stats_ RTC_GUARDED_BY(stats_crit_);
+ mutable Mutex stats_mutex_;
+ Stats current_stats_ RTC_GUARDED_BY(stats_mutex_);
rtc::TaskQueue task_queue_;
};
diff --git a/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender_unittest.cc b/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender_unittest.cc
index 83aa73e9aaa..ab6a24ba42b 100644
--- a/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/task_queue_paced_sender_unittest.cc
@@ -24,6 +24,7 @@
#include "test/time_controller/simulated_time_controller.h"
using ::testing::_;
+using ::testing::AtLeast;
using ::testing::Return;
using ::testing::SaveArg;
@@ -37,26 +38,40 @@ constexpr size_t kDefaultPacketSize = 1234;
class MockPacketRouter : public PacketRouter {
public:
- MOCK_METHOD2(SendPacket,
- void(std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info));
- MOCK_METHOD1(
- GeneratePadding,
- std::vector<std::unique_ptr<RtpPacketToSend>>(size_t target_size_bytes));
+ MOCK_METHOD(void,
+ SendPacket,
+ (std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info),
+ (override));
+ MOCK_METHOD(std::vector<std::unique_ptr<RtpPacketToSend>>,
+ GeneratePadding,
+ (size_t target_size_bytes),
+ (override));
+};
+
+class StatsUpdateObserver {
+ public:
+ StatsUpdateObserver() = default;
+ virtual ~StatsUpdateObserver() = default;
+
+ virtual void OnStatsUpdated() = 0;
};
class TaskQueuePacedSenderForTest : public TaskQueuePacedSender {
public:
- TaskQueuePacedSenderForTest(Clock* clock,
- PacketRouter* packet_router,
- RtcEventLog* event_log,
- const WebRtcKeyValueConfig* field_trials,
- TaskQueueFactory* task_queue_factory)
+ TaskQueuePacedSenderForTest(
+ Clock* clock,
+ PacketRouter* packet_router,
+ RtcEventLog* event_log,
+ const WebRtcKeyValueConfig* field_trials,
+ TaskQueueFactory* task_queue_factory,
+ TimeDelta hold_back_window = PacingController::kMinSleepTime)
: TaskQueuePacedSender(clock,
packet_router,
event_log,
field_trials,
- task_queue_factory) {}
+ task_queue_factory,
+ hold_back_window) {}
void OnStatsUpdated(const Stats& stats) override {
++num_stats_updates_;
@@ -65,250 +80,327 @@ class TaskQueuePacedSenderForTest : public TaskQueuePacedSender {
size_t num_stats_updates_ = 0;
};
+} // namespace
-std::unique_ptr<RtpPacketToSend> BuildRtpPacket(RtpPacketMediaType type) {
- auto packet = std::make_unique<RtpPacketToSend>(nullptr);
- packet->set_packet_type(type);
- switch (type) {
- case RtpPacketMediaType::kAudio:
- packet->SetSsrc(kAudioSsrc);
- break;
- case RtpPacketMediaType::kVideo:
- packet->SetSsrc(kVideoSsrc);
- break;
- case RtpPacketMediaType::kRetransmission:
- case RtpPacketMediaType::kPadding:
- packet->SetSsrc(kVideoRtxSsrc);
- break;
- case RtpPacketMediaType::kForwardErrorCorrection:
- packet->SetSsrc(kFlexFecSsrc);
- break;
+namespace test {
+
+ std::unique_ptr<RtpPacketToSend> BuildRtpPacket(RtpPacketMediaType type) {
+ auto packet = std::make_unique<RtpPacketToSend>(nullptr);
+ packet->set_packet_type(type);
+ switch (type) {
+ case RtpPacketMediaType::kAudio:
+ packet->SetSsrc(kAudioSsrc);
+ break;
+ case RtpPacketMediaType::kVideo:
+ packet->SetSsrc(kVideoSsrc);
+ break;
+ case RtpPacketMediaType::kRetransmission:
+ case RtpPacketMediaType::kPadding:
+ packet->SetSsrc(kVideoRtxSsrc);
+ break;
+ case RtpPacketMediaType::kForwardErrorCorrection:
+ packet->SetSsrc(kFlexFecSsrc);
+ break;
+ }
+
+ packet->SetPayloadSize(kDefaultPacketSize);
+ return packet;
}
- packet->SetPayloadSize(kDefaultPacketSize);
- return packet;
-}
+ std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePackets(
+ RtpPacketMediaType type,
+ size_t num_packets) {
+ std::vector<std::unique_ptr<RtpPacketToSend>> packets;
+ for (size_t i = 0; i < num_packets; ++i) {
+ packets.push_back(BuildRtpPacket(type));
+ }
+ return packets;
+ }
-std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePackets(
- RtpPacketMediaType type,
- size_t num_packets) {
- std::vector<std::unique_ptr<RtpPacketToSend>> packets;
- for (size_t i = 0; i < num_packets; ++i) {
- packets.push_back(BuildRtpPacket(type));
+ TEST(TaskQueuePacedSenderTest, PacesPackets) {
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ PacingController::kMinSleepTime);
+
+ // Insert a number of packets, covering one second.
+ static constexpr size_t kPacketsToSend = 42;
+ pacer.SetPacingRates(
+ DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsToSend),
+ DataRate::Zero());
+ pacer.EnqueuePackets(
+ GeneratePackets(RtpPacketMediaType::kVideo, kPacketsToSend));
+
+ // Expect all of them to be sent.
+ size_t packets_sent = 0;
+ Timestamp end_time = Timestamp::PlusInfinity();
+ EXPECT_CALL(packet_router, SendPacket)
+ .WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info) {
+ ++packets_sent;
+ if (packets_sent == kPacketsToSend) {
+ end_time = time_controller.GetClock()->CurrentTime();
+ }
+ });
+
+ const Timestamp start_time = time_controller.GetClock()->CurrentTime();
+
+ // Packets should be sent over a period of close to 1s. Expect a little
+ // lower than this since initial probing is a bit quicker.
+ time_controller.AdvanceTime(TimeDelta::Seconds(1));
+ EXPECT_EQ(packets_sent, kPacketsToSend);
+ ASSERT_TRUE(end_time.IsFinite());
+ EXPECT_NEAR((end_time - start_time).ms<double>(), 1000.0, 50.0);
}
- return packets;
-}
-} // namespace
-namespace test {
+ TEST(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) {
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ PacingController::kMinSleepTime);
+
+ // Insert a number of packets to be sent 200ms apart.
+ const size_t kPacketsPerSecond = 5;
+ const DataRate kPacingRate =
+ DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsPerSecond);
+ pacer.SetPacingRates(kPacingRate, DataRate::Zero());
+
+ // Send some initial packets to be rid of any probes.
+ EXPECT_CALL(packet_router, SendPacket).Times(kPacketsPerSecond);
+ pacer.EnqueuePackets(
+ GeneratePackets(RtpPacketMediaType::kVideo, kPacketsPerSecond));
+ time_controller.AdvanceTime(TimeDelta::Seconds(1));
+
+ // Insert three packets, and record send time of each of them.
+ // After the second packet is sent, double the send rate so we can
+ // check the third packets is sent after half the wait time.
+ Timestamp first_packet_time = Timestamp::MinusInfinity();
+ Timestamp second_packet_time = Timestamp::MinusInfinity();
+ Timestamp third_packet_time = Timestamp::MinusInfinity();
+
+ EXPECT_CALL(packet_router, SendPacket)
+ .Times(3)
+ .WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
+ const PacedPacketInfo& cluster_info) {
+ if (first_packet_time.IsInfinite()) {
+ first_packet_time = time_controller.GetClock()->CurrentTime();
+ } else if (second_packet_time.IsInfinite()) {
+ second_packet_time = time_controller.GetClock()->CurrentTime();
+ pacer.SetPacingRates(2 * kPacingRate, DataRate::Zero());
+ } else {
+ third_packet_time = time_controller.GetClock()->CurrentTime();
+ }
+ });
+
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 3));
+ time_controller.AdvanceTime(TimeDelta::Millis(500));
+ ASSERT_TRUE(third_packet_time.IsFinite());
+ EXPECT_NEAR((second_packet_time - first_packet_time).ms<double>(), 200.0,
+ 1.0);
+ EXPECT_NEAR((third_packet_time - second_packet_time).ms<double>(), 100.0,
+ 1.0);
+ }
-class TaskQueuePacedSenderTest : public ::testing::Test {
- public:
- TaskQueuePacedSenderTest()
- : time_controller_(Timestamp::Millis(1234)),
- pacer_(time_controller_.GetClock(),
- &packet_router_,
- /*event_log=*/nullptr,
- /*field_trials=*/nullptr,
- time_controller_.GetTaskQueueFactory()) {}
-
- protected:
- Timestamp CurrentTime() { return time_controller_.GetClock()->CurrentTime(); }
-
- GlobalSimulatedTimeController time_controller_;
- MockPacketRouter packet_router_;
- TaskQueuePacedSender pacer_;
-};
+ TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) {
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ PacingController::kMinSleepTime);
-TEST_F(TaskQueuePacedSenderTest, PacesPackets) {
- // Insert a number of packets, covering one second.
- static constexpr size_t kPacketsToSend = 42;
- pacer_.SetPacingRates(
- DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsToSend),
- DataRate::Zero());
- pacer_.EnqueuePackets(
- GeneratePackets(RtpPacketMediaType::kVideo, kPacketsToSend));
-
- // Expect all of them to be sent.
- size_t packets_sent = 0;
- Timestamp end_time = Timestamp::PlusInfinity();
- EXPECT_CALL(packet_router_, SendPacket)
- .WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) {
- ++packets_sent;
- if (packets_sent == kPacketsToSend) {
- end_time = time_controller_.GetClock()->CurrentTime();
- }
- });
-
- const Timestamp start_time = time_controller_.GetClock()->CurrentTime();
-
- // Packets should be sent over a period of close to 1s. Expect a little lower
- // than this since initial probing is a bit quicker.
- time_controller_.AdvanceTime(TimeDelta::Seconds(1));
- EXPECT_EQ(packets_sent, kPacketsToSend);
- ASSERT_TRUE(end_time.IsFinite());
- EXPECT_NEAR((end_time - start_time).ms<double>(), 1000.0, 50.0);
-}
-
-TEST_F(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) {
- // Insert a number of packets to be sent 200ms apart.
- const size_t kPacketsPerSecond = 5;
- const DataRate kPacingRate =
- DataRate::BitsPerSec(kDefaultPacketSize * 8 * kPacketsPerSecond);
- pacer_.SetPacingRates(kPacingRate, DataRate::Zero());
-
- // Send some initial packets to be rid of any probes.
- EXPECT_CALL(packet_router_, SendPacket).Times(kPacketsPerSecond);
- pacer_.EnqueuePackets(
- GeneratePackets(RtpPacketMediaType::kVideo, kPacketsPerSecond));
- time_controller_.AdvanceTime(TimeDelta::Seconds(1));
-
- // Insert three packets, and record send time of each of them.
- // After the second packet is sent, double the send rate so we can
- // check the third packets is sent after half the wait time.
- Timestamp first_packet_time = Timestamp::MinusInfinity();
- Timestamp second_packet_time = Timestamp::MinusInfinity();
- Timestamp third_packet_time = Timestamp::MinusInfinity();
-
- EXPECT_CALL(packet_router_, SendPacket)
- .Times(3)
- .WillRepeatedly([&](std::unique_ptr<RtpPacketToSend> packet,
- const PacedPacketInfo& cluster_info) {
- if (first_packet_time.IsInfinite()) {
- first_packet_time = CurrentTime();
- } else if (second_packet_time.IsInfinite()) {
- second_packet_time = CurrentTime();
- pacer_.SetPacingRates(2 * kPacingRate, DataRate::Zero());
- } else {
- third_packet_time = CurrentTime();
- }
- });
-
- pacer_.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 3));
- time_controller_.AdvanceTime(TimeDelta::Millis(500));
- ASSERT_TRUE(third_packet_time.IsFinite());
- EXPECT_NEAR((second_packet_time - first_packet_time).ms<double>(), 200.0,
- 1.0);
- EXPECT_NEAR((third_packet_time - second_packet_time).ms<double>(), 100.0,
- 1.0);
-}
-
-TEST_F(TaskQueuePacedSenderTest, SendsAudioImmediately) {
- const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125);
- const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
- const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate;
-
- pacer_.SetPacingRates(kPacingDataRate, DataRate::Zero());
-
- // Add some initial video packets, only one should be sent.
- EXPECT_CALL(packet_router_, SendPacket);
- pacer_.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
- time_controller_.AdvanceTime(TimeDelta::Zero());
- ::testing::Mock::VerifyAndClearExpectations(&packet_router_);
-
- // Advance time, but still before next packet should be sent.
- time_controller_.AdvanceTime(kPacketPacingTime / 2);
-
- // Insert an audio packet, it should be sent immediately.
- EXPECT_CALL(packet_router_, SendPacket);
- pacer_.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kAudio, 1));
- time_controller_.AdvanceTime(TimeDelta::Zero());
- ::testing::Mock::VerifyAndClearExpectations(&packet_router_);
-}
-
-TEST(TaskQueuePacedSenderTestNew, RespectedMinTimeBetweenStatsUpdates) {
- GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
- MockPacketRouter packet_router;
- TaskQueuePacedSenderForTest pacer(time_controller.GetClock(), &packet_router,
- /*event_log=*/nullptr,
- /*field_trials=*/nullptr,
- time_controller.GetTaskQueueFactory());
- const DataRate kPacingDataRate = DataRate::KilobitsPerSec(300);
- pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
-
- const TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1);
-
- // Nothing inserted, no stats updates yet.
- EXPECT_EQ(pacer.num_stats_updates_, 0u);
-
- // Insert one packet, stats should be updated.
- pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1));
- time_controller.AdvanceTime(TimeDelta::Zero());
- EXPECT_EQ(pacer.num_stats_updates_, 1u);
-
- // Advance time half of the min stats update interval, and trigger a
- // refresh - stats should not be updated yet.
- time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates / 2);
- pacer.EnqueuePackets({});
- time_controller.AdvanceTime(TimeDelta::Zero());
- EXPECT_EQ(pacer.num_stats_updates_, 1u);
-
- // Advance time the next half, now stats update is triggered.
- time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates / 2);
- pacer.EnqueuePackets({});
- time_controller.AdvanceTime(TimeDelta::Zero());
- EXPECT_EQ(pacer.num_stats_updates_, 2u);
-}
-
-TEST(TaskQueuePacedSenderTestNew, ThrottlesStatsUpdates) {
- GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
- MockPacketRouter packet_router;
- TaskQueuePacedSenderForTest pacer(time_controller.GetClock(), &packet_router,
- /*event_log=*/nullptr,
- /*field_trials=*/nullptr,
- time_controller.GetTaskQueueFactory());
-
- // Set rates so one packet adds 10ms of buffer level.
- const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
- const TimeDelta kPacketPacingTime = TimeDelta::Millis(10);
- const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime;
- const TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1);
- const TimeDelta kMaxTimeBetweenStatsUpdates = TimeDelta::Millis(33);
-
- // Nothing inserted, no stats updates yet.
- size_t num_expected_stats_updates = 0;
- EXPECT_EQ(pacer.num_stats_updates_, num_expected_stats_updates);
- pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
- time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates);
- // Updating pacing rates refreshes stats.
- EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
-
- // Record time when we insert first packet, this triggers the scheduled
- // stats updating.
- Clock* const clock = time_controller.GetClock();
- const Timestamp start_time = clock->CurrentTime();
-
- while (clock->CurrentTime() - start_time <=
- kMaxTimeBetweenStatsUpdates - kPacketPacingTime) {
- // Enqueue packet, expect stats update.
- pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1));
+ const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125);
+ const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
+ const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate;
+
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+
+ // Add some initial video packets, only one should be sent.
+ EXPECT_CALL(packet_router, SendPacket);
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
time_controller.AdvanceTime(TimeDelta::Zero());
- EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
- // Advance time to halfway through pacing time, expect another stats
- // update.
+ // Advance time, but still before next packet should be sent.
time_controller.AdvanceTime(kPacketPacingTime / 2);
+
+ // Insert an audio packet, it should be sent immediately.
+ EXPECT_CALL(packet_router, SendPacket);
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kAudio, 1));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+ }
+
+ TEST(TaskQueuePacedSenderTest, SleepsDuringCoalscingWindow) {
+ const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ kCoalescingWindow);
+
+ // Set rates so one packet adds one ms of buffer level.
+ const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
+ const TimeDelta kPacketPacingTime = TimeDelta::Millis(1);
+ const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime;
+
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+
+ // Add 10 packets. The first should be sent immediately since the buffers
+ // are clear.
+ EXPECT_CALL(packet_router, SendPacket);
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+
+ // Advance time to 1ms before the coalescing window ends. No packets should
+ // be sent.
+ EXPECT_CALL(packet_router, SendPacket).Times(0);
+ time_controller.AdvanceTime(kCoalescingWindow - TimeDelta::Millis(1));
+
+ // Advance time to where coalescing window ends. All packets that should
+ // have been sent up til now will be sent.
+ EXPECT_CALL(packet_router, SendPacket).Times(5);
+ time_controller.AdvanceTime(TimeDelta::Millis(1));
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+ }
+
+ TEST(TaskQueuePacedSenderTest, ProbingOverridesCoalescingWindow) {
+ const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ kCoalescingWindow);
+
+ // Set rates so one packet adds one ms of buffer level.
+ const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
+ const TimeDelta kPacketPacingTime = TimeDelta::Millis(1);
+ const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime;
+
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+
+ // Add 10 packets. The first should be sent immediately since the buffers
+ // are clear. This will also trigger the probe to start.
+ EXPECT_CALL(packet_router, SendPacket).Times(AtLeast(1));
+ pacer.CreateProbeCluster(kPacingDataRate * 2, 17);
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ ::testing::Mock::VerifyAndClearExpectations(&packet_router);
+
+ // Advance time to 1ms before the coalescing window ends. Packets should be
+ // flying.
+ EXPECT_CALL(packet_router, SendPacket).Times(AtLeast(1));
+ time_controller.AdvanceTime(kCoalescingWindow - TimeDelta::Millis(1));
+ }
+
+ TEST(TaskQueuePacedSenderTest, RespectedMinTimeBetweenStatsUpdates) {
+ const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ kCoalescingWindow);
+ const DataRate kPacingDataRate = DataRate::KilobitsPerSec(300);
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+
+ const TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1);
+
+ // Nothing inserted, no stats updates yet.
+ EXPECT_EQ(pacer.num_stats_updates_, 0u);
+
+ // Insert one packet, stats should be updated.
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, 1u);
+
+ // Advance time half of the min stats update interval, and trigger a
+ // refresh - stats should not be updated yet.
+ time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates / 2);
pacer.EnqueuePackets({});
time_controller.AdvanceTime(TimeDelta::Zero());
- EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+ EXPECT_EQ(pacer.num_stats_updates_, 1u);
- // Advance time the rest of the way.
- time_controller.AdvanceTime(kPacketPacingTime / 2);
+ // Advance time the next half, now stats update is triggered.
+ time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates / 2);
+ pacer.EnqueuePackets({});
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, 2u);
}
- // At this point, the pace queue is drained so there is no more intersting
- // update to be made - but there is still as schduled task that should run
- // |kMaxTimeBetweenStatsUpdates| after the first update.
- time_controller.AdvanceTime(start_time + kMaxTimeBetweenStatsUpdates -
- clock->CurrentTime());
- EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
-
- // Advance time a significant time - don't expect any more calls as stats
- // updating does not happen when queue is drained.
- time_controller.AdvanceTime(TimeDelta::Millis(400));
- EXPECT_EQ(pacer.num_stats_updates_, num_expected_stats_updates);
-}
+ TEST(TaskQueuePacedSenderTest, ThrottlesStatsUpdates) {
+ const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
+ GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
+ MockPacketRouter packet_router;
+ TaskQueuePacedSenderForTest pacer(
+ time_controller.GetClock(), &packet_router,
+ /*event_log=*/nullptr,
+ /*field_trials=*/nullptr, time_controller.GetTaskQueueFactory(),
+ kCoalescingWindow);
+
+ // Set rates so one packet adds 10ms of buffer level.
+ const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
+ const TimeDelta kPacketPacingTime = TimeDelta::Millis(10);
+ const DataRate kPacingDataRate = kPacketSize / kPacketPacingTime;
+ const TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1);
+ const TimeDelta kMaxTimeBetweenStatsUpdates = TimeDelta::Millis(33);
+
+ // Nothing inserted, no stats updates yet.
+ size_t num_expected_stats_updates = 0;
+ EXPECT_EQ(pacer.num_stats_updates_, num_expected_stats_updates);
+ pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
+ time_controller.AdvanceTime(kMinTimeBetweenStatsUpdates);
+ // Updating pacing rates refreshes stats.
+ EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+
+ // Record time when we insert first packet, this triggers the scheduled
+ // stats updating.
+ Clock* const clock = time_controller.GetClock();
+ const Timestamp start_time = clock->CurrentTime();
+
+ while (clock->CurrentTime() - start_time <=
+ kMaxTimeBetweenStatsUpdates - kPacketPacingTime) {
+ // Enqueue packet, expect stats update.
+ pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 1));
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+
+ // Advance time to halfway through pacing time, expect another stats
+ // update.
+ time_controller.AdvanceTime(kPacketPacingTime / 2);
+ pacer.EnqueuePackets({});
+ time_controller.AdvanceTime(TimeDelta::Zero());
+ EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+
+ // Advance time the rest of the way.
+ time_controller.AdvanceTime(kPacketPacingTime / 2);
+ }
+
+ // At this point, the pace queue is drained so there is no more intersting
+ // update to be made - but there is still as schduled task that should run
+ // |kMaxTimeBetweenStatsUpdates| after the first update.
+ time_controller.AdvanceTime(start_time + kMaxTimeBetweenStatsUpdates -
+ clock->CurrentTime());
+ EXPECT_EQ(pacer.num_stats_updates_, ++num_expected_stats_updates);
+
+ // Advance time a significant time - don't expect any more calls as stats
+ // updating does not happen when queue is drained.
+ time_controller.AdvanceTime(TimeDelta::Millis(400));
+ EXPECT_EQ(pacer.num_stats_updates_, num_expected_stats_updates);
+ }
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/BUILD.gn b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/BUILD.gn
index d7b0397ea5c..f5df205d7b6 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/BUILD.gn
@@ -59,6 +59,8 @@ rtc_library("remote_bitrate_estimator") {
"../../system_wrappers",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -76,6 +78,8 @@ if (!build_with_chromium) {
"../../rtc_base:rtc_base_approved",
"../../test:rtp_test_utils",
"../rtp_rtcp",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
]
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
index f7e8ffc9fca..da995922d9d 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
@@ -65,9 +65,10 @@ std::vector<int64_t> TimestampsMs(
class MockTransportFeedbackSender : public TransportFeedbackSenderInterface {
public:
- MOCK_METHOD1(
- SendCombinedRtcpPacket,
- bool(std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets));
+ MOCK_METHOD(bool,
+ SendCombinedRtcpPacket,
+ (std::vector<std::unique_ptr<rtcp::RtcpPacket>> feedback_packets),
+ (override));
};
class RemoteEstimatorProxyTest : public ::testing::Test {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn b/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
index 0ac6900e656..0446799fb7b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
@@ -118,6 +118,8 @@ rtc_library("rtp_rtcp_format") {
"../../rtc_base/system:unused",
"../../system_wrappers",
"../video_coding:codec_globals_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -132,14 +134,18 @@ rtc_library("rtp_rtcp") {
"include/flexfec_sender.h",
"include/receive_statistics.h",
"include/remote_ntp_time_estimator.h",
- "include/rtp_rtcp.h",
+ "include/rtp_rtcp.h", # deprecated
"include/ulpfec_receiver.h",
"source/absolute_capture_time_receiver.cc",
"source/absolute_capture_time_receiver.h",
"source/absolute_capture_time_sender.cc",
"source/absolute_capture_time_sender.h",
+ "source/active_decode_targets_helper.cc",
+ "source/active_decode_targets_helper.h",
"source/create_video_rtp_depacketizer.cc",
"source/create_video_rtp_depacketizer.h",
+ "source/deprecated/deprecated_rtp_sender_egress.cc",
+ "source/deprecated/deprecated_rtp_sender_egress.h",
"source/dtmf_queue.cc",
"source/dtmf_queue.h",
"source/fec_private_tables_bursty.cc",
@@ -186,6 +192,9 @@ rtc_library("rtp_rtcp") {
"source/rtp_rtcp_config.h",
"source/rtp_rtcp_impl.cc",
"source/rtp_rtcp_impl.h",
+ "source/rtp_rtcp_impl2.cc",
+ "source/rtp_rtcp_impl2.h",
+ "source/rtp_rtcp_interface.h",
"source/rtp_sender.cc",
"source/rtp_sender.h",
"source/rtp_sender_audio.cc",
@@ -292,6 +301,8 @@ rtc_library("rtp_rtcp") {
"../../system_wrappers:metrics",
"../remote_bitrate_estimator",
"../video_coding:codec_globals_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/container:inlined_vector",
@@ -328,6 +339,8 @@ rtc_library("rtcp_transceiver") {
"../../rtc_base/task_utils:repeating_task",
"../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
@@ -347,6 +360,8 @@ rtc_library("rtp_video_header") {
"../../api/video:video_frame_type",
"../../api/video:video_rtp_headers",
"../../modules/video_coding:codec_globals_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/abseil-cpp/absl/types:variant",
@@ -384,8 +399,8 @@ rtc_library("mock_rtp_rtcp") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -425,6 +440,7 @@ if (rtc_include_tests) {
sources = [
"source/absolute_capture_time_receiver_unittest.cc",
"source/absolute_capture_time_sender_unittest.cc",
+ "source/active_decode_targets_helper_unittest.cc",
"source/byte_io_unittest.cc",
"source/fec_private_tables_bursty_unittest.cc",
"source/flexfec_header_reader_writer_unittest.cc",
@@ -463,6 +479,7 @@ if (rtc_include_tests) {
"source/rtcp_sender_unittest.cc",
"source/rtcp_transceiver_impl_unittest.cc",
"source/rtcp_transceiver_unittest.cc",
+ "source/rtp_dependency_descriptor_extension_unittest.cc",
"source/rtp_fec_unittest.cc",
"source/rtp_format_h264_unittest.cc",
"source/rtp_format_unittest.cc",
@@ -477,6 +494,7 @@ if (rtc_include_tests) {
"source/rtp_packet_history_unittest.cc",
"source/rtp_packet_unittest.cc",
"source/rtp_packetizer_av1_unittest.cc",
+ "source/rtp_rtcp_impl2_unittest.cc",
"source/rtp_rtcp_impl_unittest.cc",
"source/rtp_sender_audio_unittest.cc",
"source/rtp_sender_unittest.cc",
@@ -541,6 +559,8 @@ if (rtc_include_tests) {
"../../test:test_common",
"../../test:test_support",
"../video_coding:codec_globals_headers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h
index 360a619f82f..ff2d34d60d9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h
@@ -51,10 +51,6 @@ class RtpHeaderExtensionMap {
return ids_[type];
}
- // TODO(danilchap): Remove use of the functions below.
- RTC_DEPRECATED int32_t Register(RTPExtensionType type, int id) {
- return RegisterByType(id, type) ? 0 : -1;
- }
int32_t Deregister(RTPExtensionType type);
void Deregister(absl::string_view uri);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
index f91f0d13a33..8663296eba0 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
@@ -12,456 +12,70 @@
#define MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_H_
#include <memory>
-#include <set>
#include <string>
-#include <utility>
#include <vector>
-#include "absl/strings/string_view.h"
-#include "absl/types/optional.h"
-#include "api/frame_transformer_interface.h"
-#include "api/scoped_refptr.h"
-#include "api/transport/webrtc_key_value_config.h"
-#include "api/video/video_bitrate_allocation.h"
#include "modules/include/module.h"
-#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "modules/rtp_rtcp/include/report_block_data.h"
-#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
-#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
-#include "modules/rtp_rtcp/source/video_fec_generator.h"
-#include "rtc_base/constructor_magic.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/deprecation.h"
namespace webrtc {
-// Forward declarations.
-class FrameEncryptorInterface;
-class RateLimiter;
-class ReceiveStatisticsProvider;
-class RemoteBitrateEstimator;
-class RtcEventLog;
-class RTPSender;
-class Transport;
-class VideoBitrateAllocationObserver;
-
-namespace rtcp {
-class TransportFeedback;
-}
-
-class RtpRtcp : public Module, public RtcpFeedbackSenderInterface {
+// DEPRECATED. Do not use.
+class RtpRtcp : public Module, public RtpRtcpInterface {
public:
- struct Configuration {
- Configuration();
- Configuration(Configuration&& rhs);
-
- // True for a audio version of the RTP/RTCP module object false will create
- // a video version.
- bool audio = false;
- bool receiver_only = false;
-
- // The clock to use to read time. If nullptr then system clock will be used.
- Clock* clock = nullptr;
-
- ReceiveStatisticsProvider* receive_statistics = nullptr;
-
- // Transport object that will be called when packets are ready to be sent
- // out on the network.
- Transport* outgoing_transport = nullptr;
-
- // Called when the receiver requests an intra frame.
- RtcpIntraFrameObserver* intra_frame_callback = nullptr;
-
- // Called when the receiver sends a loss notification.
- RtcpLossNotificationObserver* rtcp_loss_notification_observer = nullptr;
-
- // Called when we receive a changed estimate from the receiver of out
- // stream.
- RtcpBandwidthObserver* bandwidth_callback = nullptr;
-
- NetworkStateEstimateObserver* network_state_estimate_observer = nullptr;
- TransportFeedbackObserver* transport_feedback_callback = nullptr;
- VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr;
- RtcpRttStats* rtt_stats = nullptr;
- RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr;
- // Called on receipt of RTCP report block from remote side.
- // TODO(bugs.webrtc.org/10678): Remove RtcpStatisticsCallback in
- // favor of ReportBlockDataObserver.
- // TODO(bugs.webrtc.org/10679): Consider whether we want to use
- // only getters or only callbacks. If we decide on getters, the
- // ReportBlockDataObserver should also be removed in favor of
- // GetLatestReportBlockData().
- RtcpStatisticsCallback* rtcp_statistics_callback = nullptr;
- RtcpCnameCallback* rtcp_cname_callback = nullptr;
- ReportBlockDataObserver* report_block_data_observer = nullptr;
-
- // Estimates the bandwidth available for a set of streams from the same
- // client.
- RemoteBitrateEstimator* remote_bitrate_estimator = nullptr;
-
- // Spread any bursts of packets into smaller bursts to minimize packet loss.
- RtpPacketSender* paced_sender = nullptr;
-
- // Generates FEC packets.
- // TODO(sprang): Wire up to RtpSenderEgress.
- VideoFecGenerator* fec_generator = nullptr;
-
- BitrateStatisticsObserver* send_bitrate_observer = nullptr;
- SendSideDelayObserver* send_side_delay_observer = nullptr;
- RtcEventLog* event_log = nullptr;
- SendPacketObserver* send_packet_observer = nullptr;
- RateLimiter* retransmission_rate_limiter = nullptr;
- StreamDataCountersCallback* rtp_stats_callback = nullptr;
-
- int rtcp_report_interval_ms = 0;
-
- // Update network2 instead of pacer_exit field of video timing extension.
- bool populate_network2_timestamp = false;
-
- rtc::scoped_refptr<FrameTransformerInterface> frame_transformer;
-
- // E2EE Custom Video Frame Encryption
- FrameEncryptorInterface* frame_encryptor = nullptr;
- // Require all outgoing frames to be encrypted with a FrameEncryptor.
- bool require_frame_encryption = false;
-
- // Corresponds to extmap-allow-mixed in SDP negotiation.
- bool extmap_allow_mixed = false;
-
- // If true, the RTP sender will always annotate outgoing packets with
- // MID and RID header extensions, if provided and negotiated.
- // If false, the RTP sender will stop sending MID and RID header extensions,
- // when it knows that the receiver is ready to demux based on SSRC. This is
- // done by RTCP RR acking.
- bool always_send_mid_and_rid = false;
-
- // If set, field trials are read from |field_trials|, otherwise
- // defaults to webrtc::FieldTrialBasedConfig.
- const WebRtcKeyValueConfig* field_trials = nullptr;
-
- // SSRCs for media and retransmission, respectively.
- // FlexFec SSRC is fetched from |flexfec_sender|.
- uint32_t local_media_ssrc = 0;
- absl::optional<uint32_t> rtx_send_ssrc;
-
- bool need_rtp_packet_infos = false;
-
- // If true, the RTP packet history will select RTX packets based on
- // heuristics such as send time, retransmission count etc, in order to
- // make padding potentially more useful.
- // If false, the last packet will always be picked. This may reduce CPU
- // overhead.
- bool enable_rtx_padding_prioritization = true;
-
- private:
- RTC_DISALLOW_COPY_AND_ASSIGN(Configuration);
- };
-
- // Creates an RTP/RTCP module object using provided |configuration|.
- static std::unique_ptr<RtpRtcp> Create(const Configuration& configuration);
+ // Instantiates a deprecated version of the RtpRtcp module.
+ static std::unique_ptr<RtpRtcp> RTC_DEPRECATED
+ Create(const Configuration& configuration) {
+ return DEPRECATED_Create(configuration);
+ }
- // **************************************************************************
- // Receiver functions
- // **************************************************************************
+ static std::unique_ptr<RtpRtcp> DEPRECATED_Create(
+ const Configuration& configuration);
- virtual void IncomingRtcpPacket(const uint8_t* incoming_packet,
- size_t incoming_packet_length) = 0;
-
- virtual void SetRemoteSSRC(uint32_t ssrc) = 0;
-
- // **************************************************************************
- // Sender
- // **************************************************************************
-
- // Sets the maximum size of an RTP packet, including RTP headers.
- virtual void SetMaxRtpPacketSize(size_t size) = 0;
+ // (TMMBR) Temporary Max Media Bit Rate
+ RTC_DEPRECATED virtual bool TMMBR() const = 0;
- // Returns max RTP packet size. Takes into account RTP headers and
- // FEC/ULP/RED overhead (when FEC is enabled).
- virtual size_t MaxRtpPacketSize() const = 0;
+ RTC_DEPRECATED virtual void SetTMMBRStatus(bool enable) = 0;
- virtual void RegisterSendPayloadFrequency(int payload_type,
- int payload_frequency) = 0;
+ // Returns -1 on failure else 0.
+ RTC_DEPRECATED virtual int32_t AddMixedCNAME(uint32_t ssrc,
+ const char* cname) = 0;
- // Unregisters a send payload.
- // |payload_type| - payload type of codec
// Returns -1 on failure else 0.
- virtual int32_t DeRegisterSendPayload(int8_t payload_type) = 0;
+ RTC_DEPRECATED virtual int32_t RemoveMixedCNAME(uint32_t ssrc) = 0;
- virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0;
+ // Returns remote CName.
+ // Returns -1 on failure else 0.
+ RTC_DEPRECATED virtual int32_t RemoteCNAME(
+ uint32_t remote_ssrc,
+ char cname[RTCP_CNAME_SIZE]) const = 0;
// (De)registers RTP header extension type and id.
// Returns -1 on failure else 0.
RTC_DEPRECATED virtual int32_t RegisterSendRtpHeaderExtension(
RTPExtensionType type,
uint8_t id) = 0;
- // Register extension by uri, triggers CHECK on falure.
- virtual void RegisterRtpHeaderExtension(absl::string_view uri, int id) = 0;
-
- virtual int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) = 0;
- virtual void DeregisterSendRtpHeaderExtension(absl::string_view uri) = 0;
-
- // Returns true if RTP module is send media, and any of the extensions
- // required for bandwidth estimation is registered.
- virtual bool SupportsPadding() const = 0;
- // Same as SupportsPadding(), but additionally requires that
- // SetRtxSendStatus() has been called with the kRtxRedundantPayloads option
- // enabled.
- virtual bool SupportsRtxPayloadPadding() const = 0;
-
- // Returns start timestamp.
- virtual uint32_t StartTimestamp() const = 0;
-
- // Sets start timestamp. Start timestamp is set to a random value if this
- // function is never called.
- virtual void SetStartTimestamp(uint32_t timestamp) = 0;
-
- // Returns SequenceNumber.
- virtual uint16_t SequenceNumber() const = 0;
-
- // Sets SequenceNumber, default is a random number.
- virtual void SetSequenceNumber(uint16_t seq) = 0;
-
- virtual void SetRtpState(const RtpState& rtp_state) = 0;
- virtual void SetRtxState(const RtpState& rtp_state) = 0;
- virtual RtpState GetRtpState() const = 0;
- virtual RtpState GetRtxState() const = 0;
-
- // Returns SSRC.
- virtual uint32_t SSRC() const = 0;
-
- // Sets the value for sending in the RID (and Repaired) RTP header extension.
- // RIDs are used to identify an RTP stream if SSRCs are not negotiated.
- // If the RID and Repaired RID extensions are not registered, the RID will
- // not be sent.
- virtual void SetRid(const std::string& rid) = 0;
-
- // Sets the value for sending in the MID RTP header extension.
- // The MID RTP header extension should be registered for this to do anything.
- // Once set, this value can not be changed or removed.
- virtual void SetMid(const std::string& mid) = 0;
-
- // Sets CSRC.
- // |csrcs| - vector of CSRCs
- virtual void SetCsrcs(const std::vector<uint32_t>& csrcs) = 0;
-
- // Turns on/off sending RTX (RFC 4588). The modes can be set as a combination
- // of values of the enumerator RtxMode.
- virtual void SetRtxSendStatus(int modes) = 0;
-
- // Returns status of sending RTX (RFC 4588). The returned value can be
- // a combination of values of the enumerator RtxMode.
- virtual int RtxSendStatus() const = 0;
-
- // Returns the SSRC used for RTX if set, otherwise a nullopt.
- virtual absl::optional<uint32_t> RtxSsrc() const = 0;
-
- // Sets the payload type to use when sending RTX packets. Note that this
- // doesn't enable RTX, only the payload type is set.
- virtual void SetRtxSendPayloadType(int payload_type,
- int associated_payload_type) = 0;
-
- // Returns the FlexFEC SSRC, if there is one.
- virtual absl::optional<uint32_t> FlexfecSsrc() const = 0;
-
- // Sets sending status. Sends kRtcpByeCode when going from true to false.
- // Returns -1 on failure else 0.
- virtual int32_t SetSendingStatus(bool sending) = 0;
-
- // Returns current sending status.
- virtual bool Sending() const = 0;
-
- // Starts/Stops media packets. On by default.
- virtual void SetSendingMediaStatus(bool sending) = 0;
-
- // Returns current media sending status.
- virtual bool SendingMedia() const = 0;
-
- // Returns whether audio is configured (i.e. Configuration::audio = true).
- virtual bool IsAudioConfigured() const = 0;
-
- // Indicate that the packets sent by this module should be counted towards the
- // bitrate estimate since the stream participates in the bitrate allocation.
- virtual void SetAsPartOfAllocation(bool part_of_allocation) = 0;
-
- // TODO(sprang): Remove when all call sites have been moved to
- // GetSendRates(). Fetches the current send bitrates in bits/s.
- virtual void BitrateSent(uint32_t* total_rate,
- uint32_t* video_rate,
- uint32_t* fec_rate,
- uint32_t* nack_rate) const = 0;
-
- // Returns bitrate sent (post-pacing) per packet type.
- virtual RtpSendRates GetSendRates() const = 0;
-
- virtual RTPSender* RtpSender() = 0;
- virtual const RTPSender* RtpSender() const = 0;
-
- // Record that a frame is about to be sent. Returns true on success, and false
- // if the module isn't ready to send.
- virtual bool OnSendingRtpFrame(uint32_t timestamp,
- int64_t capture_time_ms,
- int payload_type,
- bool force_sender_report) = 0;
-
- // Try to send the provided packet. Returns true iff packet matches any of
- // the SSRCs for this module (media/rtx/fec etc) and was forwarded to the
- // transport.
- virtual bool TrySendPacket(RtpPacketToSend* packet,
- const PacedPacketInfo& pacing_info) = 0;
-
- virtual void OnPacketsAcknowledged(
- rtc::ArrayView<const uint16_t> sequence_numbers) = 0;
-
- virtual std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
- size_t target_size_bytes) = 0;
-
- virtual std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
- rtc::ArrayView<const uint16_t> sequence_numbers) const = 0;
-
- // Returns an expected per packet overhead representing the main RTP header,
- // any CSRCs, and the registered header extensions that are expected on all
- // packets (i.e. disregarding things like abs capture time which is only
- // populated on a subset of packets, but counting MID/RID type extensions
- // when we expect to send them).
- virtual size_t ExpectedPerPacketOverhead() const = 0;
-
- // **************************************************************************
- // RTCP
- // **************************************************************************
-
- // Returns RTCP status.
- virtual RtcpMode RTCP() const = 0;
-
- // Sets RTCP status i.e on(compound or non-compound)/off.
- // |method| - RTCP method to use.
- virtual void SetRTCPStatus(RtcpMode method) = 0;
-
- // Sets RTCP CName (i.e unique identifier).
- // Returns -1 on failure else 0.
- virtual int32_t SetCNAME(const char* cname) = 0;
-
- // Returns remote CName.
- // Returns -1 on failure else 0.
- virtual int32_t RemoteCNAME(uint32_t remote_ssrc,
- char cname[RTCP_CNAME_SIZE]) const = 0;
-
- // Returns remote NTP.
- // Returns -1 on failure else 0.
- virtual int32_t RemoteNTP(uint32_t* received_ntp_secs,
- uint32_t* received_ntp_frac,
- uint32_t* rtcp_arrival_time_secs,
- uint32_t* rtcp_arrival_time_frac,
- uint32_t* rtcp_timestamp) const = 0;
-
- // Returns -1 on failure else 0.
- virtual int32_t AddMixedCNAME(uint32_t ssrc, const char* cname) = 0;
-
- // Returns -1 on failure else 0.
- virtual int32_t RemoveMixedCNAME(uint32_t ssrc) = 0;
-
- // Returns current RTT (round-trip time) estimate.
- // Returns -1 on failure else 0.
- virtual int32_t RTT(uint32_t remote_ssrc,
- int64_t* rtt,
- int64_t* avg_rtt,
- int64_t* min_rtt,
- int64_t* max_rtt) const = 0;
-
- // Returns the estimated RTT, with fallback to a default value.
- virtual int64_t ExpectedRetransmissionTimeMs() const = 0;
-
- // Forces a send of a RTCP packet. Periodic SR and RR are triggered via the
- // process function.
- // Returns -1 on failure else 0.
- virtual int32_t SendRTCP(RTCPPacketType rtcp_packet_type) = 0;
-
- // Returns statistics of the amount of data sent.
- // Returns -1 on failure else 0.
- virtual int32_t DataCountersRTP(size_t* bytes_sent,
- uint32_t* packets_sent) const = 0;
-
- // Returns send statistics for the RTP and RTX stream.
- virtual void GetSendStreamDataCounters(
- StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const = 0;
-
- // Returns received RTCP report block.
- // Returns -1 on failure else 0.
- // TODO(https://crbug.com/webrtc/10678): Remove this in favor of
- // GetLatestReportBlockData().
- virtual int32_t RemoteRTCPStat(
- std::vector<RTCPReportBlock>* receive_blocks) const = 0;
- // A snapshot of Report Blocks with additional data of interest to statistics.
- // Within this list, the sender-source SSRC pair is unique and per-pair the
- // ReportBlockData represents the latest Report Block that was received for
- // that pair.
- virtual std::vector<ReportBlockData> GetLatestReportBlockData() const = 0;
// (APP) Sets application specific data.
// Returns -1 on failure else 0.
- virtual int32_t SetRTCPApplicationSpecificData(uint8_t sub_type,
- uint32_t name,
- const uint8_t* data,
- uint16_t length) = 0;
- // (XR) Sets Receiver Reference Time Report (RTTR) status.
- virtual void SetRtcpXrRrtrStatus(bool enable) = 0;
-
- // Returns current Receiver Reference Time Report (RTTR) status.
- virtual bool RtcpXrRrtrStatus() const = 0;
+ RTC_DEPRECATED virtual int32_t SetRTCPApplicationSpecificData(
+ uint8_t sub_type,
+ uint32_t name,
+ const uint8_t* data,
+ uint16_t length) = 0;
- // (REMB) Receiver Estimated Max Bitrate.
- // Schedules sending REMB on next and following sender/receiver reports.
- void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) override = 0;
- // Stops sending REMB on next and following sender/receiver reports.
- void UnsetRemb() override = 0;
-
- // (TMMBR) Temporary Max Media Bit Rate
- virtual bool TMMBR() const = 0;
-
- virtual void SetTMMBRStatus(bool enable) = 0;
-
- // (NACK)
-
- // Sends a Negative acknowledgement packet.
+ // Returns statistics of the amount of data sent.
// Returns -1 on failure else 0.
- // TODO(philipel): Deprecate this and start using SendNack instead, mostly
- // because we want a function that actually send NACK for the specified
- // packets.
- virtual int32_t SendNACK(const uint16_t* nack_list, uint16_t size) = 0;
-
- // Sends NACK for the packets specified.
- // Note: This assumes the caller keeps track of timing and doesn't rely on
- // the RTP module to do this.
- virtual void SendNack(const std::vector<uint16_t>& sequence_numbers) = 0;
-
- // Store the sent packets, needed to answer to a Negative acknowledgment
- // requests.
- virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0;
-
- // Returns true if the module is configured to store packets.
- virtual bool StorePackets() const = 0;
-
- virtual void SetVideoBitrateAllocation(
- const VideoBitrateAllocation& bitrate) = 0;
-
- // **************************************************************************
- // Video
- // **************************************************************************
+ RTC_DEPRECATED virtual int32_t DataCountersRTP(
+ size_t* bytes_sent,
+ uint32_t* packets_sent) const = 0;
// Requests new key frame.
// using PLI, https://tools.ietf.org/html/rfc4585#section-6.3.1.1
void SendPictureLossIndication() { SendRTCP(kRtcpPli); }
// using FIR, https://tools.ietf.org/html/rfc5104#section-4.3.1.2
void SendFullIntraRequest() { SendRTCP(kRtcpFir); }
-
- // Sends a LossNotification RTCP message.
- // Returns -1 on failure else 0.
- virtual int32_t SendLossNotification(uint16_t last_decoded_seq_num,
- uint16_t last_received_seq_num,
- bool decodability_flag,
- bool buffering_allowed) = 0;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
index 049ff5c506d..1b72236bbb3 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
@@ -65,7 +65,6 @@ enum RTPExtensionType : int {
kRtpExtensionPlayoutDelay,
kRtpExtensionVideoContentType,
kRtpExtensionVideoTiming,
- kRtpExtensionFrameMarking,
kRtpExtensionRtpStreamId,
kRtpExtensionRepairedRtpStreamId,
kRtpExtensionMid,
@@ -91,7 +90,6 @@ enum RTCPPacketType : uint32_t {
kRtcpTmmbr = 0x0100,
kRtcpTmmbn = 0x0200,
kRtcpSrReq = 0x0400,
- kRtcpApp = 0x1000,
kRtcpLossNotification = 0x2000,
kRtcpRemb = 0x10000,
kRtcpTransmissionTimeOffset = 0x20000,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h
index 5b1585fa0fb..e9a7d526916 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h
@@ -19,7 +19,7 @@ namespace webrtc {
class MockRtcpRttStats : public RtcpRttStats {
public:
MOCK_METHOD(void, OnRttUpdate, (int64_t rtt), (override));
- MOCK_METHOD(int64_t, LastProcessedRtt, (), (const override));
+ MOCK_METHOD(int64_t, LastProcessedRtt, (), (const, override));
};
} // namespace webrtc
#endif // MODULES_RTP_RTCP_MOCKS_MOCK_RTCP_RTT_STATS_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index 5a333fe8470..08b38eee7b6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -20,14 +20,14 @@
#include "absl/types/optional.h"
#include "api/video/video_bitrate_allocation.h"
#include "modules/include/module.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "test/gmock.h"
namespace webrtc {
-class MockRtpRtcp : public RtpRtcp {
+class MockRtpRtcpInterface : public RtpRtcpInterface {
public:
MOCK_METHOD(void,
IncomingRtcpPacket,
@@ -35,7 +35,7 @@ class MockRtpRtcp : public RtpRtcp {
(override));
MOCK_METHOD(void, SetRemoteSSRC, (uint32_t ssrc), (override));
MOCK_METHOD(void, SetMaxRtpPacketSize, (size_t size), (override));
- MOCK_METHOD(size_t, MaxRtpPacketSize, (), (const override));
+ MOCK_METHOD(size_t, MaxRtpPacketSize, (), (const, override));
MOCK_METHOD(void,
RegisterSendPayloadFrequency,
(int payload_type, int frequency),
@@ -45,10 +45,6 @@ class MockRtpRtcp : public RtpRtcp {
(int8_t payload_type),
(override));
MOCK_METHOD(void, SetExtmapAllowMixed, (bool extmap_allow_mixed), (override));
- MOCK_METHOD(int32_t,
- RegisterSendRtpHeaderExtension,
- (RTPExtensionType type, uint8_t id),
- (override));
MOCK_METHOD(void,
RegisterRtpHeaderExtension,
(absl::string_view uri, int id),
@@ -61,32 +57,30 @@ class MockRtpRtcp : public RtpRtcp {
DeregisterSendRtpHeaderExtension,
(absl::string_view uri),
(override));
- MOCK_METHOD(bool, SupportsPadding, (), (const override));
- MOCK_METHOD(bool, SupportsRtxPayloadPadding, (), (const override));
- MOCK_METHOD(uint32_t, StartTimestamp, (), (const override));
+ MOCK_METHOD(bool, SupportsPadding, (), (const, override));
+ MOCK_METHOD(bool, SupportsRtxPayloadPadding, (), (const, override));
+ MOCK_METHOD(uint32_t, StartTimestamp, (), (const, override));
MOCK_METHOD(void, SetStartTimestamp, (uint32_t timestamp), (override));
- MOCK_METHOD(uint16_t, SequenceNumber, (), (const override));
+ MOCK_METHOD(uint16_t, SequenceNumber, (), (const, override));
MOCK_METHOD(void, SetSequenceNumber, (uint16_t seq), (override));
MOCK_METHOD(void, SetRtpState, (const RtpState& rtp_state), (override));
MOCK_METHOD(void, SetRtxState, (const RtpState& rtp_state), (override));
- MOCK_METHOD(RtpState, GetRtpState, (), (const override));
- MOCK_METHOD(RtpState, GetRtxState, (), (const override));
- MOCK_METHOD(uint32_t, SSRC, (), (const override));
+ MOCK_METHOD(RtpState, GetRtpState, (), (const, override));
+ MOCK_METHOD(RtpState, GetRtxState, (), (const, override));
+ MOCK_METHOD(uint32_t, SSRC, (), (const, override));
MOCK_METHOD(void, SetRid, (const std::string& rid), (override));
MOCK_METHOD(void, SetMid, (const std::string& mid), (override));
- MOCK_METHOD(int32_t, CSRCs, (uint32_t csrcs[kRtpCsrcSize]), (const override));
MOCK_METHOD(void, SetCsrcs, (const std::vector<uint32_t>& csrcs), (override));
MOCK_METHOD(void, SetRtxSendStatus, (int modes), (override));
- MOCK_METHOD(int, RtxSendStatus, (), (const override));
- MOCK_METHOD(absl::optional<uint32_t>, RtxSsrc, (), (const override));
+ MOCK_METHOD(int, RtxSendStatus, (), (const, override));
+ MOCK_METHOD(absl::optional<uint32_t>, RtxSsrc, (), (const, override));
MOCK_METHOD(void, SetRtxSendPayloadType, (int, int), (override));
- MOCK_METHOD(absl::optional<uint32_t>, FlexfecSsrc, (), (const override));
- MOCK_METHOD((std::pair<int, int>), RtxSendPayloadType, (), (const override));
+ MOCK_METHOD(absl::optional<uint32_t>, FlexfecSsrc, (), (const, override));
MOCK_METHOD(int32_t, SetSendingStatus, (bool sending), (override));
- MOCK_METHOD(bool, Sending, (), (const override));
+ MOCK_METHOD(bool, Sending, (), (const, override));
MOCK_METHOD(void, SetSendingMediaStatus, (bool sending), (override));
- MOCK_METHOD(bool, SendingMedia, (), (const override));
- MOCK_METHOD(bool, IsAudioConfigured, (), (const override));
+ MOCK_METHOD(bool, SendingMedia, (), (const, override));
+ MOCK_METHOD(bool, IsAudioConfigured, (), (const, override));
MOCK_METHOD(void, SetAsPartOfAllocation, (bool), (override));
MOCK_METHOD(void,
BitrateSent,
@@ -94,12 +88,8 @@ class MockRtpRtcp : public RtpRtcp {
uint32_t* video_rate,
uint32_t* fec_rate,
uint32_t* nack_rate),
- (const override));
- MOCK_METHOD(RtpSendRates, GetSendRates, (), (const override));
- MOCK_METHOD(int,
- EstimatedReceiveBandwidth,
- (uint32_t * available_bandwidth),
- (const override));
+ (const, override));
+ MOCK_METHOD(RtpSendRates, GetSendRates, (), (const, override));
MOCK_METHOD(bool,
OnSendingRtpFrame,
(uint32_t, int64_t, int, bool),
@@ -119,31 +109,22 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_METHOD(std::vector<RtpSequenceNumberMap::Info>,
GetSentRtpPacketInfos,
(rtc::ArrayView<const uint16_t> sequence_numbers),
- (const override));
- MOCK_METHOD(size_t, ExpectedPerPacketOverhead, (), (const override));
- MOCK_METHOD(RtcpMode, RTCP, (), (const override));
+ (const, override));
+ MOCK_METHOD(size_t, ExpectedPerPacketOverhead, (), (const, override));
+ MOCK_METHOD(RtcpMode, RTCP, (), (const, override));
MOCK_METHOD(void, SetRTCPStatus, (RtcpMode method), (override));
MOCK_METHOD(int32_t,
SetCNAME,
(const char cname[RTCP_CNAME_SIZE]),
(override));
MOCK_METHOD(int32_t,
- RemoteCNAME,
- (uint32_t remote_ssrc, char cname[RTCP_CNAME_SIZE]),
- (const override));
- MOCK_METHOD(int32_t,
RemoteNTP,
(uint32_t * received_ntp_secs,
uint32_t* received_ntp_frac,
uint32_t* rtcp_arrival_time_secs,
uint32_t* rtcp_arrival_time_frac,
uint32_t* rtcp_timestamp),
- (const override));
- MOCK_METHOD(int32_t,
- AddMixedCNAME,
- (uint32_t ssrc, const char cname[RTCP_CNAME_SIZE]),
- (override));
- MOCK_METHOD(int32_t, RemoveMixedCNAME, (uint32_t ssrc), (override));
+ (const, override));
MOCK_METHOD(int32_t,
RTT,
(uint32_t remote_ssrc,
@@ -151,39 +132,28 @@ class MockRtpRtcp : public RtpRtcp {
int64_t* avg_rtt,
int64_t* min_rtt,
int64_t* max_rtt),
- (const override));
- MOCK_METHOD(int64_t, ExpectedRetransmissionTimeMs, (), (const override));
+ (const, override));
+ MOCK_METHOD(int64_t, ExpectedRetransmissionTimeMs, (), (const, override));
MOCK_METHOD(int32_t, SendRTCP, (RTCPPacketType packet_type), (override));
- MOCK_METHOD(int32_t,
- DataCountersRTP,
- (size_t * bytes_sent, uint32_t* packets_sent),
- (const override));
MOCK_METHOD(void,
GetSendStreamDataCounters,
(StreamDataCounters*, StreamDataCounters*),
- (const override));
+ (const, override));
MOCK_METHOD(int32_t,
RemoteRTCPStat,
(std::vector<RTCPReportBlock> * receive_blocks),
- (const override));
+ (const, override));
MOCK_METHOD(std::vector<ReportBlockData>,
GetLatestReportBlockData,
(),
- (const override));
- MOCK_METHOD(
- int32_t,
- SetRTCPApplicationSpecificData,
- (uint8_t sub_type, uint32_t name, const uint8_t* data, uint16_t length),
- (override));
+ (const, override));
MOCK_METHOD(void, SetRtcpXrRrtrStatus, (bool enable), (override));
- MOCK_METHOD(bool, RtcpXrRrtrStatus, (), (const override));
+ MOCK_METHOD(bool, RtcpXrRrtrStatus, (), (const, override));
MOCK_METHOD(void,
SetRemb,
(int64_t bitrate, std::vector<uint32_t> ssrcs),
(override));
MOCK_METHOD(void, UnsetRemb, (), (override));
- MOCK_METHOD(bool, TMMBR, (), (const override));
- MOCK_METHOD(void, SetTMMBRStatus, (bool enable), (override));
MOCK_METHOD(int32_t,
SendNACK,
(const uint16_t* nack_list, uint16_t size),
@@ -196,7 +166,7 @@ class MockRtpRtcp : public RtpRtcp {
SetStorePacketsStatus,
(bool enable, uint16_t number_to_store),
(override));
- MOCK_METHOD(bool, StorePackets, (), (const override));
+ MOCK_METHOD(bool, StorePackets, (), (const, override));
MOCK_METHOD(void,
SendCombinedRtcpPacket,
(std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets),
@@ -208,20 +178,12 @@ class MockRtpRtcp : public RtpRtcp {
bool decodability_flag,
bool buffering_allowed),
(override));
- MOCK_METHOD(void, Process, (), (override));
MOCK_METHOD(void,
SetVideoBitrateAllocation,
(const VideoBitrateAllocation&),
(override));
MOCK_METHOD(RTPSender*, RtpSender, (), (override));
- MOCK_METHOD(const RTPSender*, RtpSender, (), (const override));
-
- private:
- // Mocking this method is currently not required and having a default
- // implementation like
- // MOCK_METHOD(int64_t, TimeUntilNextProcess, (), (override))
- // can be dangerous since it can cause a tight loop on a process thread.
- int64_t TimeUntilNextProcess() override { return 0xffffffff; }
+ MOCK_METHOD(const RTPSender*, RtpSender, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.cc
new file mode 100644
index 00000000000..a14426e1445
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/active_decode_targets_helper.h"
+
+#include <stdint.h>
+
+#include "api/array_view.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+// Returns mask of ids of chains previous frame is part of.
+// Assumes for each chain frames are seen in order and no frame on any chain is
+// missing. That assumptions allows a simple detection when previous frame is
+// part of a chain.
+std::bitset<32> LastSendOnChain(int frame_diff,
+ rtc::ArrayView<const int> chain_diffs) {
+ std::bitset<32> bitmask = 0;
+ for (size_t i = 0; i < chain_diffs.size(); ++i) {
+ if (frame_diff == chain_diffs[i]) {
+ bitmask.set(i);
+ }
+ }
+ return bitmask;
+}
+
+// Returns bitmask with first `num` bits set to 1.
+std::bitset<32> AllActive(size_t num) {
+ RTC_DCHECK_LE(num, 32);
+ return (~uint32_t{0}) >> (32 - num);
+}
+
+// Returns bitmask of chains that protect at least one active decode target.
+std::bitset<32> ActiveChains(
+ rtc::ArrayView<const int> decode_target_protected_by_chain,
+ int num_chains,
+ std::bitset<32> active_decode_targets) {
+ std::bitset<32> active_chains = 0;
+ for (size_t dt = 0; dt < decode_target_protected_by_chain.size(); ++dt) {
+ if (dt < active_decode_targets.size() && !active_decode_targets[dt]) {
+ continue;
+ }
+ // chain_idx == num_chains is valid and means the decode target is
+ // not protected by any chain.
+ int chain_idx = decode_target_protected_by_chain[dt];
+ if (chain_idx < num_chains) {
+ active_chains.set(chain_idx);
+ }
+ }
+ return active_chains;
+}
+
+} // namespace
+
+void ActiveDecodeTargetsHelper::OnFrame(
+ rtc::ArrayView<const int> decode_target_protected_by_chain,
+ std::bitset<32> active_decode_targets,
+ bool is_keyframe,
+ int64_t frame_id,
+ rtc::ArrayView<const int> chain_diffs) {
+ const int num_chains = chain_diffs.size();
+ if (num_chains == 0) {
+ // Avoid printing the warning
+ // when already printed the warning for the same active decode targets, or
+ // when active_decode_targets are not changed from it's default value of
+ // all are active, including non-existent decode targets.
+ if (last_active_decode_targets_ != active_decode_targets &&
+ !active_decode_targets.all()) {
+ RTC_LOG(LS_WARNING) << "No chains are configured, but some decode "
+ "targets might be inactive. Unsupported.";
+ }
+ last_active_decode_targets_ = active_decode_targets;
+ return;
+ }
+ const size_t num_decode_targets = decode_target_protected_by_chain.size();
+ RTC_DCHECK_GT(num_decode_targets, 0);
+ std::bitset<32> all_decode_targets = AllActive(num_decode_targets);
+ // Default value for active_decode_targets is 'all are active', i.e. all bits
+ // are set. Default value is set before number of decode targets is known.
+ // It is up to this helper to make the value cleaner and unset unused bits.
+ active_decode_targets &= all_decode_targets;
+
+ if (is_keyframe) {
+ // Key frame resets the state.
+ last_active_decode_targets_ = all_decode_targets;
+ unsent_on_chain_.reset();
+ } else {
+ // Update state assuming previous frame was sent.
+ unsent_on_chain_ &=
+ ~LastSendOnChain(frame_id - last_frame_id_, chain_diffs);
+ }
+ // Save for the next call to OnFrame.
+ // Though usually `frame_id == last_frame_id_ + 1`, it might not be so when
+ // frame id space is shared by several simulcast rtp streams.
+ last_frame_id_ = frame_id;
+
+ if (active_decode_targets == last_active_decode_targets_) {
+ return;
+ }
+ last_active_decode_targets_ = active_decode_targets;
+
+ // Frames that are part of inactive chains might not be produced by the
+ // encoder. Thus stop sending `active_decode_target` bitmask when it is sent
+ // on all active chains rather than on all chains.
+ unsent_on_chain_ = ActiveChains(decode_target_protected_by_chain, num_chains,
+ active_decode_targets);
+ if (unsent_on_chain_.none()) {
+ // Active decode targets are not protected by any chains. To be on the
+ // safe side always send the active_decode_targets_bitmask from now on.
+ RTC_LOG(LS_WARNING)
+ << "Active decode targets protected by no chains. (In)active decode "
+ "targets information will be send overreliably.";
+ unsent_on_chain_.set(1);
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.h
new file mode 100644
index 00000000000..b51144d9cb1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_
+#define MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_
+
+#include <stdint.h>
+
+#include <bitset>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+
+namespace webrtc {
+
+// Helper class that decides when active_decode_target_bitmask should be written
+// into the dependency descriptor rtp header extension.
+// See: https://aomediacodec.github.io/av1-rtp-spec/#a44-switching
+// This class is thread-compatible
+class ActiveDecodeTargetsHelper {
+ public:
+ ActiveDecodeTargetsHelper() = default;
+ ActiveDecodeTargetsHelper(const ActiveDecodeTargetsHelper&) = delete;
+ ActiveDecodeTargetsHelper& operator=(const ActiveDecodeTargetsHelper&) =
+ delete;
+ ~ActiveDecodeTargetsHelper() = default;
+
+ // Decides if active decode target bitmask should be attached to the frame
+ // that is about to be sent.
+ void OnFrame(rtc::ArrayView<const int> decode_target_protected_by_chain,
+ std::bitset<32> active_decode_targets,
+ bool is_keyframe,
+ int64_t frame_id,
+ rtc::ArrayView<const int> chain_diffs);
+
+ // Returns active decode target to attach to the dependency descriptor.
+ absl::optional<uint32_t> ActiveDecodeTargetsBitmask() const {
+ if (unsent_on_chain_.none())
+ return absl::nullopt;
+ return last_active_decode_targets_.to_ulong();
+ }
+
+ private:
+ // `unsent_on_chain_[i]` indicates last active decode
+ // target bitmask wasn't attached to a packet on the chain with id `i`.
+ std::bitset<32> unsent_on_chain_ = 0;
+ std::bitset<32> last_active_decode_targets_ = 0;
+ int64_t last_frame_id_ = 0;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_ACTIVE_DECODE_TARGETS_HELPER_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc
new file mode 100644
index 00000000000..651ab22e546
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc
@@ -0,0 +1,295 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/active_decode_targets_helper.h"
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+constexpr std::bitset<32> kAll = ~uint32_t{0};
+} // namespace
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsNulloptOnKeyFrameWhenAllDecodeTargetsAreActive) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b11,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsNulloptOnKeyFrameWhenAllDecodeTargetsAreActiveAfterDeltaFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b11,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u);
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b11,
+ /*is_keyframe=*/true, /*frame_id=*/3, chain_diffs_key);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsBitmaskOnKeyFrameWhenSomeDecodeTargetsAreInactive) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsBitmaskOnKeyFrameWhenSomeDecodeTargetsAreInactiveAfterDeltaFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/3, chain_diffs_key);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsNulloptWhenActiveDecodeTargetsAreUnused) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsNulloptOnDeltaFrameAfterSentOnKeyFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, ReturnsNewBitmaskOnDeltaFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b11,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b01u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ ReturnsBitmaskWhenAllDecodeTargetsReactivatedOnDeltaFrame) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 0};
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key);
+ ASSERT_NE(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b01,
+ /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta);
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+
+ // Reactive all the decode targets
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/false, /*frame_id=*/3, chain_diffs_delta);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b11u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptAfterSentOnAllActiveChains) {
+ // Active decode targets (0 and 1) are protected by chains 1 and 2.
+ const std::bitset<32> kSome = 0b011;
+ constexpr int kDecodeTargetProtectedByChain[] = {2, 1, 0};
+
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0, 0, 0};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b111,
+ /*is_keyframe=*/true,
+ /*frame_id=*/0, chain_diffs_key);
+ ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+
+ int chain_diffs_delta1[] = {1, 1, 1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kSome,
+ /*is_keyframe=*/false,
+ /*frame_id=*/1, chain_diffs_delta1);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u);
+
+ int chain_diffs_delta2[] = {2, 2, 1}; // Previous frame was part of chain#2
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kSome,
+ /*is_keyframe=*/false,
+ /*frame_id=*/2, chain_diffs_delta2);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u);
+
+ // active_decode_targets_bitmask was send on chains 1 and 2. It was never sent
+ // on chain 0, but chain 0 only protects inactive decode target#2
+ int chain_diffs_delta3[] = {3, 1, 2}; // Previous frame was part of chain#1
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/kSome,
+ /*is_keyframe=*/false,
+ /*frame_id=*/3, chain_diffs_delta3);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, ReturnsBitmaskWhenChanged) {
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 1, 1};
+
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0, 0};
+ helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b111,
+ /*is_keyframe=*/true,
+ /*frame_id=*/0, chain_diffs_key);
+ int chain_diffs_delta1[] = {1, 1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b011,
+ /*is_keyframe=*/false,
+ /*frame_id=*/1, chain_diffs_delta1);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b011u);
+
+ int chain_diffs_delta2[] = {1, 2};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b101,
+ /*is_keyframe=*/false,
+ /*frame_id=*/2, chain_diffs_delta2);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b101u);
+
+ // active_decode_target_bitmask was send on chain0, but it was an old one.
+ int chain_diffs_delta3[] = {2, 1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b101,
+ /*is_keyframe=*/false,
+ /*frame_id=*/3, chain_diffs_delta3);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b101u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptWhenChainsAreNotUsed) {
+ const rtc::ArrayView<const int> kDecodeTargetProtectedByChain;
+ const rtc::ArrayView<const int> kNoChainDiffs;
+
+ ActiveDecodeTargetsHelper helper;
+ helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/true,
+ /*frame_id=*/0, kNoChainDiffs);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b101,
+ /*is_keyframe=*/false,
+ /*frame_id=*/1, kNoChainDiffs);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+}
+
+TEST(ActiveDecodeTargetsHelperTest,
+ KeepReturningBitmaskWhenAllChainsAreInactive) {
+ // Two decode targets, but single chain.
+ // 2nd decode target is not protected by any chain.
+ constexpr int kDecodeTargetProtectedByChain[] = {0, 1};
+
+ ActiveDecodeTargetsHelper helper;
+ int chain_diffs_key[] = {0};
+ helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b10,
+ /*is_keyframe=*/true,
+ /*frame_id=*/0, chain_diffs_key);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b10u);
+
+ // Even though previous frame is part of the only chain, that inactive chain
+ // doesn't provide guaranted delivery.
+ int chain_diffs_delta[] = {1};
+ helper.OnFrame(kDecodeTargetProtectedByChain,
+ /*active_decode_targets=*/0b10,
+ /*is_keyframe=*/false,
+ /*frame_id=*/1, chain_diffs_delta);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), 0b10u);
+}
+
+TEST(ActiveDecodeTargetsHelperTest, Supports32DecodeTargets) {
+ std::bitset<32> some;
+ std::vector<int> decode_target_protected_by_chain(32);
+ for (int i = 0; i < 32; ++i) {
+ decode_target_protected_by_chain[i] = i;
+ some[i] = i % 2 == 0;
+ }
+
+ ActiveDecodeTargetsHelper helper;
+ std::vector<int> chain_diffs_key(32, 0);
+ helper.OnFrame(decode_target_protected_by_chain,
+ /*active_decode_targets=*/some,
+ /*is_keyframe=*/true,
+ /*frame_id=*/1, chain_diffs_key);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), some.to_ulong());
+ std::vector<int> chain_diffs_delta(32, 1);
+ helper.OnFrame(decode_target_protected_by_chain,
+ /*active_decode_targets=*/some,
+ /*is_keyframe=*/false,
+ /*frame_id=*/2, chain_diffs_delta);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt);
+ helper.OnFrame(decode_target_protected_by_chain,
+ /*active_decode_targets=*/kAll,
+ /*is_keyframe=*/false,
+ /*frame_id=*/2, chain_diffs_delta);
+ EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), kAll.to_ulong());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc
new file mode 100644
index 00000000000..b13c85a1d78
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc
@@ -0,0 +1,472 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h"
+
+#include <limits>
+#include <memory>
+#include <utility>
+
+#include "absl/strings/match.h"
+#include "api/transport/field_trial_based_config.h"
+#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h"
+#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+constexpr uint32_t kTimestampTicksPerMs = 90;
+constexpr int kSendSideDelayWindowMs = 1000;
+constexpr int kBitrateStatisticsWindowMs = 1000;
+constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13;
+
+bool IsEnabled(absl::string_view name,
+ const WebRtcKeyValueConfig* field_trials) {
+ FieldTrialBasedConfig default_trials;
+ auto& trials = field_trials ? *field_trials : default_trials;
+ return absl::StartsWith(trials.Lookup(name), "Enabled");
+}
+} // namespace
+
+DEPRECATED_RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender(
+ DEPRECATED_RtpSenderEgress* sender)
+ : transport_sequence_number_(0), sender_(sender) {}
+DEPRECATED_RtpSenderEgress::NonPacedPacketSender::~NonPacedPacketSender() =
+ default;
+
+void DEPRECATED_RtpSenderEgress::NonPacedPacketSender::EnqueuePackets(
+ std::vector<std::unique_ptr<RtpPacketToSend>> packets) {
+ for (auto& packet : packets) {
+ if (!packet->SetExtension<TransportSequenceNumber>(
+ ++transport_sequence_number_)) {
+ --transport_sequence_number_;
+ }
+ packet->ReserveExtension<TransmissionOffset>();
+ packet->ReserveExtension<AbsoluteSendTime>();
+ sender_->SendPacket(packet.get(), PacedPacketInfo());
+ }
+}
+
+DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress(
+ const RtpRtcpInterface::Configuration& config,
+ RtpPacketHistory* packet_history)
+ : ssrc_(config.local_media_ssrc),
+ rtx_ssrc_(config.rtx_send_ssrc),
+ flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc()
+ : absl::nullopt),
+ populate_network2_timestamp_(config.populate_network2_timestamp),
+ send_side_bwe_with_overhead_(
+ IsEnabled("WebRTC-SendSideBwe-WithOverhead", config.field_trials)),
+ clock_(config.clock),
+ packet_history_(packet_history),
+ transport_(config.outgoing_transport),
+ event_log_(config.event_log),
+ is_audio_(config.audio),
+ need_rtp_packet_infos_(config.need_rtp_packet_infos),
+ transport_feedback_observer_(config.transport_feedback_callback),
+ send_side_delay_observer_(config.send_side_delay_observer),
+ send_packet_observer_(config.send_packet_observer),
+ rtp_stats_callback_(config.rtp_stats_callback),
+ bitrate_callback_(config.send_bitrate_observer),
+ media_has_been_sent_(false),
+ force_part_of_allocation_(false),
+ timestamp_offset_(0),
+ max_delay_it_(send_delays_.end()),
+ sum_delays_ms_(0),
+ total_packet_send_delay_ms_(0),
+ send_rates_(kNumMediaTypes,
+ {kBitrateStatisticsWindowMs, RateStatistics::kBpsScale}),
+ rtp_sequence_number_map_(need_rtp_packet_infos_
+ ? std::make_unique<RtpSequenceNumberMap>(
+ kRtpSequenceNumberMapMaxEntries)
+ : nullptr) {}
+
+void DEPRECATED_RtpSenderEgress::SendPacket(
+ RtpPacketToSend* packet,
+ const PacedPacketInfo& pacing_info) {
+ RTC_DCHECK(packet);
+
+ const uint32_t packet_ssrc = packet->Ssrc();
+ RTC_DCHECK(packet->packet_type().has_value());
+ RTC_DCHECK(HasCorrectSsrc(*packet));
+ int64_t now_ms = clock_->TimeInMilliseconds();
+
+ if (is_audio_) {
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now_ms,
+ GetSendRates().Sum().kbps(), packet_ssrc);
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(
+ 1, "AudioNackBitrate_kbps", now_ms,
+ GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(),
+ packet_ssrc);
+#endif
+ } else {
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now_ms,
+ GetSendRates().Sum().kbps(), packet_ssrc);
+ BWE_TEST_LOGGING_PLOT_WITH_SSRC(
+ 1, "VideoNackBitrate_kbps", now_ms,
+ GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(),
+ packet_ssrc);
+#endif
+ }
+
+ PacketOptions options;
+ {
+ rtc::CritScope lock(&lock_);
+ options.included_in_allocation = force_part_of_allocation_;
+
+ if (need_rtp_packet_infos_ &&
+ packet->packet_type() == RtpPacketToSend::Type::kVideo) {
+ RTC_DCHECK(rtp_sequence_number_map_);
+ // Last packet of a frame, add it to sequence number info map.
+ const uint32_t timestamp = packet->Timestamp() - timestamp_offset_;
+ bool is_first_packet_of_frame = packet->is_first_packet_of_frame();
+ bool is_last_packet_of_frame = packet->Marker();
+
+ rtp_sequence_number_map_->InsertPacket(
+ packet->SequenceNumber(),
+ RtpSequenceNumberMap::Info(timestamp, is_first_packet_of_frame,
+ is_last_packet_of_frame));
+ }
+ }
+
+ // Bug webrtc:7859. While FEC is invoked from rtp_sender_video, and not after
+ // the pacer, these modifications of the header below are happening after the
+ // FEC protection packets are calculated. This will corrupt recovered packets
+ // at the same place. It's not an issue for extensions, which are present in
+ // all the packets (their content just may be incorrect on recovered packets).
+ // In case of VideoTimingExtension, since it's present not in every packet,
+ // data after rtp header may be corrupted if these packets are protected by
+ // the FEC.
+ int64_t diff_ms = now_ms - packet->capture_time_ms();
+ if (packet->HasExtension<TransmissionOffset>()) {
+ packet->SetExtension<TransmissionOffset>(kTimestampTicksPerMs * diff_ms);
+ }
+ if (packet->HasExtension<AbsoluteSendTime>()) {
+ packet->SetExtension<AbsoluteSendTime>(
+ AbsoluteSendTime::MsTo24Bits(now_ms));
+ }
+
+ if (packet->HasExtension<VideoTimingExtension>()) {
+ if (populate_network2_timestamp_) {
+ packet->set_network2_time_ms(now_ms);
+ } else {
+ packet->set_pacer_exit_time_ms(now_ms);
+ }
+ }
+
+ const bool is_media = packet->packet_type() == RtpPacketMediaType::kAudio ||
+ packet->packet_type() == RtpPacketMediaType::kVideo;
+
+ // Downstream code actually uses this flag to distinguish between media and
+ // everything else.
+ options.is_retransmit = !is_media;
+ if (auto packet_id = packet->GetExtension<TransportSequenceNumber>()) {
+ options.packet_id = *packet_id;
+ options.included_in_feedback = true;
+ options.included_in_allocation = true;
+ AddPacketToTransportFeedback(*packet_id, *packet, pacing_info);
+ }
+
+ options.application_data.assign(packet->application_data().begin(),
+ packet->application_data().end());
+
+ if (packet->packet_type() != RtpPacketMediaType::kPadding &&
+ packet->packet_type() != RtpPacketMediaType::kRetransmission) {
+ UpdateDelayStatistics(packet->capture_time_ms(), now_ms, packet_ssrc);
+ UpdateOnSendPacket(options.packet_id, packet->capture_time_ms(),
+ packet_ssrc);
+ }
+
+ const bool send_success = SendPacketToNetwork(*packet, options, pacing_info);
+
+ // Put packet in retransmission history or update pending status even if
+ // actual sending fails.
+ if (is_media && packet->allow_retransmission()) {
+ packet_history_->PutRtpPacket(std::make_unique<RtpPacketToSend>(*packet),
+ now_ms);
+ } else if (packet->retransmitted_sequence_number()) {
+ packet_history_->MarkPacketAsSent(*packet->retransmitted_sequence_number());
+ }
+
+ if (send_success) {
+ rtc::CritScope lock(&lock_);
+ UpdateRtpStats(*packet);
+ media_has_been_sent_ = true;
+ }
+}
+
+void DEPRECATED_RtpSenderEgress::ProcessBitrateAndNotifyObservers() {
+ if (!bitrate_callback_)
+ return;
+
+ rtc::CritScope lock(&lock_);
+ RtpSendRates send_rates = GetSendRatesLocked();
+ bitrate_callback_->Notify(
+ send_rates.Sum().bps(),
+ send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_);
+}
+
+RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRates() const {
+ rtc::CritScope lock(&lock_);
+ return GetSendRatesLocked();
+}
+
+RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRatesLocked() const {
+ const int64_t now_ms = clock_->TimeInMilliseconds();
+ RtpSendRates current_rates;
+ for (size_t i = 0; i < kNumMediaTypes; ++i) {
+ RtpPacketMediaType type = static_cast<RtpPacketMediaType>(i);
+ current_rates[type] =
+ DataRate::BitsPerSec(send_rates_[i].Rate(now_ms).value_or(0));
+ }
+ return current_rates;
+}
+
+void DEPRECATED_RtpSenderEgress::GetDataCounters(
+ StreamDataCounters* rtp_stats,
+ StreamDataCounters* rtx_stats) const {
+ rtc::CritScope lock(&lock_);
+ *rtp_stats = rtp_stats_;
+ *rtx_stats = rtx_rtp_stats_;
+}
+
+void DEPRECATED_RtpSenderEgress::ForceIncludeSendPacketsInAllocation(
+ bool part_of_allocation) {
+ rtc::CritScope lock(&lock_);
+ force_part_of_allocation_ = part_of_allocation;
+}
+
+bool DEPRECATED_RtpSenderEgress::MediaHasBeenSent() const {
+ rtc::CritScope lock(&lock_);
+ return media_has_been_sent_;
+}
+
+void DEPRECATED_RtpSenderEgress::SetMediaHasBeenSent(bool media_sent) {
+ rtc::CritScope lock(&lock_);
+ media_has_been_sent_ = media_sent;
+}
+
+void DEPRECATED_RtpSenderEgress::SetTimestampOffset(uint32_t timestamp) {
+ rtc::CritScope lock(&lock_);
+ timestamp_offset_ = timestamp;
+}
+
+std::vector<RtpSequenceNumberMap::Info>
+DEPRECATED_RtpSenderEgress::GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const {
+ RTC_DCHECK(!sequence_numbers.empty());
+ if (!need_rtp_packet_infos_) {
+ return std::vector<RtpSequenceNumberMap::Info>();
+ }
+
+ std::vector<RtpSequenceNumberMap::Info> results;
+ results.reserve(sequence_numbers.size());
+
+ rtc::CritScope cs(&lock_);
+ for (uint16_t sequence_number : sequence_numbers) {
+ const auto& info = rtp_sequence_number_map_->Get(sequence_number);
+ if (!info) {
+ // The empty vector will be returned. We can delay the clearing
+ // of the vector until after we exit the critical section.
+ return std::vector<RtpSequenceNumberMap::Info>();
+ }
+ results.push_back(*info);
+ }
+
+ return results;
+}
+
+bool DEPRECATED_RtpSenderEgress::HasCorrectSsrc(
+ const RtpPacketToSend& packet) const {
+ switch (*packet.packet_type()) {
+ case RtpPacketMediaType::kAudio:
+ case RtpPacketMediaType::kVideo:
+ return packet.Ssrc() == ssrc_;
+ case RtpPacketMediaType::kRetransmission:
+ case RtpPacketMediaType::kPadding:
+ // Both padding and retransmission must be on either the media or the
+ // RTX stream.
+ return packet.Ssrc() == rtx_ssrc_ || packet.Ssrc() == ssrc_;
+ case RtpPacketMediaType::kForwardErrorCorrection:
+ // FlexFEC is on separate SSRC, ULPFEC uses media SSRC.
+ return packet.Ssrc() == ssrc_ || packet.Ssrc() == flexfec_ssrc_;
+ }
+ return false;
+}
+
+void DEPRECATED_RtpSenderEgress::AddPacketToTransportFeedback(
+ uint16_t packet_id,
+ const RtpPacketToSend& packet,
+ const PacedPacketInfo& pacing_info) {
+ if (transport_feedback_observer_) {
+ size_t packet_size = packet.payload_size() + packet.padding_size();
+ if (send_side_bwe_with_overhead_) {
+ packet_size = packet.size();
+ }
+
+ RtpPacketSendInfo packet_info;
+ packet_info.ssrc = ssrc_;
+ packet_info.transport_sequence_number = packet_id;
+ packet_info.rtp_sequence_number = packet.SequenceNumber();
+ packet_info.length = packet_size;
+ packet_info.pacing_info = pacing_info;
+ packet_info.packet_type = packet.packet_type();
+ transport_feedback_observer_->OnAddPacket(packet_info);
+ }
+}
+
+void DEPRECATED_RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms,
+ int64_t now_ms,
+ uint32_t ssrc) {
+ if (!send_side_delay_observer_ || capture_time_ms <= 0)
+ return;
+
+ int avg_delay_ms = 0;
+ int max_delay_ms = 0;
+ uint64_t total_packet_send_delay_ms = 0;
+ {
+ rtc::CritScope cs(&lock_);
+ // Compute the max and average of the recent capture-to-send delays.
+ // The time complexity of the current approach depends on the distribution
+ // of the delay values. This could be done more efficiently.
+
+ // Remove elements older than kSendSideDelayWindowMs.
+ auto lower_bound =
+ send_delays_.lower_bound(now_ms - kSendSideDelayWindowMs);
+ for (auto it = send_delays_.begin(); it != lower_bound; ++it) {
+ if (max_delay_it_ == it) {
+ max_delay_it_ = send_delays_.end();
+ }
+ sum_delays_ms_ -= it->second;
+ }
+ send_delays_.erase(send_delays_.begin(), lower_bound);
+ if (max_delay_it_ == send_delays_.end()) {
+ // Removed the previous max. Need to recompute.
+ RecomputeMaxSendDelay();
+ }
+
+ // Add the new element.
+ RTC_DCHECK_GE(now_ms, 0);
+ RTC_DCHECK_LE(now_ms, std::numeric_limits<int64_t>::max() / 2);
+ RTC_DCHECK_GE(capture_time_ms, 0);
+ RTC_DCHECK_LE(capture_time_ms, std::numeric_limits<int64_t>::max() / 2);
+ int64_t diff_ms = now_ms - capture_time_ms;
+ RTC_DCHECK_GE(diff_ms, static_cast<int64_t>(0));
+ RTC_DCHECK_LE(diff_ms, std::numeric_limits<int>::max());
+ int new_send_delay = rtc::dchecked_cast<int>(now_ms - capture_time_ms);
+ SendDelayMap::iterator it;
+ bool inserted;
+ std::tie(it, inserted) =
+ send_delays_.insert(std::make_pair(now_ms, new_send_delay));
+ if (!inserted) {
+ // TODO(terelius): If we have multiple delay measurements during the same
+ // millisecond then we keep the most recent one. It is not clear that this
+ // is the right decision, but it preserves an earlier behavior.
+ int previous_send_delay = it->second;
+ sum_delays_ms_ -= previous_send_delay;
+ it->second = new_send_delay;
+ if (max_delay_it_ == it && new_send_delay < previous_send_delay) {
+ RecomputeMaxSendDelay();
+ }
+ }
+ if (max_delay_it_ == send_delays_.end() ||
+ it->second >= max_delay_it_->second) {
+ max_delay_it_ = it;
+ }
+ sum_delays_ms_ += new_send_delay;
+ total_packet_send_delay_ms_ += new_send_delay;
+ total_packet_send_delay_ms = total_packet_send_delay_ms_;
+
+ size_t num_delays = send_delays_.size();
+ RTC_DCHECK(max_delay_it_ != send_delays_.end());
+ max_delay_ms = rtc::dchecked_cast<int>(max_delay_it_->second);
+ int64_t avg_ms = (sum_delays_ms_ + num_delays / 2) / num_delays;
+ RTC_DCHECK_GE(avg_ms, static_cast<int64_t>(0));
+ RTC_DCHECK_LE(avg_ms,
+ static_cast<int64_t>(std::numeric_limits<int>::max()));
+ avg_delay_ms =
+ rtc::dchecked_cast<int>((sum_delays_ms_ + num_delays / 2) / num_delays);
+ }
+ send_side_delay_observer_->SendSideDelayUpdated(
+ avg_delay_ms, max_delay_ms, total_packet_send_delay_ms, ssrc);
+}
+
+void DEPRECATED_RtpSenderEgress::RecomputeMaxSendDelay() {
+ max_delay_it_ = send_delays_.begin();
+ for (auto it = send_delays_.begin(); it != send_delays_.end(); ++it) {
+ if (it->second >= max_delay_it_->second) {
+ max_delay_it_ = it;
+ }
+ }
+}
+
+void DEPRECATED_RtpSenderEgress::UpdateOnSendPacket(int packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc) {
+ if (!send_packet_observer_ || capture_time_ms <= 0 || packet_id == -1) {
+ return;
+ }
+
+ send_packet_observer_->OnSendPacket(packet_id, capture_time_ms, ssrc);
+}
+
+bool DEPRECATED_RtpSenderEgress::SendPacketToNetwork(
+ const RtpPacketToSend& packet,
+ const PacketOptions& options,
+ const PacedPacketInfo& pacing_info) {
+ int bytes_sent = -1;
+ if (transport_) {
+ bytes_sent = transport_->SendRtp(packet.data(), packet.size(), options)
+ ? static_cast<int>(packet.size())
+ : -1;
+ if (event_log_ && bytes_sent > 0) {
+ event_log_->Log(std::make_unique<RtcEventRtpPacketOutgoing>(
+ packet, pacing_info.probe_cluster_id));
+ }
+ }
+
+ if (bytes_sent <= 0) {
+ RTC_LOG(LS_WARNING) << "Transport failed to send packet.";
+ return false;
+ }
+ return true;
+}
+
+void DEPRECATED_RtpSenderEgress::UpdateRtpStats(const RtpPacketToSend& packet) {
+ int64_t now_ms = clock_->TimeInMilliseconds();
+
+ StreamDataCounters* counters =
+ packet.Ssrc() == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_;
+
+ if (counters->first_packet_time_ms == -1) {
+ counters->first_packet_time_ms = now_ms;
+ }
+
+ if (packet.packet_type() == RtpPacketMediaType::kForwardErrorCorrection) {
+ counters->fec.AddPacket(packet);
+ }
+
+ if (packet.packet_type() == RtpPacketMediaType::kRetransmission) {
+ counters->retransmitted.AddPacket(packet);
+ }
+ counters->transmitted.AddPacket(packet);
+
+ RTC_DCHECK(packet.packet_type().has_value());
+ send_rates_[static_cast<size_t>(*packet.packet_type())].Update(packet.size(),
+ now_ms);
+
+ if (rtp_stats_callback_) {
+ rtp_stats_callback_->DataCountersUpdated(*counters, packet.Ssrc());
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h
new file mode 100644
index 00000000000..9f1d7d6c837
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_
+#define MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/call/transport.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/units/data_rate.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtp_packet_history.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
+#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
+#include "rtc_base/critical_section.h"
+#include "rtc_base/rate_statistics.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class DEPRECATED_RtpSenderEgress {
+ public:
+ // Helper class that redirects packets directly to the send part of this class
+ // without passing through an actual paced sender.
+ class NonPacedPacketSender : public RtpPacketSender {
+ public:
+ explicit NonPacedPacketSender(DEPRECATED_RtpSenderEgress* sender);
+ virtual ~NonPacedPacketSender();
+
+ void EnqueuePackets(
+ std::vector<std::unique_ptr<RtpPacketToSend>> packets) override;
+
+ private:
+ uint16_t transport_sequence_number_;
+ DEPRECATED_RtpSenderEgress* const sender_;
+ };
+
+ DEPRECATED_RtpSenderEgress(const RtpRtcpInterface::Configuration& config,
+ RtpPacketHistory* packet_history);
+ ~DEPRECATED_RtpSenderEgress() = default;
+
+ void SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info)
+ RTC_LOCKS_EXCLUDED(lock_);
+ uint32_t Ssrc() const { return ssrc_; }
+ absl::optional<uint32_t> RtxSsrc() const { return rtx_ssrc_; }
+ absl::optional<uint32_t> FlexFecSsrc() const { return flexfec_ssrc_; }
+
+ void ProcessBitrateAndNotifyObservers() RTC_LOCKS_EXCLUDED(lock_);
+ RtpSendRates GetSendRates() const RTC_LOCKS_EXCLUDED(lock_);
+ void GetDataCounters(StreamDataCounters* rtp_stats,
+ StreamDataCounters* rtx_stats) const
+ RTC_LOCKS_EXCLUDED(lock_);
+
+ void ForceIncludeSendPacketsInAllocation(bool part_of_allocation)
+ RTC_LOCKS_EXCLUDED(lock_);
+ bool MediaHasBeenSent() const RTC_LOCKS_EXCLUDED(lock_);
+ void SetMediaHasBeenSent(bool media_sent) RTC_LOCKS_EXCLUDED(lock_);
+ void SetTimestampOffset(uint32_t timestamp) RTC_LOCKS_EXCLUDED(lock_);
+
+ // For each sequence number in |sequence_number|, recall the last RTP packet
+ // which bore it - its timestamp and whether it was the first and/or last
+ // packet in that frame. If all of the given sequence numbers could be
+ // recalled, return a vector with all of them (in corresponding order).
+ // If any could not be recalled, return an empty vector.
+ std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const
+ RTC_LOCKS_EXCLUDED(lock_);
+
+ private:
+ // Maps capture time in milliseconds to send-side delay in milliseconds.
+ // Send-side delay is the difference between transmission time and capture
+ // time.
+ typedef std::map<int64_t, int> SendDelayMap;
+
+ RtpSendRates GetSendRatesLocked() const RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ bool HasCorrectSsrc(const RtpPacketToSend& packet) const;
+ void AddPacketToTransportFeedback(uint16_t packet_id,
+ const RtpPacketToSend& packet,
+ const PacedPacketInfo& pacing_info);
+ void UpdateDelayStatistics(int64_t capture_time_ms,
+ int64_t now_ms,
+ uint32_t ssrc);
+ void RecomputeMaxSendDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ void UpdateOnSendPacket(int packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc);
+ // Sends packet on to |transport_|, leaving the RTP module.
+ bool SendPacketToNetwork(const RtpPacketToSend& packet,
+ const PacketOptions& options,
+ const PacedPacketInfo& pacing_info);
+ void UpdateRtpStats(const RtpPacketToSend& packet)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+
+ const uint32_t ssrc_;
+ const absl::optional<uint32_t> rtx_ssrc_;
+ const absl::optional<uint32_t> flexfec_ssrc_;
+ const bool populate_network2_timestamp_;
+ const bool send_side_bwe_with_overhead_;
+ Clock* const clock_;
+ RtpPacketHistory* const packet_history_;
+ Transport* const transport_;
+ RtcEventLog* const event_log_;
+ const bool is_audio_;
+ const bool need_rtp_packet_infos_;
+
+ TransportFeedbackObserver* const transport_feedback_observer_;
+ SendSideDelayObserver* const send_side_delay_observer_;
+ SendPacketObserver* const send_packet_observer_;
+ StreamDataCountersCallback* const rtp_stats_callback_;
+ BitrateStatisticsObserver* const bitrate_callback_;
+
+ rtc::CriticalSection lock_;
+ bool media_has_been_sent_ RTC_GUARDED_BY(lock_);
+ bool force_part_of_allocation_ RTC_GUARDED_BY(lock_);
+ uint32_t timestamp_offset_ RTC_GUARDED_BY(lock_);
+
+ SendDelayMap send_delays_ RTC_GUARDED_BY(lock_);
+ SendDelayMap::const_iterator max_delay_it_ RTC_GUARDED_BY(lock_);
+ // The sum of delays over a kSendSideDelayWindowMs sliding window.
+ int64_t sum_delays_ms_ RTC_GUARDED_BY(lock_);
+ uint64_t total_packet_send_delay_ms_ RTC_GUARDED_BY(lock_);
+ StreamDataCounters rtp_stats_ RTC_GUARDED_BY(lock_);
+ StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_);
+ // One element per value in RtpPacketMediaType, with index matching value.
+ std::vector<RateStatistics> send_rates_ RTC_GUARDED_BY(lock_);
+
+ // Maps sent packets' sequence numbers to a tuple consisting of:
+ // 1. The timestamp, without the randomizing offset mandated by the RFC.
+ // 2. Whether the packet was the first in its frame.
+ // 3. Whether the packet was the last in its frame.
+ const std::unique_ptr<RtpSequenceNumberMap> rtp_sequence_number_map_
+ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
index 55e1e44ebe4..c30eb32a44a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
@@ -19,9 +19,9 @@
#include "call/rtp_stream_receiver_controller.h"
#include "call/rtx_receive_stream.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_sender_video.h"
#include "rtc_base/rate_limiter.h"
#include "test/gtest.h"
@@ -63,7 +63,9 @@ class RtxLoopBackTransport : public webrtc::Transport {
count_rtx_ssrc_(0),
module_(NULL) {}
- void SetSendModule(RtpRtcp* rtpRtcpModule) { module_ = rtpRtcpModule; }
+ void SetSendModule(RtpRtcpInterface* rtpRtcpModule) {
+ module_ = rtpRtcpModule;
+ }
void DropEveryNthPacket(int n) { packet_loss_ = n; }
@@ -109,7 +111,7 @@ class RtxLoopBackTransport : public webrtc::Transport {
int consecutive_drop_end_;
uint32_t rtx_ssrc_;
int count_rtx_ssrc_;
- RtpRtcp* module_;
+ RtpRtcpInterface* module_;
RtpStreamReceiverController stream_receiver_controller_;
std::set<uint16_t> expected_sequence_numbers_;
};
@@ -125,7 +127,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
~RtpRtcpRtxNackTest() override {}
void SetUp() override {
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.audio = false;
configuration.clock = &fake_clock;
receive_statistics_ = ReceiveStatistics::Create(&fake_clock);
@@ -134,7 +136,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
configuration.retransmission_rate_limiter = &retransmission_rate_limiter_;
configuration.local_media_ssrc = kTestSsrc;
configuration.rtx_send_ssrc = kTestRtxSsrc;
- rtp_rtcp_module_ = RtpRtcp::Create(configuration);
+ rtp_rtcp_module_ = ModuleRtpRtcpImpl2::Create(configuration);
FieldTrialBasedConfig field_trials;
RTPSenderVideo::Config video_config;
video_config.clock = &fake_clock;
@@ -224,7 +226,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
}
std::unique_ptr<ReceiveStatistics> receive_statistics_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_module_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_module_;
std::unique_ptr<RTPSenderVideo> rtp_sender_video_;
RtxLoopBackTransport transport_;
const std::map<int, int> rtx_associated_payload_types_ = {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
index bfe26676849..da51a501f26 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -66,7 +66,8 @@ const size_t kMaxNumberOfStoredRrtrs = 300;
constexpr int32_t kDefaultVideoReportInterval = 1000;
constexpr int32_t kDefaultAudioReportInterval = 5000;
-std::set<uint32_t> GetRegisteredSsrcs(const RtpRtcp::Configuration& config) {
+std::set<uint32_t> GetRegisteredSsrcs(
+ const RtpRtcpInterface::Configuration& config) {
std::set<uint32_t> ssrcs;
ssrcs.insert(config.local_media_ssrc);
if (config.rtx_send_ssrc) {
@@ -136,7 +137,7 @@ struct RTCPReceiver::LastFirStatus {
uint8_t sequence_number;
};
-RTCPReceiver::RTCPReceiver(const RtpRtcp::Configuration& config,
+RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config,
ModuleRtpRtcp* owner)
: clock_(config.clock),
receiver_only_(config.receiver_only),
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
index ef41476903a..f7fb6075878 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -20,10 +20,10 @@
#include "api/array_view.h"
#include "modules/rtp_rtcp/include/report_block_data.h"
#include "modules/rtp_rtcp/include/rtcp_statistics.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_nack_stats.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/ntp_time.h"
@@ -53,7 +53,8 @@ class RTCPReceiver final {
virtual ~ModuleRtpRtcp() = default;
};
- RTCPReceiver(const RtpRtcp::Configuration& config, ModuleRtpRtcp* owner);
+ RTCPReceiver(const RtpRtcpInterface::Configuration& config,
+ ModuleRtpRtcp* owner);
~RTCPReceiver();
void IncomingPacket(const uint8_t* packet, size_t packet_size) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
index f95219674b1..a384d71913b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -161,8 +161,8 @@ struct ReceiverMocks {
StrictMock<MockModuleRtpRtcp> rtp_rtcp_impl;
};
-RtpRtcp::Configuration DefaultConfiguration(ReceiverMocks* mocks) {
- RtpRtcp::Configuration config;
+RtpRtcpInterface::Configuration DefaultConfiguration(ReceiverMocks* mocks) {
+ RtpRtcpInterface::Configuration config;
config.clock = &mocks->clock;
config.receiver_only = false;
config.rtcp_packet_type_counter_observer =
@@ -230,7 +230,7 @@ TEST(RtcpReceiverTest, InjectSrPacketFromUnknownSender) {
rtcp::SenderReport sr;
sr.SetSenderSsrc(kUnknownSenderSsrc);
- // The parser will handle report blocks in Sender Report from other than his
+ // The parser will handle report blocks in Sender Report from other than their
// expected peer.
EXPECT_CALL(mocks.rtp_rtcp_impl, OnReceivedRtcpReportBlocks);
EXPECT_CALL(mocks.bandwidth_observer,
@@ -636,7 +636,7 @@ TEST(RtcpReceiverTest, InjectApp) {
TEST(RtcpReceiverTest, InjectSdesWithOneChunk) {
ReceiverMocks mocks;
MockCnameCallbackImpl callback;
- RtpRtcp::Configuration config = DefaultConfiguration(&mocks);
+ RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
config.rtcp_cname_callback = &callback;
RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
receiver.SetRemoteSSRC(kSenderSsrc);
@@ -1310,7 +1310,7 @@ TEST(RtcpReceiverTest, TmmbrThreeConstraintsTimeOut) {
TEST(RtcpReceiverTest, Callbacks) {
ReceiverMocks mocks;
MockRtcpCallbackImpl callback;
- RtpRtcp::Configuration config = DefaultConfiguration(&mocks);
+ RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
config.rtcp_statistics_callback = &callback;
RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
receiver.SetRemoteSSRC(kSenderSsrc);
@@ -1348,7 +1348,7 @@ TEST(RtcpReceiverTest,
VerifyBlockAndTimestampObtainedFromReportBlockDataObserver) {
ReceiverMocks mocks;
MockReportBlockDataObserverImpl observer;
- RtpRtcp::Configuration config = DefaultConfiguration(&mocks);
+ RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
config.report_block_data_observer = &observer;
RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
receiver.SetRemoteSSRC(kSenderSsrc);
@@ -1397,7 +1397,7 @@ TEST(RtcpReceiverTest,
TEST(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) {
ReceiverMocks mocks;
MockReportBlockDataObserverImpl observer;
- RtpRtcp::Configuration config = DefaultConfiguration(&mocks);
+ RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks);
config.report_block_data_observer = &observer;
RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl);
receiver.SetRemoteSSRC(kSenderSsrc);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
index f06d429fb92..f3e04b17f3c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -33,7 +33,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/tmmbn.h"
#include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
-#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "modules/rtp_rtcp/source/tmmbr_help.h"
#include "rtc_base/checks.h"
@@ -123,7 +123,7 @@ RTCPSender::FeedbackState::FeedbackState()
last_rr_ntp_secs(0),
last_rr_ntp_frac(0),
remote_sr(0),
- module(nullptr) {}
+ receiver(nullptr) {}
RTCPSender::FeedbackState::FeedbackState(const FeedbackState&) = default;
@@ -148,7 +148,7 @@ class RTCPSender::RtcpContext {
const int64_t now_us_;
};
-RTCPSender::RTCPSender(const RtpRtcp::Configuration& config)
+RTCPSender::RTCPSender(const RtpRtcpInterface::Configuration& config)
: audio_(config.audio),
ssrc_(config.local_media_ssrc),
clock_(config.clock),
@@ -176,11 +176,6 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config)
packet_oh_send_(0),
max_packet_size_(IP_PACKET_SIZE - 28), // IPv4 + UDP by default.
- app_sub_type_(0),
- app_name_(0),
- app_data_(nullptr),
- app_length_(0),
-
xr_send_receiver_reference_time_enabled_(false),
packet_type_counter_observer_(config.rtcp_packet_type_counter_observer),
send_video_bitrate_allocation_(false),
@@ -194,7 +189,6 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config)
builders_[kRtcpFir] = &RTCPSender::BuildFIR;
builders_[kRtcpRemb] = &RTCPSender::BuildREMB;
builders_[kRtcpBye] = &RTCPSender::BuildBYE;
- builders_[kRtcpApp] = &RTCPSender::BuildAPP;
builders_[kRtcpLossNotification] = &RTCPSender::BuildLossNotification;
builders_[kRtcpTmmbr] = &RTCPSender::BuildTMMBR;
builders_[kRtcpTmmbn] = &RTCPSender::BuildTMMBN;
@@ -262,8 +256,8 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state,
return 0;
}
- return SendCompoundRTCP(feedback_state,
- {RTCPPacketType::kRtcpLossNotification});
+ return SendCompoundRTCPLocked(
+ feedback_state, {RTCPPacketType::kRtcpLossNotification}, 0, nullptr);
}
void RTCPSender::SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) {
@@ -544,7 +538,7 @@ void RTCPSender::SetTargetBitrate(unsigned int target_bitrate) {
std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBR(
const RtcpContext& ctx) {
- if (ctx.feedback_state_.module == nullptr)
+ if (ctx.feedback_state_.receiver == nullptr)
return nullptr;
// Before sending the TMMBR check the received TMMBN, only an owner is
// allowed to raise the bitrate:
@@ -558,7 +552,7 @@ std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBR(
// will accuire criticalSectionRTCPReceiver_ is a potental deadlock but
// since RTCPreceiver is not doing the reverse we should be fine
std::vector<rtcp::TmmbItem> candidates =
- ctx.feedback_state_.module->BoundingSet(&tmmbr_owner);
+ ctx.feedback_state_.receiver->BoundingSet(&tmmbr_owner);
if (!candidates.empty()) {
for (const auto& candidate : candidates) {
@@ -614,9 +608,6 @@ std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBN(
std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildAPP(const RtcpContext& ctx) {
rtcp::App* app = new rtcp::App();
app->SetSenderSsrc(ssrc_);
- app->SetSubType(app_sub_type_);
- app->SetName(app_name_);
- app->SetData(app_data_.get(), app_length_);
return std::unique_ptr<rtcp::RtcpPacket>(app);
}
@@ -712,52 +703,86 @@ int32_t RTCPSender::SendCompoundRTCP(
{
rtc::CritScope lock(&critical_section_rtcp_sender_);
- if (method_ == RtcpMode::kOff) {
- RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled.";
- return -1;
+ auto result = ComputeCompoundRTCPPacket(feedback_state, packet_types,
+ nack_size, nack_list, &container);
+ if (result) {
+ return *result;
}
- // Add all flags as volatile. Non volatile entries will not be overwritten.
- // All new volatile flags added will be consumed by the end of this call.
- SetFlags(packet_types, true);
-
- // Prevent sending streams to send SR before any media has been sent.
- const bool can_calculate_rtp_timestamp = (last_frame_capture_time_ms_ >= 0);
- if (!can_calculate_rtp_timestamp) {
- bool consumed_sr_flag = ConsumeFlag(kRtcpSr);
- bool consumed_report_flag = sending_ && ConsumeFlag(kRtcpReport);
- bool sender_report = consumed_report_flag || consumed_sr_flag;
- if (sender_report && AllVolatileFlagsConsumed()) {
- // This call was for Sender Report and nothing else.
- return 0;
- }
- if (sending_ && method_ == RtcpMode::kCompound) {
- // Not allowed to send any RTCP packet without sender report.
- return -1;
- }
+ max_packet_size = max_packet_size_;
+ }
+
+ size_t bytes_sent = container.SendPackets(max_packet_size);
+ return bytes_sent == 0 ? -1 : 0;
+}
+
+int32_t RTCPSender::SendCompoundRTCPLocked(
+ const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packet_types,
+ int32_t nack_size,
+ const uint16_t* nack_list) {
+ PacketContainer container(transport_, event_log_);
+ auto result = ComputeCompoundRTCPPacket(feedback_state, packet_types,
+ nack_size, nack_list, &container);
+ if (result) {
+ return *result;
+ }
+ size_t bytes_sent = container.SendPackets(max_packet_size_);
+ return bytes_sent == 0 ? -1 : 0;
+}
+
+absl::optional<int32_t> RTCPSender::ComputeCompoundRTCPPacket(
+ const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packet_types,
+ int32_t nack_size,
+ const uint16_t* nack_list,
+ rtcp::CompoundPacket* out_packet) {
+ if (method_ == RtcpMode::kOff) {
+ RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled.";
+ return -1;
+ }
+ // Add all flags as volatile. Non volatile entries will not be overwritten.
+ // All new volatile flags added will be consumed by the end of this call.
+ SetFlags(packet_types, true);
+
+ // Prevent sending streams to send SR before any media has been sent.
+ const bool can_calculate_rtp_timestamp = (last_frame_capture_time_ms_ >= 0);
+ if (!can_calculate_rtp_timestamp) {
+ bool consumed_sr_flag = ConsumeFlag(kRtcpSr);
+ bool consumed_report_flag = sending_ && ConsumeFlag(kRtcpReport);
+ bool sender_report = consumed_report_flag || consumed_sr_flag;
+ if (sender_report && AllVolatileFlagsConsumed()) {
+ // This call was for Sender Report and nothing else.
+ return 0;
}
+ if (sending_ && method_ == RtcpMode::kCompound) {
+ // Not allowed to send any RTCP packet without sender report.
+ return -1;
+ }
+ }
- if (packet_type_counter_.first_packet_time_ms == -1)
- packet_type_counter_.first_packet_time_ms = clock_->TimeInMilliseconds();
+ if (packet_type_counter_.first_packet_time_ms == -1)
+ packet_type_counter_.first_packet_time_ms = clock_->TimeInMilliseconds();
- // We need to send our NTP even if we haven't received any reports.
- RtcpContext context(feedback_state, nack_size, nack_list,
- clock_->TimeInMicroseconds());
+ // We need to send our NTP even if we haven't received any reports.
+ RtcpContext context(feedback_state, nack_size, nack_list,
+ clock_->TimeInMicroseconds());
- PrepareReport(feedback_state);
+ PrepareReport(feedback_state);
- std::unique_ptr<rtcp::RtcpPacket> packet_bye;
+ std::unique_ptr<rtcp::RtcpPacket> packet_bye;
- auto it = report_flags_.begin();
- while (it != report_flags_.end()) {
- auto builder_it = builders_.find(it->type);
- RTC_DCHECK(builder_it != builders_.end())
- << "Could not find builder for packet type " << it->type;
- if (it->is_volatile) {
- report_flags_.erase(it++);
- } else {
- ++it;
- }
+ auto it = report_flags_.begin();
+ while (it != report_flags_.end()) {
+ auto builder_it = builders_.find(it->type);
+ if (it->is_volatile) {
+ report_flags_.erase(it++);
+ } else {
+ ++it;
+ }
+ if (builder_it == builders_.end()) {
+ RTC_NOTREACHED() << "Could not find builder for packet type " << it->type;
+ } else {
BuilderFunc func = builder_it->second;
std::unique_ptr<rtcp::RtcpPacket> packet = (this->*func)(context);
if (packet == nullptr)
@@ -767,26 +792,23 @@ int32_t RTCPSender::SendCompoundRTCP(
if (builder_it->first == kRtcpBye) {
packet_bye = std::move(packet);
} else {
- container.Append(packet.release());
+ out_packet->Append(packet.release());
}
}
+ }
- // Append the BYE now at the end
- if (packet_bye) {
- container.Append(packet_bye.release());
- }
-
- if (packet_type_counter_observer_ != nullptr) {
- packet_type_counter_observer_->RtcpPacketTypesCounterUpdated(
- remote_ssrc_, packet_type_counter_);
- }
+ // Append the BYE now at the end
+ if (packet_bye) {
+ out_packet->Append(packet_bye.release());
+ }
- RTC_DCHECK(AllVolatileFlagsConsumed());
- max_packet_size = max_packet_size_;
+ if (packet_type_counter_observer_ != nullptr) {
+ packet_type_counter_observer_->RtcpPacketTypesCounterUpdated(
+ remote_ssrc_, packet_type_counter_);
}
- size_t bytes_sent = container.SendPackets(max_packet_size);
- return bytes_sent == 0 ? -1 : 0;
+ RTC_DCHECK(AllVolatileFlagsConsumed());
+ return absl::nullopt;
}
void RTCPSender::PrepareReport(const FeedbackState& feedback_state) {
@@ -877,25 +899,6 @@ void RTCPSender::SetCsrcs(const std::vector<uint32_t>& csrcs) {
csrcs_ = csrcs;
}
-int32_t RTCPSender::SetApplicationSpecificData(uint8_t subType,
- uint32_t name,
- const uint8_t* data,
- uint16_t length) {
- if (length % 4 != 0) {
- RTC_LOG(LS_ERROR) << "Failed to SetApplicationSpecificData.";
- return -1;
- }
- rtc::CritScope lock(&critical_section_rtcp_sender_);
-
- SetFlag(kRtcpApp, true);
- app_sub_type_ = subType;
- app_name_ = name;
- app_data_.reset(new uint8_t[length]);
- app_length_ = length;
- memcpy(app_data_.get(), data, length);
- return 0;
-}
-
void RTCPSender::SendRtcpXrReceiverReferenceTime(bool enable) {
rtc::CritScope lock(&critical_section_rtcp_sender_);
xr_send_receiver_reference_time_enabled_ = enable;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
index 32c1e1dbc10..d9422ebe8e0 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
@@ -23,13 +23,14 @@
#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_nack_stats.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h"
#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/random.h"
@@ -37,10 +38,10 @@
namespace webrtc {
-class ModuleRtpRtcpImpl;
+class RTCPReceiver;
class RtcEventLog;
-class RTCPSender {
+class RTCPSender final {
public:
struct FeedbackState {
FeedbackState();
@@ -60,90 +61,124 @@ class RTCPSender {
std::vector<rtcp::ReceiveTimeInfo> last_xr_rtis;
// Used when generating TMMBR.
- ModuleRtpRtcpImpl* module;
+ RTCPReceiver* receiver;
};
- explicit RTCPSender(const RtpRtcp::Configuration& config);
+ explicit RTCPSender(const RtpRtcpInterface::Configuration& config);
virtual ~RTCPSender();
- RtcpMode Status() const;
- void SetRTCPStatus(RtcpMode method);
+ RtcpMode Status() const RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
+ void SetRTCPStatus(RtcpMode method)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- bool Sending() const;
+ bool Sending() const RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
int32_t SetSendingStatus(const FeedbackState& feedback_state,
- bool enabled); // combine the functions
+ bool enabled)
+ RTC_LOCKS_EXCLUDED(
+ critical_section_rtcp_sender_); // combine the functions
- int32_t SetNackStatus(bool enable);
+ int32_t SetNackStatus(bool enable)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void SetTimestampOffset(uint32_t timestamp_offset);
+ void SetTimestampOffset(uint32_t timestamp_offset)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
// TODO(bugs.webrtc.org/6458): Remove default parameter value when all the
// depending projects are updated to correctly set payload type.
void SetLastRtpTime(uint32_t rtp_timestamp,
int64_t capture_time_ms,
- int8_t payload_type = -1);
+ int8_t payload_type = -1)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz);
+ void SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
uint32_t SSRC() const { return ssrc_; }
- void SetRemoteSSRC(uint32_t ssrc);
+ void SetRemoteSSRC(uint32_t ssrc)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- int32_t SetCNAME(const char* cName);
+ int32_t SetCNAME(const char* cName)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name);
+ int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- int32_t RemoveMixedCNAME(uint32_t SSRC);
+ int32_t RemoveMixedCNAME(uint32_t SSRC)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const;
+ bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
int32_t SendRTCP(const FeedbackState& feedback_state,
RTCPPacketType packetType,
int32_t nackSize = 0,
- const uint16_t* nackList = 0);
+ const uint16_t* nackList = 0)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
int32_t SendCompoundRTCP(const FeedbackState& feedback_state,
const std::set<RTCPPacketType>& packetTypes,
int32_t nackSize = 0,
- const uint16_t* nackList = 0);
+ const uint16_t* nackList = nullptr)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
int32_t SendLossNotification(const FeedbackState& feedback_state,
uint16_t last_decoded_seq_num,
uint16_t last_received_seq_num,
bool decodability_flag,
- bool buffering_allowed);
+ bool buffering_allowed)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs);
+ void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void UnsetRemb();
+ void UnsetRemb() RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- bool TMMBR() const;
+ bool TMMBR() const RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void SetTMMBRStatus(bool enable);
+ void SetTMMBRStatus(bool enable)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void SetMaxRtpPacketSize(size_t max_packet_size);
+ void SetMaxRtpPacketSize(size_t max_packet_size)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set);
+ void SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- int32_t SetApplicationSpecificData(uint8_t subType,
- uint32_t name,
- const uint8_t* data,
- uint16_t length);
+ void SendRtcpXrReceiverReferenceTime(bool enable)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void SendRtcpXrReceiverReferenceTime(bool enable);
+ bool RtcpXrReceiverReferenceTime() const
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- bool RtcpXrReceiverReferenceTime() const;
+ void SetCsrcs(const std::vector<uint32_t>& csrcs)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
- void SetCsrcs(const std::vector<uint32_t>& csrcs);
-
- void SetTargetBitrate(unsigned int target_bitrate);
- void SetVideoBitrateAllocation(const VideoBitrateAllocation& bitrate);
+ void SetTargetBitrate(unsigned int target_bitrate)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
+ void SetVideoBitrateAllocation(const VideoBitrateAllocation& bitrate)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
void SendCombinedRtcpPacket(
- std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets);
+ std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets)
+ RTC_LOCKS_EXCLUDED(critical_section_rtcp_sender_);
private:
class RtcpContext;
+ int32_t SendCompoundRTCPLocked(const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packet_types,
+ int32_t nack_size,
+ const uint16_t* nack_list)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+
+ absl::optional<int32_t> ComputeCompoundRTCPPacket(
+ const FeedbackState& feedback_state,
+ const std::set<RTCPPacketType>& packet_types,
+ int32_t nack_size,
+ const uint16_t* nack_list,
+ rtcp::CompoundPacket* out_packet)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
+
// Determine which RTCP messages should be sent and setup flags.
void PrepareReport(const FeedbackState& feedback_state)
RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_);
@@ -237,13 +272,6 @@ class RTCPSender {
uint32_t packet_oh_send_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
size_t max_packet_size_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- // APP
- uint8_t app_sub_type_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- uint32_t app_name_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
- std::unique_ptr<uint8_t[]> app_data_
- RTC_GUARDED_BY(critical_section_rtcp_sender_);
- uint16_t app_length_ RTC_GUARDED_BY(critical_section_rtcp_sender_);
-
// True if sending of XR Receiver reference time report is enabled.
bool xr_send_receiver_reference_time_enabled_
RTC_GUARDED_BY(critical_section_rtcp_sender_);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
index 754ad893271..4b6d4a3da94 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
@@ -18,7 +18,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/bye.h"
#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
-#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/rate_limiter.h"
#include "test/gmock.h"
@@ -76,8 +76,8 @@ class RtcpSenderTest : public ::testing::Test {
: clock_(1335900000),
receive_statistics_(ReceiveStatistics::Create(&clock_)),
retransmission_rate_limiter_(&clock_, 1000) {
- RtpRtcp::Configuration configuration = GetDefaultConfig();
- rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl(configuration));
+ RtpRtcpInterface::Configuration configuration = GetDefaultConfig();
+ rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl2(configuration));
rtcp_sender_.reset(new RTCPSender(configuration));
rtcp_sender_->SetRemoteSSRC(kRemoteSsrc);
rtcp_sender_->SetTimestampOffset(kStartRtpTimestamp);
@@ -85,8 +85,8 @@ class RtcpSenderTest : public ::testing::Test {
/*payload_type=*/0);
}
- RtpRtcp::Configuration GetDefaultConfig() {
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration GetDefaultConfig() {
+ RtpRtcpInterface::Configuration configuration;
configuration.audio = false;
configuration.clock = &clock_;
configuration.outgoing_transport = &test_transport_;
@@ -115,7 +115,7 @@ class RtcpSenderTest : public ::testing::Test {
SimulatedClock clock_;
TestTransport test_transport_;
std::unique_ptr<ReceiveStatistics> receive_statistics_;
- std::unique_ptr<ModuleRtpRtcpImpl> rtp_rtcp_impl_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_impl_;
std::unique_ptr<RTCPSender> rtcp_sender_;
RateLimiter retransmission_rate_limiter_;
};
@@ -191,7 +191,7 @@ TEST_F(RtcpSenderTest, SendConsecutiveSrWithExactSlope) {
}
TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
@@ -213,7 +213,7 @@ TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) {
}
TEST_F(RtcpSenderTest, DoNotSendCompundBeforeRtp) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
@@ -315,47 +315,6 @@ TEST_F(RtcpSenderTest, StopSendingTriggersBye) {
EXPECT_EQ(kSenderSsrc, parser()->bye()->sender_ssrc());
}
-TEST_F(RtcpSenderTest, SendApp) {
- const uint8_t kSubType = 30;
- uint32_t name = 'n' << 24;
- name += 'a' << 16;
- name += 'm' << 8;
- name += 'e';
- const uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't', 'a'};
- EXPECT_EQ(0, rtcp_sender_->SetApplicationSpecificData(kSubType, name, kData,
- sizeof(kData)));
- rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize);
- EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpApp));
- EXPECT_EQ(1, parser()->app()->num_packets());
- EXPECT_EQ(kSubType, parser()->app()->sub_type());
- EXPECT_EQ(name, parser()->app()->name());
- EXPECT_EQ(sizeof(kData), parser()->app()->data_size());
- EXPECT_EQ(0, memcmp(kData, parser()->app()->data(), sizeof(kData)));
-}
-
-TEST_F(RtcpSenderTest, SendEmptyApp) {
- const uint8_t kSubType = 30;
- const uint32_t kName = 0x6E616D65;
-
- EXPECT_EQ(
- 0, rtcp_sender_->SetApplicationSpecificData(kSubType, kName, nullptr, 0));
-
- rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize);
- EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpApp));
- EXPECT_EQ(1, parser()->app()->num_packets());
- EXPECT_EQ(kSubType, parser()->app()->sub_type());
- EXPECT_EQ(kName, parser()->app()->name());
- EXPECT_EQ(0U, parser()->app()->data_size());
-}
-
-TEST_F(RtcpSenderTest, SetInvalidApplicationSpecificData) {
- const uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't'};
- const uint16_t kInvalidDataLength = sizeof(kData) / sizeof(kData[0]);
- EXPECT_EQ(-1,
- rtcp_sender_->SetApplicationSpecificData(
- 0, 0, kData, kInvalidDataLength)); // Should by multiple of 4.
-}
-
TEST_F(RtcpSenderTest, SendFir) {
rtcp_sender_->SetRTCPStatus(RtcpMode::kReducedSize);
EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpFir));
@@ -563,7 +522,7 @@ TEST_F(RtcpSenderTest, TestNoXrRrtrSentIfNotEnabled) {
TEST_F(RtcpSenderTest, TestRegisterRtcpPacketTypeObserver) {
RtcpPacketTypeCounterObserverImpl observer;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &test_transport_;
@@ -691,7 +650,7 @@ TEST_F(RtcpSenderTest, ByeMustBeLast) {
}));
// Re-configure rtcp_sender_ with mock_transport_
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &clock_;
config.receive_statistics = receive_statistics_.get();
config.outgoing_transport = &mock_transport;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h
index 2cbd1045d27..8a8fd6aed8f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h
@@ -28,8 +28,8 @@ class MediaReceiverRtcpObserver {
public:
virtual ~MediaReceiverRtcpObserver() = default;
- // All message handlers have default empty implementation. This way user needs
- // to implement only those she is interested in.
+ // All message handlers have default empty implementation. This way users only
+ // need to implement the ones they are interested in.
virtual void OnSenderReport(uint32_t sender_ssrc,
NtpTime ntp_time,
uint32_t rtp_time) {}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
index 727a9bca231..9c4c5adf79f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc
@@ -55,15 +55,17 @@ using ::webrtc::test::RtcpPacketParser;
class MockReceiveStatisticsProvider : public webrtc::ReceiveStatisticsProvider {
public:
- MOCK_METHOD1(RtcpReportBlocks, std::vector<ReportBlock>(size_t));
+ MOCK_METHOD(std::vector<ReportBlock>, RtcpReportBlocks, (size_t), (override));
};
class MockMediaReceiverRtcpObserver : public webrtc::MediaReceiverRtcpObserver {
public:
- MOCK_METHOD3(OnSenderReport, void(uint32_t, NtpTime, uint32_t));
- MOCK_METHOD1(OnBye, void(uint32_t));
- MOCK_METHOD2(OnBitrateAllocation,
- void(uint32_t, const VideoBitrateAllocation&));
+ MOCK_METHOD(void, OnSenderReport, (uint32_t, NtpTime, uint32_t), (override));
+ MOCK_METHOD(void, OnBye, (uint32_t), (override));
+ MOCK_METHOD(void,
+ OnBitrateAllocation,
+ (uint32_t, const VideoBitrateAllocation&),
+ (override));
};
// Since some tests will need to wait for this period, make it small to avoid
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
index 5fb2aa55eb2..9c181c65266 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc
@@ -42,7 +42,10 @@ using ::webrtc::test::RtcpPacketParser;
class MockMediaReceiverRtcpObserver : public webrtc::MediaReceiverRtcpObserver {
public:
- MOCK_METHOD3(OnSenderReport, void(uint32_t, webrtc::NtpTime, uint32_t));
+ MOCK_METHOD(void,
+ OnSenderReport,
+ (uint32_t, webrtc::NtpTime, uint32_t),
+ (override));
};
constexpr int kTimeoutMs = 1000;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc
index 30dedb192f2..3b098185766 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc
@@ -10,6 +10,7 @@
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include <bitset>
#include <cstdint>
#include "api/array_view.h"
@@ -23,6 +24,7 @@ namespace webrtc {
constexpr RTPExtensionType RtpDependencyDescriptorExtension::kId;
constexpr char RtpDependencyDescriptorExtension::kUri[];
+constexpr std::bitset<32> RtpDependencyDescriptorExtension::kAllChainsAreActive;
bool RtpDependencyDescriptorExtension::Parse(
rtc::ArrayView<const uint8_t> data,
@@ -34,16 +36,20 @@ bool RtpDependencyDescriptorExtension::Parse(
size_t RtpDependencyDescriptorExtension::ValueSize(
const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor) {
- RtpDependencyDescriptorWriter writer(/*data=*/{}, structure, descriptor);
+ RtpDependencyDescriptorWriter writer(/*data=*/{}, structure, active_chains,
+ descriptor);
return DivideRoundUp(writer.ValueSizeBits(), 8);
}
bool RtpDependencyDescriptorExtension::Write(
rtc::ArrayView<uint8_t> data,
const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor) {
- RtpDependencyDescriptorWriter writer(data, structure, descriptor);
+ RtpDependencyDescriptorWriter writer(data, structure, active_chains,
+ descriptor);
return writer.Write();
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h
index b99230c56b3..de16eeab2a3 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h
@@ -10,6 +10,7 @@
#ifndef MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_EXTENSION_H_
#define MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_EXTENSION_H_
+#include <bitset>
#include <cstdint>
#include "api/array_view.h"
@@ -34,10 +35,24 @@ class RtpDependencyDescriptorExtension {
DependencyDescriptor* descriptor);
static size_t ValueSize(const FrameDependencyStructure& structure,
+ const DependencyDescriptor& descriptor) {
+ return ValueSize(structure, kAllChainsAreActive, descriptor);
+ }
+ static size_t ValueSize(const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor);
static bool Write(rtc::ArrayView<uint8_t> data,
const FrameDependencyStructure& structure,
+ const DependencyDescriptor& descriptor) {
+ return Write(data, structure, kAllChainsAreActive, descriptor);
+ }
+ static bool Write(rtc::ArrayView<uint8_t> data,
+ const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor);
+
+ private:
+ static constexpr std::bitset<32> kAllChainsAreActive = ~uint32_t{0};
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc
new file mode 100644
index 00000000000..11d809693c7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+
+#include "api/array_view.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+
+#include "test/gmock.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::Each;
+
+TEST(RtpDependencyDescriptorExtensionTest, Writer3BytesForPerfectTemplate) {
+ uint8_t buffer[3];
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.templates = {
+ FrameDependencyTemplate().Dtis("SR").FrameDiffs({1}).ChainDiffs({2, 2})};
+ DependencyDescriptor descriptor;
+ descriptor.frame_dependencies = structure.templates[0];
+
+ EXPECT_EQ(RtpDependencyDescriptorExtension::ValueSize(structure, descriptor),
+ 3u);
+ EXPECT_TRUE(
+ RtpDependencyDescriptorExtension::Write(buffer, structure, descriptor));
+}
+
+TEST(RtpDependencyDescriptorExtensionTest, WriteZeroInUnusedBits) {
+ uint8_t buffer[32];
+ std::memset(buffer, 0xff, sizeof(buffer));
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.templates = {
+ FrameDependencyTemplate().Dtis("SR").FrameDiffs({1}).ChainDiffs({1, 1})};
+ DependencyDescriptor descriptor;
+ descriptor.frame_dependencies = structure.templates[0];
+ descriptor.frame_dependencies.frame_diffs = {2};
+
+ // To test unused bytes are zeroed, need a buffer large enough.
+ size_t value_size =
+ RtpDependencyDescriptorExtension::ValueSize(structure, descriptor);
+ ASSERT_LT(value_size, sizeof(buffer));
+
+ ASSERT_TRUE(
+ RtpDependencyDescriptorExtension::Write(buffer, structure, descriptor));
+
+ const uint8_t* unused_bytes = buffer + value_size;
+ size_t num_unused_bytes = buffer + sizeof(buffer) - unused_bytes;
+ // Check remaining bytes are zeroed.
+ EXPECT_THAT(rtc::MakeArrayView(unused_bytes, num_unused_bytes), Each(0));
+}
+
+// In practice chain diff for inactive chain will grow uboundly because no
+// frames are produced for it, that shouldn't block writing the extension.
+TEST(RtpDependencyDescriptorExtensionTest,
+ TemplateMatchingSkipsInactiveChains) {
+ uint8_t buffer[3];
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.templates = {
+ FrameDependencyTemplate().Dtis("SR").ChainDiffs({2, 2})};
+ DependencyDescriptor descriptor;
+ descriptor.frame_dependencies = structure.templates[0];
+
+ // Set only 1st chain as active.
+ std::bitset<32> active_chains = 0b01;
+ descriptor.frame_dependencies.chain_diffs[1] = 1000;
+
+ // Expect perfect template match since the only difference is for an inactive
+ // chain. Pefect template match consumes 3 bytes.
+ EXPECT_EQ(RtpDependencyDescriptorExtension::ValueSize(
+ structure, active_chains, descriptor),
+ 3u);
+ EXPECT_TRUE(RtpDependencyDescriptorExtension::Write(
+ buffer, structure, active_chains, descriptor));
+}
+
+TEST(RtpDependencyDescriptorExtensionTest,
+ AcceptsInvalidChainDiffForInactiveChainWhenChainsAreCustom) {
+ uint8_t buffer[256];
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.templates = {
+ FrameDependencyTemplate().Dtis("SR").ChainDiffs({2, 2})};
+ DependencyDescriptor descriptor;
+ descriptor.frame_dependencies = structure.templates[0];
+
+ // Set only 1st chain as active.
+ std::bitset<32> active_chains = 0b01;
+ // Set chain_diff different to the template to make it custom.
+ descriptor.frame_dependencies.chain_diffs[0] = 1;
+ // Set chain diff for inactive chain beyound limit of 255 max chain diff.
+ descriptor.frame_dependencies.chain_diffs[1] = 1000;
+
+ // Because chains are custom, should use more than base 3 bytes.
+ EXPECT_GT(RtpDependencyDescriptorExtension::ValueSize(
+ structure, active_chains, descriptor),
+ 3u);
+ EXPECT_TRUE(RtpDependencyDescriptorExtension::Write(
+ buffer, structure, active_chains, descriptor));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
index 07b6a3b3c32..01b893a94e8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc
@@ -18,13 +18,6 @@
#include "rtc_base/checks.h"
namespace webrtc {
-namespace {
-
-constexpr int kMaxTemporalId = 7;
-constexpr int kMaxSpatialId = 3;
-constexpr int kMaxTemplates = 64;
-
-} // namespace
RtpDependencyDescriptorReader::RtpDependencyDescriptorReader(
rtc::ArrayView<const uint8_t> raw_data,
@@ -95,7 +88,7 @@ void RtpDependencyDescriptorReader::ReadTemplateLayers() {
int spatial_id = 0;
NextLayerIdc next_layer_idc;
do {
- if (templates.size() == kMaxTemplates) {
+ if (templates.size() == DependencyDescriptor::kMaxTemplates) {
parsing_failed_ = true;
break;
}
@@ -107,14 +100,14 @@ void RtpDependencyDescriptorReader::ReadTemplateLayers() {
next_layer_idc = static_cast<NextLayerIdc>(ReadBits(2));
if (next_layer_idc == kNextTemporalLayer) {
temporal_id++;
- if (temporal_id > kMaxTemporalId) {
+ if (temporal_id >= DependencyDescriptor::kMaxTemporalIds) {
parsing_failed_ = true;
break;
}
} else if (next_layer_idc == kNextSpatialLayer) {
temporal_id = 0;
spatial_id++;
- if (spatial_id > kMaxSpatialId) {
+ if (spatial_id >= DependencyDescriptor::kMaxSpatialIds) {
parsing_failed_ = true;
break;
}
@@ -198,9 +191,10 @@ void RtpDependencyDescriptorReader::ReadExtendedFields() {
}
void RtpDependencyDescriptorReader::ReadFrameDependencyDefinition() {
- size_t template_index = (frame_dependency_template_id_ + kMaxTemplates -
- structure_->structure_id) %
- kMaxTemplates;
+ size_t template_index =
+ (frame_dependency_template_id_ + DependencyDescriptor::kMaxTemplates -
+ structure_->structure_id) %
+ DependencyDescriptor::kMaxTemplates;
if (template_index >= structure_->templates.size()) {
parsing_failed_ = true;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc
index 9e1a4256662..c5f229c59f2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc
@@ -9,6 +9,7 @@
*/
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h"
+#include <bitset>
#include <cstddef>
#include <cstdint>
#include <iterator>
@@ -23,8 +24,6 @@
namespace webrtc {
namespace {
-constexpr int kMaxTemplates = 64;
-
enum class NextLayerIdc : uint64_t {
kSameLayer = 0,
kNextTemporal = 1,
@@ -35,12 +34,8 @@ enum class NextLayerIdc : uint64_t {
NextLayerIdc GetNextLayerIdc(const FrameDependencyTemplate& previous,
const FrameDependencyTemplate& next) {
- // TODO(danilchap): Move these constants to header shared between reader and
- // writer.
- static constexpr int kMaxSpatialId = 3;
- static constexpr int kMaxTemporalId = 7;
- RTC_DCHECK_LE(next.spatial_id, kMaxSpatialId);
- RTC_DCHECK_LE(next.temporal_id, kMaxTemporalId);
+ RTC_DCHECK_LT(next.spatial_id, DependencyDescriptor::kMaxSpatialIds);
+ RTC_DCHECK_LT(next.temporal_id, DependencyDescriptor::kMaxTemporalIds);
if (next.spatial_id == previous.spatial_id &&
next.temporal_id == previous.temporal_id) {
@@ -61,9 +56,11 @@ NextLayerIdc GetNextLayerIdc(const FrameDependencyTemplate& previous,
RtpDependencyDescriptorWriter::RtpDependencyDescriptorWriter(
rtc::ArrayView<uint8_t> data,
const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor)
: descriptor_(descriptor),
structure_(structure),
+ active_chains_(active_chains),
bit_writer_(data.data(), data.size()) {
FindBestTemplate();
}
@@ -74,6 +71,14 @@ bool RtpDependencyDescriptorWriter::Write() {
WriteExtendedFields();
WriteFrameDependencyDefinition();
}
+ size_t remaining_bits = bit_writer_.RemainingBitCount();
+ // Zero remaining memory to avoid leaving it uninitialized.
+ if (remaining_bits % 64 != 0) {
+ WriteBits(/*val=*/0, remaining_bits % 64);
+ }
+ for (size_t i = 0; i < remaining_bits / 64; ++i) {
+ WriteBits(/*val=*/0, 64);
+ }
return !build_failed_;
}
@@ -126,8 +131,14 @@ RtpDependencyDescriptorWriter::CalculateMatch(
result.need_custom_dtis =
descriptor_.frame_dependencies.decode_target_indications !=
frame_template->decode_target_indications;
- result.need_custom_chains =
- descriptor_.frame_dependencies.chain_diffs != frame_template->chain_diffs;
+ result.need_custom_chains = false;
+ for (int i = 0; i < structure_.num_chains; ++i) {
+ if (active_chains_[i] && descriptor_.frame_dependencies.chain_diffs[i] !=
+ frame_template->chain_diffs[i]) {
+ result.need_custom_chains = true;
+ break;
+ }
+ }
result.extra_size_bits = 0;
if (result.need_custom_fdiffs) {
@@ -193,7 +204,7 @@ bool RtpDependencyDescriptorWriter::HasExtendedFields() const {
uint64_t RtpDependencyDescriptorWriter::TemplateId() const {
return (best_template_.template_position - structure_.templates.begin() +
structure_.structure_id) %
- kMaxTemplates;
+ DependencyDescriptor::kMaxTemplates;
}
void RtpDependencyDescriptorWriter::WriteBits(uint64_t val, size_t bit_count) {
@@ -209,9 +220,10 @@ void RtpDependencyDescriptorWriter::WriteNonSymmetric(uint32_t value,
void RtpDependencyDescriptorWriter::WriteTemplateDependencyStructure() {
RTC_DCHECK_GE(structure_.structure_id, 0);
- RTC_DCHECK_LT(structure_.structure_id, kMaxTemplates);
+ RTC_DCHECK_LT(structure_.structure_id, DependencyDescriptor::kMaxTemplates);
RTC_DCHECK_GT(structure_.num_decode_targets, 0);
- RTC_DCHECK_LE(structure_.num_decode_targets, 1 << 5);
+ RTC_DCHECK_LE(structure_.num_decode_targets,
+ DependencyDescriptor::kMaxDecodeTargets);
WriteBits(structure_.structure_id, 6);
WriteBits(structure_.num_decode_targets - 1, 5);
@@ -228,7 +240,7 @@ void RtpDependencyDescriptorWriter::WriteTemplateDependencyStructure() {
void RtpDependencyDescriptorWriter::WriteTemplateLayers() {
const auto& templates = structure_.templates;
RTC_DCHECK(!templates.empty());
- RTC_DCHECK_LE(templates.size(), kMaxTemplates);
+ RTC_DCHECK_LE(templates.size(), DependencyDescriptor::kMaxTemplates);
RTC_DCHECK_EQ(templates[0].spatial_id, 0);
RTC_DCHECK_EQ(templates[0].temporal_id, 0);
@@ -363,7 +375,9 @@ void RtpDependencyDescriptorWriter::WriteFrameFdiffs() {
void RtpDependencyDescriptorWriter::WriteFrameChains() {
RTC_DCHECK_EQ(descriptor_.frame_dependencies.chain_diffs.size(),
structure_.num_chains);
- for (int chain_diff : descriptor_.frame_dependencies.chain_diffs) {
+ for (int i = 0; i < structure_.num_chains; ++i) {
+ int chain_diff =
+ active_chains_[i] ? descriptor_.frame_dependencies.chain_diffs[i] : 0;
RTC_DCHECK_GE(chain_diff, 0);
RTC_DCHECK_LT(chain_diff, 1 << 8);
WriteBits(chain_diff, 8);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h
index 5a823b6e86a..99fefecea6e 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h
@@ -10,6 +10,7 @@
#ifndef MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_WRITER_H_
#define MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_WRITER_H_
+#include <bitset>
#include <cstddef>
#include <cstdint>
#include <vector>
@@ -25,6 +26,7 @@ class RtpDependencyDescriptorWriter {
// |descriptor| matches the |structure|.
RtpDependencyDescriptorWriter(rtc::ArrayView<uint8_t> data,
const FrameDependencyStructure& structure,
+ std::bitset<32> active_chains,
const DependencyDescriptor& descriptor);
// Serializes DependencyDescriptor rtp header extension.
@@ -77,6 +79,7 @@ class RtpDependencyDescriptorWriter {
bool build_failed_ = false;
const DependencyDescriptor& descriptor_;
const FrameDependencyStructure& structure_;
+ std::bitset<32> active_chains_;
rtc::BitBufferWriter bit_writer_;
TemplateMatch best_template_;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc
index f59f9c4ebbd..63562c5b890 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc
@@ -41,7 +41,6 @@ constexpr ExtensionInfo kExtensions[] = {
CreateExtensionInfo<PlayoutDelayLimits>(),
CreateExtensionInfo<VideoContentTypeExtension>(),
CreateExtensionInfo<VideoTimingExtension>(),
- CreateExtensionInfo<FrameMarkingExtension>(),
CreateExtensionInfo<RtpStreamId>(),
CreateExtensionInfo<RepairedRtpStreamId>(),
CreateExtensionInfo<RtpMid>(),
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
index fefe6c618f6..527874d785c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -525,86 +525,6 @@ bool VideoTimingExtension::Write(rtc::ArrayView<uint8_t> data,
return true;
}
-// Frame Marking.
-//
-// Meta-information about an RTP stream outside the encrypted media payload,
-// useful for an RTP switch to do codec-agnostic selective forwarding
-// without decrypting the payload.
-//
-// For non-scalable streams:
-// 0 1
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | ID | L = 0 |S|E|I|D|0 0 0 0|
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//
-// For scalable streams:
-// 0 1 2 3
-// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-// | ID | L = 2 |S|E|I|D|B| TID | LID | TL0PICIDX |
-// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-constexpr RTPExtensionType FrameMarkingExtension::kId;
-constexpr const char FrameMarkingExtension::kUri[];
-
-bool FrameMarkingExtension::IsScalable(uint8_t temporal_id, uint8_t layer_id) {
- return temporal_id != kNoTemporalIdx || layer_id != kNoSpatialIdx;
-}
-
-bool FrameMarkingExtension::Parse(rtc::ArrayView<const uint8_t> data,
- FrameMarking* frame_marking) {
- RTC_DCHECK(frame_marking);
-
- if (data.size() != 1 && data.size() != 3)
- return false;
-
- frame_marking->start_of_frame = (data[0] & 0x80) != 0;
- frame_marking->end_of_frame = (data[0] & 0x40) != 0;
- frame_marking->independent_frame = (data[0] & 0x20) != 0;
- frame_marking->discardable_frame = (data[0] & 0x10) != 0;
-
- if (data.size() == 3) {
- frame_marking->base_layer_sync = (data[0] & 0x08) != 0;
- frame_marking->temporal_id = data[0] & 0x7;
- frame_marking->layer_id = data[1];
- frame_marking->tl0_pic_idx = data[2];
- } else {
- // non-scalable
- frame_marking->base_layer_sync = false;
- frame_marking->temporal_id = kNoTemporalIdx;
- frame_marking->layer_id = kNoSpatialIdx;
- frame_marking->tl0_pic_idx = 0;
- }
- return true;
-}
-
-size_t FrameMarkingExtension::ValueSize(const FrameMarking& frame_marking) {
- if (IsScalable(frame_marking.temporal_id, frame_marking.layer_id))
- return 3;
- else
- return 1;
-}
-
-bool FrameMarkingExtension::Write(rtc::ArrayView<uint8_t> data,
- const FrameMarking& frame_marking) {
- RTC_DCHECK_GE(data.size(), 1);
- RTC_CHECK_LE(frame_marking.temporal_id, 0x07);
- data[0] = frame_marking.start_of_frame ? 0x80 : 0x00;
- data[0] |= frame_marking.end_of_frame ? 0x40 : 0x00;
- data[0] |= frame_marking.independent_frame ? 0x20 : 0x00;
- data[0] |= frame_marking.discardable_frame ? 0x10 : 0x00;
-
- if (IsScalable(frame_marking.temporal_id, frame_marking.layer_id)) {
- RTC_DCHECK_EQ(data.size(), 3);
- data[0] |= frame_marking.base_layer_sync ? 0x08 : 0x00;
- data[0] |= frame_marking.temporal_id & 0x07;
- data[1] = frame_marking.layer_id;
- data[2] = frame_marking.tl0_pic_idx;
- }
- return true;
-}
-
// Color space including HDR metadata as an optional field.
//
// RTP header extension to carry color space information and optionally HDR
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
index f4517bb513e..8a81280f7b7 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -19,7 +19,6 @@
#include "api/rtp_headers.h"
#include "api/video/color_space.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame_marking.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -217,23 +216,6 @@ class VideoTimingExtension {
uint8_t offset);
};
-class FrameMarkingExtension {
- public:
- using value_type = FrameMarking;
- static constexpr RTPExtensionType kId = kRtpExtensionFrameMarking;
- static constexpr const char kUri[] =
- "http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07";
-
- static bool Parse(rtc::ArrayView<const uint8_t> data,
- FrameMarking* frame_marking);
- static size_t ValueSize(const FrameMarking& frame_marking);
- static bool Write(rtc::ArrayView<uint8_t> data,
- const FrameMarking& frame_marking);
-
- private:
- static bool IsScalable(uint8_t temporal_id, uint8_t layer_id);
-};
-
class ColorSpaceExtension {
public:
using value_type = ColorSpace;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
index e054bb8306a..767c9a06883 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
@@ -188,7 +188,6 @@ void RtpPacket::ZeroMutableExtensions() {
case RTPExtensionType::kRtpExtensionAudioLevel:
case RTPExtensionType::kRtpExtensionAbsoluteCaptureTime:
case RTPExtensionType::kRtpExtensionColorSpace:
- case RTPExtensionType::kRtpExtensionFrameMarking:
case RTPExtensionType::kRtpExtensionGenericFrameDescriptor00:
case RTPExtensionType::kRtpExtensionGenericFrameDescriptor02:
case RTPExtensionType::kRtpExtensionMid:
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc
index 56aea8eb5ec..feadee1db10 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc
@@ -69,8 +69,6 @@ void RtpPacketReceived::GetHeader(RTPHeader* header) const {
&header->extension.videoContentType);
header->extension.has_video_timing =
GetExtension<VideoTimingExtension>(&header->extension.video_timing);
- header->extension.has_frame_marking =
- GetExtension<FrameMarkingExtension>(&header->extension.frame_marking);
GetExtension<RtpStreamId>(&header->extension.stream_id);
GetExtension<RepairedRtpStreamId>(&header->extension.repaired_stream_id);
GetExtension<RtpMid>(&header->extension.mid);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index fb6f8a3f8f8..f372dbe0cdf 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -39,7 +39,7 @@ const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
} // namespace
ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext(
- const RtpRtcp::Configuration& config)
+ const RtpRtcpInterface::Configuration& config)
: packet_history(config.clock, config.enable_rtx_padding_prioritization),
packet_sender(config, &packet_history),
non_paced_sender(&packet_sender),
@@ -48,11 +48,11 @@ ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext(
&packet_history,
config.paced_sender ? config.paced_sender : &non_paced_sender) {}
-RtpRtcp::Configuration::Configuration() = default;
-RtpRtcp::Configuration::Configuration(Configuration&& rhs) = default;
-
-std::unique_ptr<RtpRtcp> RtpRtcp::Create(const Configuration& configuration) {
+std::unique_ptr<RtpRtcp> RtpRtcp::DEPRECATED_Create(
+ const Configuration& configuration) {
RTC_DCHECK(configuration.clock);
+ RTC_LOG(LS_ERROR)
+ << "*********** USING WebRTC INTERNAL IMPLEMENTATION DETAILS ***********";
return std::make_unique<ModuleRtpRtcpImpl>(configuration);
}
@@ -96,23 +96,34 @@ int64_t ModuleRtpRtcpImpl::TimeUntilNextProcess() {
// Process any pending tasks such as timeouts (non time critical events).
void ModuleRtpRtcpImpl::Process() {
const int64_t now = clock_->TimeInMilliseconds();
+ // TODO(bugs.webrtc.org/11581): Figure out why we need to call Process() 200
+ // times a second.
next_process_time_ = now + kRtpRtcpMaxIdleTimeProcessMs;
if (rtp_sender_) {
if (now >= last_bitrate_process_time_ + kRtpRtcpBitrateProcessTimeMs) {
rtp_sender_->packet_sender.ProcessBitrateAndNotifyObservers();
last_bitrate_process_time_ = now;
+ // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function,
+ // next_process_time_ is incremented by 5ms, here we effectively do a
+ // std::min() of (now + 5ms, now + 10ms). Seems like this is a no-op?
next_process_time_ =
std::min(next_process_time_, now + kRtpRtcpBitrateProcessTimeMs);
}
}
+ // TODO(bugs.webrtc.org/11581): We update the RTT once a second, whereas other
+ // things that run in this method are updated much more frequently. Move the
+ // RTT checking over to the worker thread, which matches better with where the
+ // stats are maintained.
bool process_rtt = now >= last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs;
if (rtcp_sender_.Sending()) {
// Process RTT if we have received a report block and we haven't
// processed RTT for at least |kRtpRtcpRttProcessTimeMs| milliseconds.
- if (rtcp_receiver_.LastReceivedReportBlockMs() > last_rtt_process_time_ &&
- process_rtt) {
+ // Note that LastReceivedReportBlockMs() grabs a lock, so check
+ // |process_rtt| first.
+ if (process_rtt &&
+ rtcp_receiver_.LastReceivedReportBlockMs() > last_rtt_process_time_) {
std::vector<RTCPReportBlock> receive_blocks;
rtcp_receiver_.StatisticsReceived(&receive_blocks);
int64_t max_rtt = 0;
@@ -129,6 +140,12 @@ void ModuleRtpRtcpImpl::Process() {
// Verify receiver reports are delivered and the reported sequence number
// is increasing.
+ // TODO(bugs.webrtc.org/11581): The timeout value needs to be checked every
+ // few seconds (see internals of RtcpRrTimeout). Here, we may be polling it
+ // a couple of hundred times a second, which isn't great since it grabs a
+ // lock. Note also that LastReceivedReportBlockMs() (called above) and
+ // RtcpRrTimeout() both grab the same lock and check the same timer, so
+ // it should be possible to consolidate that work somehow.
if (rtcp_receiver_.RtcpRrTimeout()) {
RTC_LOG_F(LS_WARNING) << "Timeout: No RTCP RR received.";
} else if (rtcp_receiver_.RtcpRrSequenceNumberTimeout()) {
@@ -159,6 +176,9 @@ void ModuleRtpRtcpImpl::Process() {
// Get processed rtt.
if (process_rtt) {
last_rtt_process_time_ = now;
+ // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function,
+ // next_process_time_ is incremented by 5ms, here we effectively do a
+ // std::min() of (now + 5ms, now + 1000ms). Seems like this is a no-op?
next_process_time_ = std::min(
next_process_time_, last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs);
if (rtt_stats_) {
@@ -292,7 +312,7 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() {
state.send_bitrate =
rtp_sender_->packet_sender.GetSendRates().Sum().bps<uint32_t>();
}
- state.module = this;
+ state.receiver = &rtcp_receiver_;
LastReceivedNTP(&state.last_rr_ntp_secs, &state.last_rr_ntp_frac,
&state.remote_sr);
@@ -503,7 +523,8 @@ int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData(
const uint32_t name,
const uint8_t* data,
const uint16_t length) {
- return rtcp_sender_.SetApplicationSpecificData(sub_type, name, data, length);
+ RTC_NOTREACHED() << "Not implemented";
+ return -1;
}
void ModuleRtpRtcpImpl::SetRtcpXrRrtrStatus(bool enable) {
@@ -773,11 +794,6 @@ bool ModuleRtpRtcpImpl::LastReceivedNTP(
return true;
}
-// Called from RTCPsender.
-std::vector<rtcp::TmmbItem> ModuleRtpRtcpImpl::BoundingSet(bool* tmmbr_owner) {
- return rtcp_receiver_.BoundingSet(tmmbr_owner);
-}
-
void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) {
{
rtc::CritScope cs(&critical_section_rtt_);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index debb433297f..989b8d37176 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -26,13 +26,13 @@
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType
+#include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h"
#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
#include "modules/rtp_rtcp/source/rtcp_receiver.h"
#include "modules/rtp_rtcp/source/rtcp_sender.h"
#include "modules/rtp_rtcp/source/rtp_packet_history.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "modules/rtp_rtcp/source/rtp_sender.h"
-#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/gtest_prod_util.h"
@@ -42,9 +42,11 @@ class Clock;
struct PacedPacketInfo;
struct RTPVideoHeader;
+// DEPRECATED.
class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
public:
- explicit ModuleRtpRtcpImpl(const RtpRtcp::Configuration& configuration);
+ explicit ModuleRtpRtcpImpl(
+ const RtpRtcpInterface::Configuration& configuration);
~ModuleRtpRtcpImpl() override;
// Returns the number of milliseconds until the module want a worker thread to
@@ -257,8 +259,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
uint32_t* NTPfrac,
uint32_t* remote_sr) const;
- std::vector<rtcp::TmmbItem> BoundingSet(bool* tmmbr_owner);
-
void BitrateSent(uint32_t* total_rate,
uint32_t* video_rate,
uint32_t* fec_rate,
@@ -294,6 +294,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
RTCPReceiver* rtcp_receiver() { return &rtcp_receiver_; }
const RTCPReceiver* rtcp_receiver() const { return &rtcp_receiver_; }
+ void SetMediaHasBeenSent(bool media_has_been_sent) {
+ rtp_sender_->packet_sender.SetMediaHasBeenSent(media_has_been_sent);
+ }
+
Clock* clock() const { return clock_; }
// TODO(sprang): Remove when usage is gone.
@@ -305,14 +309,14 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp {
FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, RttForReceiverOnly);
struct RtpSenderContext {
- explicit RtpSenderContext(const RtpRtcp::Configuration& config);
+ explicit RtpSenderContext(const RtpRtcpInterface::Configuration& config);
// Storage of packets, for retransmissions and padding, if applicable.
RtpPacketHistory packet_history;
// Handles final time timestamping/stats/etc and handover to Transport.
- RtpSenderEgress packet_sender;
+ DEPRECATED_RtpSenderEgress packet_sender;
// If no paced sender configured, this class will be used to pass packets
// from |packet_generator_| to |packet_sender_|.
- RtpSenderEgress::NonPacedPacketSender non_paced_sender;
+ DEPRECATED_RtpSenderEgress::NonPacedPacketSender non_paced_sender;
// Handles creation of RTP packets to be sent.
RTPSender packet_generator;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
new file mode 100644
index 00000000000..e50f72bb292
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
@@ -0,0 +1,769 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+
+#include <string.h>
+
+#include <algorithm>
+#include <cstdint>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+
+#include "api/transport/field_trial_based_config.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+#ifdef _WIN32
+// Disable warning C4355: 'this' : used in base member initializer list.
+#pragma warning(disable : 4355)
+#endif
+
+namespace webrtc {
+namespace {
+const int64_t kRtpRtcpMaxIdleTimeProcessMs = 5;
+const int64_t kRtpRtcpRttProcessTimeMs = 1000;
+const int64_t kRtpRtcpBitrateProcessTimeMs = 10;
+const int64_t kDefaultExpectedRetransmissionTimeMs = 125;
+} // namespace
+
+ModuleRtpRtcpImpl2::RtpSenderContext::RtpSenderContext(
+ const RtpRtcpInterface::Configuration& config)
+ : packet_history(config.clock, config.enable_rtx_padding_prioritization),
+ packet_sender(config, &packet_history),
+ non_paced_sender(&packet_sender),
+ packet_generator(
+ config,
+ &packet_history,
+ config.paced_sender ? config.paced_sender : &non_paced_sender) {}
+
+ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration)
+ : rtcp_sender_(configuration),
+ rtcp_receiver_(configuration, this),
+ clock_(configuration.clock),
+ last_bitrate_process_time_(clock_->TimeInMilliseconds()),
+ last_rtt_process_time_(clock_->TimeInMilliseconds()),
+ next_process_time_(clock_->TimeInMilliseconds() +
+ kRtpRtcpMaxIdleTimeProcessMs),
+ packet_overhead_(28), // IPV4 UDP.
+ nack_last_time_sent_full_ms_(0),
+ nack_last_seq_number_sent_(0),
+ remote_bitrate_(configuration.remote_bitrate_estimator),
+ rtt_stats_(configuration.rtt_stats),
+ rtt_ms_(0) {
+ process_thread_checker_.Detach();
+ if (!configuration.receiver_only) {
+ rtp_sender_ = std::make_unique<RtpSenderContext>(configuration);
+ // Make sure rtcp sender use same timestamp offset as rtp sender.
+ rtcp_sender_.SetTimestampOffset(
+ rtp_sender_->packet_generator.TimestampOffset());
+ }
+
+ // Set default packet size limit.
+ // TODO(nisse): Kind-of duplicates
+ // webrtc::VideoSendStream::Config::Rtp::kDefaultMaxPacketSize.
+ const size_t kTcpOverIpv4HeaderSize = 40;
+ SetMaxRtpPacketSize(IP_PACKET_SIZE - kTcpOverIpv4HeaderSize);
+}
+
+ModuleRtpRtcpImpl2::~ModuleRtpRtcpImpl2() {
+ RTC_DCHECK_RUN_ON(&construction_thread_checker_);
+}
+
+// static
+std::unique_ptr<ModuleRtpRtcpImpl2> ModuleRtpRtcpImpl2::Create(
+ const Configuration& configuration) {
+ RTC_DCHECK(configuration.clock);
+ RTC_DCHECK(TaskQueueBase::Current());
+ return std::make_unique<ModuleRtpRtcpImpl2>(configuration);
+}
+
+// Returns the number of milliseconds until the module want a worker thread
+// to call Process.
+int64_t ModuleRtpRtcpImpl2::TimeUntilNextProcess() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ return std::max<int64_t>(0,
+ next_process_time_ - clock_->TimeInMilliseconds());
+}
+
+// Process any pending tasks such as timeouts (non time critical events).
+void ModuleRtpRtcpImpl2::Process() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ const int64_t now = clock_->TimeInMilliseconds();
+ // TODO(bugs.webrtc.org/11581): Figure out why we need to call Process() 200
+ // times a second.
+ next_process_time_ = now + kRtpRtcpMaxIdleTimeProcessMs;
+
+ if (rtp_sender_) {
+ if (now >= last_bitrate_process_time_ + kRtpRtcpBitrateProcessTimeMs) {
+ rtp_sender_->packet_sender.ProcessBitrateAndNotifyObservers();
+ last_bitrate_process_time_ = now;
+ // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function,
+ // next_process_time_ is incremented by 5ms, here we effectively do a
+ // std::min() of (now + 5ms, now + 10ms). Seems like this is a no-op?
+ next_process_time_ =
+ std::min(next_process_time_, now + kRtpRtcpBitrateProcessTimeMs);
+ }
+ }
+
+ // TODO(bugs.webrtc.org/11581): We update the RTT once a second, whereas other
+ // things that run in this method are updated much more frequently. Move the
+ // RTT checking over to the worker thread, which matches better with where the
+ // stats are maintained.
+ bool process_rtt = now >= last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs;
+ if (rtcp_sender_.Sending()) {
+ // Process RTT if we have received a report block and we haven't
+ // processed RTT for at least |kRtpRtcpRttProcessTimeMs| milliseconds.
+ // Note that LastReceivedReportBlockMs() grabs a lock, so check
+ // |process_rtt| first.
+ if (process_rtt &&
+ rtcp_receiver_.LastReceivedReportBlockMs() > last_rtt_process_time_) {
+ std::vector<RTCPReportBlock> receive_blocks;
+ rtcp_receiver_.StatisticsReceived(&receive_blocks);
+ int64_t max_rtt = 0;
+ for (std::vector<RTCPReportBlock>::iterator it = receive_blocks.begin();
+ it != receive_blocks.end(); ++it) {
+ int64_t rtt = 0;
+ rtcp_receiver_.RTT(it->sender_ssrc, &rtt, NULL, NULL, NULL);
+ max_rtt = (rtt > max_rtt) ? rtt : max_rtt;
+ }
+ // Report the rtt.
+ if (rtt_stats_ && max_rtt != 0)
+ rtt_stats_->OnRttUpdate(max_rtt);
+ }
+
+ // Verify receiver reports are delivered and the reported sequence number
+ // is increasing.
+ // TODO(bugs.webrtc.org/11581): The timeout value needs to be checked every
+ // few seconds (see internals of RtcpRrTimeout). Here, we may be polling it
+ // a couple of hundred times a second, which isn't great since it grabs a
+ // lock. Note also that LastReceivedReportBlockMs() (called above) and
+ // RtcpRrTimeout() both grab the same lock and check the same timer, so
+ // it should be possible to consolidate that work somehow.
+ if (rtcp_receiver_.RtcpRrTimeout()) {
+ RTC_LOG_F(LS_WARNING) << "Timeout: No RTCP RR received.";
+ } else if (rtcp_receiver_.RtcpRrSequenceNumberTimeout()) {
+ RTC_LOG_F(LS_WARNING) << "Timeout: No increase in RTCP RR extended "
+ "highest sequence number.";
+ }
+
+ if (remote_bitrate_ && rtcp_sender_.TMMBR()) {
+ unsigned int target_bitrate = 0;
+ std::vector<unsigned int> ssrcs;
+ if (remote_bitrate_->LatestEstimate(&ssrcs, &target_bitrate)) {
+ if (!ssrcs.empty()) {
+ target_bitrate = target_bitrate / ssrcs.size();
+ }
+ rtcp_sender_.SetTargetBitrate(target_bitrate);
+ }
+ }
+ } else {
+ // Report rtt from receiver.
+ if (process_rtt) {
+ int64_t rtt_ms;
+ if (rtt_stats_ && rtcp_receiver_.GetAndResetXrRrRtt(&rtt_ms)) {
+ rtt_stats_->OnRttUpdate(rtt_ms);
+ }
+ }
+ }
+
+ // Get processed rtt.
+ if (process_rtt) {
+ last_rtt_process_time_ = now;
+ // TODO(bugs.webrtc.org/11581): Is this a bug? At the top of the function,
+ // next_process_time_ is incremented by 5ms, here we effectively do a
+ // std::min() of (now + 5ms, now + 1000ms). Seems like this is a no-op?
+ next_process_time_ = std::min(
+ next_process_time_, last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs);
+ if (rtt_stats_) {
+ // Make sure we have a valid RTT before setting.
+ int64_t last_rtt = rtt_stats_->LastProcessedRtt();
+ if (last_rtt >= 0)
+ set_rtt_ms(last_rtt);
+ }
+ }
+
+ if (rtcp_sender_.TimeToSendRTCPReport())
+ rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
+
+ if (rtcp_sender_.TMMBR() && rtcp_receiver_.UpdateTmmbrTimers()) {
+ rtcp_receiver_.NotifyTmmbrUpdated();
+ }
+}
+
+void ModuleRtpRtcpImpl2::SetRtxSendStatus(int mode) {
+ rtp_sender_->packet_generator.SetRtxStatus(mode);
+}
+
+int ModuleRtpRtcpImpl2::RtxSendStatus() const {
+ return rtp_sender_ ? rtp_sender_->packet_generator.RtxStatus() : kRtxOff;
+}
+
+void ModuleRtpRtcpImpl2::SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) {
+ rtp_sender_->packet_generator.SetRtxPayloadType(payload_type,
+ associated_payload_type);
+}
+
+absl::optional<uint32_t> ModuleRtpRtcpImpl2::RtxSsrc() const {
+ return rtp_sender_ ? rtp_sender_->packet_generator.RtxSsrc() : absl::nullopt;
+}
+
+absl::optional<uint32_t> ModuleRtpRtcpImpl2::FlexfecSsrc() const {
+ if (rtp_sender_) {
+ return rtp_sender_->packet_generator.FlexfecSsrc();
+ }
+ return absl::nullopt;
+}
+
+void ModuleRtpRtcpImpl2::IncomingRtcpPacket(const uint8_t* rtcp_packet,
+ const size_t length) {
+ rtcp_receiver_.IncomingPacket(rtcp_packet, length);
+}
+
+void ModuleRtpRtcpImpl2::RegisterSendPayloadFrequency(int payload_type,
+ int payload_frequency) {
+ rtcp_sender_.SetRtpClockRate(payload_type, payload_frequency);
+}
+
+int32_t ModuleRtpRtcpImpl2::DeRegisterSendPayload(const int8_t payload_type) {
+ return 0;
+}
+
+uint32_t ModuleRtpRtcpImpl2::StartTimestamp() const {
+ return rtp_sender_->packet_generator.TimestampOffset();
+}
+
+// Configure start timestamp, default is a random number.
+void ModuleRtpRtcpImpl2::SetStartTimestamp(const uint32_t timestamp) {
+ rtcp_sender_.SetTimestampOffset(timestamp);
+ rtp_sender_->packet_generator.SetTimestampOffset(timestamp);
+ rtp_sender_->packet_sender.SetTimestampOffset(timestamp);
+}
+
+uint16_t ModuleRtpRtcpImpl2::SequenceNumber() const {
+ return rtp_sender_->packet_generator.SequenceNumber();
+}
+
+// Set SequenceNumber, default is a random number.
+void ModuleRtpRtcpImpl2::SetSequenceNumber(const uint16_t seq_num) {
+ rtp_sender_->packet_generator.SetSequenceNumber(seq_num);
+}
+
+void ModuleRtpRtcpImpl2::SetRtpState(const RtpState& rtp_state) {
+ rtp_sender_->packet_generator.SetRtpState(rtp_state);
+ rtp_sender_->packet_sender.SetMediaHasBeenSent(rtp_state.media_has_been_sent);
+ rtcp_sender_.SetTimestampOffset(rtp_state.start_timestamp);
+}
+
+void ModuleRtpRtcpImpl2::SetRtxState(const RtpState& rtp_state) {
+ rtp_sender_->packet_generator.SetRtxRtpState(rtp_state);
+}
+
+RtpState ModuleRtpRtcpImpl2::GetRtpState() const {
+ RtpState state = rtp_sender_->packet_generator.GetRtpState();
+ state.media_has_been_sent = rtp_sender_->packet_sender.MediaHasBeenSent();
+ return state;
+}
+
+RtpState ModuleRtpRtcpImpl2::GetRtxState() const {
+ return rtp_sender_->packet_generator.GetRtxRtpState();
+}
+
+void ModuleRtpRtcpImpl2::SetRid(const std::string& rid) {
+ if (rtp_sender_) {
+ rtp_sender_->packet_generator.SetRid(rid);
+ }
+}
+
+void ModuleRtpRtcpImpl2::SetMid(const std::string& mid) {
+ if (rtp_sender_) {
+ rtp_sender_->packet_generator.SetMid(mid);
+ }
+ // TODO(bugs.webrtc.org/4050): If we end up supporting the MID SDES item for
+ // RTCP, this will need to be passed down to the RTCPSender also.
+}
+
+void ModuleRtpRtcpImpl2::SetCsrcs(const std::vector<uint32_t>& csrcs) {
+ rtcp_sender_.SetCsrcs(csrcs);
+ rtp_sender_->packet_generator.SetCsrcs(csrcs);
+}
+
+// TODO(pbos): Handle media and RTX streams separately (separate RTCP
+// feedbacks).
+RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() {
+ RTCPSender::FeedbackState state;
+ // This is called also when receiver_only is true. Hence below
+ // checks that rtp_sender_ exists.
+ if (rtp_sender_) {
+ StreamDataCounters rtp_stats;
+ StreamDataCounters rtx_stats;
+ rtp_sender_->packet_sender.GetDataCounters(&rtp_stats, &rtx_stats);
+ state.packets_sent =
+ rtp_stats.transmitted.packets + rtx_stats.transmitted.packets;
+ state.media_bytes_sent = rtp_stats.transmitted.payload_bytes +
+ rtx_stats.transmitted.payload_bytes;
+ state.send_bitrate =
+ rtp_sender_->packet_sender.GetSendRates().Sum().bps<uint32_t>();
+ }
+ state.receiver = &rtcp_receiver_;
+
+ LastReceivedNTP(&state.last_rr_ntp_secs, &state.last_rr_ntp_frac,
+ &state.remote_sr);
+
+ state.last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo();
+
+ return state;
+}
+
+// TODO(nisse): This method shouldn't be called for a receive-only
+// stream. Delete rtp_sender_ check as soon as all applications are
+// updated.
+int32_t ModuleRtpRtcpImpl2::SetSendingStatus(const bool sending) {
+ if (rtcp_sender_.Sending() != sending) {
+ // Sends RTCP BYE when going from true to false
+ if (rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending) != 0) {
+ RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE";
+ }
+ }
+ return 0;
+}
+
+bool ModuleRtpRtcpImpl2::Sending() const {
+ return rtcp_sender_.Sending();
+}
+
+// TODO(nisse): This method shouldn't be called for a receive-only
+// stream. Delete rtp_sender_ check as soon as all applications are
+// updated.
+void ModuleRtpRtcpImpl2::SetSendingMediaStatus(const bool sending) {
+ if (rtp_sender_) {
+ rtp_sender_->packet_generator.SetSendingMediaStatus(sending);
+ } else {
+ RTC_DCHECK(!sending);
+ }
+}
+
+bool ModuleRtpRtcpImpl2::SendingMedia() const {
+ return rtp_sender_ ? rtp_sender_->packet_generator.SendingMedia() : false;
+}
+
+bool ModuleRtpRtcpImpl2::IsAudioConfigured() const {
+ return rtp_sender_ ? rtp_sender_->packet_generator.IsAudioConfigured()
+ : false;
+}
+
+void ModuleRtpRtcpImpl2::SetAsPartOfAllocation(bool part_of_allocation) {
+ RTC_CHECK(rtp_sender_);
+ rtp_sender_->packet_sender.ForceIncludeSendPacketsInAllocation(
+ part_of_allocation);
+}
+
+bool ModuleRtpRtcpImpl2::OnSendingRtpFrame(uint32_t timestamp,
+ int64_t capture_time_ms,
+ int payload_type,
+ bool force_sender_report) {
+ if (!Sending())
+ return false;
+
+ rtcp_sender_.SetLastRtpTime(timestamp, capture_time_ms, payload_type);
+ // Make sure an RTCP report isn't queued behind a key frame.
+ if (rtcp_sender_.TimeToSendRTCPReport(force_sender_report))
+ rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport);
+
+ return true;
+}
+
+bool ModuleRtpRtcpImpl2::TrySendPacket(RtpPacketToSend* packet,
+ const PacedPacketInfo& pacing_info) {
+ RTC_DCHECK(rtp_sender_);
+ // TODO(sprang): Consider if we can remove this check.
+ if (!rtp_sender_->packet_generator.SendingMedia()) {
+ return false;
+ }
+ rtp_sender_->packet_sender.SendPacket(packet, pacing_info);
+ return true;
+}
+
+void ModuleRtpRtcpImpl2::OnPacketsAcknowledged(
+ rtc::ArrayView<const uint16_t> sequence_numbers) {
+ RTC_DCHECK(rtp_sender_);
+ rtp_sender_->packet_history.CullAcknowledgedPackets(sequence_numbers);
+}
+
+bool ModuleRtpRtcpImpl2::SupportsPadding() const {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_generator.SupportsPadding();
+}
+
+bool ModuleRtpRtcpImpl2::SupportsRtxPayloadPadding() const {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_generator.SupportsRtxPayloadPadding();
+}
+
+std::vector<std::unique_ptr<RtpPacketToSend>>
+ModuleRtpRtcpImpl2::GeneratePadding(size_t target_size_bytes) {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_generator.GeneratePadding(
+ target_size_bytes, rtp_sender_->packet_sender.MediaHasBeenSent());
+}
+
+std::vector<RtpSequenceNumberMap::Info>
+ModuleRtpRtcpImpl2::GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_sender.GetSentRtpPacketInfos(sequence_numbers);
+}
+
+size_t ModuleRtpRtcpImpl2::ExpectedPerPacketOverhead() const {
+ if (!rtp_sender_) {
+ return 0;
+ }
+ return rtp_sender_->packet_generator.ExpectedPerPacketOverhead();
+}
+
+size_t ModuleRtpRtcpImpl2::MaxRtpPacketSize() const {
+ RTC_DCHECK(rtp_sender_);
+ return rtp_sender_->packet_generator.MaxRtpPacketSize();
+}
+
+void ModuleRtpRtcpImpl2::SetMaxRtpPacketSize(size_t rtp_packet_size) {
+ RTC_DCHECK_LE(rtp_packet_size, IP_PACKET_SIZE)
+ << "rtp packet size too large: " << rtp_packet_size;
+ RTC_DCHECK_GT(rtp_packet_size, packet_overhead_)
+ << "rtp packet size too small: " << rtp_packet_size;
+
+ rtcp_sender_.SetMaxRtpPacketSize(rtp_packet_size);
+ if (rtp_sender_) {
+ rtp_sender_->packet_generator.SetMaxRtpPacketSize(rtp_packet_size);
+ }
+}
+
+RtcpMode ModuleRtpRtcpImpl2::RTCP() const {
+ return rtcp_sender_.Status();
+}
+
+// Configure RTCP status i.e on/off.
+void ModuleRtpRtcpImpl2::SetRTCPStatus(const RtcpMode method) {
+ rtcp_sender_.SetRTCPStatus(method);
+}
+
+int32_t ModuleRtpRtcpImpl2::SetCNAME(const char* c_name) {
+ return rtcp_sender_.SetCNAME(c_name);
+}
+
+int32_t ModuleRtpRtcpImpl2::RemoteNTP(uint32_t* received_ntpsecs,
+ uint32_t* received_ntpfrac,
+ uint32_t* rtcp_arrival_time_secs,
+ uint32_t* rtcp_arrival_time_frac,
+ uint32_t* rtcp_timestamp) const {
+ return rtcp_receiver_.NTP(received_ntpsecs, received_ntpfrac,
+ rtcp_arrival_time_secs, rtcp_arrival_time_frac,
+ rtcp_timestamp)
+ ? 0
+ : -1;
+}
+
+// Get RoundTripTime.
+int32_t ModuleRtpRtcpImpl2::RTT(const uint32_t remote_ssrc,
+ int64_t* rtt,
+ int64_t* avg_rtt,
+ int64_t* min_rtt,
+ int64_t* max_rtt) const {
+ int32_t ret = rtcp_receiver_.RTT(remote_ssrc, rtt, avg_rtt, min_rtt, max_rtt);
+ if (rtt && *rtt == 0) {
+ // Try to get RTT from RtcpRttStats class.
+ *rtt = rtt_ms();
+ }
+ return ret;
+}
+
+int64_t ModuleRtpRtcpImpl2::ExpectedRetransmissionTimeMs() const {
+ int64_t expected_retransmission_time_ms = rtt_ms();
+ if (expected_retransmission_time_ms > 0) {
+ return expected_retransmission_time_ms;
+ }
+ // No rtt available (|kRtpRtcpRttProcessTimeMs| not yet passed?), so try to
+ // poll avg_rtt_ms directly from rtcp receiver.
+ if (rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), nullptr,
+ &expected_retransmission_time_ms, nullptr,
+ nullptr) == 0) {
+ return expected_retransmission_time_ms;
+ }
+ return kDefaultExpectedRetransmissionTimeMs;
+}
+
+// Force a send of an RTCP packet.
+// Normal SR and RR are triggered via the process function.
+int32_t ModuleRtpRtcpImpl2::SendRTCP(RTCPPacketType packet_type) {
+ return rtcp_sender_.SendRTCP(GetFeedbackState(), packet_type);
+}
+
+void ModuleRtpRtcpImpl2::SetRtcpXrRrtrStatus(bool enable) {
+ rtcp_receiver_.SetRtcpXrRrtrStatus(enable);
+ rtcp_sender_.SendRtcpXrReceiverReferenceTime(enable);
+}
+
+bool ModuleRtpRtcpImpl2::RtcpXrRrtrStatus() const {
+ return rtcp_sender_.RtcpXrReceiverReferenceTime();
+}
+
+void ModuleRtpRtcpImpl2::GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const {
+ rtp_sender_->packet_sender.GetDataCounters(rtp_counters, rtx_counters);
+}
+
+// Received RTCP report.
+int32_t ModuleRtpRtcpImpl2::RemoteRTCPStat(
+ std::vector<RTCPReportBlock>* receive_blocks) const {
+ return rtcp_receiver_.StatisticsReceived(receive_blocks);
+}
+
+std::vector<ReportBlockData> ModuleRtpRtcpImpl2::GetLatestReportBlockData()
+ const {
+ return rtcp_receiver_.GetLatestReportBlockData();
+}
+
+// (REMB) Receiver Estimated Max Bitrate.
+void ModuleRtpRtcpImpl2::SetRemb(int64_t bitrate_bps,
+ std::vector<uint32_t> ssrcs) {
+ rtcp_sender_.SetRemb(bitrate_bps, std::move(ssrcs));
+}
+
+void ModuleRtpRtcpImpl2::UnsetRemb() {
+ rtcp_sender_.UnsetRemb();
+}
+
+void ModuleRtpRtcpImpl2::SetExtmapAllowMixed(bool extmap_allow_mixed) {
+ rtp_sender_->packet_generator.SetExtmapAllowMixed(extmap_allow_mixed);
+}
+
+void ModuleRtpRtcpImpl2::RegisterRtpHeaderExtension(absl::string_view uri,
+ int id) {
+ bool registered =
+ rtp_sender_->packet_generator.RegisterRtpHeaderExtension(uri, id);
+ RTC_CHECK(registered);
+}
+
+int32_t ModuleRtpRtcpImpl2::DeregisterSendRtpHeaderExtension(
+ const RTPExtensionType type) {
+ return rtp_sender_->packet_generator.DeregisterRtpHeaderExtension(type);
+}
+void ModuleRtpRtcpImpl2::DeregisterSendRtpHeaderExtension(
+ absl::string_view uri) {
+ rtp_sender_->packet_generator.DeregisterRtpHeaderExtension(uri);
+}
+
+void ModuleRtpRtcpImpl2::SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set) {
+ rtcp_sender_.SetTmmbn(std::move(bounding_set));
+}
+
+// Send a Negative acknowledgment packet.
+int32_t ModuleRtpRtcpImpl2::SendNACK(const uint16_t* nack_list,
+ const uint16_t size) {
+ uint16_t nack_length = size;
+ uint16_t start_id = 0;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (TimeToSendFullNackList(now_ms)) {
+ nack_last_time_sent_full_ms_ = now_ms;
+ } else {
+ // Only send extended list.
+ if (nack_last_seq_number_sent_ == nack_list[size - 1]) {
+ // Last sequence number is the same, do not send list.
+ return 0;
+ }
+ // Send new sequence numbers.
+ for (int i = 0; i < size; ++i) {
+ if (nack_last_seq_number_sent_ == nack_list[i]) {
+ start_id = i + 1;
+ break;
+ }
+ }
+ nack_length = size - start_id;
+ }
+
+ // Our RTCP NACK implementation is limited to kRtcpMaxNackFields sequence
+ // numbers per RTCP packet.
+ if (nack_length > kRtcpMaxNackFields) {
+ nack_length = kRtcpMaxNackFields;
+ }
+ nack_last_seq_number_sent_ = nack_list[start_id + nack_length - 1];
+
+ return rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpNack, nack_length,
+ &nack_list[start_id]);
+}
+
+void ModuleRtpRtcpImpl2::SendNack(
+ const std::vector<uint16_t>& sequence_numbers) {
+ rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpNack, sequence_numbers.size(),
+ sequence_numbers.data());
+}
+
+bool ModuleRtpRtcpImpl2::TimeToSendFullNackList(int64_t now) const {
+ // Use RTT from RtcpRttStats class if provided.
+ int64_t rtt = rtt_ms();
+ if (rtt == 0) {
+ rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &rtt, NULL, NULL);
+ }
+
+ const int64_t kStartUpRttMs = 100;
+ int64_t wait_time = 5 + ((rtt * 3) >> 1); // 5 + RTT * 1.5.
+ if (rtt == 0) {
+ wait_time = kStartUpRttMs;
+ }
+
+ // Send a full NACK list once within every |wait_time|.
+ return now - nack_last_time_sent_full_ms_ > wait_time;
+}
+
+// Store the sent packets, needed to answer to Negative acknowledgment requests.
+void ModuleRtpRtcpImpl2::SetStorePacketsStatus(const bool enable,
+ const uint16_t number_to_store) {
+ rtp_sender_->packet_history.SetStorePacketsStatus(
+ enable ? RtpPacketHistory::StorageMode::kStoreAndCull
+ : RtpPacketHistory::StorageMode::kDisabled,
+ number_to_store);
+}
+
+bool ModuleRtpRtcpImpl2::StorePackets() const {
+ return rtp_sender_->packet_history.GetStorageMode() !=
+ RtpPacketHistory::StorageMode::kDisabled;
+}
+
+void ModuleRtpRtcpImpl2::SendCombinedRtcpPacket(
+ std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets) {
+ rtcp_sender_.SendCombinedRtcpPacket(std::move(rtcp_packets));
+}
+
+int32_t ModuleRtpRtcpImpl2::SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) {
+ return rtcp_sender_.SendLossNotification(
+ GetFeedbackState(), last_decoded_seq_num, last_received_seq_num,
+ decodability_flag, buffering_allowed);
+}
+
+void ModuleRtpRtcpImpl2::SetRemoteSSRC(const uint32_t ssrc) {
+ // Inform about the incoming SSRC.
+ rtcp_sender_.SetRemoteSSRC(ssrc);
+ rtcp_receiver_.SetRemoteSSRC(ssrc);
+}
+
+// TODO(nisse): Delete video_rate amd fec_rate arguments.
+void ModuleRtpRtcpImpl2::BitrateSent(uint32_t* total_rate,
+ uint32_t* video_rate,
+ uint32_t* fec_rate,
+ uint32_t* nack_rate) const {
+ RtpSendRates send_rates = rtp_sender_->packet_sender.GetSendRates();
+ *total_rate = send_rates.Sum().bps<uint32_t>();
+ if (video_rate)
+ *video_rate = 0;
+ if (fec_rate)
+ *fec_rate = 0;
+ *nack_rate = send_rates[RtpPacketMediaType::kRetransmission].bps<uint32_t>();
+}
+
+RtpSendRates ModuleRtpRtcpImpl2::GetSendRates() const {
+ return rtp_sender_->packet_sender.GetSendRates();
+}
+
+void ModuleRtpRtcpImpl2::OnRequestSendReport() {
+ SendRTCP(kRtcpSr);
+}
+
+void ModuleRtpRtcpImpl2::OnReceivedNack(
+ const std::vector<uint16_t>& nack_sequence_numbers) {
+ if (!rtp_sender_)
+ return;
+
+ if (!StorePackets() || nack_sequence_numbers.empty()) {
+ return;
+ }
+ // Use RTT from RtcpRttStats class if provided.
+ int64_t rtt = rtt_ms();
+ if (rtt == 0) {
+ rtcp_receiver_.RTT(rtcp_receiver_.RemoteSSRC(), NULL, &rtt, NULL, NULL);
+ }
+ rtp_sender_->packet_generator.OnReceivedNack(nack_sequence_numbers, rtt);
+}
+
+void ModuleRtpRtcpImpl2::OnReceivedRtcpReportBlocks(
+ const ReportBlockList& report_blocks) {
+ if (rtp_sender_) {
+ uint32_t ssrc = SSRC();
+ absl::optional<uint32_t> rtx_ssrc;
+ if (rtp_sender_->packet_generator.RtxStatus() != kRtxOff) {
+ rtx_ssrc = rtp_sender_->packet_generator.RtxSsrc();
+ }
+
+ for (const RTCPReportBlock& report_block : report_blocks) {
+ if (ssrc == report_block.source_ssrc) {
+ rtp_sender_->packet_generator.OnReceivedAckOnSsrc(
+ report_block.extended_highest_sequence_number);
+ } else if (rtx_ssrc && *rtx_ssrc == report_block.source_ssrc) {
+ rtp_sender_->packet_generator.OnReceivedAckOnRtxSsrc(
+ report_block.extended_highest_sequence_number);
+ }
+ }
+ }
+}
+
+bool ModuleRtpRtcpImpl2::LastReceivedNTP(
+ uint32_t* rtcp_arrival_time_secs, // When we got the last report.
+ uint32_t* rtcp_arrival_time_frac,
+ uint32_t* remote_sr) const {
+ // Remote SR: NTP inside the last received (mid 16 bits from sec and frac).
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+
+ if (!rtcp_receiver_.NTP(&ntp_secs, &ntp_frac, rtcp_arrival_time_secs,
+ rtcp_arrival_time_frac, NULL)) {
+ return false;
+ }
+ *remote_sr =
+ ((ntp_secs & 0x0000ffff) << 16) + ((ntp_frac & 0xffff0000) >> 16);
+ return true;
+}
+
+void ModuleRtpRtcpImpl2::set_rtt_ms(int64_t rtt_ms) {
+ {
+ rtc::CritScope cs(&critical_section_rtt_);
+ rtt_ms_ = rtt_ms;
+ }
+ if (rtp_sender_) {
+ rtp_sender_->packet_history.SetRtt(rtt_ms);
+ }
+}
+
+int64_t ModuleRtpRtcpImpl2::rtt_ms() const {
+ rtc::CritScope cs(&critical_section_rtt_);
+ return rtt_ms_;
+}
+
+void ModuleRtpRtcpImpl2::SetVideoBitrateAllocation(
+ const VideoBitrateAllocation& bitrate) {
+ rtcp_sender_.SetVideoBitrateAllocation(bitrate);
+}
+
+RTPSender* ModuleRtpRtcpImpl2::RtpSender() {
+ return rtp_sender_ ? &rtp_sender_->packet_generator : nullptr;
+}
+
+const RTPSender* ModuleRtpRtcpImpl2::RtpSender() const {
+ return rtp_sender_ ? &rtp_sender_->packet_generator : nullptr;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h
new file mode 100644
index 00000000000..276f88a6b57
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h
@@ -0,0 +1,316 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_
+#define MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/rtp_headers.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "modules/include/module_fec_types.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType
+#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
+#include "modules/rtp_rtcp/source/rtcp_receiver.h"
+#include "modules/rtp_rtcp/source/rtcp_sender.h"
+#include "modules/rtp_rtcp/source/rtp_packet_history.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+#include "modules/rtp_rtcp/source/rtp_sender.h"
+#include "modules/rtp_rtcp/source/rtp_sender_egress.h"
+#include "rtc_base/critical_section.h"
+#include "rtc_base/gtest_prod_util.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+
+namespace webrtc {
+
+class Clock;
+struct PacedPacketInfo;
+struct RTPVideoHeader;
+
+class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface,
+ public Module,
+ public RTCPReceiver::ModuleRtpRtcp {
+ public:
+ explicit ModuleRtpRtcpImpl2(
+ const RtpRtcpInterface::Configuration& configuration);
+ ~ModuleRtpRtcpImpl2() override;
+
+ // This method is provided to easy with migrating away from the
+ // RtpRtcp::Create factory method. Since this is an internal implementation
+ // detail though, creating an instance of ModuleRtpRtcpImpl2 directly should
+ // be fine.
+ static std::unique_ptr<ModuleRtpRtcpImpl2> Create(
+ const Configuration& configuration);
+
+ // Returns the number of milliseconds until the module want a worker thread to
+ // call Process.
+ int64_t TimeUntilNextProcess() override;
+
+ // Process any pending tasks such as timeouts.
+ void Process() override;
+
+ // Receiver part.
+
+ // Called when we receive an RTCP packet.
+ void IncomingRtcpPacket(const uint8_t* incoming_packet,
+ size_t incoming_packet_length) override;
+
+ void SetRemoteSSRC(uint32_t ssrc) override;
+
+ // Sender part.
+ void RegisterSendPayloadFrequency(int payload_type,
+ int payload_frequency) override;
+
+ int32_t DeRegisterSendPayload(int8_t payload_type) override;
+
+ void SetExtmapAllowMixed(bool extmap_allow_mixed) override;
+
+ void RegisterRtpHeaderExtension(absl::string_view uri, int id) override;
+ int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) override;
+ void DeregisterSendRtpHeaderExtension(absl::string_view uri) override;
+
+ bool SupportsPadding() const override;
+ bool SupportsRtxPayloadPadding() const override;
+
+ // Get start timestamp.
+ uint32_t StartTimestamp() const override;
+
+ // Configure start timestamp, default is a random number.
+ void SetStartTimestamp(uint32_t timestamp) override;
+
+ uint16_t SequenceNumber() const override;
+
+ // Set SequenceNumber, default is a random number.
+ void SetSequenceNumber(uint16_t seq) override;
+
+ void SetRtpState(const RtpState& rtp_state) override;
+ void SetRtxState(const RtpState& rtp_state) override;
+ RtpState GetRtpState() const override;
+ RtpState GetRtxState() const override;
+
+ uint32_t SSRC() const override { return rtcp_sender_.SSRC(); }
+
+ void SetRid(const std::string& rid) override;
+
+ void SetMid(const std::string& mid) override;
+
+ void SetCsrcs(const std::vector<uint32_t>& csrcs) override;
+
+ RTCPSender::FeedbackState GetFeedbackState();
+
+ void SetRtxSendStatus(int mode) override;
+ int RtxSendStatus() const override;
+ absl::optional<uint32_t> RtxSsrc() const override;
+
+ void SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) override;
+
+ absl::optional<uint32_t> FlexfecSsrc() const override;
+
+ // Sends kRtcpByeCode when going from true to false.
+ int32_t SetSendingStatus(bool sending) override;
+
+ bool Sending() const override;
+
+ // Drops or relays media packets.
+ void SetSendingMediaStatus(bool sending) override;
+
+ bool SendingMedia() const override;
+
+ bool IsAudioConfigured() const override;
+
+ void SetAsPartOfAllocation(bool part_of_allocation) override;
+
+ bool OnSendingRtpFrame(uint32_t timestamp,
+ int64_t capture_time_ms,
+ int payload_type,
+ bool force_sender_report) override;
+
+ bool TrySendPacket(RtpPacketToSend* packet,
+ const PacedPacketInfo& pacing_info) override;
+
+ void OnPacketsAcknowledged(
+ rtc::ArrayView<const uint16_t> sequence_numbers) override;
+
+ std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
+ size_t target_size_bytes) override;
+
+ std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const override;
+
+ size_t ExpectedPerPacketOverhead() const override;
+
+ // RTCP part.
+
+ // Get RTCP status.
+ RtcpMode RTCP() const override;
+
+ // Configure RTCP status i.e on/off.
+ void SetRTCPStatus(RtcpMode method) override;
+
+ // Set RTCP CName.
+ int32_t SetCNAME(const char* c_name) override;
+
+ // Get remote NTP.
+ int32_t RemoteNTP(uint32_t* received_ntp_secs,
+ uint32_t* received_ntp_frac,
+ uint32_t* rtcp_arrival_time_secs,
+ uint32_t* rtcp_arrival_time_frac,
+ uint32_t* rtcp_timestamp) const override;
+
+ // Get RoundTripTime.
+ int32_t RTT(uint32_t remote_ssrc,
+ int64_t* rtt,
+ int64_t* avg_rtt,
+ int64_t* min_rtt,
+ int64_t* max_rtt) const override;
+
+ int64_t ExpectedRetransmissionTimeMs() const override;
+
+ // Force a send of an RTCP packet.
+ // Normal SR and RR are triggered via the process function.
+ int32_t SendRTCP(RTCPPacketType rtcpPacketType) override;
+
+ void GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const override;
+
+ // Get received RTCP report, report block.
+ int32_t RemoteRTCPStat(
+ std::vector<RTCPReportBlock>* receive_blocks) const override;
+ // A snapshot of the most recent Report Block with additional data of
+ // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats.
+ // Within this list, the ReportBlockData::RTCPReportBlock::source_ssrc(),
+ // which is the SSRC of the corresponding outbound RTP stream, is unique.
+ std::vector<ReportBlockData> GetLatestReportBlockData() const override;
+
+ // (REMB) Receiver Estimated Max Bitrate.
+ void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) override;
+ void UnsetRemb() override;
+
+ void SetTmmbn(std::vector<rtcp::TmmbItem> bounding_set) override;
+
+ size_t MaxRtpPacketSize() const override;
+
+ void SetMaxRtpPacketSize(size_t max_packet_size) override;
+
+ // (NACK) Negative acknowledgment part.
+
+ // Send a Negative acknowledgment packet.
+ // TODO(philipel): Deprecate SendNACK and use SendNack instead.
+ int32_t SendNACK(const uint16_t* nack_list, uint16_t size) override;
+
+ void SendNack(const std::vector<uint16_t>& sequence_numbers) override;
+
+ // Store the sent packets, needed to answer to a negative acknowledgment
+ // requests.
+ void SetStorePacketsStatus(bool enable, uint16_t number_to_store) override;
+
+ bool StorePackets() const override;
+
+ void SendCombinedRtcpPacket(
+ std::vector<std::unique_ptr<rtcp::RtcpPacket>> rtcp_packets) override;
+
+ // (XR) Receiver reference time report.
+ void SetRtcpXrRrtrStatus(bool enable) override;
+
+ bool RtcpXrRrtrStatus() const override;
+
+ // Video part.
+ int32_t SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) override;
+
+ bool LastReceivedNTP(uint32_t* NTPsecs,
+ uint32_t* NTPfrac,
+ uint32_t* remote_sr) const;
+
+ void BitrateSent(uint32_t* total_rate,
+ uint32_t* video_rate,
+ uint32_t* fec_rate,
+ uint32_t* nackRate) const override;
+
+ RtpSendRates GetSendRates() const override;
+
+ void OnReceivedNack(
+ const std::vector<uint16_t>& nack_sequence_numbers) override;
+ void OnReceivedRtcpReportBlocks(
+ const ReportBlockList& report_blocks) override;
+ void OnRequestSendReport() override;
+
+ void SetVideoBitrateAllocation(
+ const VideoBitrateAllocation& bitrate) override;
+
+ RTPSender* RtpSender() override;
+ const RTPSender* RtpSender() const override;
+
+ private:
+ FRIEND_TEST_ALL_PREFIXES(RtpRtcpImpl2Test, Rtt);
+ FRIEND_TEST_ALL_PREFIXES(RtpRtcpImpl2Test, RttForReceiverOnly);
+
+ struct RtpSenderContext {
+ explicit RtpSenderContext(const RtpRtcpInterface::Configuration& config);
+ // Storage of packets, for retransmissions and padding, if applicable.
+ RtpPacketHistory packet_history;
+ // Handles final time timestamping/stats/etc and handover to Transport.
+ RtpSenderEgress packet_sender;
+ // If no paced sender configured, this class will be used to pass packets
+ // from |packet_generator_| to |packet_sender_|.
+ RtpSenderEgress::NonPacedPacketSender non_paced_sender;
+ // Handles creation of RTP packets to be sent.
+ RTPSender packet_generator;
+ };
+
+ void set_rtt_ms(int64_t rtt_ms);
+ int64_t rtt_ms() const;
+
+ bool TimeToSendFullNackList(int64_t now) const;
+
+ SequenceChecker construction_thread_checker_;
+ SequenceChecker process_thread_checker_;
+
+ std::unique_ptr<RtpSenderContext> rtp_sender_;
+
+ RTCPSender rtcp_sender_;
+ RTCPReceiver rtcp_receiver_;
+
+ Clock* const clock_;
+
+ int64_t last_bitrate_process_time_;
+ int64_t last_rtt_process_time_;
+ int64_t next_process_time_;
+ uint16_t packet_overhead_;
+
+ // Send side
+ int64_t nack_last_time_sent_full_ms_;
+ uint16_t nack_last_seq_number_sent_;
+
+ RemoteBitrateEstimator* const remote_bitrate_;
+
+ RtcpRttStats* const rtt_stats_;
+
+ // The processed RTT from RtcpRttStats.
+ rtc::CriticalSection critical_section_rtt_;
+ int64_t rtt_ms_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL2_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc
new file mode 100644
index 00000000000..5861ae99caa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc
@@ -0,0 +1,630 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+
+#include <map>
+#include <memory>
+#include <set>
+
+#include "api/transport/field_trial_based_config.h"
+#include "api/video_codecs/video_codec.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtcp_packet.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/nack.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_sender_video.h"
+#include "rtc_base/rate_limiter.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+#include "test/rtp_header_parser.h"
+
+using ::testing::ElementsAre;
+
+namespace webrtc {
+namespace {
+const uint32_t kSenderSsrc = 0x12345;
+const uint32_t kReceiverSsrc = 0x23456;
+const int64_t kOneWayNetworkDelayMs = 100;
+const uint8_t kBaseLayerTid = 0;
+const uint8_t kHigherLayerTid = 1;
+const uint16_t kSequenceNumber = 100;
+
+class RtcpRttStatsTestImpl : public RtcpRttStats {
+ public:
+ RtcpRttStatsTestImpl() : rtt_ms_(0) {}
+ ~RtcpRttStatsTestImpl() override = default;
+
+ void OnRttUpdate(int64_t rtt_ms) override { rtt_ms_ = rtt_ms; }
+ int64_t LastProcessedRtt() const override { return rtt_ms_; }
+ int64_t rtt_ms_;
+};
+
+class SendTransport : public Transport {
+ public:
+ SendTransport()
+ : receiver_(nullptr),
+ clock_(nullptr),
+ delay_ms_(0),
+ rtp_packets_sent_(0),
+ rtcp_packets_sent_(0) {}
+
+ void SetRtpRtcpModule(ModuleRtpRtcpImpl2* receiver) { receiver_ = receiver; }
+ void SimulateNetworkDelay(int64_t delay_ms, SimulatedClock* clock) {
+ clock_ = clock;
+ delay_ms_ = delay_ms;
+ }
+ bool SendRtp(const uint8_t* data,
+ size_t len,
+ const PacketOptions& options) override {
+ RTPHeader header;
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::CreateForTest());
+ EXPECT_TRUE(parser->Parse(static_cast<const uint8_t*>(data), len, &header));
+ ++rtp_packets_sent_;
+ last_rtp_header_ = header;
+ return true;
+ }
+ bool SendRtcp(const uint8_t* data, size_t len) override {
+ test::RtcpPacketParser parser;
+ parser.Parse(data, len);
+ last_nack_list_ = parser.nack()->packet_ids();
+
+ if (clock_) {
+ clock_->AdvanceTimeMilliseconds(delay_ms_);
+ }
+ EXPECT_TRUE(receiver_);
+ receiver_->IncomingRtcpPacket(data, len);
+ ++rtcp_packets_sent_;
+ return true;
+ }
+ size_t NumRtcpSent() { return rtcp_packets_sent_; }
+ ModuleRtpRtcpImpl2* receiver_;
+ SimulatedClock* clock_;
+ int64_t delay_ms_;
+ int rtp_packets_sent_;
+ size_t rtcp_packets_sent_;
+ RTPHeader last_rtp_header_;
+ std::vector<uint16_t> last_nack_list_;
+};
+
+class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
+ public:
+ RtpRtcpModule(SimulatedClock* clock, bool is_sender)
+ : is_sender_(is_sender),
+ receive_statistics_(ReceiveStatistics::Create(clock)),
+ clock_(clock) {
+ CreateModuleImpl();
+ transport_.SimulateNetworkDelay(kOneWayNetworkDelayMs, clock);
+ }
+
+ const bool is_sender_;
+ RtcpPacketTypeCounter packets_sent_;
+ RtcpPacketTypeCounter packets_received_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
+ SendTransport transport_;
+ RtcpRttStatsTestImpl rtt_stats_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> impl_;
+ int rtcp_report_interval_ms_ = 0;
+
+ void RtcpPacketTypesCounterUpdated(
+ uint32_t ssrc,
+ const RtcpPacketTypeCounter& packet_counter) override {
+ counter_map_[ssrc] = packet_counter;
+ }
+
+ RtcpPacketTypeCounter RtcpSent() {
+ // RTCP counters for remote SSRC.
+ return counter_map_[is_sender_ ? kReceiverSsrc : kSenderSsrc];
+ }
+
+ RtcpPacketTypeCounter RtcpReceived() {
+ // Received RTCP stats for (own) local SSRC.
+ return counter_map_[impl_->SSRC()];
+ }
+ int RtpSent() { return transport_.rtp_packets_sent_; }
+ uint16_t LastRtpSequenceNumber() {
+ return transport_.last_rtp_header_.sequenceNumber;
+ }
+ std::vector<uint16_t> LastNackListSent() {
+ return transport_.last_nack_list_;
+ }
+ void SetRtcpReportIntervalAndReset(int rtcp_report_interval_ms) {
+ rtcp_report_interval_ms_ = rtcp_report_interval_ms;
+ CreateModuleImpl();
+ }
+
+ private:
+ void CreateModuleImpl() {
+ RtpRtcpInterface::Configuration config;
+ config.audio = false;
+ config.clock = clock_;
+ config.outgoing_transport = &transport_;
+ config.receive_statistics = receive_statistics_.get();
+ config.rtcp_packet_type_counter_observer = this;
+ config.rtt_stats = &rtt_stats_;
+ config.rtcp_report_interval_ms = rtcp_report_interval_ms_;
+ config.local_media_ssrc = is_sender_ ? kSenderSsrc : kReceiverSsrc;
+ config.need_rtp_packet_infos = true;
+
+ impl_.reset(new ModuleRtpRtcpImpl2(config));
+ impl_->SetRemoteSSRC(is_sender_ ? kReceiverSsrc : kSenderSsrc);
+ impl_->SetRTCPStatus(RtcpMode::kCompound);
+ }
+
+ SimulatedClock* const clock_;
+ std::map<uint32_t, RtcpPacketTypeCounter> counter_map_;
+};
+} // namespace
+
+class RtpRtcpImpl2Test : public ::testing::Test {
+ protected:
+ RtpRtcpImpl2Test()
+ : clock_(133590000000000),
+ sender_(&clock_, /*is_sender=*/true),
+ receiver_(&clock_, /*is_sender=*/false) {}
+
+ void SetUp() override {
+ // Send module.
+ EXPECT_EQ(0, sender_.impl_->SetSendingStatus(true));
+ sender_.impl_->SetSendingMediaStatus(true);
+ sender_.impl_->SetSequenceNumber(kSequenceNumber);
+ sender_.impl_->SetStorePacketsStatus(true, 100);
+
+ FieldTrialBasedConfig field_trials;
+ RTPSenderVideo::Config video_config;
+ video_config.clock = &clock_;
+ video_config.rtp_sender = sender_.impl_->RtpSender();
+ video_config.field_trials = &field_trials;
+ sender_video_ = std::make_unique<RTPSenderVideo>(video_config);
+
+ memset(&codec_, 0, sizeof(VideoCodec));
+ codec_.plType = 100;
+ codec_.width = 320;
+ codec_.height = 180;
+
+ // Receive module.
+ EXPECT_EQ(0, receiver_.impl_->SetSendingStatus(false));
+ receiver_.impl_->SetSendingMediaStatus(false);
+ // Transport settings.
+ sender_.transport_.SetRtpRtcpModule(receiver_.impl_.get());
+ receiver_.transport_.SetRtpRtcpModule(sender_.impl_.get());
+ }
+
+ SimulatedClock clock_;
+ RtpRtcpModule sender_;
+ std::unique_ptr<RTPSenderVideo> sender_video_;
+ RtpRtcpModule receiver_;
+ VideoCodec codec_;
+
+ void SendFrame(const RtpRtcpModule* module,
+ RTPSenderVideo* sender,
+ uint8_t tid) {
+ RTPVideoHeaderVP8 vp8_header = {};
+ vp8_header.temporalIdx = tid;
+ RTPVideoHeader rtp_video_header;
+ rtp_video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ rtp_video_header.width = codec_.width;
+ rtp_video_header.height = codec_.height;
+ rtp_video_header.rotation = kVideoRotation_0;
+ rtp_video_header.content_type = VideoContentType::UNSPECIFIED;
+ rtp_video_header.playout_delay = {-1, -1};
+ rtp_video_header.is_first_packet_in_frame = true;
+ rtp_video_header.simulcastIdx = 0;
+ rtp_video_header.codec = kVideoCodecVP8;
+ rtp_video_header.video_type_header = vp8_header;
+ rtp_video_header.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false};
+
+ const uint8_t payload[100] = {0};
+ EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, codec_.plType, true));
+ EXPECT_TRUE(sender->SendVideo(codec_.plType, VideoCodecType::kVideoCodecVP8,
+ 0, 0, payload, nullptr, rtp_video_header, 0));
+ }
+
+ void IncomingRtcpNack(const RtpRtcpModule* module, uint16_t sequence_number) {
+ bool sender = module->impl_->SSRC() == kSenderSsrc;
+ rtcp::Nack nack;
+ uint16_t list[1];
+ list[0] = sequence_number;
+ const uint16_t kListLength = sizeof(list) / sizeof(list[0]);
+ nack.SetSenderSsrc(sender ? kReceiverSsrc : kSenderSsrc);
+ nack.SetMediaSsrc(sender ? kSenderSsrc : kReceiverSsrc);
+ nack.SetPacketIds(list, kListLength);
+ rtc::Buffer packet = nack.Build();
+ module->impl_->IncomingRtcpPacket(packet.data(), packet.size());
+ }
+};
+
+TEST_F(RtpRtcpImpl2Test, RetransmitsAllLayers) {
+ // Send frames.
+ EXPECT_EQ(0, sender_.RtpSent());
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); // kSequenceNumber
+ SendFrame(&sender_, sender_video_.get(),
+ kHigherLayerTid); // kSequenceNumber + 1
+ SendFrame(&sender_, sender_video_.get(),
+ kNoTemporalIdx); // kSequenceNumber + 2
+ EXPECT_EQ(3, sender_.RtpSent());
+ EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber());
+
+ // Min required delay until retransmit = 5 + RTT ms (RTT = 0).
+ clock_.AdvanceTimeMilliseconds(5);
+
+ // Frame with kBaseLayerTid re-sent.
+ IncomingRtcpNack(&sender_, kSequenceNumber);
+ EXPECT_EQ(4, sender_.RtpSent());
+ EXPECT_EQ(kSequenceNumber, sender_.LastRtpSequenceNumber());
+ // Frame with kHigherLayerTid re-sent.
+ IncomingRtcpNack(&sender_, kSequenceNumber + 1);
+ EXPECT_EQ(5, sender_.RtpSent());
+ EXPECT_EQ(kSequenceNumber + 1, sender_.LastRtpSequenceNumber());
+ // Frame with kNoTemporalIdx re-sent.
+ IncomingRtcpNack(&sender_, kSequenceNumber + 2);
+ EXPECT_EQ(6, sender_.RtpSent());
+ EXPECT_EQ(kSequenceNumber + 2, sender_.LastRtpSequenceNumber());
+}
+
+TEST_F(RtpRtcpImpl2Test, Rtt) {
+ RtpPacketReceived packet;
+ packet.SetTimestamp(1);
+ packet.SetSequenceNumber(123);
+ packet.SetSsrc(kSenderSsrc);
+ packet.AllocatePayload(100 - 12);
+ receiver_.receive_statistics_->OnRtpPacket(packet);
+
+ // Send Frame before sending an SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ // Sender module should send an SR.
+ EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
+
+ // Receiver module should send a RR with a response to the last received SR.
+ clock_.AdvanceTimeMilliseconds(1000);
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
+
+ // Verify RTT.
+ int64_t rtt;
+ int64_t avg_rtt;
+ int64_t min_rtt;
+ int64_t max_rtt;
+ EXPECT_EQ(
+ 0, sender_.impl_->RTT(kReceiverSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt));
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, avg_rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, min_rtt, 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, max_rtt, 1);
+
+ // No RTT from other ssrc.
+ EXPECT_EQ(-1, sender_.impl_->RTT(kReceiverSsrc + 1, &rtt, &avg_rtt, &min_rtt,
+ &max_rtt));
+
+ // Verify RTT from rtt_stats config.
+ EXPECT_EQ(0, sender_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(0, sender_.impl_->rtt_ms());
+ sender_.impl_->Process();
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, sender_.rtt_stats_.LastProcessedRtt(),
+ 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, sender_.impl_->rtt_ms(), 1);
+}
+
+TEST_F(RtpRtcpImpl2Test, SetRtcpXrRrtrStatus) {
+ EXPECT_FALSE(receiver_.impl_->RtcpXrRrtrStatus());
+ receiver_.impl_->SetRtcpXrRrtrStatus(true);
+ EXPECT_TRUE(receiver_.impl_->RtcpXrRrtrStatus());
+}
+
+TEST_F(RtpRtcpImpl2Test, RttForReceiverOnly) {
+ receiver_.impl_->SetRtcpXrRrtrStatus(true);
+
+ // Receiver module should send a Receiver time reference report (RTRR).
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
+
+ // Sender module should send a response to the last received RTRR (DLRR).
+ clock_.AdvanceTimeMilliseconds(1000);
+ // Send Frame before sending a SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
+
+ // Verify RTT.
+ EXPECT_EQ(0, receiver_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(0, receiver_.impl_->rtt_ms());
+ receiver_.impl_->Process();
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs,
+ receiver_.rtt_stats_.LastProcessedRtt(), 1);
+ EXPECT_NEAR(2 * kOneWayNetworkDelayMs, receiver_.impl_->rtt_ms(), 1);
+}
+
+TEST_F(RtpRtcpImpl2Test, NoSrBeforeMedia) {
+ // Ignore fake transport delays in this test.
+ sender_.transport_.SimulateNetworkDelay(0, &clock_);
+ receiver_.transport_.SimulateNetworkDelay(0, &clock_);
+
+ sender_.impl_->Process();
+ EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms);
+
+ // Verify no SR is sent before media has been sent, RR should still be sent
+ // from the receiving module though.
+ clock_.AdvanceTimeMilliseconds(2000);
+ int64_t current_time = clock_.TimeInMilliseconds();
+ sender_.impl_->Process();
+ receiver_.impl_->Process();
+ EXPECT_EQ(-1, sender_.RtcpSent().first_packet_time_ms);
+ EXPECT_EQ(receiver_.RtcpSent().first_packet_time_ms, current_time);
+
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, current_time);
+}
+
+TEST_F(RtpRtcpImpl2Test, RtcpPacketTypeCounter_Nack) {
+ EXPECT_EQ(-1, receiver_.RtcpSent().first_packet_time_ms);
+ EXPECT_EQ(-1, sender_.RtcpReceived().first_packet_time_ms);
+ EXPECT_EQ(0U, sender_.RtcpReceived().nack_packets);
+ EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets);
+
+ // Receive module sends a NACK.
+ const uint16_t kNackLength = 1;
+ uint16_t nack_list[kNackLength] = {123};
+ EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets);
+ EXPECT_GT(receiver_.RtcpSent().first_packet_time_ms, -1);
+
+ // Send module receives the NACK.
+ EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets);
+ EXPECT_GT(sender_.RtcpReceived().first_packet_time_ms, -1);
+}
+
+TEST_F(RtpRtcpImpl2Test, AddStreamDataCounters) {
+ StreamDataCounters rtp;
+ const int64_t kStartTimeMs = 1;
+ rtp.first_packet_time_ms = kStartTimeMs;
+ rtp.transmitted.packets = 1;
+ rtp.transmitted.payload_bytes = 1;
+ rtp.transmitted.header_bytes = 2;
+ rtp.transmitted.padding_bytes = 3;
+ EXPECT_EQ(rtp.transmitted.TotalBytes(), rtp.transmitted.payload_bytes +
+ rtp.transmitted.header_bytes +
+ rtp.transmitted.padding_bytes);
+
+ StreamDataCounters rtp2;
+ rtp2.first_packet_time_ms = -1;
+ rtp2.transmitted.packets = 10;
+ rtp2.transmitted.payload_bytes = 10;
+ rtp2.retransmitted.header_bytes = 4;
+ rtp2.retransmitted.payload_bytes = 5;
+ rtp2.retransmitted.padding_bytes = 6;
+ rtp2.retransmitted.packets = 7;
+ rtp2.fec.packets = 8;
+
+ StreamDataCounters sum = rtp;
+ sum.Add(rtp2);
+ EXPECT_EQ(kStartTimeMs, sum.first_packet_time_ms);
+ EXPECT_EQ(11U, sum.transmitted.packets);
+ EXPECT_EQ(11U, sum.transmitted.payload_bytes);
+ EXPECT_EQ(2U, sum.transmitted.header_bytes);
+ EXPECT_EQ(3U, sum.transmitted.padding_bytes);
+ EXPECT_EQ(4U, sum.retransmitted.header_bytes);
+ EXPECT_EQ(5U, sum.retransmitted.payload_bytes);
+ EXPECT_EQ(6U, sum.retransmitted.padding_bytes);
+ EXPECT_EQ(7U, sum.retransmitted.packets);
+ EXPECT_EQ(8U, sum.fec.packets);
+ EXPECT_EQ(sum.transmitted.TotalBytes(),
+ rtp.transmitted.TotalBytes() + rtp2.transmitted.TotalBytes());
+
+ StreamDataCounters rtp3;
+ rtp3.first_packet_time_ms = kStartTimeMs + 10;
+ sum.Add(rtp3);
+ EXPECT_EQ(kStartTimeMs, sum.first_packet_time_ms); // Holds oldest time.
+}
+
+TEST_F(RtpRtcpImpl2Test, SendsInitialNackList) {
+ // Send module sends a NACK.
+ const uint16_t kNackLength = 1;
+ uint16_t nack_list[kNackLength] = {123};
+ EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
+ // Send Frame before sending a compound RTCP that starts with SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123));
+}
+
+TEST_F(RtpRtcpImpl2Test, SendsExtendedNackList) {
+ // Send module sends a NACK.
+ const uint16_t kNackLength = 1;
+ uint16_t nack_list[kNackLength] = {123};
+ EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
+ // Send Frame before sending a compound RTCP that starts with SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123));
+
+ // Same list not re-send.
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123));
+
+ // Only extended list sent.
+ const uint16_t kNackExtLength = 2;
+ uint16_t nack_list_ext[kNackExtLength] = {123, 124};
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list_ext, kNackExtLength));
+ EXPECT_EQ(2U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(124));
+}
+
+TEST_F(RtpRtcpImpl2Test, ReSendsNackListAfterRttMs) {
+ sender_.transport_.SimulateNetworkDelay(0, &clock_);
+ // Send module sends a NACK.
+ const uint16_t kNackLength = 2;
+ uint16_t nack_list[kNackLength] = {123, 125};
+ EXPECT_EQ(0U, sender_.RtcpSent().nack_packets);
+ // Send Frame before sending a compound RTCP that starts with SR.
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125));
+
+ // Same list not re-send, rtt interval has not passed.
+ const int kStartupRttMs = 100;
+ clock_.AdvanceTimeMilliseconds(kStartupRttMs);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, sender_.RtcpSent().nack_packets);
+
+ // Rtt interval passed, full list sent.
+ clock_.AdvanceTimeMilliseconds(1);
+ EXPECT_EQ(0, sender_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(2U, sender_.RtcpSent().nack_packets);
+ EXPECT_THAT(sender_.LastNackListSent(), ElementsAre(123, 125));
+}
+
+TEST_F(RtpRtcpImpl2Test, UniqueNackRequests) {
+ receiver_.transport_.SimulateNetworkDelay(0, &clock_);
+ EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets);
+ EXPECT_EQ(0U, receiver_.RtcpSent().nack_requests);
+ EXPECT_EQ(0U, receiver_.RtcpSent().unique_nack_requests);
+ EXPECT_EQ(0, receiver_.RtcpSent().UniqueNackRequestsInPercent());
+
+ // Receive module sends NACK request.
+ const uint16_t kNackLength = 4;
+ uint16_t nack_list[kNackLength] = {10, 11, 13, 18};
+ EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets);
+ EXPECT_EQ(4U, receiver_.RtcpSent().nack_requests);
+ EXPECT_EQ(4U, receiver_.RtcpSent().unique_nack_requests);
+ EXPECT_THAT(receiver_.LastNackListSent(), ElementsAre(10, 11, 13, 18));
+
+ // Send module receives the request.
+ EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets);
+ EXPECT_EQ(4U, sender_.RtcpReceived().nack_requests);
+ EXPECT_EQ(4U, sender_.RtcpReceived().unique_nack_requests);
+ EXPECT_EQ(100, sender_.RtcpReceived().UniqueNackRequestsInPercent());
+
+ // Receive module sends new request with duplicated packets.
+ const int kStartupRttMs = 100;
+ clock_.AdvanceTimeMilliseconds(kStartupRttMs + 1);
+ const uint16_t kNackLength2 = 4;
+ uint16_t nack_list2[kNackLength2] = {11, 18, 20, 21};
+ EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list2, kNackLength2));
+ EXPECT_EQ(2U, receiver_.RtcpSent().nack_packets);
+ EXPECT_EQ(8U, receiver_.RtcpSent().nack_requests);
+ EXPECT_EQ(6U, receiver_.RtcpSent().unique_nack_requests);
+ EXPECT_THAT(receiver_.LastNackListSent(), ElementsAre(11, 18, 20, 21));
+
+ // Send module receives the request.
+ EXPECT_EQ(2U, sender_.RtcpReceived().nack_packets);
+ EXPECT_EQ(8U, sender_.RtcpReceived().nack_requests);
+ EXPECT_EQ(6U, sender_.RtcpReceived().unique_nack_requests);
+ EXPECT_EQ(75, sender_.RtcpReceived().UniqueNackRequestsInPercent());
+}
+
+TEST_F(RtpRtcpImpl2Test, ConfigurableRtcpReportInterval) {
+ const int kVideoReportInterval = 3000;
+
+ // Recreate sender impl with new configuration, and redo setup.
+ sender_.SetRtcpReportIntervalAndReset(kVideoReportInterval);
+ SetUp();
+
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+
+ // Initial state
+ sender_.impl_->Process();
+ EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, -1);
+ EXPECT_EQ(0u, sender_.transport_.NumRtcpSent());
+
+ // Move ahead to the last ms before a rtcp is expected, no action.
+ clock_.AdvanceTimeMilliseconds(kVideoReportInterval / 2 - 1);
+ sender_.impl_->Process();
+ EXPECT_EQ(sender_.RtcpSent().first_packet_time_ms, -1);
+ EXPECT_EQ(sender_.transport_.NumRtcpSent(), 0u);
+
+ // Move ahead to the first rtcp. Send RTCP.
+ clock_.AdvanceTimeMilliseconds(1);
+ sender_.impl_->Process();
+ EXPECT_GT(sender_.RtcpSent().first_packet_time_ms, -1);
+ EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u);
+
+ SendFrame(&sender_, sender_video_.get(), kBaseLayerTid);
+
+ // Move ahead to the last possible second before second rtcp is expected.
+ clock_.AdvanceTimeMilliseconds(kVideoReportInterval * 1 / 2 - 1);
+ sender_.impl_->Process();
+ EXPECT_EQ(sender_.transport_.NumRtcpSent(), 1u);
+
+ // Move ahead into the range of second rtcp, the second rtcp may be sent.
+ clock_.AdvanceTimeMilliseconds(1);
+ sender_.impl_->Process();
+ EXPECT_GE(sender_.transport_.NumRtcpSent(), 1u);
+
+ clock_.AdvanceTimeMilliseconds(kVideoReportInterval / 2);
+ sender_.impl_->Process();
+ EXPECT_GE(sender_.transport_.NumRtcpSent(), 1u);
+
+ // Move out the range of second rtcp, the second rtcp must have been sent.
+ clock_.AdvanceTimeMilliseconds(kVideoReportInterval / 2);
+ sender_.impl_->Process();
+ EXPECT_EQ(sender_.transport_.NumRtcpSent(), 2u);
+}
+
+TEST_F(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) {
+ const uint32_t kStartTimestamp = 1u;
+ SetUp();
+ sender_.impl_->SetStartTimestamp(kStartTimestamp);
+
+ PacedPacketInfo pacing_info;
+ RtpPacketToSend packet(nullptr);
+ packet.set_packet_type(RtpPacketToSend::Type::kVideo);
+ packet.SetSsrc(kSenderSsrc);
+
+ // Single-packet frame.
+ packet.SetTimestamp(1);
+ packet.SetSequenceNumber(1);
+ packet.set_first_packet_of_frame(true);
+ packet.SetMarker(true);
+ sender_.impl_->TrySendPacket(&packet, pacing_info);
+
+ std::vector<RtpSequenceNumberMap::Info> seqno_info =
+ sender_.impl_->GetSentRtpPacketInfos(std::vector<uint16_t>{1});
+
+ EXPECT_THAT(seqno_info, ElementsAre(RtpSequenceNumberMap::Info(
+ /*timestamp=*/1 - kStartTimestamp,
+ /*is_first=*/1,
+ /*is_last=*/1)));
+
+ // Three-packet frame.
+ packet.SetTimestamp(2);
+ packet.SetSequenceNumber(2);
+ packet.set_first_packet_of_frame(true);
+ packet.SetMarker(false);
+ sender_.impl_->TrySendPacket(&packet, pacing_info);
+
+ packet.SetSequenceNumber(3);
+ packet.set_first_packet_of_frame(false);
+ sender_.impl_->TrySendPacket(&packet, pacing_info);
+
+ packet.SetSequenceNumber(4);
+ packet.SetMarker(true);
+ sender_.impl_->TrySendPacket(&packet, pacing_info);
+
+ seqno_info =
+ sender_.impl_->GetSentRtpPacketInfos(std::vector<uint16_t>{2, 3, 4});
+
+ EXPECT_THAT(seqno_info, ElementsAre(RtpSequenceNumberMap::Info(
+ /*timestamp=*/2 - kStartTimestamp,
+ /*is_first=*/1,
+ /*is_last=*/0),
+ RtpSequenceNumberMap::Info(
+ /*timestamp=*/2 - kStartTimestamp,
+ /*is_first=*/0,
+ /*is_last=*/0),
+ RtpSequenceNumberMap::Info(
+ /*timestamp=*/2 - kStartTimestamp,
+ /*is_first=*/0,
+ /*is_last=*/1)));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index e2595664f6e..dd7b512ff21 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -143,7 +143,7 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
private:
void CreateModuleImpl() {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.audio = false;
config.clock = clock_;
config.outgoing_transport = &transport_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h
new file mode 100644
index 00000000000..440837fc5df
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h
@@ -0,0 +1,424 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_
+#define MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/frame_transformer_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/transport/webrtc_key_value_config.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "modules/rtp_rtcp/include/receive_statistics.h"
+#include "modules/rtp_rtcp/include/report_block_data.h"
+#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
+#include "modules/rtp_rtcp/source/video_fec_generator.h"
+#include "rtc_base/constructor_magic.h"
+
+namespace webrtc {
+
+// Forward declarations.
+class FrameEncryptorInterface;
+class RateLimiter;
+class RemoteBitrateEstimator;
+class RtcEventLog;
+class RTPSender;
+class Transport;
+class VideoBitrateAllocationObserver;
+
+class RtpRtcpInterface : public RtcpFeedbackSenderInterface {
+ public:
+ struct Configuration {
+ Configuration() = default;
+ Configuration(Configuration&& rhs) = default;
+
+ // True for a audio version of the RTP/RTCP module object false will create
+ // a video version.
+ bool audio = false;
+ bool receiver_only = false;
+
+ // The clock to use to read time. If nullptr then system clock will be used.
+ Clock* clock = nullptr;
+
+ ReceiveStatisticsProvider* receive_statistics = nullptr;
+
+ // Transport object that will be called when packets are ready to be sent
+ // out on the network.
+ Transport* outgoing_transport = nullptr;
+
+ // Called when the receiver requests an intra frame.
+ RtcpIntraFrameObserver* intra_frame_callback = nullptr;
+
+ // Called when the receiver sends a loss notification.
+ RtcpLossNotificationObserver* rtcp_loss_notification_observer = nullptr;
+
+ // Called when we receive a changed estimate from the receiver of out
+ // stream.
+ RtcpBandwidthObserver* bandwidth_callback = nullptr;
+
+ NetworkStateEstimateObserver* network_state_estimate_observer = nullptr;
+ TransportFeedbackObserver* transport_feedback_callback = nullptr;
+ VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr;
+ RtcpRttStats* rtt_stats = nullptr;
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr;
+ // Called on receipt of RTCP report block from remote side.
+ // TODO(bugs.webrtc.org/10678): Remove RtcpStatisticsCallback in
+ // favor of ReportBlockDataObserver.
+ // TODO(bugs.webrtc.org/10679): Consider whether we want to use
+ // only getters or only callbacks. If we decide on getters, the
+ // ReportBlockDataObserver should also be removed in favor of
+ // GetLatestReportBlockData().
+ RtcpStatisticsCallback* rtcp_statistics_callback = nullptr;
+ RtcpCnameCallback* rtcp_cname_callback = nullptr;
+ ReportBlockDataObserver* report_block_data_observer = nullptr;
+
+ // Estimates the bandwidth available for a set of streams from the same
+ // client.
+ RemoteBitrateEstimator* remote_bitrate_estimator = nullptr;
+
+ // Spread any bursts of packets into smaller bursts to minimize packet loss.
+ RtpPacketSender* paced_sender = nullptr;
+
+ // Generates FEC packets.
+ // TODO(sprang): Wire up to RtpSenderEgress.
+ VideoFecGenerator* fec_generator = nullptr;
+
+ BitrateStatisticsObserver* send_bitrate_observer = nullptr;
+ SendSideDelayObserver* send_side_delay_observer = nullptr;
+ RtcEventLog* event_log = nullptr;
+ SendPacketObserver* send_packet_observer = nullptr;
+ RateLimiter* retransmission_rate_limiter = nullptr;
+ StreamDataCountersCallback* rtp_stats_callback = nullptr;
+
+ int rtcp_report_interval_ms = 0;
+
+ // Update network2 instead of pacer_exit field of video timing extension.
+ bool populate_network2_timestamp = false;
+
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer;
+
+ // E2EE Custom Video Frame Encryption
+ FrameEncryptorInterface* frame_encryptor = nullptr;
+ // Require all outgoing frames to be encrypted with a FrameEncryptor.
+ bool require_frame_encryption = false;
+
+ // Corresponds to extmap-allow-mixed in SDP negotiation.
+ bool extmap_allow_mixed = false;
+
+ // If true, the RTP sender will always annotate outgoing packets with
+ // MID and RID header extensions, if provided and negotiated.
+ // If false, the RTP sender will stop sending MID and RID header extensions,
+ // when it knows that the receiver is ready to demux based on SSRC. This is
+ // done by RTCP RR acking.
+ bool always_send_mid_and_rid = false;
+
+ // If set, field trials are read from |field_trials|, otherwise
+ // defaults to webrtc::FieldTrialBasedConfig.
+ const WebRtcKeyValueConfig* field_trials = nullptr;
+
+ // SSRCs for media and retransmission, respectively.
+ // FlexFec SSRC is fetched from |flexfec_sender|.
+ uint32_t local_media_ssrc = 0;
+ absl::optional<uint32_t> rtx_send_ssrc;
+
+ bool need_rtp_packet_infos = false;
+
+ // If true, the RTP packet history will select RTX packets based on
+ // heuristics such as send time, retransmission count etc, in order to
+ // make padding potentially more useful.
+ // If false, the last packet will always be picked. This may reduce CPU
+ // overhead.
+ bool enable_rtx_padding_prioritization = true;
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(Configuration);
+ };
+
+ // **************************************************************************
+ // Receiver functions
+ // **************************************************************************
+
+ virtual void IncomingRtcpPacket(const uint8_t* incoming_packet,
+ size_t incoming_packet_length) = 0;
+
+ virtual void SetRemoteSSRC(uint32_t ssrc) = 0;
+
+ // **************************************************************************
+ // Sender
+ // **************************************************************************
+
+ // Sets the maximum size of an RTP packet, including RTP headers.
+ virtual void SetMaxRtpPacketSize(size_t size) = 0;
+
+ // Returns max RTP packet size. Takes into account RTP headers and
+ // FEC/ULP/RED overhead (when FEC is enabled).
+ virtual size_t MaxRtpPacketSize() const = 0;
+
+ virtual void RegisterSendPayloadFrequency(int payload_type,
+ int payload_frequency) = 0;
+
+ // Unregisters a send payload.
+ // |payload_type| - payload type of codec
+ // Returns -1 on failure else 0.
+ virtual int32_t DeRegisterSendPayload(int8_t payload_type) = 0;
+
+ virtual void SetExtmapAllowMixed(bool extmap_allow_mixed) = 0;
+
+ // Register extension by uri, triggers CHECK on falure.
+ virtual void RegisterRtpHeaderExtension(absl::string_view uri, int id) = 0;
+
+ virtual int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) = 0;
+ virtual void DeregisterSendRtpHeaderExtension(absl::string_view uri) = 0;
+
+ // Returns true if RTP module is send media, and any of the extensions
+ // required for bandwidth estimation is registered.
+ virtual bool SupportsPadding() const = 0;
+ // Same as SupportsPadding(), but additionally requires that
+ // SetRtxSendStatus() has been called with the kRtxRedundantPayloads option
+ // enabled.
+ virtual bool SupportsRtxPayloadPadding() const = 0;
+
+ // Returns start timestamp.
+ virtual uint32_t StartTimestamp() const = 0;
+
+ // Sets start timestamp. Start timestamp is set to a random value if this
+ // function is never called.
+ virtual void SetStartTimestamp(uint32_t timestamp) = 0;
+
+ // Returns SequenceNumber.
+ virtual uint16_t SequenceNumber() const = 0;
+
+ // Sets SequenceNumber, default is a random number.
+ virtual void SetSequenceNumber(uint16_t seq) = 0;
+
+ virtual void SetRtpState(const RtpState& rtp_state) = 0;
+ virtual void SetRtxState(const RtpState& rtp_state) = 0;
+ virtual RtpState GetRtpState() const = 0;
+ virtual RtpState GetRtxState() const = 0;
+
+ // Returns SSRC.
+ virtual uint32_t SSRC() const = 0;
+
+ // Sets the value for sending in the RID (and Repaired) RTP header extension.
+ // RIDs are used to identify an RTP stream if SSRCs are not negotiated.
+ // If the RID and Repaired RID extensions are not registered, the RID will
+ // not be sent.
+ virtual void SetRid(const std::string& rid) = 0;
+
+ // Sets the value for sending in the MID RTP header extension.
+ // The MID RTP header extension should be registered for this to do anything.
+ // Once set, this value can not be changed or removed.
+ virtual void SetMid(const std::string& mid) = 0;
+
+ // Sets CSRC.
+ // |csrcs| - vector of CSRCs
+ virtual void SetCsrcs(const std::vector<uint32_t>& csrcs) = 0;
+
+ // Turns on/off sending RTX (RFC 4588). The modes can be set as a combination
+ // of values of the enumerator RtxMode.
+ virtual void SetRtxSendStatus(int modes) = 0;
+
+ // Returns status of sending RTX (RFC 4588). The returned value can be
+ // a combination of values of the enumerator RtxMode.
+ virtual int RtxSendStatus() const = 0;
+
+ // Returns the SSRC used for RTX if set, otherwise a nullopt.
+ virtual absl::optional<uint32_t> RtxSsrc() const = 0;
+
+ // Sets the payload type to use when sending RTX packets. Note that this
+ // doesn't enable RTX, only the payload type is set.
+ virtual void SetRtxSendPayloadType(int payload_type,
+ int associated_payload_type) = 0;
+
+ // Returns the FlexFEC SSRC, if there is one.
+ virtual absl::optional<uint32_t> FlexfecSsrc() const = 0;
+
+ // Sets sending status. Sends kRtcpByeCode when going from true to false.
+ // Returns -1 on failure else 0.
+ virtual int32_t SetSendingStatus(bool sending) = 0;
+
+ // Returns current sending status.
+ virtual bool Sending() const = 0;
+
+ // Starts/Stops media packets. On by default.
+ virtual void SetSendingMediaStatus(bool sending) = 0;
+
+ // Returns current media sending status.
+ virtual bool SendingMedia() const = 0;
+
+ // Returns whether audio is configured (i.e. Configuration::audio = true).
+ virtual bool IsAudioConfigured() const = 0;
+
+ // Indicate that the packets sent by this module should be counted towards the
+ // bitrate estimate since the stream participates in the bitrate allocation.
+ virtual void SetAsPartOfAllocation(bool part_of_allocation) = 0;
+
+ // TODO(sprang): Remove when all call sites have been moved to
+ // GetSendRates(). Fetches the current send bitrates in bits/s.
+ virtual void BitrateSent(uint32_t* total_rate,
+ uint32_t* video_rate,
+ uint32_t* fec_rate,
+ uint32_t* nack_rate) const = 0;
+
+ // Returns bitrate sent (post-pacing) per packet type.
+ virtual RtpSendRates GetSendRates() const = 0;
+
+ virtual RTPSender* RtpSender() = 0;
+ virtual const RTPSender* RtpSender() const = 0;
+
+ // Record that a frame is about to be sent. Returns true on success, and false
+ // if the module isn't ready to send.
+ virtual bool OnSendingRtpFrame(uint32_t timestamp,
+ int64_t capture_time_ms,
+ int payload_type,
+ bool force_sender_report) = 0;
+
+ // Try to send the provided packet. Returns true iff packet matches any of
+ // the SSRCs for this module (media/rtx/fec etc) and was forwarded to the
+ // transport.
+ virtual bool TrySendPacket(RtpPacketToSend* packet,
+ const PacedPacketInfo& pacing_info) = 0;
+
+ virtual void OnPacketsAcknowledged(
+ rtc::ArrayView<const uint16_t> sequence_numbers) = 0;
+
+ virtual std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
+ size_t target_size_bytes) = 0;
+
+ virtual std::vector<RtpSequenceNumberMap::Info> GetSentRtpPacketInfos(
+ rtc::ArrayView<const uint16_t> sequence_numbers) const = 0;
+
+ // Returns an expected per packet overhead representing the main RTP header,
+ // any CSRCs, and the registered header extensions that are expected on all
+ // packets (i.e. disregarding things like abs capture time which is only
+ // populated on a subset of packets, but counting MID/RID type extensions
+ // when we expect to send them).
+ virtual size_t ExpectedPerPacketOverhead() const = 0;
+
+ // **************************************************************************
+ // RTCP
+ // **************************************************************************
+
+ // Returns RTCP status.
+ virtual RtcpMode RTCP() const = 0;
+
+ // Sets RTCP status i.e on(compound or non-compound)/off.
+ // |method| - RTCP method to use.
+ virtual void SetRTCPStatus(RtcpMode method) = 0;
+
+ // Sets RTCP CName (i.e unique identifier).
+ // Returns -1 on failure else 0.
+ virtual int32_t SetCNAME(const char* cname) = 0;
+
+ // Returns remote NTP.
+ // Returns -1 on failure else 0.
+ virtual int32_t RemoteNTP(uint32_t* received_ntp_secs,
+ uint32_t* received_ntp_frac,
+ uint32_t* rtcp_arrival_time_secs,
+ uint32_t* rtcp_arrival_time_frac,
+ uint32_t* rtcp_timestamp) const = 0;
+
+ // Returns current RTT (round-trip time) estimate.
+ // Returns -1 on failure else 0.
+ virtual int32_t RTT(uint32_t remote_ssrc,
+ int64_t* rtt,
+ int64_t* avg_rtt,
+ int64_t* min_rtt,
+ int64_t* max_rtt) const = 0;
+
+ // Returns the estimated RTT, with fallback to a default value.
+ virtual int64_t ExpectedRetransmissionTimeMs() const = 0;
+
+ // Forces a send of a RTCP packet. Periodic SR and RR are triggered via the
+ // process function.
+ // Returns -1 on failure else 0.
+ virtual int32_t SendRTCP(RTCPPacketType rtcp_packet_type) = 0;
+
+ // Returns send statistics for the RTP and RTX stream.
+ virtual void GetSendStreamDataCounters(
+ StreamDataCounters* rtp_counters,
+ StreamDataCounters* rtx_counters) const = 0;
+
+ // Returns received RTCP report block.
+ // Returns -1 on failure else 0.
+ // TODO(https://crbug.com/webrtc/10678): Remove this in favor of
+ // GetLatestReportBlockData().
+ virtual int32_t RemoteRTCPStat(
+ std::vector<RTCPReportBlock>* receive_blocks) const = 0;
+ // A snapshot of Report Blocks with additional data of interest to statistics.
+ // Within this list, the sender-source SSRC pair is unique and per-pair the
+ // ReportBlockData represents the latest Report Block that was received for
+ // that pair.
+ virtual std::vector<ReportBlockData> GetLatestReportBlockData() const = 0;
+
+ // (XR) Sets Receiver Reference Time Report (RTTR) status.
+ virtual void SetRtcpXrRrtrStatus(bool enable) = 0;
+
+ // Returns current Receiver Reference Time Report (RTTR) status.
+ virtual bool RtcpXrRrtrStatus() const = 0;
+
+ // (REMB) Receiver Estimated Max Bitrate.
+ // Schedules sending REMB on next and following sender/receiver reports.
+ void SetRemb(int64_t bitrate_bps, std::vector<uint32_t> ssrcs) override = 0;
+ // Stops sending REMB on next and following sender/receiver reports.
+ void UnsetRemb() override = 0;
+
+ // (NACK)
+
+ // Sends a Negative acknowledgement packet.
+ // Returns -1 on failure else 0.
+ // TODO(philipel): Deprecate this and start using SendNack instead, mostly
+ // because we want a function that actually send NACK for the specified
+ // packets.
+ virtual int32_t SendNACK(const uint16_t* nack_list, uint16_t size) = 0;
+
+ // Sends NACK for the packets specified.
+ // Note: This assumes the caller keeps track of timing and doesn't rely on
+ // the RTP module to do this.
+ virtual void SendNack(const std::vector<uint16_t>& sequence_numbers) = 0;
+
+ // Store the sent packets, needed to answer to a Negative acknowledgment
+ // requests.
+ virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0;
+
+ // Returns true if the module is configured to store packets.
+ virtual bool StorePackets() const = 0;
+
+ virtual void SetVideoBitrateAllocation(
+ const VideoBitrateAllocation& bitrate) = 0;
+
+ // **************************************************************************
+ // Video
+ // **************************************************************************
+
+ // Requests new key frame.
+ // using PLI, https://tools.ietf.org/html/rfc4585#section-6.3.1.1
+ void SendPictureLossIndication() { SendRTCP(kRtcpPli); }
+ // using FIR, https://tools.ietf.org/html/rfc5104#section-4.3.1.2
+ void SendFullIntraRequest() { SendRTCP(kRtcpFir); }
+
+ // Sends a LossNotification RTCP message.
+ // Returns -1 on failure else 0.
+ virtual int32_t SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) = 0;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
index 3023e595578..af2752d6e3c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
@@ -108,7 +108,6 @@ bool IsNonVolatile(RTPExtensionType type) {
case kRtpExtensionAbsoluteSendTime:
case kRtpExtensionTransportSequenceNumber:
case kRtpExtensionTransportSequenceNumber02:
- case kRtpExtensionFrameMarking:
case kRtpExtensionRtpStreamId:
case kRtpExtensionMid:
case kRtpExtensionGenericFrameDescriptor00:
@@ -154,7 +153,7 @@ double GetMaxPaddingSizeFactor(const WebRtcKeyValueConfig* field_trials) {
} // namespace
-RTPSender::RTPSender(const RtpRtcp::Configuration& config,
+RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config,
RtpPacketHistory* packet_history,
RtpPacketSender* packet_sender)
: clock_(config.clock),
@@ -431,7 +430,7 @@ std::vector<std::unique_ptr<RtpPacketToSend>> RTPSender::GeneratePadding(
size_t padding_bytes_in_packet;
const size_t max_payload_size =
- max_packet_size_ - FecOrPaddingPacketMaxRtpHeaderLength();
+ max_packet_size_ - max_padding_fec_packet_header_;
if (audio_configured_) {
// Allow smaller padding packets for audio.
padding_bytes_in_packet = rtc::SafeClamp<size_t>(
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
index a14c3ae1a82..dd291f8d59a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
@@ -25,10 +25,10 @@
#include "modules/rtp_rtcp/include/flexfec_sender.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/include/rtp_packet_sender.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_history.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/deprecation.h"
@@ -45,105 +45,133 @@ class RtpPacketToSend;
class RTPSender {
public:
- RTPSender(const RtpRtcp::Configuration& config,
+ RTPSender(const RtpRtcpInterface::Configuration& config,
RtpPacketHistory* packet_history,
RtpPacketSender* packet_sender);
~RTPSender();
- void SetSendingMediaStatus(bool enabled);
- bool SendingMedia() const;
- bool IsAudioConfigured() const;
+ void SetSendingMediaStatus(bool enabled) RTC_LOCKS_EXCLUDED(send_critsect_);
+ bool SendingMedia() const RTC_LOCKS_EXCLUDED(send_critsect_);
+ bool IsAudioConfigured() const RTC_LOCKS_EXCLUDED(send_critsect_);
- uint32_t TimestampOffset() const;
- void SetTimestampOffset(uint32_t timestamp);
+ uint32_t TimestampOffset() const RTC_LOCKS_EXCLUDED(send_critsect_);
+ void SetTimestampOffset(uint32_t timestamp)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
- void SetRid(const std::string& rid);
+ void SetRid(const std::string& rid) RTC_LOCKS_EXCLUDED(send_critsect_);
- void SetMid(const std::string& mid);
+ void SetMid(const std::string& mid) RTC_LOCKS_EXCLUDED(send_critsect_);
- uint16_t SequenceNumber() const;
- void SetSequenceNumber(uint16_t seq);
+ uint16_t SequenceNumber() const RTC_LOCKS_EXCLUDED(send_critsect_);
+ void SetSequenceNumber(uint16_t seq) RTC_LOCKS_EXCLUDED(send_critsect_);
- void SetCsrcs(const std::vector<uint32_t>& csrcs);
+ void SetCsrcs(const std::vector<uint32_t>& csrcs)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
- void SetMaxRtpPacketSize(size_t max_packet_size);
+ void SetMaxRtpPacketSize(size_t max_packet_size)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
- void SetExtmapAllowMixed(bool extmap_allow_mixed);
+ void SetExtmapAllowMixed(bool extmap_allow_mixed)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// RTP header extension
- int32_t RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id);
- bool RegisterRtpHeaderExtension(absl::string_view uri, int id);
- bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) const;
- int32_t DeregisterRtpHeaderExtension(RTPExtensionType type);
- void DeregisterRtpHeaderExtension(absl::string_view uri);
-
- bool SupportsPadding() const;
- bool SupportsRtxPayloadPadding() const;
+ int32_t RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
+ bool RegisterRtpHeaderExtension(absl::string_view uri, int id)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
+ bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) const
+ RTC_LOCKS_EXCLUDED(send_critsect_);
+ int32_t DeregisterRtpHeaderExtension(RTPExtensionType type)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
+ void DeregisterRtpHeaderExtension(absl::string_view uri)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
+
+ bool SupportsPadding() const RTC_LOCKS_EXCLUDED(send_critsect_);
+ bool SupportsRtxPayloadPadding() const RTC_LOCKS_EXCLUDED(send_critsect_);
std::vector<std::unique_ptr<RtpPacketToSend>> GeneratePadding(
size_t target_size_bytes,
- bool media_has_been_sent);
+ bool media_has_been_sent) RTC_LOCKS_EXCLUDED(send_critsect_);
// NACK.
void OnReceivedNack(const std::vector<uint16_t>& nack_sequence_numbers,
- int64_t avg_rtt);
+ int64_t avg_rtt) RTC_LOCKS_EXCLUDED(send_critsect_);
- int32_t ReSendPacket(uint16_t packet_id);
+ int32_t ReSendPacket(uint16_t packet_id) RTC_LOCKS_EXCLUDED(send_critsect_);
// ACK.
- void OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number);
- void OnReceivedAckOnRtxSsrc(int64_t extended_highest_sequence_number);
+ void OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
+ void OnReceivedAckOnRtxSsrc(int64_t extended_highest_sequence_number)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// RTX.
- void SetRtxStatus(int mode);
- int RtxStatus() const;
- absl::optional<uint32_t> RtxSsrc() const { return rtx_ssrc_; }
+ void SetRtxStatus(int mode) RTC_LOCKS_EXCLUDED(send_critsect_);
+ int RtxStatus() const RTC_LOCKS_EXCLUDED(send_critsect_);
+ absl::optional<uint32_t> RtxSsrc() const RTC_LOCKS_EXCLUDED(send_critsect_) {
+ return rtx_ssrc_;
+ }
- void SetRtxPayloadType(int payload_type, int associated_payload_type);
+ void SetRtxPayloadType(int payload_type, int associated_payload_type)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Size info for header extensions used by FEC packets.
- static rtc::ArrayView<const RtpExtensionSize> FecExtensionSizes();
+ static rtc::ArrayView<const RtpExtensionSize> FecExtensionSizes()
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Size info for header extensions used by video packets.
- static rtc::ArrayView<const RtpExtensionSize> VideoExtensionSizes();
+ static rtc::ArrayView<const RtpExtensionSize> VideoExtensionSizes()
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Size info for header extensions used by audio packets.
- static rtc::ArrayView<const RtpExtensionSize> AudioExtensionSizes();
+ static rtc::ArrayView<const RtpExtensionSize> AudioExtensionSizes()
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Create empty packet, fills ssrc, csrcs and reserve place for header
// extensions RtpSender updates before sending.
- std::unique_ptr<RtpPacketToSend> AllocatePacket() const;
+ std::unique_ptr<RtpPacketToSend> AllocatePacket() const
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Allocate sequence number for provided packet.
// Save packet's fields to generate padding that doesn't break media stream.
// Return false if sending was turned off.
- bool AssignSequenceNumber(RtpPacketToSend* packet);
+ bool AssignSequenceNumber(RtpPacketToSend* packet)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Maximum header overhead per fec/padding packet.
- size_t FecOrPaddingPacketMaxRtpHeaderLength() const;
+ size_t FecOrPaddingPacketMaxRtpHeaderLength() const
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Expected header overhead per media packet.
- size_t ExpectedPerPacketOverhead() const;
- uint16_t AllocateSequenceNumber(uint16_t packets_to_send);
+ size_t ExpectedPerPacketOverhead() const RTC_LOCKS_EXCLUDED(send_critsect_);
+ uint16_t AllocateSequenceNumber(uint16_t packets_to_send)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Including RTP headers.
- size_t MaxRtpPacketSize() const;
+ size_t MaxRtpPacketSize() const RTC_LOCKS_EXCLUDED(send_critsect_);
- uint32_t SSRC() const { return ssrc_; }
+ uint32_t SSRC() const RTC_LOCKS_EXCLUDED(send_critsect_) { return ssrc_; }
- absl::optional<uint32_t> FlexfecSsrc() const { return flexfec_ssrc_; }
+ absl::optional<uint32_t> FlexfecSsrc() const
+ RTC_LOCKS_EXCLUDED(send_critsect_) {
+ return flexfec_ssrc_;
+ }
// Sends packet to |transport_| or to the pacer, depending on configuration.
// TODO(bugs.webrtc.org/XXX): Remove in favor of EnqueuePackets().
- bool SendToNetwork(std::unique_ptr<RtpPacketToSend> packet);
+ bool SendToNetwork(std::unique_ptr<RtpPacketToSend> packet)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
// Pass a set of packets to RtpPacketSender instance, for paced or immediate
// sending to the network.
- void EnqueuePackets(std::vector<std::unique_ptr<RtpPacketToSend>> packets);
+ void EnqueuePackets(std::vector<std::unique_ptr<RtpPacketToSend>> packets)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
- void SetRtpState(const RtpState& rtp_state);
- RtpState GetRtpState() const;
- void SetRtxRtpState(const RtpState& rtp_state);
- RtpState GetRtxRtpState() const;
+ void SetRtpState(const RtpState& rtp_state)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
+ RtpState GetRtpState() const RTC_LOCKS_EXCLUDED(send_critsect_);
+ void SetRtxRtpState(const RtpState& rtp_state)
+ RTC_LOCKS_EXCLUDED(send_critsect_);
+ RtpState GetRtxRtpState() const RTC_LOCKS_EXCLUDED(send_critsect_);
- int64_t LastTimestampTimeMs() const;
+ int64_t LastTimestampTimeMs() const RTC_LOCKS_EXCLUDED(send_critsect_);
private:
std::unique_ptr<RtpPacketToSend> BuildRtxPacket(
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
index 3e35f42bffe..1583ab04c03 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
@@ -18,6 +18,7 @@
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -67,8 +68,8 @@ class RtpSenderAudioTest : public ::testing::Test {
public:
RtpSenderAudioTest()
: fake_clock_(kStartTime),
- rtp_module_(RtpRtcp::Create([&] {
- RtpRtcp::Configuration config;
+ rtp_module_(ModuleRtpRtcpImpl2::Create([&] {
+ RtpRtcpInterface::Configuration config;
config.audio = true;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
@@ -81,7 +82,7 @@ class RtpSenderAudioTest : public ::testing::Test {
SimulatedClock fake_clock_;
LoopbackTransportTest transport_;
- std::unique_ptr<RtpRtcp> rtp_module_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_module_;
RTPSenderAudio rtp_sender_audio_;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc
index 6d5477be213..c309fc3f0dd 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc
@@ -53,7 +53,7 @@ void RtpSenderEgress::NonPacedPacketSender::EnqueuePackets(
}
}
-RtpSenderEgress::RtpSenderEgress(const RtpRtcp::Configuration& config,
+RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config,
RtpPacketHistory* packet_history)
: ssrc_(config.local_media_ssrc),
rtx_ssrc_(config.rtx_send_ssrc),
@@ -84,7 +84,9 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcp::Configuration& config,
rtp_sequence_number_map_(need_rtp_packet_infos_
? std::make_unique<RtpSequenceNumberMap>(
kRtpSequenceNumberMapMaxEntries)
- : nullptr) {}
+ : nullptr) {
+ RTC_DCHECK(TaskQueueBase::Current());
+}
void RtpSenderEgress::SendPacket(RtpPacketToSend* packet,
const PacedPacketInfo& pacing_info) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h
index c9ecde3be8b..a8e033c5bf1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h
@@ -19,10 +19,10 @@
#include "api/call/transport.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/units/data_rate.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_packet_history.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/rate_statistics.h"
@@ -47,7 +47,7 @@ class RtpSenderEgress {
RtpSenderEgress* const sender_;
};
- RtpSenderEgress(const RtpRtcp::Configuration& config,
+ RtpSenderEgress(const RtpRtcpInterface::Configuration& config,
RtpPacketHistory* packet_history);
~RtpSenderEgress() = default;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index 65e2e04ef44..12055b5b1ca 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -149,33 +149,32 @@ class MockRtpPacketPacer : public RtpPacketSender {
MockRtpPacketPacer() {}
virtual ~MockRtpPacketPacer() {}
- MOCK_METHOD1(EnqueuePackets,
- void(std::vector<std::unique_ptr<RtpPacketToSend>>));
-
- MOCK_METHOD2(CreateProbeCluster, void(int bitrate_bps, int cluster_id));
-
- MOCK_METHOD0(Pause, void());
- MOCK_METHOD0(Resume, void());
- MOCK_METHOD1(SetCongestionWindow,
- void(absl::optional<int64_t> congestion_window_bytes));
- MOCK_METHOD1(UpdateOutstandingData, void(int64_t outstanding_bytes));
- MOCK_METHOD1(SetAccountForAudioPackets, void(bool account_for_audio));
+ MOCK_METHOD(void,
+ EnqueuePackets,
+ (std::vector<std::unique_ptr<RtpPacketToSend>>),
+ (override));
};
class MockSendSideDelayObserver : public SendSideDelayObserver {
public:
- MOCK_METHOD4(SendSideDelayUpdated, void(int, int, uint64_t, uint32_t));
+ MOCK_METHOD(void,
+ SendSideDelayUpdated,
+ (int, int, uint64_t, uint32_t),
+ (override));
};
class MockSendPacketObserver : public SendPacketObserver {
public:
- MOCK_METHOD3(OnSendPacket, void(uint16_t, int64_t, uint32_t));
+ MOCK_METHOD(void, OnSendPacket, (uint16_t, int64_t, uint32_t), (override));
};
class MockTransportFeedbackObserver : public TransportFeedbackObserver {
public:
- MOCK_METHOD1(OnAddPacket, void(const RtpPacketSendInfo&));
- MOCK_METHOD1(OnTransportFeedback, void(const rtcp::TransportFeedback&));
+ MOCK_METHOD(void, OnAddPacket, (const RtpPacketSendInfo&), (override));
+ MOCK_METHOD(void,
+ OnTransportFeedback,
+ (const rtcp::TransportFeedback&),
+ (override));
};
class StreamDataTestCallback : public StreamDataCountersCallback {
@@ -213,7 +212,7 @@ class StreamDataTestCallback : public StreamDataCountersCallback {
// TODO(sprang): Split up unit tests and test these components individually
// wherever possible.
struct RtpSenderContext {
- explicit RtpSenderContext(const RtpRtcp::Configuration& config)
+ explicit RtpSenderContext(const RtpRtcpInterface::Configuration& config)
: packet_history_(config.clock, config.enable_rtx_padding_prioritization),
packet_sender_(config, &packet_history_),
non_paced_sender_(&packet_sender_),
@@ -286,7 +285,7 @@ class RtpSenderTest : public ::testing::TestWithParam<TestConfig> {
void SetUpRtpSender(bool pacer,
bool populate_network2,
bool always_send_mid_and_rid) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -482,7 +481,7 @@ TEST_P(RtpSenderTestWithoutPacer, AssignSequenceNumberMayAllowPaddingOnVideo) {
TEST_P(RtpSenderTest, AssignSequenceNumberAllowsPaddingOnAudio) {
MockTransport transport;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.audio = true;
config.clock = &fake_clock_;
config.outgoing_transport = &transport;
@@ -532,7 +531,7 @@ TEST_P(RtpSenderTestWithoutPacer,
TransportFeedbackObserverGetsCorrectByteCount) {
constexpr size_t kRtpOverheadBytesPerPacket = 12 + 8;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -567,7 +566,7 @@ TEST_P(RtpSenderTestWithoutPacer,
}
TEST_P(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -606,7 +605,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
}
TEST_P(RtpSenderTestWithoutPacer, PacketOptionsNoRetransmission) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -661,7 +660,7 @@ TEST_P(RtpSenderTestWithoutPacer, DoesnSetIncludedInAllocationByDefault) {
TEST_P(RtpSenderTestWithoutPacer, OnSendSideDelayUpdated) {
StrictMock<MockSendSideDelayObserver> send_side_delay_observer_;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -748,7 +747,7 @@ TEST_P(RtpSenderTestWithoutPacer, OnSendPacketUpdated) {
}
TEST_P(RtpSenderTest, SendsPacketsWithTransportSequenceNumber) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.paced_sender = &mock_paced_sender_;
@@ -1240,7 +1239,7 @@ TEST_P(RtpSenderTest, SendFlexfecPackets) {
nullptr /* rtp_state */, &fake_clock_);
// Reset |rtp_sender_| to use FlexFEC.
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.paced_sender = &mock_paced_sender_;
@@ -1329,7 +1328,7 @@ TEST_P(RtpSenderTestWithoutPacer, SendFlexfecPackets) {
nullptr /* rtp_state */, &fake_clock_);
// Reset |rtp_sender_| to use FlexFEC.
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -1662,7 +1661,7 @@ TEST_P(RtpSenderTest, FecOverheadRate) {
nullptr /* rtp_state */, &fake_clock_);
// Reset |rtp_sender_| to use FlexFEC.
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.paced_sender = &mock_paced_sender_;
@@ -1743,7 +1742,7 @@ TEST_P(RtpSenderTest, BitrateCallbacks) {
uint32_t retransmit_bitrate_;
} callback;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -1971,7 +1970,7 @@ TEST_P(RtpSenderTestWithoutPacer, RespectsNackBitrateLimit) {
}
TEST_P(RtpSenderTest, UpdatingCsrcsUpdatedOverhead) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -1987,7 +1986,7 @@ TEST_P(RtpSenderTest, UpdatingCsrcsUpdatedOverhead) {
}
TEST_P(RtpSenderTest, OnOverheadChanged) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -2006,7 +2005,7 @@ TEST_P(RtpSenderTest, OnOverheadChanged) {
}
TEST_P(RtpSenderTest, CountMidOnlyUntilAcked) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -2033,7 +2032,7 @@ TEST_P(RtpSenderTest, CountMidOnlyUntilAcked) {
}
TEST_P(RtpSenderTest, DontCountVolatileExtensionsIntoOverhead) {
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
@@ -2251,7 +2250,7 @@ TEST_P(RtpSenderTest, SendPacketUpdatesStats) {
StrictMock<MockSendSideDelayObserver> send_side_delay_observer;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.local_media_ssrc = kSsrc;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index 02c4eb8d286..58a86996886 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -145,7 +145,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config)
this,
config.frame_transformer,
rtp_sender_->SSRC(),
- config.worker_queue)
+ config.send_transport_queue)
: nullptr) {
if (frame_transformer_delegate_)
frame_transformer_delegate_->Init();
@@ -252,8 +252,6 @@ void RTPSenderVideo::SetVideoStructureUnderLock(
video_structure_ =
std::make_unique<FrameDependencyStructure>(*video_structure);
video_structure_->structure_id = structure_id;
- // TODO(bugs.webrtc.org/10342): Support chains.
- video_structure_->num_chains = 0;
}
void RTPSenderVideo::AddRtpHeaderExtensions(
@@ -314,14 +312,6 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
packet->SetExtension<AbsoluteCaptureTimeExtension>(*absolute_capture_time);
}
- if (video_header.codec == kVideoCodecH264 &&
- video_header.frame_marking.temporal_id != kNoTemporalIdx) {
- FrameMarking frame_marking = video_header.frame_marking;
- frame_marking.start_of_frame = first_packet;
- frame_marking.end_of_frame = last_packet;
- packet->SetExtension<FrameMarkingExtension>(frame_marking);
- }
-
if (video_header.generic) {
bool extension_is_set = false;
if (video_structure_ != nullptr) {
@@ -337,6 +327,8 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
descriptor.frame_dependencies.frame_diffs.push_back(
video_header.generic->frame_id - dep);
}
+ descriptor.frame_dependencies.chain_diffs =
+ video_header.generic->chain_diffs;
descriptor.frame_dependencies.decode_target_indications =
video_header.generic->decode_target_indications;
RTC_DCHECK_EQ(
@@ -736,12 +728,7 @@ uint8_t RTPSenderVideo::GetTemporalId(const RTPVideoHeader& header) {
}
uint8_t operator()(const absl::monostate&) { return kNoTemporalIdx; }
};
- switch (header.codec) {
- case kVideoCodecH264:
- return header.frame_marking.temporal_id;
- default:
- return absl::visit(TemporalIdGetter(), header.video_type_header);
- }
+ return absl::visit(TemporalIdGetter(), header.video_type_header);
}
bool RTPSenderVideo::UpdateConditionalRetransmit(
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
index 216f16faf6a..699734efa31 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -82,7 +82,7 @@ class RTPSenderVideo {
absl::optional<int> red_payload_type;
const WebRtcKeyValueConfig* field_trials = nullptr;
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer;
- TaskQueueBase* worker_queue = nullptr;
+ TaskQueueBase* send_transport_queue = nullptr;
};
explicit RTPSenderVideo(const Config& config);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
index 51fbdb01655..a4cacef520f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
@@ -43,6 +43,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
uint32_t ssrc)
: encoded_data_(encoded_image.GetEncodedData()),
header_(video_header),
+ metadata_(header_),
frame_type_(encoded_image._frameType),
payload_type_(payload_type),
codec_type_(codec_type),
@@ -75,6 +76,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
return RtpDescriptorAuthentication(header_);
}
+ const VideoFrameMetadata& GetMetadata() const override { return metadata_; }
+
const RTPVideoHeader& GetHeader() const { return header_; }
int GetPayloadType() const { return payload_type_; }
absl::optional<VideoCodecType> GetCodecType() const { return codec_type_; }
@@ -91,6 +94,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
private:
rtc::scoped_refptr<EncodedImageBufferInterface> encoded_data_;
const RTPVideoHeader header_;
+ const VideoFrameMetadata metadata_;
const VideoFrameType frame_type_;
const int payload_type_;
const absl::optional<VideoCodecType> codec_type_ = absl::nullopt;
@@ -106,11 +110,11 @@ RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate(
RTPSenderVideo* sender,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
uint32_t ssrc,
- TaskQueueBase* worker_queue)
+ TaskQueueBase* send_transport_queue)
: sender_(sender),
frame_transformer_(std::move(frame_transformer)),
ssrc_(ssrc),
- worker_queue_(worker_queue) {}
+ send_transport_queue_(send_transport_queue) {}
void RTPSenderVideoFrameTransformerDelegate::Init() {
frame_transformer_->RegisterTransformedFrameSinkCallback(
@@ -129,9 +133,9 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame(
// Save the current task queue to post the transformed frame for sending
// once it is transformed. When there is no current task queue, i.e.
// encoding is done on an external thread (for example in the case of
- // hardware encoders), use the worker queue instead.
+ // hardware encoders), use the send transport queue instead.
TaskQueueBase* current = TaskQueueBase::Current();
- encoder_queue_ = current ? current : worker_queue_;
+ encoder_queue_ = current ? current : send_transport_queue_;
}
frame_transformer_->Transform(std::make_unique<TransformableVideoSenderFrame>(
encoded_image, video_header, payload_type, codec_type, rtp_timestamp,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
index bea5ba7b65a..5beba7770b5 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
@@ -31,7 +31,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
RTPSenderVideo* sender,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
uint32_t ssrc,
- TaskQueueBase* worker_queue);
+ TaskQueueBase* send_transport_queue);
void Init();
@@ -70,7 +70,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_;
const uint32_t ssrc_;
TaskQueueBase* encoder_queue_ = nullptr;
- TaskQueueBase* worker_queue_;
+ TaskQueueBase* send_transport_queue_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
index 21c4da05abd..32e138f8405 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc
@@ -15,6 +15,7 @@
#include <utility>
#include <vector>
+#include "absl/memory/memory.h"
#include "api/test/mock_frame_encryptor.h"
#include "api/transport/field_trial_based_config.h"
#include "api/transport/rtp/dependency_descriptor.h"
@@ -23,7 +24,6 @@
#include "common_video/generic_frame_descriptor/generic_frame_info.h"
#include "modules/rtp_rtcp/include/rtp_cvo.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
@@ -32,6 +32,7 @@
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/rate_limiter.h"
@@ -45,6 +46,7 @@ namespace webrtc {
namespace {
using ::testing::_;
+using ::testing::ContainerEq;
using ::testing::ElementsAre;
using ::testing::ElementsAreArray;
using ::testing::IsEmpty;
@@ -57,7 +59,6 @@ using ::testing::WithArgs;
enum : int { // The first valid value is 1.
kAbsoluteSendTimeExtensionId = 1,
- kFrameMarkingExtensionId,
kGenericDescriptorId,
kDependencyDescriptorId,
kTransmissionTimeOffsetExtensionId,
@@ -93,8 +94,6 @@ class LoopbackTransportTest : public webrtc::Transport {
kGenericDescriptorId);
receivers_extensions_.Register<RtpDependencyDescriptorExtension>(
kDependencyDescriptorId);
- receivers_extensions_.Register<FrameMarkingExtension>(
- kFrameMarkingExtensionId);
receivers_extensions_.Register<AbsoluteCaptureTimeExtension>(
kAbsoluteCaptureTimeExtensionId);
receivers_extensions_.Register<PlayoutDelayLimits>(
@@ -167,8 +166,8 @@ class RtpSenderVideoTest : public ::testing::TestWithParam<bool> {
: field_trials_(GetParam()),
fake_clock_(kStartTime),
retransmission_rate_limiter_(&fake_clock_, 1000),
- rtp_module_(RtpRtcp::Create([&] {
- RtpRtcp::Configuration config;
+ rtp_module_(ModuleRtpRtcpImpl2::Create([&] {
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
@@ -188,12 +187,12 @@ class RtpSenderVideoTest : public ::testing::TestWithParam<bool> {
int version);
protected:
- const RtpRtcp::Configuration config_;
+ const RtpRtcpInterface::Configuration config_;
FieldTrials field_trials_;
SimulatedClock fake_clock_;
LoopbackTransportTest transport_;
RateLimiter retransmission_rate_limiter_;
- std::unique_ptr<RtpRtcp> rtp_module_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_module_;
TestRtpSenderVideo rtp_sender_video_;
};
@@ -289,43 +288,6 @@ TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) {
EXPECT_EQ(kVideoRotation_90, rotation);
}
-TEST_P(RtpSenderVideoTest, CheckH264FrameMarking) {
- uint8_t kFrame[kMaxPacketLength];
- rtp_module_->RegisterRtpHeaderExtension(FrameMarkingExtension::kUri,
- kFrameMarkingExtensionId);
-
- RTPFragmentationHeader frag;
- frag.VerifyAndAllocateFragmentationHeader(1);
- frag.fragmentationOffset[0] = 0;
- frag.fragmentationLength[0] = sizeof(kFrame);
-
- RTPVideoHeader hdr;
- hdr.video_type_header.emplace<RTPVideoHeaderH264>().packetization_mode =
- H264PacketizationMode::NonInterleaved;
- hdr.codec = kVideoCodecH264;
- hdr.frame_marking.temporal_id = kNoTemporalIdx;
- hdr.frame_marking.tl0_pic_idx = 99;
- hdr.frame_marking.base_layer_sync = true;
- hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, &frag,
- hdr, kDefaultExpectedRetransmissionTimeMs);
-
- FrameMarking fm;
- EXPECT_FALSE(
- transport_.last_sent_packet().GetExtension<FrameMarkingExtension>(&fm));
-
- hdr.frame_marking.temporal_id = 0;
- hdr.frame_type = VideoFrameType::kVideoFrameDelta;
- rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp + 1, 0, kFrame, &frag,
- hdr, kDefaultExpectedRetransmissionTimeMs);
-
- EXPECT_TRUE(
- transport_.last_sent_packet().GetExtension<FrameMarkingExtension>(&fm));
- EXPECT_EQ(hdr.frame_marking.temporal_id, fm.temporal_id);
- EXPECT_EQ(hdr.frame_marking.tl0_pic_idx, fm.tl0_pic_idx);
- EXPECT_EQ(hdr.frame_marking.base_layer_sync, fm.base_layer_sync);
-}
-
// Make sure rotation is parsed correctly when the Camera (C) and Flip (F) bits
// are set in the CVO byte.
TEST_P(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) {
@@ -367,7 +329,6 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) {
header.video_type_header.emplace<RTPVideoHeaderH264>().packetization_mode =
H264PacketizationMode::NonInterleaved;
header.codec = kVideoCodecH264;
- header.frame_marking.temporal_id = kNoTemporalIdx;
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
@@ -378,14 +339,6 @@ TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) {
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kConditionallyRetransmitHigherLayers,
kDefaultExpectedRetransmissionTimeMs));
-
- // Test higher level retransmit.
- for (int tid = 0; tid <= kMaxTemporalStreams; ++tid) {
- header.frame_marking.temporal_id = tid;
- EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
- header, kRetransmitHigherLayers | kRetransmitBaseLayer,
- kDefaultExpectedRetransmissionTimeMs));
- }
}
TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) {
@@ -546,9 +499,9 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
FrameDependencyStructure video_structure;
video_structure.num_decode_targets = 2;
video_structure.templates = {
- GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(),
- GenericFrameInfo::Builder().S(1).T(0).Dtis("-S").Build(),
- GenericFrameInfo::Builder().S(1).T(1).Dtis("-D").Build(),
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
+ FrameDependencyTemplate().S(1).T(0).Dtis("-S"),
+ FrameDependencyTemplate().S(1).T(1).Dtis("-D"),
};
rtp_sender_video_.SetVideoStructure(&video_structure);
@@ -606,6 +559,40 @@ TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
ElementsAre(1, 501));
}
+TEST_P(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) {
+ const int64_t kFrameId = 100000;
+ uint8_t kFrame[100];
+ rtp_module_->RegisterRtpHeaderExtension(
+ RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId);
+ FrameDependencyStructure video_structure;
+ video_structure.num_decode_targets = 2;
+ video_structure.num_chains = 1;
+ // First decode target is protected by the only chain, second one - is not.
+ video_structure.decode_target_protected_by_chain = {0, 1};
+ video_structure.templates = {
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}),
+ };
+ rtp_sender_video_.SetVideoStructure(&video_structure);
+
+ RTPVideoHeader hdr;
+ RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
+ generic.frame_id = kFrameId;
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
+ DecodeTargetIndication::kSwitch};
+ generic.chain_diffs = {2};
+ hdr.frame_type = VideoFrameType::kVideoFrameKey;
+ rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
+ hdr, kDefaultExpectedRetransmissionTimeMs);
+
+ ASSERT_EQ(transport_.packets_sent(), 1);
+ DependencyDescriptor descriptor_key;
+ ASSERT_TRUE(transport_.last_sent_packet()
+ .GetExtension<RtpDependencyDescriptorExtension>(
+ nullptr, &descriptor_key));
+ EXPECT_THAT(descriptor_key.frame_dependencies.chain_diffs,
+ ContainerEq(generic.chain_diffs));
+}
+
TEST_P(RtpSenderVideoTest,
SetDiffentVideoStructureAvoidsCollisionWithThePreviousStructure) {
const int64_t kFrameId = 100000;
@@ -615,14 +602,14 @@ TEST_P(RtpSenderVideoTest,
FrameDependencyStructure video_structure1;
video_structure1.num_decode_targets = 2;
video_structure1.templates = {
- GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(),
- GenericFrameInfo::Builder().S(0).T(1).Dtis("D-").Build(),
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
+ FrameDependencyTemplate().S(0).T(1).Dtis("D-"),
};
FrameDependencyStructure video_structure2;
video_structure2.num_decode_targets = 2;
video_structure2.templates = {
- GenericFrameInfo::Builder().S(0).T(0).Dtis("SS").Build(),
- GenericFrameInfo::Builder().S(0).T(1).Dtis("R-").Build(),
+ FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
+ FrameDependencyTemplate().S(0).T(1).Dtis("R-"),
};
// Send 1st key frame.
@@ -705,7 +692,7 @@ TEST_P(RtpSenderVideoTest,
FrameDependencyStructure video_structure;
video_structure.num_decode_targets = 1;
- video_structure.templates = {GenericFrameInfo::Builder().Dtis("S").Build()};
+ video_structure.templates = {FrameDependencyTemplate().Dtis("S")};
rtp_sender_video.SetVideoStructure(&video_structure);
// Send key frame.
@@ -884,8 +871,8 @@ class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test {
RtpSenderVideoWithFrameTransformerTest()
: fake_clock_(kStartTime),
retransmission_rate_limiter_(&fake_clock_, 1000),
- rtp_module_(RtpRtcp::Create([&] {
- RtpRtcp::Configuration config;
+ rtp_module_(ModuleRtpRtcpImpl2::Create([&] {
+ RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
@@ -912,7 +899,7 @@ class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test {
SimulatedClock fake_clock_;
LoopbackTransportTest transport_;
RateLimiter retransmission_rate_limiter_;
- std::unique_ptr<RtpRtcp> rtp_module_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_module_;
};
std::unique_ptr<EncodedImage> CreateDefaultEncodedImage() {
@@ -989,5 +976,47 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) {
EXPECT_EQ(transport_.packets_sent(), 1);
}
+TEST_F(RtpSenderVideoWithFrameTransformerTest,
+ TransformableFrameMetadataHasCorrectValue) {
+ rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
+ new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
+ std::unique_ptr<RTPSenderVideo> rtp_sender_video =
+ CreateSenderWithFrameTransformer(mock_frame_transformer);
+ auto encoded_image = CreateDefaultEncodedImage();
+ RTPVideoHeader video_header;
+ video_header.width = 1280u;
+ video_header.height = 720u;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.frame_id = 10;
+ generic.temporal_index = 3;
+ generic.spatial_index = 2;
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch};
+ generic.dependencies = {5};
+
+ // Check that the transformable frame passed to the frame transformer has the
+ // correct metadata.
+ EXPECT_CALL(*mock_frame_transformer, Transform)
+ .WillOnce(
+ [](std::unique_ptr<TransformableFrameInterface> transformable_frame) {
+ auto frame =
+ absl::WrapUnique(static_cast<TransformableVideoFrameInterface*>(
+ transformable_frame.release()));
+ ASSERT_TRUE(frame);
+ auto metadata = frame->GetMetadata();
+ EXPECT_EQ(metadata.GetWidth(), 1280u);
+ EXPECT_EQ(metadata.GetHeight(), 720u);
+ EXPECT_EQ(metadata.GetFrameId(), 10);
+ EXPECT_EQ(metadata.GetTemporalIndex(), 3);
+ EXPECT_EQ(metadata.GetSpatialIndex(), 2);
+ EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5));
+ EXPECT_THAT(metadata.GetDecodeTargetIndications(),
+ ElementsAre(DecodeTargetIndication::kSwitch));
+ });
+ rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
+ *encoded_image, nullptr, video_header,
+ kDefaultExpectedRetransmissionTimeMs);
+}
+
} // namespace
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
index f76d7d0f0b8..c25fd96fa5b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -17,7 +17,6 @@
#include "api/array_view.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame_marking.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
#include "modules/rtp_rtcp/include/rtp_cvo.h"
@@ -245,10 +244,6 @@ bool RtpHeaderParser::Parse(RTPHeader* header,
header->extension.has_video_timing = false;
header->extension.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false};
- header->extension.has_frame_marking = false;
- header->extension.frame_marking = {false, false, false, false,
- false, kNoTemporalIdx, 0, 0};
-
if (X) {
/* RTP header extension, RFC 3550.
0 1 2 3
@@ -497,15 +492,6 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
&header->extension.video_timing);
break;
}
- case kRtpExtensionFrameMarking: {
- if (!FrameMarkingExtension::Parse(rtc::MakeArrayView(ptr, len + 1),
- &header->extension.frame_marking)) {
- RTC_LOG(LS_WARNING) << "Incorrect frame marking len: " << len;
- return;
- }
- header->extension.has_frame_marking = true;
- break;
- }
case kRtpExtensionRtpStreamId: {
std::string name(reinterpret_cast<const char*>(ptr), len + 1);
if (IsLegalRsidName(name)) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_video_header.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_video_header.h
index 7071463be44..514340add63 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_video_header.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_video_header.h
@@ -19,11 +19,10 @@
#include "api/video/color_space.h"
#include "api/video/video_codec_type.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame_marking.h"
#include "api/video/video_frame_type.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
-#include "common_types.h" // NOLINT(build/include)
+#include "common_types.h" // NOLINT(build/include_directory)
#include "modules/video_coding/codecs/h264/include/h264_globals.h"
#include "modules/video_coding/codecs/vp8/include/vp8_globals.h"
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
@@ -53,6 +52,7 @@ struct RTPVideoHeader {
int temporal_index = 0;
absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications;
absl::InlinedVector<int64_t, 5> dependencies;
+ absl::InlinedVector<int, 4> chain_diffs;
};
RTPVideoHeader();
@@ -74,7 +74,6 @@ struct RTPVideoHeader {
PlayoutDelay playout_delay = {-1, -1};
VideoSendTiming video_timing;
- FrameMarking frame_marking = {false, false, false, false, false, 0xFF, 0, 0};
absl::optional<ColorSpace> color_space;
RTPVideoTypeHeader video_type_header;
};
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
index 6f765369f4a..1fef0b67401 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
@@ -37,9 +37,9 @@ static const int kEventWaitTimeout = 500;
class MockModule : public Module {
public:
- MOCK_METHOD0(TimeUntilNextProcess, int64_t());
- MOCK_METHOD0(Process, void());
- MOCK_METHOD1(ProcessThreadAttached, void(ProcessThread*));
+ MOCK_METHOD(int64_t, TimeUntilNextProcess, (), (override));
+ MOCK_METHOD(void, Process, (), (override));
+ MOCK_METHOD(void, ProcessThreadAttached, (ProcessThread*), (override));
};
class RaiseEventTask : public QueuedTask {
diff --git a/chromium/third_party/webrtc/modules/video_capture/BUILD.gn b/chromium/third_party/webrtc/modules/video_capture/BUILD.gn
index 1c024122642..f73472617c4 100644
--- a/chromium/third_party/webrtc/modules/video_capture/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/video_capture/BUILD.gn
@@ -38,9 +38,9 @@ rtc_library("video_capture_module") {
"../../rtc_base:stringutils",
"../../rtc_base/synchronization:rw_lock_wrapper",
"../../system_wrappers",
- "//third_party/abseil-cpp/absl/strings",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (!build_with_chromium) {
@@ -131,13 +131,13 @@ if (!build_with_chromium) {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers",
"../../test:frame_utils",
+ "../../test:test_main",
"../../test:test_support",
"../../test:video_test_common",
"../utility",
"//testing/gtest",
"//third_party/abseil-cpp/absl/memory",
]
- deps += [ "../../test:test_main" ]
}
}
}
diff --git a/chromium/third_party/webrtc/modules/video_capture/linux/device_info_linux.cc b/chromium/third_party/webrtc/modules/video_capture/linux/device_info_linux.cc
index bac5d4078a8..3c8fdd20fad 100644
--- a/chromium/third_party/webrtc/modules/video_capture/linux/device_info_linux.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/linux/device_info_linux.cc
@@ -47,11 +47,19 @@ uint32_t DeviceInfoLinux::NumberOfDevices() {
uint32_t count = 0;
char device[20];
int fd = -1;
+ struct v4l2_capability cap;
/* detect /dev/video [0-63]VideoCaptureModule entries */
for (int n = 0; n < 64; n++) {
sprintf(device, "/dev/video%d", n);
if ((fd = open(device, O_RDONLY)) != -1) {
+ // query device capabilities and make sure this is a video capture device
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 ||
+ !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
+ close(fd);
+ continue;
+ }
+
close(fd);
count++;
}
@@ -74,9 +82,16 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
char device[20];
int fd = -1;
bool found = false;
+ struct v4l2_capability cap;
for (int n = 0; n < 64; n++) {
sprintf(device, "/dev/video%d", n);
if ((fd = open(device, O_RDONLY)) != -1) {
+ // query device capabilities and make sure this is a video capture device
+ if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 ||
+ !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
+ close(fd);
+ continue;
+ }
if (count == deviceNumber) {
// Found the device
found = true;
@@ -92,7 +107,6 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
return -1;
// query device capabilities
- struct v4l2_capability cap;
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
RTC_LOG(LS_INFO) << "error in querying the device capability for device "
<< device << ". errno = " << errno;
@@ -153,6 +167,11 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(const char* deviceUniqueIdUTF8) {
// query device capabilities
struct v4l2_capability cap;
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) {
+ // skip devices without video capture capability
+ if (!(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
+ continue;
+ }
+
if (cap.bus_info[0] != 0) {
if (strncmp((const char*)cap.bus_info, (const char*)deviceUniqueIdUTF8,
strlen((const char*)deviceUniqueIdUTF8)) ==
diff --git a/chromium/third_party/webrtc/modules/video_coding/BUILD.gn b/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
index 41c952694d7..e92649d4e6c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
@@ -6,6 +6,7 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
+import("//third_party/libaom/options.gni")
import("../../webrtc.gni")
rtc_library("encoded_frame") {
@@ -30,11 +31,29 @@ rtc_library("encoded_frame") {
"../../rtc_base/experiments:rtt_mult_experiment",
"../../rtc_base/system:rtc_export",
"../../system_wrappers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/abseil-cpp/absl/types:variant",
]
}
+rtc_library("chain_diff_calculator") {
+ sources = [
+ "chain_diff_calculator.cc",
+ "chain_diff_calculator.h",
+ ]
+
+ deps = [
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
rtc_library("frame_dependencies_calculator") {
sources = [
"frame_dependencies_calculator.cc",
@@ -47,6 +66,8 @@ rtc_library("frame_dependencies_calculator") {
"../../common_video/generic_frame_descriptor",
"../../rtc_base:checks",
"../../rtc_base:logging",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/types:optional",
@@ -54,12 +75,11 @@ rtc_library("frame_dependencies_calculator") {
}
rtc_library("nack_module") {
- visibility = [ "*" ]
sources = [
"histogram.cc",
"histogram.h",
- "nack_module.cc",
- "nack_module.h",
+ "nack_module2.cc",
+ "nack_module2.h",
]
deps = [
@@ -69,7 +89,11 @@ rtc_library("nack_module") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_numerics",
+ "../../rtc_base:rtc_task_queue",
"../../rtc_base/experiments:field_trial_parser",
+ "../../rtc_base/synchronization:sequence_checker",
+ "../../rtc_base/task_utils:pending_task_safety_flag",
+ "../../rtc_base/task_utils:repeating_task",
"../../system_wrappers",
"../../system_wrappers:field_trial",
"../utility",
@@ -91,8 +115,13 @@ rtc_library("video_coding") {
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
"../rtp_rtcp:rtp_video_header",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
+ "//third_party/abseil-cpp/absl/types:variant",
]
sources = [
@@ -174,6 +203,7 @@ rtc_library("video_coding") {
"../../rtc_base/experiments:min_video_bitrate_experiment",
"../../rtc_base/experiments:rate_control_settings",
"../../rtc_base/experiments:rtt_mult_experiment",
+ "../../rtc_base/synchronization:mutex",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:repeating_task",
"../../rtc_base/third_party/base64",
@@ -181,9 +211,6 @@ rtc_library("video_coding") {
"../../system_wrappers",
"../rtp_rtcp",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/container:inlined_vector",
- "//third_party/abseil-cpp/absl/types:optional",
- "//third_party/abseil-cpp/absl/types:variant",
]
}
@@ -205,8 +232,8 @@ rtc_library("video_codec_interface") {
"../../common_video",
"../../common_video/generic_frame_descriptor",
"../../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("video_coding_legacy") {
@@ -258,6 +285,8 @@ rtc_library("video_coding_legacy") {
"../rtp_rtcp:rtp_rtcp_format",
"../rtp_rtcp:rtp_video_header",
"../utility",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/abseil-cpp/absl/types:variant",
@@ -329,8 +358,8 @@ rtc_library("video_coding_utility") {
"../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers:field_trial",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("webrtc_h264") {
@@ -363,9 +392,11 @@ rtc_library("webrtc_h264") {
"../../rtc_base/system:rtc_export",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
+ "//third_party/libyuv",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
- "//third_party/libyuv",
]
if (rtc_use_h264) {
@@ -449,9 +480,9 @@ rtc_library("webrtc_vp8") {
"../../rtc_base/experiments:rate_control_settings",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (rtc_build_libvpx) {
deps += [ rtc_libvpx_dir ]
}
@@ -482,8 +513,8 @@ rtc_library("webrtc_vp8_temporal_layers") {
"../../rtc_base:rtc_numerics",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
# This target includes VP9 files that may be used for any VP9 codec, internal SW or external HW.
@@ -507,8 +538,8 @@ rtc_library("webrtc_vp9_helpers") {
"../../rtc_base:checks",
"../../rtc_base:logging",
"../../rtc_base/experiments:stable_target_rate_experiment",
- "//third_party/abseil-cpp/absl/container:inlined_vector",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ]
}
rtc_library("webrtc_vp9") {
@@ -543,8 +574,8 @@ rtc_library("webrtc_vp9") {
"../../rtc_base/experiments:rate_control_settings",
"../../system_wrappers:field_trial",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
if (rtc_build_libvpx) {
deps += [ rtc_libvpx_dir ]
}
@@ -595,6 +626,25 @@ if (rtc_include_tests) {
}
}
+ rtc_library("encoded_video_frame_producer") {
+ testonly = true
+ sources = [
+ "codecs/test/encoded_video_frame_producer.cc",
+ "codecs/test/encoded_video_frame_producer.h",
+ ]
+ deps = [
+ ":video_codec_interface",
+ "../../api:create_frame_generator",
+ "../../api:frame_generator_api",
+ "../../api/transport/rtp:dependency_descriptor",
+ "../../api/video:encoded_image",
+ "../../api/video:video_frame",
+ "../../api/video:video_frame_type",
+ "../../api/video_codecs:video_codecs_api",
+ "../../rtc_base:checks",
+ ]
+ }
+
rtc_library("simulcast_test_fixture_impl") {
testonly = true
sources = [
@@ -663,9 +713,9 @@ if (rtc_include_tests) {
"../../test:video_test_common",
"../../test:video_test_support",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
video_coding_modules_tests_resources = []
@@ -736,8 +786,8 @@ if (rtc_include_tests) {
"../../test:test_support",
"../../test:video_test_common",
"../../test:video_test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("videocodec_test_stats_impl") {
@@ -769,11 +819,17 @@ if (rtc_include_tests) {
"codecs/vp8/test/vp8_impl_unittest.cc",
"codecs/vp9/test/vp9_impl_unittest.cc",
]
+
+ # TODO(jianj): Fix crash on iOS and re-enable
+ if (enable_libaom && !is_ios) {
+ sources += [ "codecs/test/videocodec_test_libaom.cc" ]
+ }
if (rtc_use_h264) {
sources += [ "codecs/test/videocodec_test_openh264.cc" ]
}
deps = [
+ ":encoded_video_frame_producer",
":video_codec_interface",
":video_codecs_test_framework",
":video_coding_utility",
@@ -812,9 +868,9 @@ if (rtc_include_tests) {
"../../test:test_support",
"../../test:video_test_common",
"../rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
data = video_coding_modules_tests_resources
@@ -843,6 +899,7 @@ if (rtc_include_tests) {
testonly = true
sources = [
+ "chain_diff_calculator_unittest.cc",
"codecs/test/videocodec_test_fixture_config_unittest.cc",
"codecs/test/videocodec_test_stats_impl_unittest.cc",
"codecs/test/videoprocessor_unittest.cc",
@@ -862,6 +919,7 @@ if (rtc_include_tests) {
"jitter_buffer_unittest.cc",
"jitter_estimator_tests.cc",
"loss_notification_controller_unittest.cc",
+ "nack_module2_unittest.cc",
"nack_module_unittest.cc",
"packet_buffer_unittest.cc",
"receiver_unittest.cc",
@@ -889,6 +947,7 @@ if (rtc_include_tests) {
}
deps = [
+ ":chain_diff_calculator",
":codec_globals_headers",
":encoded_frame",
":frame_dependencies_calculator",
@@ -955,7 +1014,11 @@ if (rtc_include_tests) {
"../../test/time_controller:time_controller",
"../rtp_rtcp:rtp_rtcp_format",
"../rtp_rtcp:rtp_video_header",
+ "codecs/av1:scalability_structure_tests",
"codecs/av1:video_coding_codecs_av1_tests",
+ "deprecated:nack_module",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/abseil-cpp/absl/types:variant",
diff --git a/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator.cc b/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator.cc
new file mode 100644
index 00000000000..5f852717b5d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/chain_diff_calculator.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+void ChainDiffCalculator::Reset(const std::vector<bool>& chains) {
+ last_frame_in_chain_.resize(chains.size());
+ for (size_t i = 0; i < chains.size(); ++i) {
+ if (chains[i]) {
+ last_frame_in_chain_[i] = absl::nullopt;
+ }
+ }
+}
+
+absl::InlinedVector<int, 4> ChainDiffCalculator::ChainDiffs(
+ int64_t frame_id) const {
+ absl::InlinedVector<int, 4> result;
+ result.reserve(last_frame_in_chain_.size());
+ for (const auto& frame_id_in_chain : last_frame_in_chain_) {
+ result.push_back(frame_id_in_chain ? (frame_id - *frame_id_in_chain) : 0);
+ }
+ return result;
+}
+
+absl::InlinedVector<int, 4> ChainDiffCalculator::From(
+ int64_t frame_id,
+ const std::vector<bool>& chains) {
+ auto result = ChainDiffs(frame_id);
+ if (chains.size() != last_frame_in_chain_.size()) {
+ RTC_LOG(LS_ERROR) << "Insconsistent chain configuration for frame#"
+ << frame_id << ": expected "
+ << last_frame_in_chain_.size() << " chains, found "
+ << chains.size();
+ }
+ size_t num_chains = std::min(last_frame_in_chain_.size(), chains.size());
+ for (size_t i = 0; i < num_chains; ++i) {
+ if (chains[i]) {
+ last_frame_in_chain_[i] = frame_id;
+ }
+ }
+ return result;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator.h b/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator.h
new file mode 100644
index 00000000000..bca7340c6f7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_
+#define MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_
+
+#include <stdint.h>
+
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+// This class is thread compatible.
+class ChainDiffCalculator {
+ public:
+ ChainDiffCalculator() = default;
+ ChainDiffCalculator(const ChainDiffCalculator&) = default;
+ ChainDiffCalculator& operator=(const ChainDiffCalculator&) = default;
+
+ // Restarts chains, i.e. for position where chains[i] == true next chain_diff
+ // will be 0. Saves chains.size() as number of chains in the stream.
+ void Reset(const std::vector<bool>& chains);
+
+ // Returns chain diffs based on flags if frame is part of the chain.
+ absl::InlinedVector<int, 4> From(int64_t frame_id,
+ const std::vector<bool>& chains);
+
+ private:
+ absl::InlinedVector<int, 4> ChainDiffs(int64_t frame_id) const;
+
+ absl::InlinedVector<absl::optional<int64_t>, 4> last_frame_in_chain_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CHAIN_DIFF_CALCULATOR_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator_unittest.cc
new file mode 100644
index 00000000000..efd09bd8880
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/chain_diff_calculator_unittest.cc
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/chain_diff_calculator.h"
+
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::SizeIs;
+
+TEST(ChainDiffCalculatorTest, SingleChain) {
+ // Simulate a stream with 2 temporal layer where chain
+ // protects temporal layer 0.
+ ChainDiffCalculator calculator;
+ // Key frame.
+ calculator.Reset({true});
+ EXPECT_THAT(calculator.From(1, {true}), ElementsAre(0));
+ // T1 delta frame.
+ EXPECT_THAT(calculator.From(2, {false}), ElementsAre(1));
+ // T0 delta frame.
+ EXPECT_THAT(calculator.From(3, {true}), ElementsAre(2));
+}
+
+TEST(ChainDiffCalculatorTest, TwoChainsFullSvc) {
+ // Simulate a full svc stream with 2 spatial and 2 temporal layers.
+ // chains are protecting temporal layers 0.
+ ChainDiffCalculator calculator;
+ // S0 Key frame.
+ calculator.Reset({true, true});
+ EXPECT_THAT(calculator.From(1, {true, true}), ElementsAre(0, 0));
+ // S1 Key frame.
+ EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 1));
+ // S0T1 delta frame.
+ EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1));
+ // S1T1 delta frame.
+ EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2));
+ // S0T0 delta frame.
+ EXPECT_THAT(calculator.From(5, {true, true}), ElementsAre(4, 3));
+ // S1T0 delta frame.
+ EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 1));
+}
+
+TEST(ChainDiffCalculatorTest, TwoChainsKSvc) {
+ // Simulate a k-svc stream with 2 spatial and 2 temporal layers.
+ // chains are protecting temporal layers 0.
+ ChainDiffCalculator calculator;
+ // S0 Key frame.
+ calculator.Reset({true, true});
+ EXPECT_THAT(calculator.From(1, {true, true}), ElementsAre(0, 0));
+ // S1 Key frame.
+ EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 1));
+ // S0T1 delta frame.
+ EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1));
+ // S1T1 delta frame.
+ EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2));
+ // S0T0 delta frame.
+ EXPECT_THAT(calculator.From(5, {true, false}), ElementsAre(4, 3));
+ // S1T0 delta frame.
+ EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 4));
+}
+
+TEST(ChainDiffCalculatorTest, TwoChainsSimulcast) {
+ // Simulate a k-svc stream with 2 spatial and 2 temporal layers.
+ // chains are protecting temporal layers 0.
+ ChainDiffCalculator calculator;
+ // S0 Key frame.
+ calculator.Reset({true, false});
+ EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0));
+ // S1 Key frame.
+ calculator.Reset({false, true});
+ EXPECT_THAT(calculator.From(2, {false, true}), ElementsAre(1, 0));
+ // S0T1 delta frame.
+ EXPECT_THAT(calculator.From(3, {false, false}), ElementsAre(2, 1));
+ // S1T1 delta frame.
+ EXPECT_THAT(calculator.From(4, {false, false}), ElementsAre(3, 2));
+ // S0T0 delta frame.
+ EXPECT_THAT(calculator.From(5, {true, false}), ElementsAre(4, 3));
+ // S1T0 delta frame.
+ EXPECT_THAT(calculator.From(6, {false, true}), ElementsAre(1, 4));
+}
+
+TEST(ChainDiffCalculatorTest, ResilentToAbsentChainConfig) {
+ ChainDiffCalculator calculator;
+ // Key frame.
+ calculator.Reset({true, false});
+ EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0));
+ // Forgot to set chains. should still return 2 chain_diffs.
+ EXPECT_THAT(calculator.From(2, {}), ElementsAre(1, 0));
+ // chain diffs for next frame(s) are undefined, but still there should be
+ // correct number of them.
+ EXPECT_THAT(calculator.From(3, {true, false}), SizeIs(2));
+ EXPECT_THAT(calculator.From(4, {false, true}), SizeIs(2));
+ // Since previous two frames updated all the chains, can expect what
+ // chain_diffs would be.
+ EXPECT_THAT(calculator.From(5, {false, false}), ElementsAre(2, 1));
+}
+
+TEST(ChainDiffCalculatorTest, ResilentToTooMainChains) {
+ ChainDiffCalculator calculator;
+ // Key frame.
+ calculator.Reset({true, false});
+ EXPECT_THAT(calculator.From(1, {true, false}), ElementsAre(0, 0));
+ // Set wrong number of chains. Expect number of chain_diffs is not changed.
+ EXPECT_THAT(calculator.From(2, {true, true, true}), ElementsAre(1, 0));
+ // chain diffs for next frame(s) are undefined, but still there should be
+ // correct number of them.
+ EXPECT_THAT(calculator.From(3, {true, false}), SizeIs(2));
+ EXPECT_THAT(calculator.From(4, {false, true}), SizeIs(2));
+ // Since previous two frames updated all the chains, can expect what
+ // chain_diffs would be.
+ EXPECT_THAT(calculator.From(5, {false, false}), ElementsAre(2, 1));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/BUILD.gn b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/BUILD.gn
index b2b82d49472..a927db293d4 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/BUILD.gn
@@ -13,10 +13,8 @@ rtc_library("libaom_av1_decoder") {
visibility = [ "*" ]
poisonous = [ "software_video_codecs" ]
public = [ "libaom_av1_decoder.h" ]
- deps = [
- "../../../../api/video_codecs:video_codecs_api",
- "//third_party/abseil-cpp/absl/base:core_headers",
- ]
+ deps = [ "../../../../api/video_codecs:video_codecs_api" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
if (enable_libaom) {
sources = [ "libaom_av1_decoder.cc" ]
@@ -27,21 +25,79 @@ rtc_library("libaom_av1_decoder") {
"../../../../api/video:video_frame_i420",
"../../../../common_video",
"../../../../rtc_base:logging",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libaom",
"//third_party/libyuv",
]
+ absl_deps += [ "//third_party/abseil-cpp/absl/types:optional" ]
} else {
sources = [ "libaom_av1_decoder_absent.cc" ]
}
}
+rtc_source_set("scalable_video_controller") {
+ sources = [
+ "scalable_video_controller.h",
+ "scalable_video_controller_no_layering.cc",
+ "scalable_video_controller_no_layering.h",
+ ]
+ deps = [
+ "../../../../api/transport/rtp:dependency_descriptor",
+ "../../../../api/video:video_bitrate_allocation",
+ "../../../../common_video/generic_frame_descriptor",
+ "../../../../rtc_base:checks",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("scalability_structures") {
+ sources = [
+ "scalability_structure_l1t2.cc",
+ "scalability_structure_l1t2.h",
+ "scalability_structure_l1t3.cc",
+ "scalability_structure_l1t3.h",
+ "scalability_structure_l2t1.cc",
+ "scalability_structure_l2t1.h",
+ "scalability_structure_l2t1_key.cc",
+ "scalability_structure_l2t1_key.h",
+ "scalability_structure_l2t2.cc",
+ "scalability_structure_l2t2.h",
+ "scalability_structure_l2t2_key.cc",
+ "scalability_structure_l2t2_key.h",
+ "scalability_structure_l2t2_key_shift.cc",
+ "scalability_structure_l2t2_key_shift.h",
+ "scalability_structure_l3t1.cc",
+ "scalability_structure_l3t1.h",
+ "scalability_structure_l3t3.cc",
+ "scalability_structure_l3t3.h",
+ "scalability_structure_s2t1.cc",
+ "scalability_structure_s2t1.h",
+ ]
+ deps = [
+ ":scalable_video_controller",
+ "../../../../api/transport/rtp:dependency_descriptor",
+ "../../../../common_video/generic_frame_descriptor",
+ "../../../../rtc_base:checks",
+ "../../../../rtc_base:logging",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
rtc_library("libaom_av1_encoder") {
visibility = [ "*" ]
poisonous = [ "software_video_codecs" ]
public = [ "libaom_av1_encoder.h" ]
deps = [
+ ":scalable_video_controller",
"../../../../api/video_codecs:video_codecs_api",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
]
@@ -56,7 +112,6 @@ rtc_library("libaom_av1_encoder") {
"../../../../common_video",
"../../../../rtc_base:checks",
"../../../../rtc_base:logging",
- "//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/libaom",
]
} else {
@@ -65,6 +120,21 @@ rtc_library("libaom_av1_encoder") {
}
if (rtc_include_tests) {
+ rtc_library("scalability_structure_tests") {
+ testonly = true
+ sources = [ "scalability_structure_unittest.cc" ]
+ deps = [
+ ":scalability_structures",
+ ":scalable_video_controller",
+ "../..:chain_diff_calculator",
+ "../..:frame_dependencies_calculator",
+ "../../../../api/transport/rtp:dependency_descriptor",
+ "../../../../api/video:video_frame_type",
+ "../../../../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
rtc_library("video_coding_codecs_av1_tests") {
testonly = true
@@ -76,13 +146,16 @@ if (rtc_include_tests) {
deps = [
":libaom_av1_decoder",
":libaom_av1_encoder",
+ ":scalability_structures",
+ ":scalable_video_controller",
+ "../..:encoded_video_frame_producer",
"../..:video_codec_interface",
- "../../../../api:create_frame_generator",
- "../../../../api:frame_generator_api",
+ "../../../../api:mock_video_encoder",
+ "../../../../api/video:video_frame_i420",
"../../../../api/video_codecs:video_codecs_api",
"../../../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc
index 122f214a5c8..1a8a0c4775a 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc
@@ -53,6 +53,8 @@ class LibaomAv1Decoder final : public VideoDecoder {
int32_t Release() override;
+ const char* ImplementationName() const override;
+
private:
aom_codec_ctx_t context_;
bool inited_;
@@ -127,7 +129,7 @@ int32_t LibaomAv1Decoder::Decode(const EncodedImage& encoded_image,
// Return decoded frame data.
int qp;
- ret = aom_codec_control_(&context_, AOMD_GET_LAST_QUANTIZER, &qp);
+ ret = aom_codec_control(&context_, AOMD_GET_LAST_QUANTIZER, &qp);
if (ret != AOM_CODEC_OK) {
RTC_LOG(LS_WARNING) << "LibaomAv1Decoder::Decode returned " << ret
<< " on control AOME_GET_LAST_QUANTIZER.";
@@ -180,6 +182,10 @@ int32_t LibaomAv1Decoder::Release() {
return WEBRTC_VIDEO_CODEC_OK;
}
+const char* LibaomAv1Decoder::ImplementationName() const {
+ return "libaom";
+}
+
} // namespace
const bool kIsLibaomAv1DecoderSupported = true;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
index 59ad127435e..0b2c2dacf79 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc
@@ -13,15 +13,19 @@
#include <stdint.h>
#include <memory>
+#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
+#include "absl/base/macros.h"
#include "api/scoped_refptr.h"
#include "api/video/encoded_image.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "rtc_base/checks.h"
@@ -34,9 +38,7 @@ namespace webrtc {
namespace {
// Encoder configuration parameters
-constexpr int kQpMax = 56;
constexpr int kQpMin = 10;
-constexpr int kDefaultEncSpeed = 7; // Use values 6, 7, or 8 for RTC.
constexpr int kUsageProfile = 1; // 0 = good quality; 1 = real-time.
constexpr int kMinQindex = 58; // Min qindex threshold for QP scaling.
constexpr int kMaxQindex = 180; // Max qindex threshold for QP scaling.
@@ -45,9 +47,23 @@ constexpr int kLagInFrames = 0; // No look ahead.
constexpr int kRtpTicksPerSecond = 90000;
constexpr float kMinimumFrameRate = 1.0;
+// Only positive speeds, range for real-time coding currently is: 6 - 8.
+// Lower means slower/better quality, higher means fastest/lower quality.
+int GetCpuSpeed(int width, int height, int number_of_cores) {
+ // For smaller resolutions, use lower speed setting (get some coding gain at
+ // the cost of increased encoding complexity).
+ if (number_of_cores > 2 && width * height <= 320 * 180)
+ return 6;
+ else if (width * height >= 1280 * 720)
+ return 8;
+ else
+ return 7;
+}
+
class LibaomAv1Encoder final : public VideoEncoder {
public:
- LibaomAv1Encoder();
+ explicit LibaomAv1Encoder(
+ std::unique_ptr<ScalableVideoController> svc_controller);
~LibaomAv1Encoder();
int InitEncode(const VideoCodec* codec_settings,
@@ -66,8 +82,18 @@ class LibaomAv1Encoder final : public VideoEncoder {
EncoderInfo GetEncoderInfo() const override;
private:
+ // Configures the encoder with scalability for the next coded video sequence.
+ bool SetSvcParams(ScalableVideoController::StreamLayersConfig svc_config);
+ // Configures the encoder with layer for the next frame.
+ void SetSvcLayerId(
+ const ScalableVideoController::LayerFrameConfig& layer_frame);
+ // Configures the encoder which buffers next frame updates and can reference.
+ void SetSvcRefFrameConfig(
+ const ScalableVideoController::LayerFrameConfig& layer_frame);
+
+ const std::unique_ptr<ScalableVideoController> svc_controller_;
bool inited_;
- bool keyframe_required_;
+ bool svc_enabled_;
VideoCodec encoder_settings_;
aom_image_t* frame_for_encode_;
aom_codec_ctx_t ctx_;
@@ -100,11 +126,15 @@ int32_t VerifyCodecSettings(const VideoCodec& codec_settings) {
return WEBRTC_VIDEO_CODEC_OK;
}
-LibaomAv1Encoder::LibaomAv1Encoder()
- : inited_(false),
- keyframe_required_(true),
+LibaomAv1Encoder::LibaomAv1Encoder(
+ std::unique_ptr<ScalableVideoController> svc_controller)
+ : svc_controller_(std::move(svc_controller)),
+ inited_(false),
+ svc_enabled_(false),
frame_for_encode_(nullptr),
- encoded_image_callback_(nullptr) {}
+ encoded_image_callback_(nullptr) {
+ RTC_DCHECK(svc_controller_);
+}
LibaomAv1Encoder::~LibaomAv1Encoder() {
Release();
@@ -153,9 +183,12 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings,
cfg_.g_input_bit_depth = kBitDepth;
cfg_.kf_mode = AOM_KF_DISABLED;
cfg_.rc_min_quantizer = kQpMin;
- cfg_.rc_max_quantizer = kQpMax;
+ cfg_.rc_max_quantizer = encoder_settings_.qpMax;
cfg_.g_usage = kUsageProfile;
-
+ if (svc_controller_->StreamConfig().num_spatial_layers > 1 ||
+ svc_controller_->StreamConfig().num_temporal_layers > 1) {
+ cfg_.g_error_resilient = 1;
+ }
// Low-latency settings.
cfg_.rc_end_usage = AOM_CBR; // Constant Bit Rate (CBR) mode
cfg_.g_pass = AOM_RC_ONE_PASS; // One-pass rate control
@@ -180,7 +213,9 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings,
inited_ = true;
// Set control parameters
- ret = aom_codec_control(&ctx_, AOME_SET_CPUUSED, kDefaultEncSpeed);
+ ret = aom_codec_control(
+ &ctx_, AOME_SET_CPUUSED,
+ GetCpuSpeed(cfg_.g_w, cfg_.g_h, settings.number_of_cores));
if (ret != AOM_CODEC_OK) {
RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
<< " on control AV1E_SET_CPUUSED.";
@@ -198,16 +233,149 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings,
<< " on control AV1E_SET_DELTAQ_MODE.";
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_ORDER_HINT, 0);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_ENABLE_ORDER_HINT.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
ret = aom_codec_control(&ctx_, AV1E_SET_AQ_MODE, 3);
if (ret != AOM_CODEC_OK) {
RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
<< " on control AV1E_SET_AQ_MODE.";
return WEBRTC_VIDEO_CODEC_ERROR;
}
+ if (!SetSvcParams(svc_controller_->StreamConfig())) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ret = aom_codec_control(&ctx_, AOME_SET_MAX_INTRA_BITRATE_PCT, 300);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_MAX_INTRA_BITRATE_PCT.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ret = aom_codec_control(&ctx_, AV1E_SET_COEFF_COST_UPD_FREQ, 2);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_COEFF_COST_UPD_FREQ.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ret = aom_codec_control(&ctx_, AV1E_SET_MODE_COST_UPD_FREQ, 2);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_MODE_COST_UPD_FREQ.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ret = aom_codec_control(&ctx_, AV1E_SET_MV_COST_UPD_FREQ, 3);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_MV_COST_UPD_FREQ.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
return WEBRTC_VIDEO_CODEC_OK;
}
+bool LibaomAv1Encoder::SetSvcParams(
+ ScalableVideoController::StreamLayersConfig svc_config) {
+ svc_enabled_ =
+ svc_config.num_spatial_layers > 1 || svc_config.num_temporal_layers > 1;
+ if (!svc_enabled_) {
+ return true;
+ }
+ aom_svc_params_t svc_params = {};
+ if (svc_config.num_spatial_layers < 1 || svc_config.num_spatial_layers > 4) {
+ RTC_LOG(LS_WARNING) << "Av1 supports up to 4 spatial layers. "
+ << svc_config.num_spatial_layers << " configured.";
+ return false;
+ }
+ if (svc_config.num_temporal_layers < 1 ||
+ svc_config.num_temporal_layers > 8) {
+ RTC_LOG(LS_WARNING) << "Av1 supports up to 8 temporal layers. "
+ << svc_config.num_temporal_layers << " configured.";
+ return false;
+ }
+ svc_params.number_spatial_layers = svc_config.num_spatial_layers;
+ svc_params.number_temporal_layers = svc_config.num_temporal_layers;
+
+ int num_layers =
+ svc_config.num_spatial_layers * svc_config.num_temporal_layers;
+ for (int i = 0; i < num_layers; ++i) {
+ svc_params.min_quantizers[i] = kQpMin;
+ svc_params.max_quantizers[i] = encoder_settings_.qpMax;
+ }
+
+ // Assume each temporal layer doubles framerate.
+ for (int tid = 0; tid < svc_config.num_temporal_layers; ++tid) {
+ svc_params.framerate_factor[tid] =
+ 1 << (svc_config.num_temporal_layers - tid - 1);
+ }
+
+ // TODO(danilchap): Add support for custom resolution factor.
+ for (int sid = 0; sid < svc_config.num_spatial_layers; ++sid) {
+ svc_params.scaling_factor_num[sid] = 1;
+ svc_params.scaling_factor_den[sid] =
+ 1 << (svc_config.num_spatial_layers - sid - 1);
+ }
+
+ aom_codec_err_t ret =
+ aom_codec_control(&ctx_, AV1E_SET_SVC_PARAMS, &svc_params);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAV1Encoder::EncodeInit returned " << ret
+ << " on control AV1E_SET_SVC_PARAMS.";
+ return false;
+ }
+ return true;
+}
+
+void LibaomAv1Encoder::SetSvcLayerId(
+ const ScalableVideoController::LayerFrameConfig& layer_frame) {
+ aom_svc_layer_id_t layer_id = {};
+ layer_id.spatial_layer_id = layer_frame.SpatialId();
+ layer_id.temporal_layer_id = layer_frame.TemporalId();
+ aom_codec_err_t ret =
+ aom_codec_control(&ctx_, AV1E_SET_SVC_LAYER_ID, &layer_id);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
+ << " on control AV1E_SET_SVC_LAYER_ID.";
+ }
+}
+
+void LibaomAv1Encoder::SetSvcRefFrameConfig(
+ const ScalableVideoController::LayerFrameConfig& layer_frame) {
+ // Buffer name to use for each layer_frame.buffers position. In particular
+ // when there are 2 buffers are referenced, prefer name them last and golden,
+ // because av1 bitstream format has dedicated fields for these two names.
+ // See last_frame_idx and golden_frame_idx in the av1 spec
+ // https://aomediacodec.github.io/av1-spec/av1-spec.pdf
+ static constexpr int kPreferedSlotName[] = {0, // Last
+ 3, // Golden
+ 1, 2, 4, 5, 6};
+ static constexpr int kAv1NumBuffers = 8;
+
+ aom_svc_ref_frame_config_t ref_frame_config = {};
+ RTC_CHECK_LE(layer_frame.Buffers().size(), ABSL_ARRAYSIZE(kPreferedSlotName));
+ for (size_t i = 0; i < layer_frame.Buffers().size(); ++i) {
+ const CodecBufferUsage& buffer = layer_frame.Buffers()[i];
+ int slot_name = kPreferedSlotName[i];
+ RTC_CHECK_GE(buffer.id, 0);
+ RTC_CHECK_LT(buffer.id, kAv1NumBuffers);
+ ref_frame_config.ref_idx[slot_name] = buffer.id;
+ if (buffer.referenced) {
+ ref_frame_config.reference[slot_name] = 1;
+ }
+ if (buffer.updated) {
+ ref_frame_config.refresh[buffer.id] = 1;
+ }
+ }
+ aom_codec_err_t ret = aom_codec_control(&ctx_, AV1E_SET_SVC_REF_FRAME_CONFIG,
+ &ref_frame_config);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
+ << " on control AV1_SET_SVC_REF_FRAME_CONFIG.";
+ }
+}
+
int32_t LibaomAv1Encoder::RegisterEncodeCompleteCallback(
EncodedImageCallback* encoded_image_callback) {
encoded_image_callback_ = encoded_image_callback;
@@ -235,10 +403,18 @@ int32_t LibaomAv1Encoder::Encode(
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
- keyframe_required_ =
+ bool keyframe_required =
frame_types != nullptr &&
absl::c_linear_search(*frame_types, VideoFrameType::kVideoFrameKey);
+ std::vector<ScalableVideoController::LayerFrameConfig> layer_frames =
+ svc_controller_->NextFrameConfig(keyframe_required);
+
+ if (layer_frames.empty()) {
+ RTC_LOG(LS_ERROR) << "SVCController returned no configuration for a frame.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
// Convert input frame to I420, if needed.
VideoFrame prepped_input_frame = frame;
if (prepped_input_frame.video_frame_buffer()->type() !=
@@ -263,75 +439,84 @@ int32_t LibaomAv1Encoder::Encode(
const uint32_t duration =
kRtpTicksPerSecond / static_cast<float>(encoder_settings_.maxFramerate);
- aom_enc_frame_flags_t flags = (keyframe_required_) ? AOM_EFLAG_FORCE_KF : 0;
- // Encode a frame.
- aom_codec_err_t ret = aom_codec_encode(&ctx_, frame_for_encode_,
- frame.timestamp(), duration, flags);
- if (ret != AOM_CODEC_OK) {
- RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
- << " on aom_codec_encode.";
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
+ for (ScalableVideoController::LayerFrameConfig& layer_frame : layer_frames) {
+ aom_enc_frame_flags_t flags =
+ layer_frame.IsKeyframe() ? AOM_EFLAG_FORCE_KF : 0;
- // Get encoded image data.
- EncodedImage encoded_image;
- encoded_image._completeFrame = true;
- aom_codec_iter_t iter = nullptr;
- int data_pkt_count = 0;
- while (const aom_codec_cx_pkt_t* pkt = aom_codec_get_cx_data(&ctx_, &iter)) {
- if (pkt->kind == AOM_CODEC_CX_FRAME_PKT && pkt->data.frame.sz > 0) {
- if (data_pkt_count > 0) {
- RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encoder returned more than "
- "one data packet for an input video frame.";
- Release();
- }
- // TODO(bugs.webrtc.org/11174): Remove this hack when
- // webrtc_pc_e2e::SingleProcessEncodedImageDataInjector not used or fixed
- // not to assume that encoded image transfered as is.
- const uint8_t* data = static_cast<const uint8_t*>(pkt->data.frame.buf);
- size_t size = pkt->data.frame.sz;
- if (size > 2 && data[0] == 0b0'0010'010 && data[1] == 0) {
- // Typically frame starts with a Temporal Delimter OBU of size 0 that is
- // not need by any component in webrtc and discarded during rtp
- // packetization. Before discarded it confuses test framework that
- // assumes received encoded frame is exactly same as sent frame.
- data += 2;
- size -= 2;
- }
- encoded_image.SetEncodedData(EncodedImageBuffer::Create(data, size));
-
- bool is_key_frame = ((pkt->data.frame.flags & AOM_EFLAG_FORCE_KF) != 0);
- encoded_image._frameType = is_key_frame
- ? VideoFrameType::kVideoFrameKey
- : VideoFrameType::kVideoFrameDelta;
- encoded_image.SetTimestamp(frame.timestamp());
- encoded_image.capture_time_ms_ = frame.render_time_ms();
- encoded_image.rotation_ = frame.rotation();
- encoded_image.content_type_ = VideoContentType::UNSPECIFIED;
- // If encoded image width/height info are added to aom_codec_cx_pkt_t,
- // use those values in lieu of the values in frame.
- encoded_image._encodedHeight = frame.height();
- encoded_image._encodedWidth = frame.width();
- encoded_image.timing_.flags = VideoSendTiming::kInvalid;
- int qp = -1;
- ret = aom_codec_control(&ctx_, AOME_GET_LAST_QUANTIZER, &qp);
- if (ret != AOM_CODEC_OK) {
- RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
- << " on control AOME_GET_LAST_QUANTIZER.";
- return WEBRTC_VIDEO_CODEC_ERROR;
+ if (svc_enabled_) {
+ SetSvcLayerId(layer_frame);
+ SetSvcRefFrameConfig(layer_frame);
+ }
+
+ // Encode a frame.
+ aom_codec_err_t ret = aom_codec_encode(&ctx_, frame_for_encode_,
+ frame.timestamp(), duration, flags);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
+ << " on aom_codec_encode.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // Get encoded image data.
+ EncodedImage encoded_image;
+ encoded_image._completeFrame = true;
+ aom_codec_iter_t iter = nullptr;
+ int data_pkt_count = 0;
+ while (const aom_codec_cx_pkt_t* pkt =
+ aom_codec_get_cx_data(&ctx_, &iter)) {
+ if (pkt->kind == AOM_CODEC_CX_FRAME_PKT && pkt->data.frame.sz > 0) {
+ if (data_pkt_count > 0) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encoder returned more than "
+ "one data packet for an input video frame.";
+ Release();
+ }
+ encoded_image.SetEncodedData(EncodedImageBuffer::Create(
+ /*data=*/static_cast<const uint8_t*>(pkt->data.frame.buf),
+ /*size=*/pkt->data.frame.sz));
+
+ if ((pkt->data.frame.flags & AOM_EFLAG_FORCE_KF) != 0) {
+ layer_frame.Keyframe();
+ }
+ encoded_image._frameType = layer_frame.IsKeyframe()
+ ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta;
+ encoded_image.SetTimestamp(frame.timestamp());
+ encoded_image.capture_time_ms_ = frame.render_time_ms();
+ encoded_image.rotation_ = frame.rotation();
+ encoded_image.content_type_ = VideoContentType::UNSPECIFIED;
+ // If encoded image width/height info are added to aom_codec_cx_pkt_t,
+ // use those values in lieu of the values in frame.
+ encoded_image._encodedHeight = frame.height();
+ encoded_image._encodedWidth = frame.width();
+ encoded_image.timing_.flags = VideoSendTiming::kInvalid;
+ int qp = -1;
+ ret = aom_codec_control(&ctx_, AOME_GET_LAST_QUANTIZER, &qp);
+ if (ret != AOM_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret
+ << " on control AOME_GET_LAST_QUANTIZER.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ encoded_image.qp_ = qp;
+ encoded_image.SetColorSpace(frame.color_space());
+ ++data_pkt_count;
}
- encoded_image.qp_ = qp;
- encoded_image.SetColorSpace(frame.color_space());
- ++data_pkt_count;
}
- }
- // Deliver encoded image data.
- if (encoded_image.size() > 0) {
- CodecSpecificInfo codec_specific_info;
- encoded_image_callback_->OnEncodedImage(encoded_image, &codec_specific_info,
- nullptr);
+ // Deliver encoded image data.
+ if (encoded_image.size() > 0) {
+ CodecSpecificInfo codec_specific_info;
+ codec_specific_info.codecType = kVideoCodecAV1;
+ bool is_keyframe = layer_frame.IsKeyframe();
+ codec_specific_info.generic_frame_info =
+ svc_controller_->OnEncodeDone(std::move(layer_frame));
+ if (is_keyframe && codec_specific_info.generic_frame_info) {
+ codec_specific_info.template_structure =
+ svc_controller_->DependencyStructure();
+ }
+ encoded_image_callback_->OnEncodedImage(encoded_image,
+ &codec_specific_info, nullptr);
+ }
}
return WEBRTC_VIDEO_CODEC_OK;
@@ -359,6 +544,7 @@ void LibaomAv1Encoder::SetRates(const RateControlParameters& parameters) {
RTC_DCHECK_LE(rc_target_bitrate_kbps, encoder_settings_.maxBitrate);
RTC_DCHECK_GE(rc_target_bitrate_kbps, encoder_settings_.minBitrate);
+ svc_controller_->OnRatesUpdated(parameters.bitrate);
// Set target bit rate.
cfg_.rc_target_bitrate = rc_target_bitrate_kbps;
@@ -389,7 +575,13 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const {
const bool kIsLibaomAv1EncoderSupported = true;
std::unique_ptr<VideoEncoder> CreateLibaomAv1Encoder() {
- return std::make_unique<LibaomAv1Encoder>();
+ return std::make_unique<LibaomAv1Encoder>(
+ std::make_unique<ScalableVideoControllerNoLayering>());
+}
+
+std::unique_ptr<VideoEncoder> CreateLibaomAv1Encoder(
+ std::unique_ptr<ScalableVideoController> svc_controller) {
+ return std::make_unique<LibaomAv1Encoder>(std::move(svc_controller));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h
index 4b0ee28d402..c2f04e669cb 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h
@@ -14,12 +14,15 @@
#include "absl/base/attributes.h"
#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
namespace webrtc {
ABSL_CONST_INIT extern const bool kIsLibaomAv1EncoderSupported;
std::unique_ptr<VideoEncoder> CreateLibaomAv1Encoder();
+std::unique_ptr<VideoEncoder> CreateLibaomAv1Encoder(
+ std::unique_ptr<ScalableVideoController> controller);
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
index 6d1d0bbb240..341a82774d5 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc
@@ -11,15 +11,38 @@
#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
#include <memory>
+#include <vector>
+#include "absl/types/optional.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h"
+#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h"
#include "modules/video_coding/include/video_error_codes.h"
+#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace {
+using ::testing::SizeIs;
+
+VideoCodec DefaultCodecSettings() {
+ VideoCodec codec_settings;
+ codec_settings.width = 320;
+ codec_settings.height = 180;
+ codec_settings.maxFramerate = 30;
+ codec_settings.maxBitrate = 1000;
+ codec_settings.qpMax = 63;
+ return codec_settings;
+}
+
+VideoEncoder::Settings DefaultEncoderSettings() {
+ return VideoEncoder::Settings(
+ VideoEncoder::Capabilities(/*loss_notification=*/false),
+ /*number_of_cores=*/1, /*max_payload_size=*/1200);
+}
+
TEST(LibaomAv1EncoderTest, CanCreate) {
std::unique_ptr<VideoEncoder> encoder = CreateLibaomAv1Encoder();
EXPECT_TRUE(encoder);
@@ -28,17 +51,37 @@ TEST(LibaomAv1EncoderTest, CanCreate) {
TEST(LibaomAv1EncoderTest, InitAndRelease) {
std::unique_ptr<VideoEncoder> encoder = CreateLibaomAv1Encoder();
ASSERT_TRUE(encoder);
- VideoCodec codec_settings;
- codec_settings.width = 1280;
- codec_settings.height = 720;
- codec_settings.maxFramerate = 30;
- VideoEncoder::Capabilities capabilities(/*loss_notification=*/false);
- VideoEncoder::Settings encoder_settings(capabilities, /*number_of_cores=*/1,
- /*max_payload_size=*/1200);
- EXPECT_EQ(encoder->InitEncode(&codec_settings, encoder_settings),
+ VideoCodec codec_settings = DefaultCodecSettings();
+ EXPECT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
WEBRTC_VIDEO_CODEC_OK);
EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_OK);
}
+TEST(LibaomAv1EncoderTest, NoBitrateOnTopLayerRefecltedInActiveDecodeTargets) {
+ // Configure encoder with 2 temporal layers.
+ std::unique_ptr<VideoEncoder> encoder =
+ CreateLibaomAv1Encoder(std::make_unique<ScalabilityStructureL1T2>());
+ VideoCodec codec_settings = DefaultCodecSettings();
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ VideoEncoder::RateControlParameters rate_parameters;
+ rate_parameters.framerate_fps = 30;
+ rate_parameters.bitrate.SetBitrate(0, /*temporal_index=*/0, 300'000);
+ rate_parameters.bitrate.SetBitrate(0, /*temporal_index=*/1, 0);
+ encoder->SetRates(rate_parameters);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder).SetNumInputFrames(1).Encode();
+ ASSERT_THAT(encoded_frames, SizeIs(1));
+ ASSERT_NE(encoded_frames[0].codec_specific_info.generic_frame_info,
+ absl::nullopt);
+ // Assuming L1T2 structure uses 1st decode target for T0 and 2nd decode target
+ // for T0+T1 frames, expect only 1st decode target is active.
+ EXPECT_EQ(encoded_frames[0]
+ .codec_specific_info.generic_frame_info->active_decode_targets,
+ 0b01);
+}
+
} // namespace
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc
index 4a549ea453d..c47a3923843 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/libaom_av1_unittest.cc
@@ -15,12 +15,23 @@
#include <vector>
#include "absl/types/optional.h"
-#include "api/test/create_frame_generator.h"
-#include "api/test/frame_generator_interface.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h"
#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t3.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t1.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t3.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_s2t1.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h"
+#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "test/gmock.h"
@@ -29,79 +40,43 @@
namespace webrtc {
namespace {
+using ::testing::ContainerEq;
+using ::testing::Each;
using ::testing::ElementsAreArray;
+using ::testing::Ge;
using ::testing::IsEmpty;
using ::testing::Not;
using ::testing::NotNull;
+using ::testing::SizeIs;
+using ::testing::Truly;
+using ::testing::Values;
// Use small resolution for this test to make it faster.
constexpr int kWidth = 320;
constexpr int kHeight = 180;
constexpr int kFramerate = 30;
-constexpr int kRtpTicksPerSecond = 90000;
-class TestAv1Encoder {
- public:
- struct Encoded {
- EncodedImage encoded_image;
- CodecSpecificInfo codec_specific_info;
- };
-
- TestAv1Encoder() : encoder_(CreateLibaomAv1Encoder()) {
- RTC_CHECK(encoder_);
- VideoCodec codec_settings;
- codec_settings.width = kWidth;
- codec_settings.height = kHeight;
- codec_settings.maxFramerate = kFramerate;
- VideoEncoder::Settings encoder_settings(
- VideoEncoder::Capabilities(/*loss_notification=*/false),
- /*number_of_cores=*/1, /*max_payload_size=*/1200);
- EXPECT_EQ(encoder_->InitEncode(&codec_settings, encoder_settings),
- WEBRTC_VIDEO_CODEC_OK);
- EXPECT_EQ(encoder_->RegisterEncodeCompleteCallback(&callback_),
- WEBRTC_VIDEO_CODEC_OK);
- }
- // This class requires pointer stability and thus not copyable nor movable.
- TestAv1Encoder(const TestAv1Encoder&) = delete;
- TestAv1Encoder& operator=(const TestAv1Encoder&) = delete;
-
- void EncodeAndAppend(const VideoFrame& frame, std::vector<Encoded>* encoded) {
- callback_.SetEncodeStorage(encoded);
- std::vector<VideoFrameType> frame_types = {
- VideoFrameType::kVideoFrameDelta};
- EXPECT_EQ(encoder_->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK);
- // Prefer to crash checking nullptr rather than writing to random memory.
- callback_.SetEncodeStorage(nullptr);
- }
-
- private:
- class EncoderCallback : public EncodedImageCallback {
- public:
- void SetEncodeStorage(std::vector<Encoded>* storage) { storage_ = storage; }
-
- private:
- Result OnEncodedImage(
- const EncodedImage& encoded_image,
- const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* /*fragmentation*/) override {
- RTC_CHECK(storage_);
- storage_->push_back({encoded_image, *codec_specific_info});
- return Result(Result::Error::OK);
- }
-
- std::vector<Encoded>* storage_ = nullptr;
- };
-
- EncoderCallback callback_;
- std::unique_ptr<VideoEncoder> encoder_;
-};
+VideoCodec DefaultCodecSettings() {
+ VideoCodec codec_settings;
+ codec_settings.width = kWidth;
+ codec_settings.height = kHeight;
+ codec_settings.maxFramerate = kFramerate;
+ codec_settings.maxBitrate = 1000;
+ codec_settings.qpMax = 63;
+ return codec_settings;
+}
+VideoEncoder::Settings DefaultEncoderSettings() {
+ return VideoEncoder::Settings(
+ VideoEncoder::Capabilities(/*loss_notification=*/false),
+ /*number_of_cores=*/1, /*max_payload_size=*/1200);
+}
class TestAv1Decoder {
public:
- TestAv1Decoder() {
- decoder_ = CreateLibaomAv1Decoder();
+ explicit TestAv1Decoder(int decoder_id)
+ : decoder_id_(decoder_id), decoder_(CreateLibaomAv1Decoder()) {
if (decoder_ == nullptr) {
- ADD_FAILURE() << "Failed to create a decoder";
+ ADD_FAILURE() << "Failed to create a decoder#" << decoder_id_;
return;
}
EXPECT_EQ(decoder_->InitDecode(/*codec_settings=*/nullptr,
@@ -116,20 +91,17 @@ class TestAv1Decoder {
void Decode(int64_t frame_id, const EncodedImage& image) {
ASSERT_THAT(decoder_, NotNull());
- requested_ids_.push_back(frame_id);
int32_t error = decoder_->Decode(image, /*missing_frames=*/false,
/*render_time_ms=*/image.capture_time_ms_);
if (error != WEBRTC_VIDEO_CODEC_OK) {
ADD_FAILURE() << "Failed to decode frame id " << frame_id
- << " with error code " << error;
+ << " with error code " << error << " by decoder#"
+ << decoder_id_;
return;
}
decoded_ids_.push_back(frame_id);
}
- const std::vector<int64_t>& requested_frame_ids() const {
- return requested_ids_;
- }
const std::vector<int64_t>& decoded_frame_ids() const { return decoded_ids_; }
size_t num_output_frames() const { return callback_.num_called(); }
@@ -156,51 +128,116 @@ class TestAv1Decoder {
int num_called_ = 0;
};
- std::vector<int64_t> requested_ids_;
+ const int decoder_id_;
std::vector<int64_t> decoded_ids_;
DecoderCallback callback_;
- std::unique_ptr<VideoDecoder> decoder_;
+ const std::unique_ptr<VideoDecoder> decoder_;
};
-std::vector<VideoFrame> GenerateFrames(size_t num_frames) {
- std::vector<VideoFrame> frames;
- frames.reserve(num_frames);
-
- auto input_frame_generator = test::CreateSquareFrameGenerator(
- kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420,
- absl::nullopt);
- uint32_t timestamp = 1000;
- for (size_t i = 0; i < num_frames; ++i) {
- frames.push_back(
- VideoFrame::Builder()
- .set_video_frame_buffer(input_frame_generator->NextFrame().buffer)
- .set_timestamp_rtp(timestamp += kRtpTicksPerSecond / kFramerate)
- .build());
- }
- return frames;
-}
-
TEST(LibaomAv1Test, EncodeDecode) {
- TestAv1Decoder decoder;
- TestAv1Encoder encoder;
-
- std::vector<TestAv1Encoder::Encoded> encoded_frames;
- for (const VideoFrame& frame : GenerateFrames(/*num_frames=*/4)) {
- encoder.EncodeAndAppend(frame, &encoded_frames);
- }
- for (size_t frame_idx = 0; frame_idx < encoded_frames.size(); ++frame_idx) {
- decoder.Decode(static_cast<int64_t>(frame_idx),
- encoded_frames[frame_idx].encoded_image);
+ TestAv1Decoder decoder(0);
+ std::unique_ptr<VideoEncoder> encoder = CreateLibaomAv1Encoder();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder).SetNumInputFrames(4).Encode();
+ for (size_t frame_id = 0; frame_id < encoded_frames.size(); ++frame_id) {
+ decoder.Decode(static_cast<int64_t>(frame_id),
+ encoded_frames[frame_id].encoded_image);
}
// Check encoder produced some frames for decoder to decode.
ASSERT_THAT(encoded_frames, Not(IsEmpty()));
// Check decoder found all of them valid.
- EXPECT_THAT(decoder.decoded_frame_ids(),
- ElementsAreArray(decoder.requested_frame_ids()));
+ EXPECT_THAT(decoder.decoded_frame_ids(), SizeIs(encoded_frames.size()));
// Check each of them produced an output frame.
EXPECT_EQ(decoder.num_output_frames(), decoder.decoded_frame_ids().size());
}
+struct SvcTestParam {
+ std::function<std::unique_ptr<ScalableVideoController>()> svc_factory;
+ int num_frames_to_generate;
+};
+
+class LibaomAv1SvcTest : public ::testing::TestWithParam<SvcTestParam> {};
+
+TEST_P(LibaomAv1SvcTest, EncodeAndDecodeAllDecodeTargets) {
+ std::unique_ptr<ScalableVideoController> svc_controller =
+ GetParam().svc_factory();
+ size_t num_decode_targets =
+ svc_controller->DependencyStructure().num_decode_targets;
+
+ std::unique_ptr<VideoEncoder> encoder =
+ CreateLibaomAv1Encoder(std::move(svc_controller));
+ VideoCodec codec_settings = DefaultCodecSettings();
+ ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()),
+ WEBRTC_VIDEO_CODEC_OK);
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> encoded_frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(GetParam().num_frames_to_generate)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+
+ ASSERT_THAT(
+ encoded_frames,
+ Each(Truly([&](const EncodedVideoFrameProducer::EncodedFrame& frame) {
+ return frame.codec_specific_info.generic_frame_info &&
+ frame.codec_specific_info.generic_frame_info
+ ->decode_target_indications.size() == num_decode_targets;
+ })));
+
+ for (size_t dt = 0; dt < num_decode_targets; ++dt) {
+ TestAv1Decoder decoder(dt);
+ std::vector<int64_t> requested_ids;
+ for (int64_t frame_id = 0;
+ frame_id < static_cast<int64_t>(encoded_frames.size()); ++frame_id) {
+ const EncodedVideoFrameProducer::EncodedFrame& frame =
+ encoded_frames[frame_id];
+ if (frame.codec_specific_info.generic_frame_info
+ ->decode_target_indications[dt] !=
+ DecodeTargetIndication::kNotPresent) {
+ requested_ids.push_back(frame_id);
+ decoder.Decode(frame_id, frame.encoded_image);
+ }
+ }
+
+ ASSERT_THAT(requested_ids, SizeIs(Ge(2u)));
+ // Check decoder found all of them valid.
+ EXPECT_THAT(decoder.decoded_frame_ids(), ContainerEq(requested_ids))
+ << "Decoder#" << dt;
+ // Check each of them produced an output frame.
+ EXPECT_EQ(decoder.num_output_frames(), decoder.decoded_frame_ids().size())
+ << "Decoder#" << dt;
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ Svc,
+ LibaomAv1SvcTest,
+ Values(SvcTestParam{std::make_unique<ScalableVideoControllerNoLayering>,
+ /*num_frames_to_generate=*/4},
+ SvcTestParam{std::make_unique<ScalabilityStructureL1T2>,
+ /*num_frames_to_generate=*/4},
+ SvcTestParam{std::make_unique<ScalabilityStructureL1T3>,
+ /*num_frames_to_generate=*/8},
+ SvcTestParam{std::make_unique<ScalabilityStructureL2T1>,
+ /*num_frames_to_generate=*/3},
+ SvcTestParam{std::make_unique<ScalabilityStructureL2T1Key>,
+ /*num_frames_to_generate=*/3},
+ SvcTestParam{std::make_unique<ScalabilityStructureL3T1>,
+ /*num_frames_to_generate=*/3},
+ SvcTestParam{std::make_unique<ScalabilityStructureL3T3>,
+ /*num_frames_to_generate=*/8},
+ SvcTestParam{std::make_unique<ScalabilityStructureS2T1>,
+ /*num_frames_to_generate=*/3},
+ SvcTestParam{std::make_unique<ScalabilityStructureL2T2>,
+ /*num_frames_to_generate=*/4},
+ SvcTestParam{std::make_unique<ScalabilityStructureL2T2Key>,
+ /*num_frames_to_generate=*/4},
+ SvcTestParam{std::make_unique<ScalabilityStructureL2T2KeyShift>,
+ /*num_frames_to_generate=*/4}));
+
} // namespace
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc
new file mode 100644
index 00000000000..ae4c8792244
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[3][2] = {
+ {kSwitch, kSwitch}, // KeyFrame
+ {kNotPresent, kDiscardable}, // DeltaFrame T1
+ {kSwitch, kSwitch}, // DeltaFrame T0
+};
+
+} // namespace
+
+ScalabilityStructureL1T2::~ScalabilityStructureL1T2() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL1T2::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 1;
+ result.num_temporal_layers = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL1T2::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 1;
+ structure.decode_target_protected_by_chain = {0, 0};
+ structure.templates.resize(3);
+ structure.templates[0].T(0).Dtis("SS").ChainDiffs({0});
+ structure.templates[1].T(0).Dtis("SS").ChainDiffs({2}).FrameDiffs({2});
+ structure.templates[2].T(1).Dtis("-D").ChainDiffs({1}).FrameDiffs({1});
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL1T2::NextFrameConfig(bool restart) {
+ if (!active_decode_targets_[0]) {
+ RTC_LOG(LS_WARNING) << "No bitrate allocated for temporal layer 0, yet "
+ "frame is requested. No frame will be encoded.";
+ return {};
+ }
+ if (restart) {
+ next_pattern_ = kKeyFrame;
+ } else if (!active_decode_targets_[1]) {
+ next_pattern_ = kDeltaFrameT0;
+ }
+ std::vector<LayerFrameConfig> result(1);
+
+ switch (next_pattern_) {
+ case kKeyFrame:
+ result[0].Id(0).T(0).Keyframe().Update(0);
+ next_pattern_ = kDeltaFrameT1;
+ break;
+ case kDeltaFrameT1:
+ result[0].Id(1).T(1).Reference(0);
+ next_pattern_ = kDeltaFrameT0;
+ break;
+ case kDeltaFrameT0:
+ result[0].Id(2).T(0).ReferenceAndUpdate(0);
+ next_pattern_ = kDeltaFrameT1;
+ break;
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL1T2::OnEncodeDone(
+ LayerFrameConfig config) {
+ // Encoder may have generated a keyframe even when not asked for it. Treat
+ // such frame same as requested keyframe, in particular restart the sequence.
+ if (config.IsKeyframe()) {
+ config = NextFrameConfig(/*restart=*/true).front();
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ frame_info->part_of_chain = {config.TemporalId() == 0};
+ frame_info->active_decode_targets = active_decode_targets_;
+ return frame_info;
+}
+
+void ScalabilityStructureL1T2::OnRatesUpdated(
+ const VideoBitrateAllocation& bitrates) {
+ if (bitrates.GetBitrate(0, 0) == 0) {
+ // It is unclear what frame can be produced when base layer is disabled,
+ // so mark all decode targets as inactive to produce no frames.
+ active_decode_targets_.reset();
+ return;
+ }
+ active_decode_targets_.set(0, true);
+ active_decode_targets_.set(1, bitrates.GetBitrate(0, 1) > 0);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.h
new file mode 100644
index 00000000000..55a9e8bbb0e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t2.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_
+
+#include <bitset>
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+class ScalabilityStructureL1T2 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL1T2() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override;
+
+ private:
+ enum FramePattern {
+ kKeyFrame,
+ kDeltaFrameT1,
+ kDeltaFrameT0,
+ };
+
+ FramePattern next_pattern_ = kKeyFrame;
+ std::bitset<32> active_decode_targets_ = 0b11;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T2_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc
new file mode 100644
index 00000000000..a04a4262ed3
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.cc
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t3.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[3][3] = {
+ {kSwitch, kSwitch, kSwitch}, // T0
+ {kNotPresent, kDiscardable, kSwitch}, // T1
+ {kNotPresent, kNotPresent, kDiscardable}, // T2
+};
+
+} // namespace
+
+ScalabilityStructureL1T3::~ScalabilityStructureL1T3() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL1T3::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 1;
+ result.num_temporal_layers = 3;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL1T3::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 3;
+ structure.num_chains = 1;
+ structure.decode_target_protected_by_chain = {0, 0, 0};
+ structure.templates.resize(5);
+ structure.templates[0].T(0).Dtis("SSS").ChainDiffs({0});
+ structure.templates[1].T(0).Dtis("SSS").ChainDiffs({4}).FrameDiffs({4});
+ structure.templates[2].T(1).Dtis("-DS").ChainDiffs({2}).FrameDiffs({2});
+ structure.templates[3].T(2).Dtis("--D").ChainDiffs({1}).FrameDiffs({1});
+ structure.templates[4].T(2).Dtis("--D").ChainDiffs({3}).FrameDiffs({1});
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL1T3::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKeyFrame;
+ }
+ std::vector<LayerFrameConfig> config(1);
+
+ switch (next_pattern_) {
+ case kKeyFrame:
+ config[0].T(0).Keyframe().Update(0);
+ next_pattern_ = kDeltaFrameT2A;
+ break;
+ case kDeltaFrameT2A:
+ config[0].T(2).Reference(0);
+ next_pattern_ = kDeltaFrameT1;
+ break;
+ case kDeltaFrameT1:
+ config[0].T(1).Reference(0).Update(1);
+ next_pattern_ = kDeltaFrameT2B;
+ break;
+ case kDeltaFrameT2B:
+ config[0].T(2).Reference(1);
+ next_pattern_ = kDeltaFrameT0;
+ break;
+ case kDeltaFrameT0:
+ config[0].T(0).ReferenceAndUpdate(0);
+ next_pattern_ = kDeltaFrameT2A;
+ break;
+ }
+ return config;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL1T3::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.TemporalId() < 0 ||
+ config.TemporalId() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected temporal id " << config.TemporalId();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(
+ std::begin(kDtis[config.TemporalId()]),
+ std::end(kDtis[config.TemporalId()]));
+ frame_info->part_of_chain = {config.TemporalId() == 0};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.h
new file mode 100644
index 00000000000..562d0f2a504
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l1t3.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// T2 0 0 0 0
+// | / | /
+// T1 / 0 / 0 ...
+// |_/ |_/
+// T0 0-------0------
+// Time-> 0 1 2 3 4 5 6 7
+class ScalabilityStructureL1T3 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL1T3() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKeyFrame,
+ kDeltaFrameT2A,
+ kDeltaFrameT1,
+ kDeltaFrameT2B,
+ kDeltaFrameT0,
+ };
+
+ FramePattern next_pattern_ = kKeyFrame;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L1T3_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc
new file mode 100644
index 00000000000..c3cee19a294
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.cc
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+constexpr auto kRequired = DecodeTargetIndication::kRequired;
+
+constexpr DecodeTargetIndication kDtis[4][2] = {
+ {kSwitch, kSwitch}, // Key, S0
+ {kNotPresent, kSwitch}, // Key, S1
+ {kSwitch, kRequired}, // Delta, S0
+ {kNotPresent, kRequired}, // Delta, S1
+};
+
+} // namespace
+
+ScalabilityStructureL2T1::~ScalabilityStructureL2T1() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T1::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 1;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T1::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 1};
+ structure.templates.resize(4);
+ structure.templates[0].S(0).Dtis("SR").ChainDiffs({2, 1}).FrameDiffs({2});
+ structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0});
+ structure.templates[2].S(1).Dtis("-R").ChainDiffs({1, 1}).FrameDiffs({2, 1});
+ structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T1::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).S(0).Keyframe().Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T1::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> result(2);
+ // Buffer0 keeps latest S0 frame, Buffer1 keeps latest S1 frame.
+ if (restart || keyframe_) {
+ result[0] = KeyFrameConfig();
+ result[1].Id(1).S(1).Reference(0).Update(1);
+ keyframe_ = false;
+ } else {
+ result[0].Id(2).S(0).ReferenceAndUpdate(0);
+ result[1].Id(3).S(1).Reference(0).ReferenceAndUpdate(1);
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T1::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = std::move(config.Buffers());
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ frame_info->part_of_chain = {config.SpatialId() == 0, true};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.h
new file mode 100644
index 00000000000..0f536026046
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1 0--0--0-
+// | | | ...
+// S0 0--0--0-
+class ScalabilityStructureL2T1 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T1() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ LayerFrameConfig KeyFrameConfig() const;
+
+ bool keyframe_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc
new file mode 100644
index 00000000000..7e273d1fc62
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.cc
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[3][2] = {
+ {kSwitch, kSwitch}, // Key, S0
+ {kSwitch, kNotPresent}, // Delta, S0
+ {kNotPresent, kSwitch}, // Key and Delta, S1
+};
+
+} // namespace
+
+ScalabilityStructureL2T1Key::~ScalabilityStructureL2T1Key() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T1Key::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 1;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T1Key::DependencyStructure()
+ const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 1};
+ structure.templates.resize(4);
+ structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2});
+ structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0});
+ structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2});
+ structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T1Key::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).S(0).Keyframe().Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T1Key::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> result(2);
+
+ // Buffer0 keeps latest S0T0 frame, Buffer1 keeps latest S1T0 frame.
+ if (restart || keyframe_) {
+ result[0] = KeyFrameConfig();
+ result[1].Id(2).S(1).Reference(0).Update(1);
+ keyframe_ = false;
+ } else {
+ result[0].Id(1).S(0).ReferenceAndUpdate(0);
+ result[1].Id(2).S(1).ReferenceAndUpdate(1);
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T1Key::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = std::move(config.Buffers());
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.IsKeyframe()) {
+ frame_info->part_of_chain = {true, true};
+ } else {
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() == 1};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h
new file mode 100644
index 00000000000..c1d8c8947fb
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1 0--0--0-
+// | ...
+// S0 0--0--0-
+class ScalabilityStructureL2T1Key : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T1Key() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ LayerFrameConfig KeyFrameConfig() const;
+
+ bool keyframe_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T1_KEY_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc
new file mode 100644
index 00000000000..5db2fadb5f5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.cc
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+constexpr auto kRequired = DecodeTargetIndication::kRequired;
+
+// decode targets: S0T0, S0T1, S1T0, S1T1
+constexpr DecodeTargetIndication kDtis[6][4] = {
+ {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1
+ {kNotPresent, kDiscardable, kNotPresent, kRequired}, // kDeltaT1, S0
+ {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDeltaT1, S1
+ {kSwitch, kSwitch, kRequired, kRequired}, // kDeltaT0, S0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDeltaT0, S1
+};
+
+} // namespace
+
+ScalabilityStructureL2T2::~ScalabilityStructureL2T2() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T2::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T2::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 4;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 0, 1, 1};
+ structure.templates.resize(6);
+ auto& templates = structure.templates;
+ templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0});
+ templates[1].S(0).T(0).Dtis("SSRR").ChainDiffs({4, 3}).FrameDiffs({4});
+ templates[2].S(0).T(1).Dtis("-D-R").ChainDiffs({2, 1}).FrameDiffs({2});
+ templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1});
+ templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({4, 1});
+ templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2, 1});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T2::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T2::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKey;
+ }
+ std::vector<LayerFrameConfig> result(2);
+
+ // Buffer0 keeps latest S0T0 frame,
+ // Buffer1 keeps latest S1T0 frame.
+ // Buffer2 keeps latest S0T1 frame.
+ switch (next_pattern_) {
+ case kKey:
+ result[0] = KeyFrameConfig();
+ result[1].Id(1).S(1).T(0).Reference(0).Update(1);
+ next_pattern_ = kDeltaT1;
+ break;
+ case kDeltaT1:
+ result[0].Id(2).S(0).T(1).Reference(0).Update(2);
+ result[1].Id(3).S(1).T(1).Reference(2).Reference(1);
+ next_pattern_ = kDeltaT0;
+ break;
+ case kDeltaT0:
+ result[0].Id(4).S(0).T(0).ReferenceAndUpdate(0);
+ result[1].Id(5).S(1).T(0).Reference(0).ReferenceAndUpdate(1);
+ next_pattern_ = kDeltaT1;
+ break;
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T2::OnEncodeDone(
+ LayerFrameConfig config) {
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.TemporalId() == 0) {
+ frame_info->part_of_chain = {config.SpatialId() == 0, true};
+ } else {
+ frame_info->part_of_chain = {false, false};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.h
new file mode 100644
index 00000000000..dbf5036c1f6
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1T1 0 0
+// /| /| /
+// S1T0 0-+-0-+-0
+// | | | | | ...
+// S0T1 | 0 | 0 |
+// |/ |/ |/
+// S0T0 0---0---0--
+// Time-> 0 1 2 3 4
+class ScalabilityStructureL2T2 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T2() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKey,
+ kDeltaT1,
+ kDeltaT0,
+ };
+ LayerFrameConfig KeyFrameConfig() const;
+
+ FramePattern next_pattern_ = kKey;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc
new file mode 100644
index 00000000000..7409070d168
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.cc
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+// decode targets: S0T0, S0T1, S1T0, S1T1
+constexpr DecodeTargetIndication kDtis[6][4] = {
+ {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1
+ {kNotPresent, kDiscardable, kNotPresent, kNotPresent}, // kDeltaT1, S0
+ {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDeltaT1, S1
+ {kSwitch, kSwitch, kNotPresent, kNotPresent}, // kDeltaT0, S0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDeltaT0, S1
+};
+
+} // namespace
+
+ScalabilityStructureL2T2Key::~ScalabilityStructureL2T2Key() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T2Key::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T2Key::DependencyStructure()
+ const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 4;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 0, 1, 1};
+ structure.templates.resize(6);
+ auto& templates = structure.templates;
+ templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0});
+ templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({4, 3}).FrameDiffs({4});
+ templates[2].S(0).T(1).Dtis("-D--").ChainDiffs({2, 1}).FrameDiffs({2});
+ templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1});
+ templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 4}).FrameDiffs({4});
+ templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T2Key::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T2Key::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKey;
+ }
+ std::vector<LayerFrameConfig> result(2);
+
+ // Buffer0 keeps latest S0T0 frame,
+ // Buffer1 keeps latest S1T0 frame.
+ switch (next_pattern_) {
+ case kKey:
+ result[0] = KeyFrameConfig();
+ result[1].Id(1).S(1).T(0).Reference(0).Update(1);
+ next_pattern_ = kDeltaT1;
+ break;
+ case kDeltaT1:
+ result[0].Id(2).S(0).T(1).Reference(0);
+ result[1].Id(3).S(1).T(1).Reference(1);
+ next_pattern_ = kDeltaT0;
+ break;
+ case kDeltaT0:
+ result[0].Id(4).S(0).T(0).ReferenceAndUpdate(0);
+ result[1].Id(5).S(1).T(0).ReferenceAndUpdate(1);
+ next_pattern_ = kDeltaT1;
+ break;
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T2Key::OnEncodeDone(
+ LayerFrameConfig config) {
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.IsKeyframe()) {
+ frame_info->part_of_chain = {true, true};
+ } else if (config.TemporalId() == 0) {
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() == 1};
+ } else {
+ frame_info->part_of_chain = {false, false};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h
new file mode 100644
index 00000000000..9adfcbcd585
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1T1 0 0
+// / / /
+// S1T0 0---0---0
+// | ...
+// S0T1 | 0 0
+// |/ / /
+// S0T0 0---0---0
+// Time-> 0 1 2 3 4
+class ScalabilityStructureL2T2Key : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T2Key() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKey,
+ kDeltaT1,
+ kDeltaT0,
+ };
+ LayerFrameConfig KeyFrameConfig() const;
+
+ FramePattern next_pattern_ = kKey;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc
new file mode 100644
index 00000000000..6b79332de84
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.cc
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[6][4] = {
+ {kSwitch, kSwitch, kSwitch, kSwitch}, // kKey, S0T0
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kKey, S1T0
+ {kSwitch, kSwitch, kNotPresent, kNotPresent}, // kDelta0, S0T0
+ {kNotPresent, kNotPresent, kNotPresent, kDiscardable}, // kDelta0, S1T1
+ {kNotPresent, kDiscardable, kNotPresent, kNotPresent}, // kDelta1, S0T1
+ {kNotPresent, kNotPresent, kSwitch, kSwitch}, // kDelta1, S1T0
+};
+
+} // namespace
+
+ScalabilityStructureL2T2KeyShift::~ScalabilityStructureL2T2KeyShift() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL2T2KeyShift::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 2;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL2T2KeyShift::DependencyStructure()
+ const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 4;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 0, 1, 1};
+ structure.templates.resize(7);
+ auto& templates = structure.templates;
+ templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0});
+ templates[1].S(0).T(0).Dtis("SS--").ChainDiffs({2, 1}).FrameDiffs({2});
+ templates[2].S(0).T(0).Dtis("SS--").ChainDiffs({4, 1}).FrameDiffs({4});
+ templates[3].S(0).T(1).Dtis("-D--").ChainDiffs({2, 3}).FrameDiffs({2});
+ templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1});
+ templates[5].S(1).T(0).Dtis("--SS").ChainDiffs({3, 4}).FrameDiffs({4});
+ templates[6].S(1).T(1).Dtis("---D").ChainDiffs({1, 2}).FrameDiffs({2});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL2T2KeyShift::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).Keyframe().S(0).T(0).Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL2T2KeyShift::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKey;
+ }
+ std::vector<LayerFrameConfig> result(2);
+
+ // Buffer0 keeps latest S0T0 frame,
+ // Buffer1 keeps latest S1T0 frame.
+ switch (next_pattern_) {
+ case kKey:
+ result[0] = KeyFrameConfig();
+ result[1].Id(1).S(1).T(0).Reference(0).Update(1);
+ next_pattern_ = kDelta0;
+ break;
+ case kDelta0:
+ result[0].Id(2).S(0).T(0).ReferenceAndUpdate(0);
+ result[1].Id(3).S(1).T(1).Reference(1);
+ next_pattern_ = kDelta1;
+ break;
+ case kDelta1:
+ result[0].Id(4).S(0).T(1).Reference(0);
+ result[1].Id(5).S(1).T(0).ReferenceAndUpdate(1);
+ next_pattern_ = kDelta0;
+ break;
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL2T2KeyShift::OnEncodeDone(
+ LayerFrameConfig config) {
+ if (config.IsKeyframe()) {
+ config = KeyFrameConfig();
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.IsKeyframe()) {
+ frame_info->part_of_chain = {true, true};
+ } else if (config.TemporalId() == 0) {
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() == 1};
+ } else {
+ frame_info->part_of_chain = {false, false};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h
new file mode 100644
index 00000000000..1b18bd7c173
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1T1 0 0
+// / / /
+// S1T0 0---0---0
+// | ...
+// S0T1 | 0 0
+// | / /
+// S0T0 0-0---0--
+// Time-> 0 1 2 3 4
+class ScalabilityStructureL2T2KeyShift : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL2T2KeyShift() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKey,
+ kDelta0,
+ kDelta1,
+ };
+ LayerFrameConfig KeyFrameConfig() const;
+
+ FramePattern next_pattern_ = kKey;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc
new file mode 100644
index 00000000000..1dd729c54dd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.cc
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t1.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+constexpr auto kRequired = DecodeTargetIndication::kRequired;
+
+constexpr DecodeTargetIndication kDtis[5][3] = {
+ {kSwitch, kSwitch, kSwitch}, // Key, S0
+ {kNotPresent, kSwitch, kSwitch}, // Key, S1
+ {kNotPresent, kNotPresent, kSwitch}, // Key and Delta, S2
+ {kSwitch, kRequired, kRequired}, // Delta, S0
+ {kNotPresent, kSwitch, kRequired}, // Delta, S1
+};
+
+} // namespace
+
+ScalabilityStructureL3T1::~ScalabilityStructureL3T1() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL3T1::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 3;
+ result.num_temporal_layers = 1;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 3;
+ structure.num_chains = 3;
+ structure.decode_target_protected_by_chain = {0, 1, 2};
+ auto& templates = structure.templates;
+ templates.resize(6);
+ templates[0].S(0).Dtis("SRR").ChainDiffs({3, 2, 1}).FrameDiffs({3});
+ templates[1].S(0).Dtis("SSS").ChainDiffs({0, 0, 0});
+ templates[2].S(1).Dtis("-SR").ChainDiffs({1, 1, 1}).FrameDiffs({3, 1});
+ templates[3].S(1).Dtis("-SS").ChainDiffs({1, 1, 1}).FrameDiffs({1});
+ templates[4].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({3, 1});
+ templates[5].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({1});
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL3T1::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> config(3);
+
+ // Buffer i keeps latest frame for spatial layer i
+ if (restart || keyframe_) {
+ config[0].Id(0).S(0).Keyframe().Update(0);
+ config[1].Id(1).S(1).Update(1).Reference(0);
+ config[2].Id(2).S(2).Update(2).Reference(1);
+ keyframe_ = false;
+ } else {
+ config[0].Id(3).S(0).ReferenceAndUpdate(0);
+ config[1].Id(4).S(1).ReferenceAndUpdate(1).Reference(0);
+ config[2].Id(2).S(2).ReferenceAndUpdate(2).Reference(1);
+ }
+ return config;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL3T1::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.IsKeyframe() && config.Id() != 0) {
+ // Encoder generated a key frame without asking to.
+ if (config.SpatialId() > 0) {
+ RTC_LOG(LS_WARNING) << "Unexpected spatial id " << config.SpatialId()
+ << " for key frame.";
+ }
+ config = LayerFrameConfig().Id(0).S(0).Keyframe().Update(0);
+ }
+
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ frame_info->part_of_chain = {config.SpatialId() == 0, config.SpatialId() <= 1,
+ true};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.h
new file mode 100644
index 00000000000..404860d08fa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t1.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S2 0-0-0-
+// | | |
+// S1 0-0-0-...
+// | | |
+// S0 0-0-0-
+// Time-> 0 1 2
+class ScalabilityStructureL3T1 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL3T1() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ bool keyframe_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T1_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc
new file mode 100644
index 00000000000..6ac75da4509
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.cc
@@ -0,0 +1,220 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t3.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kDiscardable = DecodeTargetIndication::kDiscardable;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+constexpr auto kRequired = DecodeTargetIndication::kRequired;
+
+constexpr DecodeTargetIndication kDtis[12][9] = {
+ // Key, S0
+ {kSwitch, kSwitch, kSwitch, // S0
+ kSwitch, kSwitch, kSwitch, // S1
+ kSwitch, kSwitch, kSwitch}, // S2
+ // Key, S1
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kSwitch, kSwitch, kSwitch, // S1
+ kSwitch, kSwitch, kSwitch}, // S2
+ // Key, S2
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kNotPresent, // S1
+ kSwitch, kSwitch, kSwitch}, // S2
+ // Delta, S0T2
+ {kNotPresent, kNotPresent, kDiscardable, // S0
+ kNotPresent, kNotPresent, kRequired, // S1
+ kNotPresent, kNotPresent, kRequired}, // S2
+ // Delta, S1T2
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kDiscardable, // S1
+ kNotPresent, kNotPresent, kRequired}, // S2
+ // Delta, S2T2
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kNotPresent, // S1
+ kNotPresent, kNotPresent, kDiscardable}, // S2
+ // Delta, S0T1
+ {kNotPresent, kDiscardable, kSwitch, // S0
+ kNotPresent, kRequired, kRequired, // S1
+ kNotPresent, kRequired, kRequired}, // S2
+ // Delta, S1T1
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kDiscardable, kSwitch, // S1
+ kNotPresent, kRequired, kRequired}, // S2
+ // Delta, S2T1
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kNotPresent, // S1
+ kNotPresent, kDiscardable, kSwitch}, // S2
+ // Delta, S0T0
+ {kSwitch, kSwitch, kSwitch, // S0
+ kRequired, kRequired, kRequired, // S1
+ kRequired, kRequired, kRequired}, // S2
+ // Delta, S1T0
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kSwitch, kSwitch, kSwitch, // S1
+ kRequired, kRequired, kRequired}, // S2
+ // Delta, S2T0
+ {kNotPresent, kNotPresent, kNotPresent, // S0
+ kNotPresent, kNotPresent, kNotPresent, // S1
+ kSwitch, kSwitch, kSwitch}, // S2
+};
+
+} // namespace
+
+ScalabilityStructureL3T3::~ScalabilityStructureL3T3() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureL3T3::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 3;
+ result.num_temporal_layers = 3;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureL3T3::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 9;
+ structure.num_chains = 3;
+ structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2};
+ auto& t = structure.templates;
+ t.resize(15);
+ // Templates are shown in the order frames following them appear in the
+ // stream, but in `structure.templates` array templates are sorted by
+ // (`spatial_id`, `temporal_id`) since that is a dependency descriptor
+ // requirement. Indexes are written in hex for nicer alignment.
+ t[0x1].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0});
+ t[0x6].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1});
+ t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1});
+ t[0x3].S(0).T(2).Dtis("--D--R--R").ChainDiffs({3, 2, 1}).FrameDiffs({3});
+ t[0x8].S(1).T(2).Dtis("-----D--R").ChainDiffs({4, 3, 2}).FrameDiffs({3, 1});
+ t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3, 1});
+ t[0x2].S(0).T(1).Dtis("-DS-RR-RR").ChainDiffs({6, 5, 4}).FrameDiffs({6});
+ t[0x7].S(1).T(1).Dtis("----DS-RR").ChainDiffs({7, 6, 5}).FrameDiffs({6, 1});
+ t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6, 1});
+ t[0x4].S(0).T(2).Dtis("--D--R--R").ChainDiffs({9, 8, 7}).FrameDiffs({3});
+ t[0x9].S(1).T(2).Dtis("-----D--R").ChainDiffs({10, 9, 8}).FrameDiffs({3, 1});
+ t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3, 1});
+ t[0x0].S(0).T(0).Dtis("SSSRRRRRR").ChainDiffs({12, 11, 10}).FrameDiffs({12});
+ t[0x5].S(1).T(0).Dtis("---SSSRRR").ChainDiffs({1, 1, 1}).FrameDiffs({12, 1});
+ t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({12, 1});
+ return structure;
+}
+
+ScalableVideoController::LayerFrameConfig
+ScalabilityStructureL3T3::KeyFrameConfig() const {
+ return LayerFrameConfig().Id(0).S(0).T(0).Keyframe().Update(0);
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureL3T3::NextFrameConfig(bool restart) {
+ if (restart) {
+ next_pattern_ = kKeyFrame;
+ }
+ std::vector<LayerFrameConfig> config(3);
+
+ // For this structure name each of 8 buffers after the layer of the frame that
+ // buffer keeps.
+ static constexpr int kS0T0 = 0;
+ static constexpr int kS1T0 = 1;
+ static constexpr int kS2T0 = 2;
+ static constexpr int kS0T1 = 3;
+ static constexpr int kS1T1 = 4;
+ static constexpr int kS2T1 = 5;
+ static constexpr int kS0T2 = 6;
+ static constexpr int kS1T2 = 7;
+ switch (next_pattern_) {
+ case kKeyFrame:
+ config[0].Id(0).S(0).T(0).Keyframe().Update(kS0T0);
+ config[1].Id(1).S(1).T(0).Update(kS1T0).Reference(kS0T0);
+ config[2].Id(2).S(2).T(0).Update(kS2T0).Reference(kS1T0);
+ next_pattern_ = kDeltaFrameT2A;
+ break;
+ case kDeltaFrameT2A:
+ config[0].Id(3).S(0).T(2).Reference(kS0T0).Update(kS0T2);
+ config[1].Id(4).S(1).T(2).Reference(kS1T0).Reference(kS0T2).Update(kS1T2);
+ config[2].Id(5).S(2).T(2).Reference(kS2T0).Reference(kS1T2);
+ next_pattern_ = kDeltaFrameT1;
+ break;
+ case kDeltaFrameT1:
+ config[0].Id(6).S(0).T(1).Reference(kS0T0).Update(kS0T1);
+ config[1].Id(7).S(1).T(1).Reference(kS1T0).Reference(kS0T1).Update(kS1T1);
+ config[2].Id(8).S(2).T(1).Reference(kS2T0).Reference(kS1T1).Update(kS2T1);
+ next_pattern_ = kDeltaFrameT2B;
+ break;
+ case kDeltaFrameT2B:
+ config[0].Id(3).S(0).T(2).Reference(kS0T1).Update(kS0T2);
+ config[1].Id(4).S(1).T(2).Reference(kS1T1).Reference(kS0T2).Update(kS1T2);
+ config[2].Id(5).S(2).T(2).Reference(kS2T1).Reference(kS1T2);
+ next_pattern_ = kDeltaFrameT0;
+ break;
+ case kDeltaFrameT0:
+ config[0].Id(9).S(0).T(0).ReferenceAndUpdate(kS0T0);
+ config[1].Id(10).S(1).T(0).ReferenceAndUpdate(kS1T0).Reference(kS0T0);
+ config[2].Id(11).S(2).T(0).ReferenceAndUpdate(kS2T0).Reference(kS1T0);
+ next_pattern_ = kDeltaFrameT2A;
+ break;
+ }
+ return config;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureL3T3::OnEncodeDone(
+ LayerFrameConfig config) {
+ if (config.IsKeyframe() && config.Id() != 0) {
+ // Encoder generated a key frame without asking to.
+ if (config.SpatialId() > 0) {
+ RTC_LOG(LS_WARNING) << "Unexpected spatial id " << config.SpatialId()
+ << " for key frame.";
+ }
+ config = LayerFrameConfig()
+ .Keyframe()
+ .Id(0)
+ .S(0)
+ .T(0)
+ .Update(0)
+ .Update(1)
+ .Update(2)
+ .Update(3)
+ .Update(4)
+ .Update(5)
+ .Update(6)
+ .Update(7);
+ }
+
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.Id() < 0 || config.Id() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected config id " << config.Id();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = config.Buffers();
+ frame_info->decode_target_indications.assign(std::begin(kDtis[config.Id()]),
+ std::end(kDtis[config.Id()]));
+ if (config.TemporalId() == 0) {
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() <= 1, true};
+ } else {
+ frame_info->part_of_chain = {false, false, false};
+ }
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.h
new file mode 100644
index 00000000000..363f07e015a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_l3t3.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// https://aomediacodec.github.io/av1-rtp-spec/#a63-l3t3-full-svc
+class ScalabilityStructureL3T3 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureL3T3() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ enum FramePattern {
+ kKeyFrame,
+ kDeltaFrameT2A,
+ kDeltaFrameT1,
+ kDeltaFrameT2B,
+ kDeltaFrameT0,
+ };
+ LayerFrameConfig KeyFrameConfig() const;
+
+ FramePattern next_pattern_ = kKeyFrame;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_L3T3_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc
new file mode 100644
index 00000000000..267363f0be4
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalability_structure_s2t1.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/base/macros.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
+
+constexpr DecodeTargetIndication kDtis[2][2] = {
+ {kSwitch, kNotPresent}, // S0
+ {kNotPresent, kSwitch}, // S1
+};
+
+} // namespace
+
+ScalabilityStructureS2T1::~ScalabilityStructureS2T1() = default;
+
+ScalableVideoController::StreamLayersConfig
+ScalabilityStructureS2T1::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 2;
+ result.num_temporal_layers = 1;
+ return result;
+}
+
+FrameDependencyStructure ScalabilityStructureS2T1::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 2;
+ structure.num_chains = 2;
+ structure.decode_target_protected_by_chain = {0, 1};
+ structure.templates.resize(4);
+ structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2});
+ structure.templates[1].S(0).Dtis("S-").ChainDiffs({0, 0});
+ structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2});
+ structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 0});
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalabilityStructureS2T1::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> result(2);
+ // Buffer0 keeps latest S0T0 frame, Buffer1 keeps latest S1T0 frame.
+ if (restart || keyframe_) {
+ result[0].S(0).Keyframe().Update(0);
+ result[1].S(1).Keyframe().Update(1);
+ keyframe_ = false;
+ } else {
+ result[0].S(0).ReferenceAndUpdate(0);
+ result[1].S(1).ReferenceAndUpdate(1);
+ }
+ return result;
+}
+
+absl::optional<GenericFrameInfo> ScalabilityStructureS2T1::OnEncodeDone(
+ LayerFrameConfig config) {
+ absl::optional<GenericFrameInfo> frame_info;
+ if (config.SpatialId() < 0 ||
+ config.SpatialId() >= int{ABSL_ARRAYSIZE(kDtis)}) {
+ RTC_LOG(LS_ERROR) << "Unexpected spatial id " << config.SpatialId();
+ return frame_info;
+ }
+ frame_info.emplace();
+ frame_info->spatial_id = config.SpatialId();
+ frame_info->temporal_id = config.TemporalId();
+ frame_info->encoder_buffers = std::move(config.Buffers());
+ frame_info->decode_target_indications.assign(
+ std::begin(kDtis[config.SpatialId()]),
+ std::end(kDtis[config.SpatialId()]));
+ frame_info->part_of_chain = {config.SpatialId() == 0,
+ config.SpatialId() == 1};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.h
new file mode 100644
index 00000000000..06a99775c49
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_s2t1.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+// S1 0--0--0-
+// ...
+// S0 0--0--0-
+class ScalabilityStructureS2T1 : public ScalableVideoController {
+ public:
+ ~ScalabilityStructureS2T1() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ bool keyframe_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABILITY_STRUCTURE_S2T1_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_unittest.cc
new file mode 100644
index 00000000000..d2a0516567a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalability_structure_unittest.cc
@@ -0,0 +1,319 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <functional>
+#include <memory>
+#include <ostream>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/video_frame_type.h"
+#include "modules/video_coding/chain_diff_calculator.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t2.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l1t3.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t1_key.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l2t2_key_shift.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t1.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_l3t3.h"
+#include "modules/video_coding/codecs/av1/scalability_structure_s2t1.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+#include "modules/video_coding/frame_dependencies_calculator.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::AllOf;
+using ::testing::Contains;
+using ::testing::Each;
+using ::testing::Field;
+using ::testing::Ge;
+using ::testing::IsEmpty;
+using ::testing::Le;
+using ::testing::Lt;
+using ::testing::Not;
+using ::testing::SizeIs;
+using ::testing::TestWithParam;
+using ::testing::Values;
+
+struct SvcTestParam {
+ friend std::ostream& operator<<(std::ostream& os, const SvcTestParam& param) {
+ return os << param.name;
+ }
+
+ std::string name;
+ std::function<std::unique_ptr<ScalableVideoController>()> svc_factory;
+ int num_temporal_units;
+};
+
+class ScalabilityStructureTest : public TestWithParam<SvcTestParam> {
+ public:
+ std::vector<GenericFrameInfo> GenerateAllFrames() {
+ std::vector<GenericFrameInfo> frames;
+
+ FrameDependenciesCalculator frame_deps_calculator;
+ ChainDiffCalculator chain_diff_calculator;
+ std::unique_ptr<ScalableVideoController> structure_controller =
+ GetParam().svc_factory();
+ FrameDependencyStructure structure =
+ structure_controller->DependencyStructure();
+ for (int i = 0; i < GetParam().num_temporal_units; ++i) {
+ for (auto& layer_frame :
+ structure_controller->NextFrameConfig(/*reset=*/false)) {
+ int64_t frame_id = static_cast<int64_t>(frames.size());
+ bool is_keyframe = layer_frame.IsKeyframe();
+ absl::optional<GenericFrameInfo> frame_info =
+ structure_controller->OnEncodeDone(std::move(layer_frame));
+ EXPECT_TRUE(frame_info.has_value());
+ if (is_keyframe) {
+ chain_diff_calculator.Reset(frame_info->part_of_chain);
+ }
+ frame_info->chain_diffs =
+ chain_diff_calculator.From(frame_id, frame_info->part_of_chain);
+ for (int64_t base_frame_id : frame_deps_calculator.FromBuffersUsage(
+ is_keyframe ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta,
+ frame_id, frame_info->encoder_buffers)) {
+ EXPECT_LT(base_frame_id, frame_id);
+ EXPECT_GE(base_frame_id, 0);
+ frame_info->frame_diffs.push_back(frame_id - base_frame_id);
+ }
+
+ frames.push_back(*std::move(frame_info));
+ }
+ }
+ return frames;
+ }
+};
+
+TEST_P(ScalabilityStructureTest,
+ NumberOfDecodeTargetsAndChainsAreInRangeAndConsistent) {
+ FrameDependencyStructure structure =
+ GetParam().svc_factory()->DependencyStructure();
+ EXPECT_GT(structure.num_decode_targets, 0);
+ EXPECT_LE(structure.num_decode_targets,
+ DependencyDescriptor::kMaxDecodeTargets);
+ EXPECT_GE(structure.num_chains, 0);
+ EXPECT_LE(structure.num_chains, structure.num_decode_targets);
+ if (structure.num_chains == 0) {
+ EXPECT_THAT(structure.decode_target_protected_by_chain, IsEmpty());
+ } else {
+ EXPECT_THAT(structure.decode_target_protected_by_chain,
+ AllOf(SizeIs(structure.num_decode_targets), Each(Ge(0)),
+ Each(Le(structure.num_chains))));
+ }
+ EXPECT_THAT(structure.templates,
+ SizeIs(Lt(size_t{DependencyDescriptor::kMaxTemplates})));
+}
+
+TEST_P(ScalabilityStructureTest, TemplatesAreSortedByLayerId) {
+ FrameDependencyStructure structure =
+ GetParam().svc_factory()->DependencyStructure();
+ ASSERT_THAT(structure.templates, Not(IsEmpty()));
+ const auto& first_templates = structure.templates.front();
+ EXPECT_EQ(first_templates.spatial_id, 0);
+ EXPECT_EQ(first_templates.temporal_id, 0);
+ for (size_t i = 1; i < structure.templates.size(); ++i) {
+ const auto& prev_template = structure.templates[i - 1];
+ const auto& next_template = structure.templates[i];
+ if (next_template.spatial_id == prev_template.spatial_id &&
+ next_template.temporal_id == prev_template.temporal_id) {
+ // Same layer, next_layer_idc == 0
+ } else if (next_template.spatial_id == prev_template.spatial_id &&
+ next_template.temporal_id == prev_template.temporal_id + 1) {
+ // Next temporal layer, next_layer_idc == 1
+ } else if (next_template.spatial_id == prev_template.spatial_id + 1 &&
+ next_template.temporal_id == 0) {
+ // Next spatial layer, next_layer_idc == 2
+ } else {
+ // everything else is invalid.
+ ADD_FAILURE() << "Invalid templates order. Template #" << i
+ << " with layer (" << next_template.spatial_id << ","
+ << next_template.temporal_id
+ << ") follows template with layer ("
+ << prev_template.spatial_id << ","
+ << prev_template.temporal_id << ").";
+ }
+ }
+}
+
+TEST_P(ScalabilityStructureTest, TemplatesMatchNumberOfDecodeTargetsAndChains) {
+ FrameDependencyStructure structure =
+ GetParam().svc_factory()->DependencyStructure();
+ EXPECT_THAT(
+ structure.templates,
+ Each(AllOf(Field(&FrameDependencyTemplate::decode_target_indications,
+ SizeIs(structure.num_decode_targets)),
+ Field(&FrameDependencyTemplate::chain_diffs,
+ SizeIs(structure.num_chains)))));
+}
+
+TEST_P(ScalabilityStructureTest, FrameInfoMatchesFrameDependencyStructure) {
+ FrameDependencyStructure structure =
+ GetParam().svc_factory()->DependencyStructure();
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ for (size_t frame_id = 0; frame_id < frame_infos.size(); ++frame_id) {
+ const auto& frame = frame_infos[frame_id];
+ EXPECT_GE(frame.spatial_id, 0) << " for frame " << frame_id;
+ EXPECT_GE(frame.temporal_id, 0) << " for frame " << frame_id;
+ EXPECT_THAT(frame.decode_target_indications,
+ SizeIs(structure.num_decode_targets))
+ << " for frame " << frame_id;
+ EXPECT_THAT(frame.part_of_chain, SizeIs(structure.num_chains))
+ << " for frame " << frame_id;
+ }
+}
+
+TEST_P(ScalabilityStructureTest, ThereIsAPerfectTemplateForEachFrame) {
+ FrameDependencyStructure structure =
+ GetParam().svc_factory()->DependencyStructure();
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ for (size_t frame_id = 0; frame_id < frame_infos.size(); ++frame_id) {
+ EXPECT_THAT(structure.templates, Contains(frame_infos[frame_id]))
+ << " for frame " << frame_id;
+ }
+}
+
+TEST_P(ScalabilityStructureTest, FrameDependsOnSameOrLowerLayer) {
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ int64_t num_frames = frame_infos.size();
+
+ for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) {
+ const auto& frame = frame_infos[frame_id];
+ for (int frame_diff : frame.frame_diffs) {
+ int64_t base_frame_id = frame_id - frame_diff;
+ const auto& base_frame = frame_infos[base_frame_id];
+ EXPECT_GE(frame.spatial_id, base_frame.spatial_id)
+ << "Frame " << frame_id << " depends on frame " << base_frame_id;
+ EXPECT_GE(frame.temporal_id, base_frame.temporal_id)
+ << "Frame " << frame_id << " depends on frame " << base_frame_id;
+ }
+ }
+}
+
+TEST_P(ScalabilityStructureTest, NoFrameDependsOnDiscardableOrNotPresent) {
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ int64_t num_frames = frame_infos.size();
+ FrameDependencyStructure structure =
+ GetParam().svc_factory()->DependencyStructure();
+
+ for (int dt = 0; dt < structure.num_decode_targets; ++dt) {
+ for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) {
+ const auto& frame = frame_infos[frame_id];
+ if (frame.decode_target_indications[dt] ==
+ DecodeTargetIndication::kNotPresent) {
+ continue;
+ }
+ for (int frame_diff : frame.frame_diffs) {
+ int64_t base_frame_id = frame_id - frame_diff;
+ const auto& base_frame = frame_infos[base_frame_id];
+ EXPECT_NE(base_frame.decode_target_indications[dt],
+ DecodeTargetIndication::kNotPresent)
+ << "Frame " << frame_id << " depends on frame " << base_frame_id
+ << " that is not part of decode target#" << dt;
+ EXPECT_NE(base_frame.decode_target_indications[dt],
+ DecodeTargetIndication::kDiscardable)
+ << "Frame " << frame_id << " depends on frame " << base_frame_id
+ << " that is discardable for decode target#" << dt;
+ }
+ }
+ }
+}
+
+TEST_P(ScalabilityStructureTest, NoFrameDependsThroughSwitchIndication) {
+ FrameDependencyStructure structure =
+ GetParam().svc_factory()->DependencyStructure();
+ std::vector<GenericFrameInfo> frame_infos = GenerateAllFrames();
+ int64_t num_frames = frame_infos.size();
+ std::vector<std::set<int64_t>> full_deps(num_frames);
+
+ // For each frame calculate set of all frames it depends on, both directly and
+ // indirectly.
+ for (int64_t frame_id = 0; frame_id < num_frames; ++frame_id) {
+ std::set<int64_t> all_base_frames;
+ for (int frame_diff : frame_infos[frame_id].frame_diffs) {
+ int64_t base_frame_id = frame_id - frame_diff;
+ all_base_frames.insert(base_frame_id);
+ const auto& indirect = full_deps[base_frame_id];
+ all_base_frames.insert(indirect.begin(), indirect.end());
+ }
+ full_deps[frame_id] = std::move(all_base_frames);
+ }
+
+ // Now check the switch indication: frames after the switch indication mustn't
+ // depend on any addition frames before the switch indications.
+ for (int dt = 0; dt < structure.num_decode_targets; ++dt) {
+ for (int64_t switch_frame_id = 0; switch_frame_id < num_frames;
+ ++switch_frame_id) {
+ if (frame_infos[switch_frame_id].decode_target_indications[dt] !=
+ DecodeTargetIndication::kSwitch) {
+ continue;
+ }
+ for (int64_t later_frame_id = switch_frame_id + 1;
+ later_frame_id < num_frames; ++later_frame_id) {
+ if (frame_infos[later_frame_id].decode_target_indications[dt] ==
+ DecodeTargetIndication::kNotPresent) {
+ continue;
+ }
+ for (int frame_diff : frame_infos[later_frame_id].frame_diffs) {
+ int64_t early_frame_id = later_frame_id - frame_diff;
+ if (early_frame_id < switch_frame_id) {
+ EXPECT_THAT(full_deps[switch_frame_id], Contains(early_frame_id))
+ << "For decode target #" << dt << " frame " << later_frame_id
+ << " depends on the frame " << early_frame_id
+ << " that switch indication frame " << switch_frame_id
+ << " doesn't directly on indirectly depend on.";
+ }
+ }
+ }
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ Svc,
+ ScalabilityStructureTest,
+ Values(SvcTestParam{"L1T2", std::make_unique<ScalabilityStructureL1T2>,
+ /*num_temporal_units=*/4},
+ SvcTestParam{"L1T3", std::make_unique<ScalabilityStructureL1T3>,
+ /*num_temporal_units=*/8},
+ SvcTestParam{"L2T1", std::make_unique<ScalabilityStructureL2T1>,
+ /*num_temporal_units=*/3},
+ SvcTestParam{"L2T1Key",
+ std::make_unique<ScalabilityStructureL2T1Key>,
+ /*num_temporal_units=*/3},
+ SvcTestParam{"L3T1", std::make_unique<ScalabilityStructureL3T1>,
+ /*num_temporal_units=*/3},
+ SvcTestParam{"L3T3", std::make_unique<ScalabilityStructureL3T3>,
+ /*num_temporal_units=*/8},
+ SvcTestParam{"S2T1", std::make_unique<ScalabilityStructureS2T1>,
+ /*num_temporal_units=*/3},
+ SvcTestParam{"L2T2", std::make_unique<ScalabilityStructureL2T2>,
+ /*num_temporal_units=*/4},
+ SvcTestParam{"L2T2Key",
+ std::make_unique<ScalabilityStructureL2T2Key>,
+ /*num_temporal_units=*/4},
+ SvcTestParam{"L2T2KeyShift",
+ std::make_unique<ScalabilityStructureL2T2KeyShift>,
+ /*num_temporal_units=*/4}),
+ [](const testing::TestParamInfo<SvcTestParam>& info) {
+ return info.param.name;
+ });
+
+} // namespace
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller.h
new file mode 100644
index 00000000000..d10aca2ce59
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_
+
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+
+namespace webrtc {
+
+// Controls how video should be encoded to be scalable. Outputs results as
+// buffer usage configuration for encoder and enough details to communicate the
+// scalability structure via dependency descriptor rtp header extension.
+class ScalableVideoController {
+ public:
+ struct StreamLayersConfig {
+ int num_spatial_layers = 1;
+ int num_temporal_layers = 1;
+ };
+ class LayerFrameConfig {
+ public:
+ // Builders/setters.
+ LayerFrameConfig& Id(int value);
+ LayerFrameConfig& Keyframe();
+ LayerFrameConfig& S(int value);
+ LayerFrameConfig& T(int value);
+ LayerFrameConfig& Reference(int buffer_id);
+ LayerFrameConfig& Update(int buffer_id);
+ LayerFrameConfig& ReferenceAndUpdate(int buffer_id);
+
+ // Getters.
+ int Id() const { return id_; }
+ bool IsKeyframe() const { return is_keyframe_; }
+ int SpatialId() const { return spatial_id_; }
+ int TemporalId() const { return temporal_id_; }
+ const absl::InlinedVector<CodecBufferUsage, kMaxEncoderBuffers>& Buffers()
+ const {
+ return buffers_;
+ }
+
+ private:
+ // Id to match configuration returned by NextFrameConfig with
+ // (possibly modified) configuration passed back via OnEncoderDone.
+ // The meaning of the id is an implementation detail of
+ // the ScalableVideoController.
+ int id_ = 0;
+
+ // Indication frame should be encoded as a key frame. In particular when
+ // `is_keyframe=true` property `CodecBufferUsage::referenced` should be
+ // ignored and treated as false.
+ bool is_keyframe_ = false;
+
+ int spatial_id_ = 0;
+ int temporal_id_ = 0;
+ // Describes how encoder which buffers encoder allowed to reference and
+ // which buffers encoder should update.
+ absl::InlinedVector<CodecBufferUsage, kMaxEncoderBuffers> buffers_;
+ };
+
+ virtual ~ScalableVideoController() = default;
+
+ // Returns video structure description for encoder to configure itself.
+ virtual StreamLayersConfig StreamConfig() const = 0;
+
+ // Returns video structure description in format compatible with
+ // dependency descriptor rtp header extension.
+ virtual FrameDependencyStructure DependencyStructure() const = 0;
+
+ // Notifies Controller with updated bitrates per layer. In particular notifies
+ // when certain layers should be disabled.
+ // Controller shouldn't produce LayerFrameConfig for disabled layers.
+ // TODO(bugs.webrtc.org/11404): Make pure virtual when implemented by all
+ // structures.
+ virtual void OnRatesUpdated(const VideoBitrateAllocation& bitrates) {}
+
+ // When `restart` is true, first `LayerFrameConfig` should have `is_keyframe`
+ // set to true.
+ // Returned vector shouldn't be empty.
+ virtual std::vector<LayerFrameConfig> NextFrameConfig(bool restart) = 0;
+
+ // Returns configuration to pass to EncoderCallback.
+ virtual absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) = 0;
+};
+
+// Below are implementation details.
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::Id(int value) {
+ id_ = value;
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::Keyframe() {
+ is_keyframe_ = true;
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::S(int value) {
+ spatial_id_ = value;
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::T(int value) {
+ temporal_id_ = value;
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::Reference(int buffer_id) {
+ buffers_.emplace_back(buffer_id, /*referenced=*/true, /*updated=*/false);
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::Update(int buffer_id) {
+ buffers_.emplace_back(buffer_id, /*referenced=*/false, /*updated=*/true);
+ return *this;
+}
+inline ScalableVideoController::LayerFrameConfig&
+ScalableVideoController::LayerFrameConfig::ReferenceAndUpdate(int buffer_id) {
+ buffers_.emplace_back(buffer_id, /*referenced=*/true, /*updated=*/true);
+ return *this;
+}
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc
new file mode 100644
index 00000000000..0d211fb9119
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h"
+
+#include <utility>
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+ScalableVideoControllerNoLayering::~ScalableVideoControllerNoLayering() =
+ default;
+
+ScalableVideoController::StreamLayersConfig
+ScalableVideoControllerNoLayering::StreamConfig() const {
+ StreamLayersConfig result;
+ result.num_spatial_layers = 1;
+ result.num_temporal_layers = 1;
+ return result;
+}
+
+FrameDependencyStructure
+ScalableVideoControllerNoLayering::DependencyStructure() const {
+ FrameDependencyStructure structure;
+ structure.num_decode_targets = 1;
+ FrameDependencyTemplate a_template;
+ a_template.decode_target_indications = {DecodeTargetIndication::kSwitch};
+ structure.templates.push_back(a_template);
+ return structure;
+}
+
+std::vector<ScalableVideoController::LayerFrameConfig>
+ScalableVideoControllerNoLayering::NextFrameConfig(bool restart) {
+ std::vector<LayerFrameConfig> result(1);
+ if (restart || start_) {
+ result[0].Id(0).Keyframe().Update(0);
+ } else {
+ result[0].Id(0).ReferenceAndUpdate(0);
+ }
+ start_ = false;
+ return result;
+}
+
+absl::optional<GenericFrameInfo>
+ScalableVideoControllerNoLayering::OnEncodeDone(LayerFrameConfig config) {
+ RTC_DCHECK_EQ(config.Id(), 0);
+ absl::optional<GenericFrameInfo> frame_info(absl::in_place);
+ frame_info->encoder_buffers = config.Buffers();
+ if (config.IsKeyframe()) {
+ for (auto& buffer : frame_info->encoder_buffers) {
+ buffer.referenced = false;
+ }
+ }
+ frame_info->decode_target_indications = {DecodeTargetIndication::kSwitch};
+ return frame_info;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h
new file mode 100644
index 00000000000..ad730989afb
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/av1/scalable_video_controller_no_layering.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_
+#define MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/video_coding/codecs/av1/scalable_video_controller.h"
+
+namespace webrtc {
+
+class ScalableVideoControllerNoLayering : public ScalableVideoController {
+ public:
+ ~ScalableVideoControllerNoLayering() override;
+
+ StreamLayersConfig StreamConfig() const override;
+ FrameDependencyStructure DependencyStructure() const override;
+
+ std::vector<LayerFrameConfig> NextFrameConfig(bool restart) override;
+ absl::optional<GenericFrameInfo> OnEncodeDone(
+ LayerFrameConfig config) override;
+
+ private:
+ bool start_ = true;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_CODECS_AV1_SCALABLE_VIDEO_CONTROLLER_NO_LAYERING_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.cc
new file mode 100644
index 00000000000..7dc387b8572
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.cc
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h"
+
+#include <memory>
+#include <vector>
+
+#include "api/test/create_frame_generator.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_type.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+
+class EncoderCallback : public EncodedImageCallback {
+ public:
+ explicit EncoderCallback(
+ std::vector<EncodedVideoFrameProducer::EncodedFrame>& output_frames)
+ : output_frames_(output_frames) {}
+
+ private:
+ Result OnEncodedImage(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* /*fragmentation*/) override {
+ output_frames_.push_back({encoded_image, *codec_specific_info});
+ return Result(Result::Error::OK);
+ }
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame>& output_frames_;
+};
+
+} // namespace
+
+std::vector<EncodedVideoFrameProducer::EncodedFrame>
+EncodedVideoFrameProducer::Encode() {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_buffer_generator =
+ test::CreateSquareFrameGenerator(
+ resolution_.Width(), resolution_.Height(),
+ test::FrameGeneratorInterface::OutputType::kI420, absl::nullopt);
+
+ std::vector<EncodedFrame> encoded_frames;
+ EncoderCallback encoder_callback(encoded_frames);
+ RTC_CHECK_EQ(encoder_.RegisterEncodeCompleteCallback(&encoder_callback),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ uint32_t rtp_tick = 90000 / framerate_fps_;
+ std::vector<VideoFrameType> frame_types = {VideoFrameType::kVideoFrameDelta};
+ for (int i = 0; i < num_input_frames_; ++i) {
+ VideoFrame frame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(frame_buffer_generator->NextFrame().buffer)
+ .set_timestamp_rtp(rtp_timestamp_)
+ .build();
+ rtp_timestamp_ += rtp_tick;
+ RTC_CHECK_EQ(encoder_.Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK);
+ }
+
+ RTC_CHECK_EQ(encoder_.RegisterEncodeCompleteCallback(nullptr),
+ WEBRTC_VIDEO_CODEC_OK);
+ return encoded_frames;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.h b/chromium/third_party/webrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.h
new file mode 100644
index 00000000000..757da02422a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/encoded_video_frame_producer.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_
+#define MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_
+
+#include <stdint.h>
+
+#include <vector>
+
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/encoded_image.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+
+namespace webrtc {
+
+// Wrapper around VideoEncoder::Encode for convenient input (generates frames)
+// and output (returns encoded frames instead of passing them to callback)
+class EncodedVideoFrameProducer {
+ public:
+ struct EncodedFrame {
+ EncodedImage encoded_image;
+ CodecSpecificInfo codec_specific_info;
+ };
+
+ // `encoder` should be initialized, but shouldn't have `EncoderCallback` set.
+ explicit EncodedVideoFrameProducer(VideoEncoder& encoder)
+ : encoder_(encoder) {}
+ EncodedVideoFrameProducer(const EncodedVideoFrameProducer&) = delete;
+ EncodedVideoFrameProducer& operator=(const EncodedVideoFrameProducer&) =
+ delete;
+
+ // Number of the input frames to pass to the encoder.
+ EncodedVideoFrameProducer& SetNumInputFrames(int value);
+ // Resolution of the input frames.
+ EncodedVideoFrameProducer& SetResolution(RenderResolution value);
+
+ // Generates input video frames and encodes them with `encoder` provided in
+ // the constructor. Returns frame passed to the `OnEncodedImage` by wraping
+ // `EncodedImageCallback` underneath.
+ std::vector<EncodedFrame> Encode();
+
+ private:
+ VideoEncoder& encoder_;
+
+ uint32_t rtp_timestamp_ = 1000;
+ int num_input_frames_ = 1;
+ int framerate_fps_ = 30;
+ RenderResolution resolution_ = {320, 180};
+};
+
+inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetNumInputFrames(
+ int value) {
+ RTC_DCHECK_GT(value, 0);
+ num_input_frames_ = value;
+ return *this;
+}
+
+inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetResolution(
+ RenderResolution value) {
+ resolution_ = value;
+ return *this;
+}
+
+} // namespace webrtc
+#endif // MODULES_VIDEO_CODING_CODECS_TEST_ENCODED_VIDEO_FRAME_PRODUCER_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc
index 7e92b360bd0..990db54321e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc
@@ -205,6 +205,9 @@ void VideoCodecTestFixtureImpl::Config::SetCodecSettings(
codec_settings.VP9()->numberOfSpatialLayers =
static_cast<uint8_t>(num_spatial_layers);
break;
+ case kVideoCodecAV1:
+ codec_settings.qpMax = 63;
+ break;
case kVideoCodecH264:
codec_settings.H264()->frameDroppingOn = frame_dropper_on;
codec_settings.H264()->keyFrameInterval = kBaseKeyFrameInterval;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_libaom.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_libaom.cc
new file mode 100644
index 00000000000..45730aa09e4
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videocodec_test_libaom.cc
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <vector>
+
+#include "api/test/create_videocodec_test_fixture.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "media/base/media_constants.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/simulcast_encoder_adapter.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+// Test clips settings.
+constexpr int kCifWidth = 352;
+constexpr int kCifHeight = 288;
+constexpr int kNumFramesLong = 300;
+
+VideoCodecTestFixture::Config CreateConfig(std::string filename) {
+ VideoCodecTestFixture::Config config;
+ config.filename = filename;
+ config.filepath = ResourcePath(config.filename, "yuv");
+ config.num_frames = kNumFramesLong;
+ config.use_single_core = true;
+ return config;
+}
+
+TEST(VideoCodecTestLibaom, HighBitrateAV1) {
+ auto config = CreateConfig("foreman_cif");
+ config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true,
+ kCifWidth, kCifHeight);
+ config.num_frames = kNumFramesLong;
+ auto fixture = CreateVideoCodecTestFixture(config);
+
+ std::vector<RateProfile> rate_profiles = {{500, 30, 0}};
+
+ std::vector<RateControlThresholds> rc_thresholds = {
+ {12, 1, 0, 1, 0.3, 0.1, 0, 1}};
+
+ std::vector<QualityThresholds> quality_thresholds = {{37, 34, 0.94, 0.92}};
+
+ fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr);
+}
+
+TEST(VideoCodecTestLibaom, VeryLowBitrateAV1) {
+ auto config = CreateConfig("foreman_cif");
+ config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true,
+ kCifWidth, kCifHeight);
+ auto fixture = CreateVideoCodecTestFixture(config);
+
+ std::vector<RateProfile> rate_profiles = {{50, 30, 0}};
+
+ std::vector<RateControlThresholds> rc_thresholds = {
+ {15, 8, 75, 2, 2, 2, 2, 1}};
+
+ std::vector<QualityThresholds> quality_thresholds = {{28, 25, 0.70, 0.62}};
+
+ fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr);
+}
+
+#if !defined(WEBRTC_ANDROID)
+constexpr int kHdWidth = 1280;
+constexpr int kHdHeight = 720;
+TEST(VideoCodecTestLibaom, HdAV1) {
+ auto config = CreateConfig("ConferenceMotion_1280_720_50");
+ config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true,
+ kHdWidth, kHdHeight);
+ config.num_frames = kNumFramesLong;
+ auto fixture = CreateVideoCodecTestFixture(config);
+
+ std::vector<RateProfile> rate_profiles = {{1000, 50, 0}};
+
+ std::vector<RateControlThresholds> rc_thresholds = {
+ {13, 3, 0, 1, 0.3, 0.1, 0, 1}};
+
+ std::vector<QualityThresholds> quality_thresholds = {{36, 32, 0.93, 0.87}};
+
+ fixture->RunTest(rate_profiles, &rc_thresholds, &quality_thresholds, nullptr);
+}
+#endif
+
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
index 83ea450d886..b5652593ae2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
@@ -608,58 +608,52 @@ FrameDependencyStructure DefaultTemporalLayers::GetTemplateStructure(
FrameDependencyStructure template_structure;
template_structure.num_decode_targets = num_layers;
- using Builder = GenericFrameInfo::Builder;
switch (num_layers) {
case 1: {
- template_structure.templates = {
- Builder().T(0).Dtis("S").Build(),
- Builder().T(0).Dtis("S").Fdiffs({1}).Build(),
- };
+ template_structure.templates.resize(2);
+ template_structure.templates[0].T(0).Dtis("S");
+ template_structure.templates[1].T(0).Dtis("S").FrameDiffs({1});
return template_structure;
}
case 2: {
- template_structure.templates = {
- Builder().T(0).Dtis("SS").Build(),
- Builder().T(0).Dtis("SS").Fdiffs({2}).Build(),
- Builder().T(0).Dtis("SR").Fdiffs({2}).Build(),
- Builder().T(1).Dtis("-S").Fdiffs({1}).Build(),
- Builder().T(1).Dtis("-D").Fdiffs({1, 2}).Build(),
- };
+ template_structure.templates.resize(5);
+ template_structure.templates[0].T(0).Dtis("SS");
+ template_structure.templates[1].T(0).Dtis("SS").FrameDiffs({2});
+ template_structure.templates[2].T(0).Dtis("SR").FrameDiffs({2});
+ template_structure.templates[3].T(1).Dtis("-S").FrameDiffs({1});
+ template_structure.templates[4].T(1).Dtis("-D").FrameDiffs({2, 1});
return template_structure;
}
case 3: {
if (field_trial::IsEnabled("WebRTC-UseShortVP8TL3Pattern")) {
- template_structure.templates = {
- Builder().T(0).Dtis("SSS").Build(),
- Builder().T(0).Dtis("SSS").Fdiffs({4}).Build(),
- Builder().T(1).Dtis("-DR").Fdiffs({2}).Build(),
- Builder().T(2).Dtis("--S").Fdiffs({1}).Build(),
- Builder().T(2).Dtis("--D").Fdiffs({1, 2}).Build(),
- };
+ template_structure.templates.resize(5);
+ template_structure.templates[0].T(0).Dtis("SSS");
+ template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4});
+ template_structure.templates[2].T(1).Dtis("-DR").FrameDiffs({2});
+ template_structure.templates[3].T(2).Dtis("--S").FrameDiffs({1});
+ template_structure.templates[4].T(2).Dtis("--D").FrameDiffs({2, 1});
} else {
- template_structure.templates = {
- Builder().T(0).Dtis("SSS").Build(),
- Builder().T(0).Dtis("SSS").Fdiffs({4}).Build(),
- Builder().T(0).Dtis("SRR").Fdiffs({4}).Build(),
- Builder().T(1).Dtis("-SS").Fdiffs({2}).Build(),
- Builder().T(1).Dtis("-DS").Fdiffs({2, 4}).Build(),
- Builder().T(2).Dtis("--D").Fdiffs({1}).Build(),
- Builder().T(2).Dtis("--D").Fdiffs({1, 3}).Build(),
- };
+ template_structure.templates.resize(7);
+ template_structure.templates[0].T(0).Dtis("SSS");
+ template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4});
+ template_structure.templates[2].T(0).Dtis("SRR").FrameDiffs({4});
+ template_structure.templates[3].T(1).Dtis("-SS").FrameDiffs({2});
+ template_structure.templates[4].T(1).Dtis("-DS").FrameDiffs({4, 2});
+ template_structure.templates[5].T(2).Dtis("--D").FrameDiffs({1});
+ template_structure.templates[6].T(2).Dtis("--D").FrameDiffs({3, 1});
}
return template_structure;
}
case 4: {
- template_structure.templates = {
- Builder().T(0).Dtis("SSSS").Build(),
- Builder().T(0).Dtis("SSSS").Fdiffs({8}).Build(),
- Builder().T(1).Dtis("-SRR").Fdiffs({4}).Build(),
- Builder().T(1).Dtis("-SRR").Fdiffs({4, 8}).Build(),
- Builder().T(2).Dtis("--SR").Fdiffs({2}).Build(),
- Builder().T(2).Dtis("--SR").Fdiffs({2, 4}).Build(),
- Builder().T(3).Dtis("---D").Fdiffs({1}).Build(),
- Builder().T(3).Dtis("---D").Fdiffs({1, 3}).Build(),
- };
+ template_structure.templates.resize(8);
+ template_structure.templates[0].T(0).Dtis("SSSS");
+ template_structure.templates[1].T(0).Dtis("SSSS").FrameDiffs({8});
+ template_structure.templates[2].T(1).Dtis("-SRR").FrameDiffs({4});
+ template_structure.templates[3].T(1).Dtis("-SRR").FrameDiffs({4, 8});
+ template_structure.templates[4].T(2).Dtis("--SR").FrameDiffs({2});
+ template_structure.templates[5].T(2).Dtis("--SR").FrameDiffs({2, 4});
+ template_structure.templates[6].T(3).Dtis("---D").FrameDiffs({1});
+ template_structure.templates[7].T(3).Dtis("---D").FrameDiffs({1, 3});
return template_structure;
}
default:
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h
index 29cfcf0489a..d127d8056d0 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h
@@ -75,7 +75,7 @@ class DefaultTemporalLayers final : public Vp8FrameBufferController {
DependencyInfo(absl::string_view indication_symbols,
Vp8FrameConfig frame_config)
: decode_target_indications(
- GenericFrameInfo::DecodeTargetInfo(indication_symbols)),
+ webrtc_impl::StringToDecodeTargetIndications(indication_symbols)),
frame_config(frame_config) {}
absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
index a3ee2c0c41d..d86d8767c5d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
@@ -44,26 +44,48 @@ constexpr int kVp8ErrorPropagationTh = 30;
constexpr long kDecodeDeadlineRealtime = 1; // NOLINT
const char kVp8PostProcArmFieldTrial[] = "WebRTC-VP8-Postproc-Config-Arm";
+const char kVp8PostProcFieldTrial[] = "WebRTC-VP8-Postproc-Config";
-void GetPostProcParamsFromFieldTrialGroup(
- LibvpxVp8Decoder::DeblockParams* deblock_params) {
- std::string group =
- webrtc::field_trial::FindFullName(kVp8PostProcArmFieldTrial);
- if (group.empty())
- return;
+#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \
+ defined(WEBRTC_ANDROID)
+constexpr bool kIsArm = true;
+#else
+constexpr bool kIsArm = false;
+#endif
+
+absl::optional<LibvpxVp8Decoder::DeblockParams> DefaultDeblockParams() {
+ if (kIsArm) {
+ // For ARM, this is only called when deblocking is explicitly enabled, and
+ // the default strength is set by the ctor.
+ return LibvpxVp8Decoder::DeblockParams();
+ }
+ // For non-arm, don't use the explicit deblocking settings by default.
+ return absl::nullopt;
+}
+
+absl::optional<LibvpxVp8Decoder::DeblockParams>
+GetPostProcParamsFromFieldTrialGroup() {
+ std::string group = webrtc::field_trial::FindFullName(
+ kIsArm ? kVp8PostProcArmFieldTrial : kVp8PostProcFieldTrial);
+ if (group.empty()) {
+ return DefaultDeblockParams();
+ }
LibvpxVp8Decoder::DeblockParams params;
if (sscanf(group.c_str(), "Enabled-%d,%d,%d", &params.max_level,
- &params.min_qp, &params.degrade_qp) != 3)
- return;
+ &params.min_qp, &params.degrade_qp) != 3) {
+ return DefaultDeblockParams();
+ }
- if (params.max_level < 0 || params.max_level > 16)
- return;
+ if (params.max_level < 0 || params.max_level > 16) {
+ return DefaultDeblockParams();
+ }
- if (params.min_qp < 0 || params.degrade_qp <= params.min_qp)
- return;
+ if (params.min_qp < 0 || params.degrade_qp <= params.min_qp) {
+ return DefaultDeblockParams();
+ }
- *deblock_params = params;
+ return params;
}
} // namespace
@@ -97,8 +119,9 @@ class LibvpxVp8Decoder::QpSmoother {
};
LibvpxVp8Decoder::LibvpxVp8Decoder()
- : use_postproc_arm_(
- webrtc::field_trial::IsEnabled(kVp8PostProcArmFieldTrial)),
+ : use_postproc_(
+ kIsArm ? webrtc::field_trial::IsEnabled(kVp8PostProcArmFieldTrial)
+ : true),
buffer_pool_(false, 300 /* max_number_of_buffers*/),
decode_complete_callback_(NULL),
inited_(false),
@@ -107,10 +130,9 @@ LibvpxVp8Decoder::LibvpxVp8Decoder()
last_frame_width_(0),
last_frame_height_(0),
key_frame_required_(true),
- qp_smoother_(use_postproc_arm_ ? new QpSmoother() : nullptr) {
- if (use_postproc_arm_)
- GetPostProcParamsFromFieldTrialGroup(&deblock_);
-}
+ deblock_params_(use_postproc_ ? GetPostProcParamsFromFieldTrialGroup()
+ : absl::nullopt),
+ qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr) {}
LibvpxVp8Decoder::~LibvpxVp8Decoder() {
inited_ = true; // in order to do the actual release
@@ -131,12 +153,7 @@ int LibvpxVp8Decoder::InitDecode(const VideoCodec* inst, int number_of_cores) {
cfg.threads = 1;
cfg.h = cfg.w = 0; // set after decode
-#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \
- defined(WEBRTC_ANDROID)
- vpx_codec_flags_t flags = use_postproc_arm_ ? VPX_CODEC_USE_POSTPROC : 0;
-#else
- vpx_codec_flags_t flags = VPX_CODEC_USE_POSTPROC;
-#endif
+ vpx_codec_flags_t flags = use_postproc_ ? VPX_CODEC_USE_POSTPROC : 0;
if (vpx_codec_dec_init(decoder_, vpx_codec_vp8_dx(), &cfg, flags)) {
delete decoder_;
@@ -174,43 +191,47 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image,
}
// Post process configurations.
-#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \
- defined(WEBRTC_ANDROID)
- if (use_postproc_arm_) {
+ if (use_postproc_) {
vp8_postproc_cfg_t ppcfg;
+ // MFQE enabled to reduce key frame popping.
ppcfg.post_proc_flag = VP8_MFQE;
- // For low resolutions, use stronger deblocking filter.
- int last_width_x_height = last_frame_width_ * last_frame_height_;
- if (last_width_x_height > 0 && last_width_x_height <= 320 * 240) {
- // Enable the deblock and demacroblocker based on qp thresholds.
- RTC_DCHECK(qp_smoother_);
- int qp = qp_smoother_->GetAvg();
- if (qp > deblock_.min_qp) {
- int level = deblock_.max_level;
- if (qp < deblock_.degrade_qp) {
- // Use lower level.
- level = deblock_.max_level * (qp - deblock_.min_qp) /
- (deblock_.degrade_qp - deblock_.min_qp);
+
+ if (kIsArm) {
+ RTC_DCHECK(deblock_params_.has_value());
+ }
+ if (deblock_params_.has_value()) {
+ // For low resolutions, use stronger deblocking filter.
+ int last_width_x_height = last_frame_width_ * last_frame_height_;
+ if (last_width_x_height > 0 && last_width_x_height <= 320 * 240) {
+ // Enable the deblock and demacroblocker based on qp thresholds.
+ RTC_DCHECK(qp_smoother_);
+ int qp = qp_smoother_->GetAvg();
+ if (qp > deblock_params_->min_qp) {
+ int level = deblock_params_->max_level;
+ if (qp < deblock_params_->degrade_qp) {
+ // Use lower level.
+ level = deblock_params_->max_level *
+ (qp - deblock_params_->min_qp) /
+ (deblock_params_->degrade_qp - deblock_params_->min_qp);
+ }
+ // Deblocking level only affects VP8_DEMACROBLOCK.
+ ppcfg.deblocking_level = std::max(level, 1);
+ ppcfg.post_proc_flag |= VP8_DEBLOCK | VP8_DEMACROBLOCK;
}
- // Deblocking level only affects VP8_DEMACROBLOCK.
- ppcfg.deblocking_level = std::max(level, 1);
- ppcfg.post_proc_flag |= VP8_DEBLOCK | VP8_DEMACROBLOCK;
}
+ } else {
+ // Non-arm with no explicit deblock params set.
+ ppcfg.post_proc_flag |= VP8_DEBLOCK;
+ // For VGA resolutions and lower, enable the demacroblocker postproc.
+ if (last_frame_width_ * last_frame_height_ <= 640 * 360) {
+ ppcfg.post_proc_flag |= VP8_DEMACROBLOCK;
+ }
+ // Strength of deblocking filter. Valid range:[0,16]
+ ppcfg.deblocking_level = 3;
}
+
vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
}
-#else
- vp8_postproc_cfg_t ppcfg;
- // MFQE enabled to reduce key frame popping.
- ppcfg.post_proc_flag = VP8_MFQE | VP8_DEBLOCK;
- // For VGA resolutions and lower, enable the demacroblocker postproc.
- if (last_frame_width_ * last_frame_height_ <= 640 * 360) {
- ppcfg.post_proc_flag |= VP8_DEMACROBLOCK;
- }
- // Strength of deblocking filter. Valid range:[0,16]
- ppcfg.deblocking_level = 3;
- vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
-#endif
// Always start with a complete key frame.
if (key_frame_required_) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
index d9bfee81c1a..2a0c5f2c5ba 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
@@ -13,6 +13,7 @@
#include <memory>
+#include "absl/types/optional.h"
#include "api/video/encoded_image.h"
#include "api/video_codecs/video_decoder.h"
#include "common_video/include/i420_buffer_pool.h"
@@ -51,7 +52,7 @@ class LibvpxVp8Decoder : public VideoDecoder {
uint32_t timeStamp,
int qp,
const webrtc::ColorSpace* explicit_color_space);
- const bool use_postproc_arm_;
+ const bool use_postproc_;
I420BufferPool buffer_pool_;
DecodedImageCallback* decode_complete_callback_;
@@ -61,7 +62,7 @@ class LibvpxVp8Decoder : public VideoDecoder {
int last_frame_width_;
int last_frame_height_;
bool key_frame_required_;
- DeblockParams deblock_;
+ const absl::optional<DeblockParams> deblock_params_;
const std::unique_ptr<QpSmoother> qp_smoother_;
};
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
index 01858c6ee9d..caccb4246c1 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
@@ -36,6 +36,7 @@ constexpr int kMinTimeBetweenSyncs = kOneSecond90Khz * 2;
constexpr int kMaxTimeBetweenSyncs = kOneSecond90Khz * 4;
constexpr int kQpDeltaThresholdForSync = 8;
constexpr int kMinBitrateKbpsForQpBoost = 500;
+constexpr auto kSwitch = DecodeTargetIndication::kSwitch;
} // namespace
const double ScreenshareLayers::kMaxTL0FpsReduction = 2.5;
@@ -319,8 +320,7 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index,
if (number_of_temporal_layers_ == 1) {
vp8_info.temporalIdx = kNoTemporalIdx;
vp8_info.layerSync = false;
- generic_frame_info.decode_target_indications =
- GenericFrameInfo::DecodeTargetInfo("S");
+ generic_frame_info.decode_target_indications = {kSwitch};
generic_frame_info.encoder_buffers.emplace_back(
0, /*referenced=*/!is_keyframe, /*updated=*/true);
} else {
@@ -344,8 +344,7 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index,
active_layer_ = 1;
info->template_structure =
GetTemplateStructure(number_of_temporal_layers_);
- generic_frame_info.decode_target_indications =
- GenericFrameInfo::DecodeTargetInfo("SS");
+ generic_frame_info.decode_target_indications = {kSwitch, kSwitch};
} else if (active_layer_ >= 0 && layers_[active_layer_].state ==
TemporalLayer::State::kKeyFrame) {
layers_[active_layer_].state = TemporalLayer::State::kNormal;
@@ -429,21 +428,18 @@ FrameDependencyStructure ScreenshareLayers::GetTemplateStructure(
FrameDependencyStructure template_structure;
template_structure.num_decode_targets = num_layers;
- using Builder = GenericFrameInfo::Builder;
switch (num_layers) {
case 1: {
- template_structure.templates = {
- Builder().T(0).Dtis("S").Build(),
- Builder().T(0).Dtis("S").Fdiffs({1}).Build(),
- };
+ template_structure.templates.resize(2);
+ template_structure.templates[0].T(0).Dtis("S");
+ template_structure.templates[1].T(0).Dtis("S").FrameDiffs({1});
return template_structure;
}
case 2: {
- template_structure.templates = {
- Builder().T(0).Dtis("SS").Build(),
- Builder().T(0).Dtis("SS").Fdiffs({1}).Build(),
- Builder().T(1).Dtis("-S").Fdiffs({1}).Build(),
- };
+ template_structure.templates.resize(3);
+ template_structure.templates[0].T(0).Dtis("SS");
+ template_structure.templates[1].T(0).Dtis("SS").FrameDiffs({1});
+ template_structure.templates[2].T(1).Dtis("-S").FrameDiffs({1});
return template_structure;
}
default:
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h
index 5270ffe81cb..39477f12f17 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h
@@ -78,7 +78,7 @@ class ScreenshareLayers final : public Vp8FrameBufferController {
DependencyInfo(absl::string_view indication_symbols,
Vp8FrameConfig frame_config)
: decode_target_indications(
- GenericFrameInfo::DecodeTargetInfo(indication_symbols)),
+ webrtc_impl::StringToDecodeTargetIndications(indication_symbols)),
frame_config(frame_config) {}
absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h
index dcff1e6a188..1ac927d29f9 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h
@@ -19,74 +19,86 @@ namespace webrtc {
class MockLibvpxVp8Interface : public LibvpxInterface {
public:
- MOCK_CONST_METHOD5(img_alloc,
- vpx_image_t*(vpx_image_t*,
- vpx_img_fmt_t,
- unsigned int,
- unsigned int,
- unsigned int));
- MOCK_CONST_METHOD6(img_wrap,
- vpx_image_t*(vpx_image_t*,
- vpx_img_fmt_t,
- unsigned int,
- unsigned int,
- unsigned int,
- unsigned char*));
- MOCK_CONST_METHOD1(img_free, void(vpx_image_t* img));
- MOCK_CONST_METHOD2(codec_enc_config_set,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- const vpx_codec_enc_cfg_t*));
- MOCK_CONST_METHOD3(codec_enc_config_default,
- vpx_codec_err_t(vpx_codec_iface_t*,
- vpx_codec_enc_cfg_t*,
- unsigned int));
- MOCK_CONST_METHOD4(codec_enc_init,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vpx_codec_iface_t*,
- const vpx_codec_enc_cfg_t*,
- vpx_codec_flags_t));
- MOCK_CONST_METHOD6(codec_enc_init_multi,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vpx_codec_iface_t*,
- vpx_codec_enc_cfg_t*,
- int,
- vpx_codec_flags_t,
- vpx_rational_t*));
- MOCK_CONST_METHOD1(codec_destroy, vpx_codec_err_t(vpx_codec_ctx_t*));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- uint32_t));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- int));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- int*));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- vpx_roi_map*));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- vpx_active_map*));
- MOCK_CONST_METHOD3(codec_control,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- vp8e_enc_control_id,
- vpx_scaling_mode*));
- MOCK_CONST_METHOD6(codec_encode,
- vpx_codec_err_t(vpx_codec_ctx_t*,
- const vpx_image_t*,
- vpx_codec_pts_t,
- uint64_t,
- vpx_enc_frame_flags_t,
- uint64_t));
- MOCK_CONST_METHOD2(codec_get_cx_data,
- const vpx_codec_cx_pkt_t*(vpx_codec_ctx_t*,
- vpx_codec_iter_t*));
+ MOCK_METHOD(
+ vpx_image_t*,
+ img_alloc,
+ (vpx_image_t*, vpx_img_fmt_t, unsigned int, unsigned int, unsigned int),
+ (const, override));
+ MOCK_METHOD(vpx_image_t*,
+ img_wrap,
+ (vpx_image_t*,
+ vpx_img_fmt_t,
+ unsigned int,
+ unsigned int,
+ unsigned int,
+ unsigned char*),
+ (const, override));
+ MOCK_METHOD(void, img_free, (vpx_image_t * img), (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_enc_config_set,
+ (vpx_codec_ctx_t*, const vpx_codec_enc_cfg_t*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_enc_config_default,
+ (vpx_codec_iface_t*, vpx_codec_enc_cfg_t*, unsigned int),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_enc_init,
+ (vpx_codec_ctx_t*,
+ vpx_codec_iface_t*,
+ const vpx_codec_enc_cfg_t*,
+ vpx_codec_flags_t),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_enc_init_multi,
+ (vpx_codec_ctx_t*,
+ vpx_codec_iface_t*,
+ vpx_codec_enc_cfg_t*,
+ int,
+ vpx_codec_flags_t,
+ vpx_rational_t*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_destroy,
+ (vpx_codec_ctx_t*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, uint32_t),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, int),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, int*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_roi_map*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_active_map*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_control,
+ (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_scaling_mode*),
+ (const, override));
+ MOCK_METHOD(vpx_codec_err_t,
+ codec_encode,
+ (vpx_codec_ctx_t*,
+ const vpx_image_t*,
+ vpx_codec_pts_t,
+ uint64_t,
+ vpx_enc_frame_flags_t,
+ uint64_t),
+ (const, override));
+ MOCK_METHOD(const vpx_codec_cx_pkt_t*,
+ codec_get_cx_data,
+ (vpx_codec_ctx_t*, vpx_codec_iter_t*),
+ (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
index 8091cacec90..7cf1c2ebd1b 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
@@ -25,6 +25,10 @@ namespace webrtc {
// negotiate in SDP, in order of preference.
std::vector<SdpVideoFormat> SupportedVP9Codecs();
+// Returns a vector with all supported internal VP9 decode profiles in order of
+// preference. These will be availble for receive-only connections.
+std::vector<SdpVideoFormat> SupportedVP9DecoderCodecs();
+
class VP9Encoder : public VideoEncoder {
public:
// Deprecated. Returns default implementation using VP9 Profile 0.
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc
index d40cf23257f..4d5b8497d1a 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc
@@ -16,6 +16,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/vp9_profile.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/video_coding/codecs/test/encoded_video_frame_producer.h"
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "modules/video_coding/codecs/vp9/svc_config.h"
@@ -25,20 +26,33 @@
#include "test/video_codec_settings.h"
namespace webrtc {
+namespace {
using ::testing::ElementsAreArray;
+using ::testing::SizeIs;
+using ::testing::UnorderedElementsAreArray;
using EncoderInfo = webrtc::VideoEncoder::EncoderInfo;
using FramerateFractions =
absl::InlinedVector<uint8_t, webrtc::kMaxTemporalStreams>;
-namespace {
-const size_t kWidth = 1280;
-const size_t kHeight = 720;
+constexpr size_t kWidth = 1280;
+constexpr size_t kHeight = 720;
const VideoEncoder::Capabilities kCapabilities(false);
const VideoEncoder::Settings kSettings(kCapabilities,
/*number_of_cores=*/1,
/*max_payload_size=*/0);
+
+VideoCodec DefaultCodecSettings() {
+ VideoCodec codec_settings;
+ webrtc::test::CodecSettings(kVideoCodecVP9, &codec_settings);
+ codec_settings.width = kWidth;
+ codec_settings.height = kHeight;
+ codec_settings.VP9()->numberOfTemporalLayers = 1;
+ codec_settings.VP9()->numberOfSpatialLayers = 1;
+ return codec_settings;
+}
+
} // namespace
class TestVp9Impl : public VideoCodecUnitTest {
@@ -59,53 +73,6 @@ class TestVp9Impl : public VideoCodecUnitTest {
codec_settings->VP9()->numberOfSpatialLayers = 1;
}
- void ExpectFrameWith(uint8_t temporal_idx) {
- EncodedImage encoded_frame;
- CodecSpecificInfo codec_specific_info;
- ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
- EXPECT_EQ(temporal_idx, codec_specific_info.codecSpecific.VP9.temporal_idx);
- }
-
- void ExpectFrameWith(size_t num_spatial_layers,
- uint8_t temporal_idx,
- bool temporal_up_switch,
- uint8_t num_ref_pics,
- const std::vector<uint8_t>& p_diff) {
- std::vector<EncodedImage> encoded_frame;
- std::vector<CodecSpecificInfo> codec_specific;
- ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific));
- for (size_t spatial_idx = 0; spatial_idx < num_spatial_layers;
- ++spatial_idx) {
- const CodecSpecificInfoVP9& vp9 =
- codec_specific[spatial_idx].codecSpecific.VP9;
- if (vp9.temporal_idx == kNoTemporalIdx) {
- EXPECT_EQ(temporal_idx, 0);
- } else {
- EXPECT_EQ(vp9.temporal_idx, temporal_idx);
- }
- if (num_spatial_layers == 1) {
- EXPECT_FALSE(encoded_frame[spatial_idx].SpatialIndex());
- } else {
- EXPECT_EQ(encoded_frame[spatial_idx].SpatialIndex(),
- static_cast<int>(spatial_idx));
- }
- EXPECT_EQ(vp9.temporal_up_switch, temporal_up_switch);
-
- // Ensure there are no duplicates in reference list.
- std::vector<uint8_t> vp9_p_diff(vp9.p_diff,
- vp9.p_diff + vp9.num_ref_pics);
- std::sort(vp9_p_diff.begin(), vp9_p_diff.end());
- EXPECT_EQ(std::unique(vp9_p_diff.begin(), vp9_p_diff.end()),
- vp9_p_diff.end());
-
- for (size_t ref_pic_num = 0; ref_pic_num < num_ref_pics; ++ref_pic_num) {
- EXPECT_NE(
- std::find(p_diff.begin(), p_diff.end(), vp9.p_diff[ref_pic_num]),
- p_diff.end());
- }
- }
- }
-
void ConfigureSvc(size_t num_spatial_layers, size_t num_temporal_layers = 1) {
codec_settings_.VP9()->numberOfSpatialLayers =
static_cast<unsigned char>(num_spatial_layers);
@@ -187,57 +154,61 @@ TEST_F(TestVp9Impl, DecodedQpEqualsEncodedQp) {
EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
}
-TEST_F(TestVp9Impl, ParserQpEqualsEncodedQp) {
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- EncodedImage encoded_frame;
- CodecSpecificInfo codec_specific_info;
- ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
-
+TEST(Vp9ImplTest, ParserQpEqualsEncodedQp) {
+ std::unique_ptr<VideoEncoder> encoder = VP9Encoder::Create();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ encoder->InitEncode(&codec_settings, kSettings);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(1)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+ ASSERT_THAT(frames, SizeIs(1));
+ const auto& encoded_frame = frames.front().encoded_image;
int qp = 0;
ASSERT_TRUE(vp9::GetQp(encoded_frame.data(), encoded_frame.size(), &qp));
EXPECT_EQ(encoded_frame.qp_, qp);
}
-TEST_F(TestVp9Impl, EncoderWith2TemporalLayers) {
- // Override default settings.
- codec_settings_.VP9()->numberOfTemporalLayers = 2;
+TEST(Vp9ImplTest, EncoderWith2TemporalLayers) {
+ std::unique_ptr<VideoEncoder> encoder = VP9Encoder::Create();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ codec_settings.VP9()->numberOfTemporalLayers = 2;
// Tl0PidIdx is only used in non-flexible mode.
- codec_settings_.VP9()->flexibleMode = false;
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->InitEncode(&codec_settings_, kSettings));
-
- // Temporal layer 0.
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- EncodedImage encoded_frame;
- CodecSpecificInfo codec_specific_info;
- ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
- EXPECT_EQ(0, codec_specific_info.codecSpecific.VP9.temporal_idx);
-
- // Temporal layer 1.
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- ExpectFrameWith(1);
-
- // Temporal layer 0.
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- ExpectFrameWith(0);
-
- // Temporal layer 1.
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- ExpectFrameWith(1);
+ codec_settings.VP9()->flexibleMode = false;
+ EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(4)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+
+ ASSERT_THAT(frames, SizeIs(4));
+ EXPECT_EQ(frames[0].codec_specific_info.codecSpecific.VP9.temporal_idx, 0);
+ EXPECT_EQ(frames[1].codec_specific_info.codecSpecific.VP9.temporal_idx, 1);
+ EXPECT_EQ(frames[2].codec_specific_info.codecSpecific.VP9.temporal_idx, 0);
+ EXPECT_EQ(frames[3].codec_specific_info.codecSpecific.VP9.temporal_idx, 1);
}
-TEST_F(TestVp9Impl, EncoderWith2SpatialLayers) {
- codec_settings_.VP9()->numberOfSpatialLayers = 2;
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->InitEncode(&codec_settings_, kSettings));
-
- SetWaitForEncodedFramesThreshold(2);
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr));
- std::vector<EncodedImage> encoded_frame;
- std::vector<CodecSpecificInfo> codec_info;
- ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_info));
- EXPECT_EQ(encoded_frame[0].SpatialIndex(), 0);
- EXPECT_EQ(encoded_frame[1].SpatialIndex(), 1);
+TEST(Vp9ImplTest, EncoderWith2SpatialLayers) {
+ std::unique_ptr<VideoEncoder> encoder = VP9Encoder::Create();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ codec_settings.VP9()->numberOfSpatialLayers = 2;
+ EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings),
+ WEBRTC_VIDEO_CODEC_OK);
+
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(1)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+
+ ASSERT_THAT(frames, SizeIs(2));
+ EXPECT_EQ(frames[0].encoded_image.SpatialIndex(), 0);
+ EXPECT_EQ(frames[1].encoded_image.SpatialIndex(), 1);
}
TEST_F(TestVp9Impl, EncoderExplicitLayering) {
@@ -1421,29 +1392,34 @@ TEST_F(TestVp9Impl, EncoderInfoFpsAllocationFlexibleMode) {
::testing::ElementsAreArray(expected_fps_allocation));
}
-class TestVp9ImplWithLayering
- : public TestVp9Impl,
- public ::testing::WithParamInterface<::testing::tuple<uint8_t, uint8_t>> {
+class Vp9ImplWithLayeringTest
+ : public ::testing::TestWithParam<std::tuple<int, int, bool>> {
protected:
- TestVp9ImplWithLayering()
- : num_spatial_layers_(::testing::get<0>(GetParam())),
- num_temporal_layers_(::testing::get<1>(GetParam())) {}
+ Vp9ImplWithLayeringTest()
+ : num_spatial_layers_(std::get<0>(GetParam())),
+ num_temporal_layers_(std::get<1>(GetParam())),
+ override_field_trials_(std::get<2>(GetParam())
+ ? "WebRTC-Vp9ExternalRefCtrl/Enabled/"
+ : "") {}
const uint8_t num_spatial_layers_;
const uint8_t num_temporal_layers_;
+ const test::ScopedFieldTrials override_field_trials_;
};
-TEST_P(TestVp9ImplWithLayering, FlexibleMode) {
+TEST_P(Vp9ImplWithLayeringTest, FlexibleMode) {
// In flexible mode encoder wrapper obtains actual list of references from
// encoder and writes it into RTP payload descriptor. Check that reference
// list in payload descriptor matches the predefined one, which is used
// in non-flexible mode.
- codec_settings_.VP9()->flexibleMode = true;
- codec_settings_.VP9()->frameDroppingOn = false;
- codec_settings_.VP9()->numberOfSpatialLayers = num_spatial_layers_;
- codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers_;
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->InitEncode(&codec_settings_, kSettings));
+ std::unique_ptr<VideoEncoder> encoder = VP9Encoder::Create();
+ VideoCodec codec_settings = DefaultCodecSettings();
+ codec_settings.VP9()->flexibleMode = true;
+ codec_settings.VP9()->frameDroppingOn = false;
+ codec_settings.VP9()->numberOfSpatialLayers = num_spatial_layers_;
+ codec_settings.VP9()->numberOfTemporalLayers = num_temporal_layers_;
+ EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings),
+ WEBRTC_VIDEO_CODEC_OK);
GofInfoVP9 gof;
if (num_temporal_layers_ == 1) {
@@ -1456,65 +1432,48 @@ TEST_P(TestVp9ImplWithLayering, FlexibleMode) {
// Encode at least (num_frames_in_gof + 1) frames to verify references
// of non-key frame with gof_idx = 0.
- for (size_t frame_num = 0; frame_num < gof.num_frames_in_gof + 1;
- ++frame_num) {
- SetWaitForEncodedFramesThreshold(num_spatial_layers_);
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->Encode(NextInputFrame(), nullptr));
-
- const bool is_key_frame = frame_num == 0;
- const size_t gof_idx = frame_num % gof.num_frames_in_gof;
- const std::vector<uint8_t> p_diff(std::begin(gof.pid_diff[gof_idx]),
- std::end(gof.pid_diff[gof_idx]));
-
- ExpectFrameWith(num_spatial_layers_, gof.temporal_idx[gof_idx],
- gof.temporal_up_switch[gof_idx],
- is_key_frame ? 0 : gof.num_ref_pics[gof_idx], p_diff);
- }
-}
-
-TEST_P(TestVp9ImplWithLayering, ExternalRefControl) {
- test::ScopedFieldTrials override_field_trials(
- "WebRTC-Vp9ExternalRefCtrl/Enabled/");
- codec_settings_.VP9()->flexibleMode = true;
- codec_settings_.VP9()->frameDroppingOn = false;
- codec_settings_.VP9()->numberOfSpatialLayers = num_spatial_layers_;
- codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers_;
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->InitEncode(&codec_settings_, kSettings));
-
- GofInfoVP9 gof;
- if (num_temporal_layers_ == 1) {
- gof.SetGofInfoVP9(kTemporalStructureMode1);
- } else if (num_temporal_layers_ == 2) {
- gof.SetGofInfoVP9(kTemporalStructureMode2);
- } else if (num_temporal_layers_ == 3) {
- gof.SetGofInfoVP9(kTemporalStructureMode3);
- }
-
- // Encode at least (num_frames_in_gof + 1) frames to verify references
- // of non-key frame with gof_idx = 0.
- for (size_t frame_num = 0; frame_num < gof.num_frames_in_gof + 1;
- ++frame_num) {
- SetWaitForEncodedFramesThreshold(num_spatial_layers_);
- EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
- encoder_->Encode(NextInputFrame(), nullptr));
-
- const bool is_key_frame = frame_num == 0;
- const size_t gof_idx = frame_num % gof.num_frames_in_gof;
- const std::vector<uint8_t> p_diff(std::begin(gof.pid_diff[gof_idx]),
- std::end(gof.pid_diff[gof_idx]));
-
- ExpectFrameWith(num_spatial_layers_, gof.temporal_idx[gof_idx],
- gof.temporal_up_switch[gof_idx],
- is_key_frame ? 0 : gof.num_ref_pics[gof_idx], p_diff);
+ int num_input_frames = gof.num_frames_in_gof + 1;
+ std::vector<EncodedVideoFrameProducer::EncodedFrame> frames =
+ EncodedVideoFrameProducer(*encoder)
+ .SetNumInputFrames(num_input_frames)
+ .SetResolution({kWidth, kHeight})
+ .Encode();
+ ASSERT_THAT(frames, SizeIs(num_input_frames * num_spatial_layers_));
+
+ for (size_t i = 0; i < frames.size(); ++i) {
+ const EncodedVideoFrameProducer::EncodedFrame& frame = frames[i];
+ const size_t picture_idx = i / num_spatial_layers_;
+ const size_t gof_idx = picture_idx % gof.num_frames_in_gof;
+
+ const CodecSpecificInfoVP9& vp9 =
+ frame.codec_specific_info.codecSpecific.VP9;
+ EXPECT_EQ(frame.encoded_image.SpatialIndex(),
+ num_spatial_layers_ == 1
+ ? absl::nullopt
+ : absl::optional<int>(i % num_spatial_layers_))
+ << "Frame " << i;
+ EXPECT_EQ(vp9.temporal_idx, num_temporal_layers_ == 1
+ ? kNoTemporalIdx
+ : gof.temporal_idx[gof_idx])
+ << "Frame " << i;
+ EXPECT_EQ(vp9.temporal_up_switch, gof.temporal_up_switch[gof_idx])
+ << "Frame " << i;
+ if (picture_idx == 0) {
+ EXPECT_EQ(vp9.num_ref_pics, 0) << "Frame " << i;
+ } else {
+ EXPECT_THAT(rtc::MakeArrayView(vp9.p_diff, vp9.num_ref_pics),
+ UnorderedElementsAreArray(gof.pid_diff[gof_idx],
+ gof.num_ref_pics[gof_idx]))
+ << "Frame " << i;
+ }
}
}
INSTANTIATE_TEST_SUITE_P(All,
- TestVp9ImplWithLayering,
+ Vp9ImplWithLayeringTest,
::testing::Combine(::testing::Values(1, 2, 3),
- ::testing::Values(1, 2, 3)));
+ ::testing::Values(1, 2, 3),
+ ::testing::Bool()));
class TestVp9ImplFrameDropping : public TestVp9Impl {
protected:
@@ -1774,4 +1733,12 @@ TEST_F(TestVp9Impl, ReenablingUpperLayerAfterKFWithInterlayerPredIsEnabled) {
EXPECT_EQ(encoded_frames[0]._frameType, VideoFrameType::kVideoFrameDelta);
}
+TEST_F(TestVp9Impl, HandlesEmptyInitDecode) {
+ std::unique_ptr<VideoDecoder> decoder = CreateDecoder();
+ // Check that nullptr settings are ok for decoder.
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ decoder->InitDecode(/*codec_settings=*/nullptr, 1));
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder->Release());
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.cc
index 527bce7729d..9b0585c0597 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.cc
@@ -39,6 +39,22 @@ std::vector<SdpVideoFormat> SupportedVP9Codecs() {
cricket::kVp9CodecName,
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}}));
}
+
+ return supported_formats;
+#else
+ return std::vector<SdpVideoFormat>();
+#endif
+}
+
+std::vector<SdpVideoFormat> SupportedVP9DecoderCodecs() {
+#ifdef RTC_ENABLE_VP9
+ std::vector<SdpVideoFormat> supported_formats = SupportedVP9Codecs();
+ // The WebRTC internal decoder supports VP9 profile 1. However, there's
+ // currently no way of sending VP9 profile 1 using the internal encoder.
+ // It would require extended support for I444, I422, and I440 buffers.
+ supported_formats.push_back(SdpVideoFormat(
+ cricket::kVp9CodecName,
+ {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}}));
return supported_formats;
#else
return std::vector<SdpVideoFormat>();
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index 028d3ab8f77..d29c19dc8c9 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -25,6 +25,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h"
+#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "rtc_base/checks.h"
#include "rtc_base/experiments/rate_control_settings.h"
#include "rtc_base/keep_ref_until_done.h"
@@ -45,8 +46,6 @@ namespace {
uint8_t kRefBufIdx[4] = {0, 0, 0, 1};
uint8_t kUpdBufIdx[4] = {0, 0, 1, 0};
-int kMaxNumTiles4kVideo = 8;
-
// Maximum allowed PID difference for differnet per-layer frame-rate case.
const int kMaxAllowedPidDiff = 30;
@@ -527,6 +526,11 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
config_->g_profile = 0;
config_->g_input_bit_depth = 8;
break;
+ case VP9Profile::kProfile1:
+ // Encoding of profile 1 is not implemented. It would require extended
+ // support for I444, I422, and I440 buffers.
+ RTC_NOTREACHED();
+ break;
case VP9Profile::kProfile2:
img_fmt = VPX_IMG_FMT_I42016;
bits_for_storage = 16;
@@ -603,13 +607,6 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
(num_spatial_layers_ > 1 &&
codec_.mode == VideoCodecMode::kScreensharing) ||
inter_layer_pred_ == InterLayerPredMode::kOn;
- // TODO(ilnik): Remove this workaround once external reference control works
- // nicely with simulcast SVC mode.
- // Simlucast SVC mode is currently only used in some tests and is impossible
- // to trigger for users without using some field trials.
- if (inter_layer_pred_ == InterLayerPredMode::kOff) {
- external_ref_control_ = false;
- }
if (num_temporal_layers_ == 1) {
gof_.SetGofInfoVP9(kTemporalStructureMode1);
@@ -997,6 +994,10 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
break;
}
+ case VP9Profile::kProfile1: {
+ RTC_NOTREACHED();
+ break;
+ }
case VP9Profile::kProfile2: {
// We can inject kI010 frames directly for encode. All other formats
// should be converted to it.
@@ -1705,14 +1706,32 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
// errors earlier than the multi-threads version.
// - Make peak CPU usage under control (not depending on input)
cfg.threads = 1;
- (void)kMaxNumTiles4kVideo; // unused
#else
- // We want to use multithreading when decoding high resolution videos. But,
- // since we don't know resolution of input stream at this stage, we always
- // enable it.
- cfg.threads = std::min(number_of_cores, kMaxNumTiles4kVideo);
+ if (!inst) {
+ // No config provided - don't know resolution to decode yet.
+ // Set thread count to one in the meantime.
+ cfg.threads = 1;
+ } else {
+ // We want to use multithreading when decoding high resolution videos. But
+ // not too many in order to avoid overhead when many stream are decoded
+ // concurrently.
+ // Set 2 thread as target for 1280x720 pixel count, and then scale up
+ // linearly from there - but cap at physical core count.
+ // For common resolutions this results in:
+ // 1 for 360p
+ // 2 for 720p
+ // 4 for 1080p
+ // 8 for 1440p
+ // 18 for 4K
+ int num_threads =
+ std::max(1, 2 * (inst->width * inst->height) / (1280 * 720));
+ cfg.threads = std::min(number_of_cores, num_threads);
+ current_codec_ = *inst;
+ }
#endif
+ num_cores_ = number_of_cores;
+
vpx_codec_flags_t flags = 0;
if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) {
return WEBRTC_VIDEO_CODEC_MEMORY;
@@ -1730,6 +1749,15 @@ int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
}
+
+ vpx_codec_err_t status =
+ vpx_codec_control(decoder_, VP9D_SET_LOOP_FILTER_OPT, 1);
+ if (status != VPX_CODEC_OK) {
+ RTC_LOG(LS_ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. "
+ << vpx_codec_error(decoder_);
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -1742,6 +1770,29 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image,
if (decode_complete_callback_ == nullptr) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
+
+ if (input_image._frameType == VideoFrameType::kVideoFrameKey) {
+ absl::optional<vp9::FrameInfo> frame_info =
+ vp9::ParseIntraFrameInfo(input_image.data(), input_image.size());
+ if (frame_info) {
+ if (frame_info->frame_width != current_codec_.width ||
+ frame_info->frame_height != current_codec_.height) {
+ // Resolution has changed, tear down and re-init a new decoder in
+ // order to get correct sizing.
+ Release();
+ current_codec_.width = frame_info->frame_width;
+ current_codec_.height = frame_info->frame_height;
+ int reinit_status = InitDecode(&current_codec_, num_cores_);
+ if (reinit_status != WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "Failed to re-init decoder.";
+ return reinit_status;
+ }
+ }
+ } else {
+ RTC_LOG(LS_WARNING) << "Failed to parse VP9 header from key-frame.";
+ }
+ }
+
// Always start with a complete key frame.
if (key_frame_required_) {
if (input_image._frameType != VideoFrameType::kVideoFrameKey)
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
index f6d8318d7d5..fae94c752b9 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -219,6 +219,8 @@ class VP9DecoderImpl : public VP9Decoder {
bool inited_;
vpx_codec_ctx_t* decoder_;
bool key_frame_required_;
+ VideoCodec current_codec_;
+ int num_cores_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/deprecated/BUILD.gn b/chromium/third_party/webrtc/modules/video_coding/deprecated/BUILD.gn
new file mode 100644
index 00000000000..f333b3f5e23
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/deprecated/BUILD.gn
@@ -0,0 +1,33 @@
+# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_library("nack_module") {
+ sources = [
+ "nack_module.cc",
+ "nack_module.h",
+ ]
+
+ deps = [
+ "..:nack_module",
+ "../..:module_api",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:deprecation",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:macromagic",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base/experiments:field_trial_parser",
+ "../../../system_wrappers",
+ "../../../system_wrappers:field_trial",
+ "../../utility",
+ ]
+}
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module.cc b/chromium/third_party/webrtc/modules/video_coding/deprecated/nack_module.cc
index 838af1548b0..8658729e99b 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/deprecated/nack_module.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "modules/video_coding/nack_module.h"
+#include "modules/video_coding/deprecated/nack_module.h"
#include <algorithm>
#include <limits>
@@ -45,25 +45,25 @@ int64_t GetSendNackDelay() {
}
} // namespace
-NackModule::NackInfo::NackInfo()
+DEPRECATED_NackModule::NackInfo::NackInfo()
: seq_num(0), send_at_seq_num(0), sent_at_time(-1), retries(0) {}
-NackModule::NackInfo::NackInfo(uint16_t seq_num,
- uint16_t send_at_seq_num,
- int64_t created_at_time)
+DEPRECATED_NackModule::NackInfo::NackInfo(uint16_t seq_num,
+ uint16_t send_at_seq_num,
+ int64_t created_at_time)
: seq_num(seq_num),
send_at_seq_num(send_at_seq_num),
created_at_time(created_at_time),
sent_at_time(-1),
retries(0) {}
-NackModule::BackoffSettings::BackoffSettings(TimeDelta min_retry,
- TimeDelta max_rtt,
- double base)
+DEPRECATED_NackModule::BackoffSettings::BackoffSettings(TimeDelta min_retry,
+ TimeDelta max_rtt,
+ double base)
: min_retry_interval(min_retry), max_rtt(max_rtt), base(base) {}
-absl::optional<NackModule::BackoffSettings>
-NackModule::BackoffSettings::ParseFromFieldTrials() {
+absl::optional<DEPRECATED_NackModule::BackoffSettings>
+DEPRECATED_NackModule::BackoffSettings::ParseFromFieldTrials() {
// Matches magic number in RTPSender::OnReceivedNack().
const TimeDelta kDefaultMinRetryInterval = TimeDelta::Millis(5);
// Upper bound on link-delay considered for exponential backoff.
@@ -82,15 +82,16 @@ NackModule::BackoffSettings::ParseFromFieldTrials() {
field_trial::FindFullName("WebRTC-ExponentialNackBackoff"));
if (enabled) {
- return NackModule::BackoffSettings(min_retry.Get(), max_rtt.Get(),
- base.Get());
+ return DEPRECATED_NackModule::BackoffSettings(min_retry.Get(),
+ max_rtt.Get(), base.Get());
}
return absl::nullopt;
}
-NackModule::NackModule(Clock* clock,
- NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender)
+DEPRECATED_NackModule::DEPRECATED_NackModule(
+ Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender)
: clock_(clock),
nack_sender_(nack_sender),
keyframe_request_sender_(keyframe_request_sender),
@@ -106,13 +107,14 @@ NackModule::NackModule(Clock* clock,
RTC_DCHECK(keyframe_request_sender_);
}
-int NackModule::OnReceivedPacket(uint16_t seq_num, bool is_keyframe) {
+int DEPRECATED_NackModule::OnReceivedPacket(uint16_t seq_num,
+ bool is_keyframe) {
return OnReceivedPacket(seq_num, is_keyframe, false);
}
-int NackModule::OnReceivedPacket(uint16_t seq_num,
- bool is_keyframe,
- bool is_recovered) {
+int DEPRECATED_NackModule::OnReceivedPacket(uint16_t seq_num,
+ bool is_keyframe,
+ bool is_recovered) {
rtc::CritScope lock(&crit_);
// TODO(philipel): When the packet includes information whether it is
// retransmitted or not, use that value instead. For
@@ -181,7 +183,7 @@ int NackModule::OnReceivedPacket(uint16_t seq_num,
return 0;
}
-void NackModule::ClearUpTo(uint16_t seq_num) {
+void DEPRECATED_NackModule::ClearUpTo(uint16_t seq_num) {
rtc::CritScope lock(&crit_);
nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num));
keyframe_list_.erase(keyframe_list_.begin(),
@@ -190,24 +192,24 @@ void NackModule::ClearUpTo(uint16_t seq_num) {
recovered_list_.lower_bound(seq_num));
}
-void NackModule::UpdateRtt(int64_t rtt_ms) {
+void DEPRECATED_NackModule::UpdateRtt(int64_t rtt_ms) {
rtc::CritScope lock(&crit_);
rtt_ms_ = rtt_ms;
}
-void NackModule::Clear() {
+void DEPRECATED_NackModule::Clear() {
rtc::CritScope lock(&crit_);
nack_list_.clear();
keyframe_list_.clear();
recovered_list_.clear();
}
-int64_t NackModule::TimeUntilNextProcess() {
+int64_t DEPRECATED_NackModule::TimeUntilNextProcess() {
return std::max<int64_t>(next_process_time_ms_ - clock_->TimeInMilliseconds(),
0);
}
-void NackModule::Process() {
+void DEPRECATED_NackModule::Process() {
if (nack_sender_) {
std::vector<uint16_t> nack_batch;
{
@@ -236,7 +238,7 @@ void NackModule::Process() {
}
}
-bool NackModule::RemovePacketsUntilKeyFrame() {
+bool DEPRECATED_NackModule::RemovePacketsUntilKeyFrame() {
while (!keyframe_list_.empty()) {
auto it = nack_list_.lower_bound(*keyframe_list_.begin());
@@ -254,8 +256,8 @@ bool NackModule::RemovePacketsUntilKeyFrame() {
return false;
}
-void NackModule::AddPacketsToNack(uint16_t seq_num_start,
- uint16_t seq_num_end) {
+void DEPRECATED_NackModule::AddPacketsToNack(uint16_t seq_num_start,
+ uint16_t seq_num_end) {
// Remove old packets.
auto it = nack_list_.lower_bound(seq_num_end - kMaxPacketAge);
nack_list_.erase(nack_list_.begin(), it);
@@ -289,7 +291,8 @@ void NackModule::AddPacketsToNack(uint16_t seq_num_start,
}
}
-std::vector<uint16_t> NackModule::GetNackBatch(NackFilterOptions options) {
+std::vector<uint16_t> DEPRECATED_NackModule::GetNackBatch(
+ NackFilterOptions options) {
bool consider_seq_num = options != kTimeOnly;
bool consider_timestamp = options != kSeqNumOnly;
Timestamp now = clock_->CurrentTime();
@@ -334,13 +337,13 @@ std::vector<uint16_t> NackModule::GetNackBatch(NackFilterOptions options) {
return nack_batch;
}
-void NackModule::UpdateReorderingStatistics(uint16_t seq_num) {
+void DEPRECATED_NackModule::UpdateReorderingStatistics(uint16_t seq_num) {
RTC_DCHECK(AheadOf(newest_seq_num_, seq_num));
uint16_t diff = ReverseDiff(newest_seq_num_, seq_num);
reordering_histogram_.Add(diff);
}
-int NackModule::WaitNumberOfPackets(float probability) const {
+int DEPRECATED_NackModule::WaitNumberOfPackets(float probability) const {
if (reordering_histogram_.NumValues() == 0)
return 0;
return reordering_histogram_.InverseCdf(probability);
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module.h b/chromium/third_party/webrtc/modules/video_coding/deprecated/nack_module.h
index d4f705b3510..d704a05c11d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module.h
+++ b/chromium/third_party/webrtc/modules/video_coding/deprecated/nack_module.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef MODULES_VIDEO_CODING_NACK_MODULE_H_
-#define MODULES_VIDEO_CODING_NACK_MODULE_H_
+#ifndef MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_
+#define MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_
#include <stdint.h>
@@ -22,17 +22,18 @@
#include "modules/include/module_common_types.h"
#include "modules/video_coding/histogram.h"
#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecation.h"
#include "rtc_base/numerics/sequence_number_util.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
-class NackModule : public Module {
+class DEPRECATED_NackModule : public Module {
public:
- NackModule(Clock* clock,
- NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender);
+ DEPRECATED_NackModule(Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender);
int OnReceivedPacket(uint16_t seq_num, bool is_keyframe);
int OnReceivedPacket(uint16_t seq_num, bool is_keyframe, bool is_recovered);
@@ -124,6 +125,8 @@ class NackModule : public Module {
const absl::optional<BackoffSettings> backoff_settings_;
};
+using NackModule = RTC_DEPRECATED DEPRECATED_NackModule;
+
} // namespace webrtc
-#endif // MODULES_VIDEO_CODING_NACK_MODULE_H_
+#endif // MODULES_VIDEO_CODING_DEPRECATED_NACK_MODULE_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/encoded_frame.cc b/chromium/third_party/webrtc/modules/video_coding/encoded_frame.cc
index 1e9e374c644..3de62da9f5e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/encoded_frame.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/encoded_frame.cc
@@ -135,20 +135,6 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) {
}
case kVideoCodecH264: {
_codecSpecificInfo.codecType = kVideoCodecH264;
-
- // The following H264 codec specific data are not used elsewhere.
- // Instead they are read directly from the frame marking extension.
- // These codec specific data structures should be removed
- // when frame marking is used.
- _codecSpecificInfo.codecSpecific.H264.temporal_idx = kNoTemporalIdx;
- if (header->frame_marking.temporal_id != kNoTemporalIdx) {
- _codecSpecificInfo.codecSpecific.H264.temporal_idx =
- header->frame_marking.temporal_id;
- _codecSpecificInfo.codecSpecific.H264.base_layer_sync =
- header->frame_marking.base_layer_sync;
- _codecSpecificInfo.codecSpecific.H264.idr_frame =
- header->frame_marking.independent_frame;
- }
break;
}
default: {
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc
index 944f97bf871..64d3699e013 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc
@@ -63,19 +63,25 @@ FrameBuffer::FrameBuffer(Clock* clock,
last_log_non_decoded_ms_(-kLogNonDecodedIntervalMs),
add_rtt_to_playout_delay_(
webrtc::field_trial::IsEnabled("WebRTC-AddRttToPlayoutDelay")),
- rtt_mult_settings_(RttMultExperiment::GetRttMultValue()) {}
+ rtt_mult_settings_(RttMultExperiment::GetRttMultValue()) {
+ callback_checker_.Detach();
+}
-FrameBuffer::~FrameBuffer() {}
+FrameBuffer::~FrameBuffer() {
+ RTC_DCHECK_RUN_ON(&construction_checker_);
+}
void FrameBuffer::NextFrame(
int64_t max_wait_time_ms,
bool keyframe_required,
rtc::TaskQueue* callback_queue,
std::function<void(std::unique_ptr<EncodedFrame>, ReturnReason)> handler) {
- RTC_DCHECK_RUN_ON(callback_queue);
+ RTC_DCHECK_RUN_ON(&callback_checker_);
+ RTC_DCHECK(callback_queue->IsCurrent());
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
int64_t latest_return_time_ms =
clock_->TimeInMilliseconds() + max_wait_time_ms;
+
rtc::CritScope lock(&crit_);
if (stopped_) {
return;
@@ -93,6 +99,7 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() {
int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds());
callback_task_ = RepeatingTaskHandle::DelayedStart(
callback_queue_->Get(), TimeDelta::Millis(wait_ms), [this] {
+ RTC_DCHECK_RUN_ON(&callback_checker_);
// If this task has not been cancelled, we did not get any new frames
// while waiting. Continue with frame delivery.
rtc::CritScope lock(&crit_);
@@ -211,6 +218,7 @@ int64_t FrameBuffer::FindNextFrame(int64_t now_ms) {
}
EncodedFrame* FrameBuffer::GetNextFrame() {
+ RTC_DCHECK_RUN_ON(&callback_checker_);
int64_t now_ms = clock_->TimeInMilliseconds();
// TODO(ilnik): remove |frames_out| use frames_to_decode_ directly.
std::vector<EncodedFrame*> frames_out;
@@ -334,7 +342,10 @@ void FrameBuffer::Start() {
void FrameBuffer::Stop() {
TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
rtc::CritScope lock(&crit_);
+ if (stopped_)
+ return;
stopped_ = true;
+
CancelCallback();
}
@@ -366,9 +377,11 @@ bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
}
void FrameBuffer::CancelCallback() {
+ // Called from the callback queue or from within Stop().
frame_handler_ = {};
callback_task_.Stop();
callback_queue_ = nullptr;
+ callback_checker_.Detach();
}
bool FrameBuffer::IsCompleteSuperFrame(const EncodedFrame& frame) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h
index 51f3820d317..d824ddf4d02 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h
@@ -28,6 +28,7 @@
#include "rtc_base/event.h"
#include "rtc_base/experiments/rtt_mult_experiment.h"
#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/thread_annotations.h"
@@ -159,6 +160,9 @@ class FrameBuffer {
EncodedFrame* CombineAndDeleteFrames(
const std::vector<EncodedFrame*>& frames) const;
+ SequenceChecker construction_checker_;
+ SequenceChecker callback_checker_;
+
// Stores only undecoded frames.
FrameMap frames_ RTC_GUARDED_BY(crit_);
DecodedFramesHistory decoded_frames_history_ RTC_GUARDED_BY(crit_);
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc
index b4d663ee063..2de3f3362b0 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc
@@ -108,21 +108,26 @@ class FrameObjectFake : public EncodedFrame {
class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback {
public:
- MOCK_METHOD3(OnCompleteFrame,
- void(bool is_keyframe,
- size_t size_bytes,
- VideoContentType content_type));
- MOCK_METHOD1(OnDroppedFrames, void(uint32_t frames_dropped));
- MOCK_METHOD1(OnDiscardedPacketsUpdated, void(int discarded_packets));
- MOCK_METHOD1(OnFrameCountsUpdated, void(const FrameCounts& frame_counts));
- MOCK_METHOD6(OnFrameBufferTimingsUpdated,
- void(int max_decode_ms,
- int current_delay_ms,
- int target_delay_ms,
- int jitter_buffer_ms,
- int min_playout_delay_ms,
- int render_delay_ms));
- MOCK_METHOD1(OnTimingFrameInfoUpdated, void(const TimingFrameInfo& info));
+ MOCK_METHOD(void,
+ OnCompleteFrame,
+ (bool is_keyframe,
+ size_t size_bytes,
+ VideoContentType content_type),
+ (override));
+ MOCK_METHOD(void, OnDroppedFrames, (uint32_t frames_dropped), (override));
+ MOCK_METHOD(void,
+ OnFrameBufferTimingsUpdated,
+ (int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms),
+ (override));
+ MOCK_METHOD(void,
+ OnTimingFrameInfoUpdated,
+ (const TimingFrameInfo& info),
+ (override));
};
class TestFrameBuffer2 : public ::testing::Test {
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_object.cc b/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
index cb83999c942..2399e8f060f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
@@ -129,9 +129,5 @@ const RTPVideoHeader& RtpFrameObject::GetRtpVideoHeader() const {
return rtp_video_header_;
}
-const FrameMarking& RtpFrameObject::GetFrameMarking() const {
- return rtp_video_header_.frame_marking;
-}
-
} // namespace video_coding
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_object.h b/chromium/third_party/webrtc/modules/video_coding/frame_object.h
index f7988763d38..831b444df42 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_object.h
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_object.h
@@ -47,7 +47,6 @@ class RtpFrameObject : public EncodedFrame {
int64_t RenderTime() const override;
bool delayed_by_retransmission() const override;
const RTPVideoHeader& GetRtpVideoHeader() const;
- const FrameMarking& GetFrameMarking() const;
private:
RTPVideoHeader rtp_video_header_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/h264_sps_pps_tracker.cc b/chromium/third_party/webrtc/modules/video_coding/h264_sps_pps_tracker.cc
index 3965b28e8ec..4becdb76085 100644
--- a/chromium/third_party/webrtc/modules/video_coding/h264_sps_pps_tracker.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/h264_sps_pps_tracker.cc
@@ -49,6 +49,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream(
RTPVideoHeader* video_header) {
RTC_DCHECK(video_header);
RTC_DCHECK(video_header->codec == kVideoCodecH264);
+ RTC_DCHECK_GT(bitstream.size(), 0);
auto& h264_header =
absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
@@ -128,7 +129,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream(
if (h264_header.packetization_type == kH264StapA) {
const uint8_t* nalu_ptr = bitstream.data() + 1;
- while (nalu_ptr < bitstream.data() + bitstream.size()) {
+ while (nalu_ptr < bitstream.data() + bitstream.size() - 1) {
RTC_DCHECK(video_header->is_first_packet_in_frame);
required_size += sizeof(start_code_h264);
@@ -180,7 +181,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream(
// Copy the rest of the bitstream and insert start codes.
if (h264_header.packetization_type == kH264StapA) {
const uint8_t* nalu_ptr = bitstream.data() + 1;
- while (nalu_ptr < bitstream.data() + bitstream.size()) {
+ while (nalu_ptr < bitstream.data() + bitstream.size() - 1) {
fixed.bitstream.AppendData(start_code_h264);
// The first two bytes describe the length of a segment.
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_estimator.cc b/chromium/third_party/webrtc/modules/video_coding/jitter_estimator.cc
index cd505835d11..44e2a9811e7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_estimator.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_estimator.cc
@@ -23,6 +23,7 @@
#include "rtc_base/experiments/jitter_upper_bound_experiment.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "system_wrappers/include/clock.h"
+#include "system_wrappers/include/field_trial.h"
namespace webrtc {
namespace {
@@ -50,6 +51,8 @@ VCMJitterEstimator::VCMJitterEstimator(Clock* clock)
time_deviation_upper_bound_(
JitterUpperBoundExperiment::GetUpperBoundSigmas().value_or(
kDefaultMaxTimestampDeviationInSigmas)),
+ enable_reduced_delay_(
+ !field_trial::IsEnabled("WebRTC-ReducedJitterDelayKillSwitch")),
clock_(clock) {
Reset();
}
@@ -395,22 +398,25 @@ int VCMJitterEstimator::GetJitterEstimate(
}
}
- static const double kJitterScaleLowThreshold = 5.0;
- static const double kJitterScaleHighThreshold = 10.0;
- double fps = GetFrameRate();
- // Ignore jitter for very low fps streams.
- if (fps < kJitterScaleLowThreshold) {
- if (fps == 0.0) {
- return rtc::checked_cast<int>(std::max(0.0, jitterMS) + 0.5);
+ if (enable_reduced_delay_) {
+ static const double kJitterScaleLowThreshold = 5.0;
+ static const double kJitterScaleHighThreshold = 10.0;
+ double fps = GetFrameRate();
+ // Ignore jitter for very low fps streams.
+ if (fps < kJitterScaleLowThreshold) {
+ if (fps == 0.0) {
+ return rtc::checked_cast<int>(std::max(0.0, jitterMS) + 0.5);
+ }
+ return 0;
}
- return 0;
- }
- // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at
- // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold.
- if (fps < kJitterScaleHighThreshold) {
- jitterMS = (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) *
- (fps - kJitterScaleLowThreshold) * jitterMS;
+ // Semi-low frame rate; scale by factor linearly interpolated from 0.0 at
+ // kJitterScaleLowThreshold to 1.0 at kJitterScaleHighThreshold.
+ if (fps < kJitterScaleHighThreshold) {
+ jitterMS =
+ (1.0 / (kJitterScaleHighThreshold - kJitterScaleLowThreshold)) *
+ (fps - kJitterScaleLowThreshold) * jitterMS;
+ }
}
return rtc::checked_cast<int>(std::max(0.0, jitterMS) + 0.5);
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_estimator.h b/chromium/third_party/webrtc/modules/video_coding/jitter_estimator.h
index d9798b40a19..1d69b957694 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_estimator.h
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_estimator.h
@@ -150,6 +150,7 @@ class VCMJitterEstimator {
rtc::RollingAccumulator<uint64_t> fps_counter_;
const double time_deviation_upper_bound_;
+ const bool enable_reduced_delay_;
Clock* clock_;
};
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_estimator_tests.cc b/chromium/third_party/webrtc/modules/video_coding/jitter_estimator_tests.cc
index 1ad9abb56f6..14baae7e816 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_estimator_tests.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_estimator_tests.cc
@@ -72,6 +72,22 @@ TEST_F(TestVCMJitterEstimator, TestLowRate) {
}
}
+TEST_F(TestVCMJitterEstimator, TestLowRateDisabled) {
+ test::ScopedFieldTrials field_trials(
+ "WebRTC-ReducedJitterDelayKillSwitch/Enabled/");
+ SetUp();
+
+ ValueGenerator gen(10);
+ uint64_t time_delta_us = rtc::kNumMicrosecsPerSec / 5;
+ for (int i = 0; i < 60; ++i) {
+ estimator_->UpdateEstimate(gen.Delay(), gen.FrameSize());
+ AdvanceClock(time_delta_us);
+ if (i > 2)
+ EXPECT_GT(estimator_->GetJitterEstimate(0, absl::nullopt), 0);
+ gen.Advance();
+ }
+}
+
TEST_F(TestVCMJitterEstimator, TestUpperBound) {
struct TestContext {
TestContext()
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module2.cc b/chromium/third_party/webrtc/modules/video_coding/nack_module2.cc
new file mode 100644
index 00000000000..8a3a731ed0c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module2.cc
@@ -0,0 +1,343 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/nack_module2.h"
+
+#include <algorithm>
+#include <limits>
+
+#include "api/units/timestamp.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/experiments/field_trial_parser.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/task_queue.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+
+namespace {
+const int kMaxPacketAge = 10000;
+const int kMaxNackPackets = 1000;
+const int kDefaultRttMs = 100;
+const int kMaxNackRetries = 10;
+const int kMaxReorderedPackets = 128;
+const int kNumReorderingBuckets = 10;
+const int kDefaultSendNackDelayMs = 0;
+
+int64_t GetSendNackDelay() {
+ int64_t delay_ms = strtol(
+ webrtc::field_trial::FindFullName("WebRTC-SendNackDelayMs").c_str(),
+ nullptr, 10);
+ if (delay_ms > 0 && delay_ms <= 20) {
+ RTC_LOG(LS_INFO) << "SendNackDelay is set to " << delay_ms;
+ return delay_ms;
+ }
+ return kDefaultSendNackDelayMs;
+}
+} // namespace
+
+constexpr TimeDelta NackModule2::kUpdateInterval;
+
+NackModule2::NackInfo::NackInfo()
+ : seq_num(0), send_at_seq_num(0), sent_at_time(-1), retries(0) {}
+
+NackModule2::NackInfo::NackInfo(uint16_t seq_num,
+ uint16_t send_at_seq_num,
+ int64_t created_at_time)
+ : seq_num(seq_num),
+ send_at_seq_num(send_at_seq_num),
+ created_at_time(created_at_time),
+ sent_at_time(-1),
+ retries(0) {}
+
+NackModule2::BackoffSettings::BackoffSettings(TimeDelta min_retry,
+ TimeDelta max_rtt,
+ double base)
+ : min_retry_interval(min_retry), max_rtt(max_rtt), base(base) {}
+
+absl::optional<NackModule2::BackoffSettings>
+NackModule2::BackoffSettings::ParseFromFieldTrials() {
+ // Matches magic number in RTPSender::OnReceivedNack().
+ const TimeDelta kDefaultMinRetryInterval = TimeDelta::Millis(5);
+ // Upper bound on link-delay considered for exponential backoff.
+ // Selected so that cumulative delay with 1.25 base and 10 retries ends up
+ // below 3s, since above that there will be a FIR generated instead.
+ const TimeDelta kDefaultMaxRtt = TimeDelta::Millis(160);
+ // Default base for exponential backoff, adds 25% RTT delay for each retry.
+ const double kDefaultBase = 1.25;
+
+ FieldTrialParameter<bool> enabled("enabled", false);
+ FieldTrialParameter<TimeDelta> min_retry("min_retry",
+ kDefaultMinRetryInterval);
+ FieldTrialParameter<TimeDelta> max_rtt("max_rtt", kDefaultMaxRtt);
+ FieldTrialParameter<double> base("base", kDefaultBase);
+ ParseFieldTrial({&enabled, &min_retry, &max_rtt, &base},
+ field_trial::FindFullName("WebRTC-ExponentialNackBackoff"));
+
+ if (enabled) {
+ return NackModule2::BackoffSettings(min_retry.Get(), max_rtt.Get(),
+ base.Get());
+ }
+ return absl::nullopt;
+}
+
+NackModule2::NackModule2(TaskQueueBase* current_queue,
+ Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender,
+ TimeDelta update_interval /*= kUpdateInterval*/)
+ : worker_thread_(current_queue),
+ update_interval_(update_interval),
+ clock_(clock),
+ nack_sender_(nack_sender),
+ keyframe_request_sender_(keyframe_request_sender),
+ reordering_histogram_(kNumReorderingBuckets, kMaxReorderedPackets),
+ initialized_(false),
+ rtt_ms_(kDefaultRttMs),
+ newest_seq_num_(0),
+ send_nack_delay_ms_(GetSendNackDelay()),
+ backoff_settings_(BackoffSettings::ParseFromFieldTrials()) {
+ RTC_DCHECK(clock_);
+ RTC_DCHECK(nack_sender_);
+ RTC_DCHECK(keyframe_request_sender_);
+ RTC_DCHECK_GT(update_interval.ms(), 0);
+ RTC_DCHECK(worker_thread_);
+ RTC_DCHECK(worker_thread_->IsCurrent());
+
+ repeating_task_ = RepeatingTaskHandle::DelayedStart(
+ TaskQueueBase::Current(), update_interval_,
+ [this]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ std::vector<uint16_t> nack_batch = GetNackBatch(kTimeOnly);
+ if (!nack_batch.empty()) {
+ // This batch of NACKs is triggered externally; there is no external
+ // initiator who can batch them with other feedback messages.
+ nack_sender_->SendNack(nack_batch, /*buffering_allowed=*/false);
+ }
+ return update_interval_;
+ },
+ clock_);
+}
+
+NackModule2::~NackModule2() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ repeating_task_.Stop();
+}
+
+int NackModule2::OnReceivedPacket(uint16_t seq_num, bool is_keyframe) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return OnReceivedPacket(seq_num, is_keyframe, false);
+}
+
+int NackModule2::OnReceivedPacket(uint16_t seq_num,
+ bool is_keyframe,
+ bool is_recovered) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ // TODO(philipel): When the packet includes information whether it is
+ // retransmitted or not, use that value instead. For
+ // now set it to true, which will cause the reordering
+ // statistics to never be updated.
+ bool is_retransmitted = true;
+
+ if (!initialized_) {
+ newest_seq_num_ = seq_num;
+ if (is_keyframe)
+ keyframe_list_.insert(seq_num);
+ initialized_ = true;
+ return 0;
+ }
+
+ // Since the |newest_seq_num_| is a packet we have actually received we know
+ // that packet has never been Nacked.
+ if (seq_num == newest_seq_num_)
+ return 0;
+
+ if (AheadOf(newest_seq_num_, seq_num)) {
+ // An out of order packet has been received.
+ auto nack_list_it = nack_list_.find(seq_num);
+ int nacks_sent_for_packet = 0;
+ if (nack_list_it != nack_list_.end()) {
+ nacks_sent_for_packet = nack_list_it->second.retries;
+ nack_list_.erase(nack_list_it);
+ }
+ if (!is_retransmitted)
+ UpdateReorderingStatistics(seq_num);
+ return nacks_sent_for_packet;
+ }
+
+ // Keep track of new keyframes.
+ if (is_keyframe)
+ keyframe_list_.insert(seq_num);
+
+ // And remove old ones so we don't accumulate keyframes.
+ auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge);
+ if (it != keyframe_list_.begin())
+ keyframe_list_.erase(keyframe_list_.begin(), it);
+
+ if (is_recovered) {
+ recovered_list_.insert(seq_num);
+
+ // Remove old ones so we don't accumulate recovered packets.
+ auto it = recovered_list_.lower_bound(seq_num - kMaxPacketAge);
+ if (it != recovered_list_.begin())
+ recovered_list_.erase(recovered_list_.begin(), it);
+
+ // Do not send nack for packets recovered by FEC or RTX.
+ return 0;
+ }
+
+ AddPacketsToNack(newest_seq_num_ + 1, seq_num);
+ newest_seq_num_ = seq_num;
+
+ // Are there any nacks that are waiting for this seq_num.
+ std::vector<uint16_t> nack_batch = GetNackBatch(kSeqNumOnly);
+ if (!nack_batch.empty()) {
+ // This batch of NACKs is triggered externally; the initiator can
+ // batch them with other feedback messages.
+ nack_sender_->SendNack(nack_batch, /*buffering_allowed=*/true);
+ }
+
+ return 0;
+}
+
+void NackModule2::ClearUpTo(uint16_t seq_num) {
+ // Called via RtpVideoStreamReceiver2::FrameContinuous on the network thread.
+ worker_thread_->PostTask(ToQueuedTask(task_safety_, [seq_num, this]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num));
+ keyframe_list_.erase(keyframe_list_.begin(),
+ keyframe_list_.lower_bound(seq_num));
+ recovered_list_.erase(recovered_list_.begin(),
+ recovered_list_.lower_bound(seq_num));
+ }));
+}
+
+void NackModule2::UpdateRtt(int64_t rtt_ms) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ rtt_ms_ = rtt_ms;
+}
+
+bool NackModule2::RemovePacketsUntilKeyFrame() {
+ // Called on worker_thread_.
+ while (!keyframe_list_.empty()) {
+ auto it = nack_list_.lower_bound(*keyframe_list_.begin());
+
+ if (it != nack_list_.begin()) {
+ // We have found a keyframe that actually is newer than at least one
+ // packet in the nack list.
+ nack_list_.erase(nack_list_.begin(), it);
+ return true;
+ }
+
+ // If this keyframe is so old it does not remove any packets from the list,
+ // remove it from the list of keyframes and try the next keyframe.
+ keyframe_list_.erase(keyframe_list_.begin());
+ }
+ return false;
+}
+
+void NackModule2::AddPacketsToNack(uint16_t seq_num_start,
+ uint16_t seq_num_end) {
+ // Called on worker_thread_.
+ // Remove old packets.
+ auto it = nack_list_.lower_bound(seq_num_end - kMaxPacketAge);
+ nack_list_.erase(nack_list_.begin(), it);
+
+ // If the nack list is too large, remove packets from the nack list until
+ // the latest first packet of a keyframe. If the list is still too large,
+ // clear it and request a keyframe.
+ uint16_t num_new_nacks = ForwardDiff(seq_num_start, seq_num_end);
+ if (nack_list_.size() + num_new_nacks > kMaxNackPackets) {
+ while (RemovePacketsUntilKeyFrame() &&
+ nack_list_.size() + num_new_nacks > kMaxNackPackets) {
+ }
+
+ if (nack_list_.size() + num_new_nacks > kMaxNackPackets) {
+ nack_list_.clear();
+ RTC_LOG(LS_WARNING) << "NACK list full, clearing NACK"
+ " list and requesting keyframe.";
+ keyframe_request_sender_->RequestKeyFrame();
+ return;
+ }
+ }
+
+ for (uint16_t seq_num = seq_num_start; seq_num != seq_num_end; ++seq_num) {
+ // Do not send nack for packets that are already recovered by FEC or RTX
+ if (recovered_list_.find(seq_num) != recovered_list_.end())
+ continue;
+ NackInfo nack_info(seq_num, seq_num + WaitNumberOfPackets(0.5),
+ clock_->TimeInMilliseconds());
+ RTC_DCHECK(nack_list_.find(seq_num) == nack_list_.end());
+ nack_list_[seq_num] = nack_info;
+ }
+}
+
+std::vector<uint16_t> NackModule2::GetNackBatch(NackFilterOptions options) {
+ // Called on worker_thread_.
+
+ bool consider_seq_num = options != kTimeOnly;
+ bool consider_timestamp = options != kSeqNumOnly;
+ Timestamp now = clock_->CurrentTime();
+ std::vector<uint16_t> nack_batch;
+ auto it = nack_list_.begin();
+ while (it != nack_list_.end()) {
+ TimeDelta resend_delay = TimeDelta::Millis(rtt_ms_);
+ if (backoff_settings_) {
+ resend_delay =
+ std::max(resend_delay, backoff_settings_->min_retry_interval);
+ if (it->second.retries > 1) {
+ TimeDelta exponential_backoff =
+ std::min(TimeDelta::Millis(rtt_ms_), backoff_settings_->max_rtt) *
+ std::pow(backoff_settings_->base, it->second.retries - 1);
+ resend_delay = std::max(resend_delay, exponential_backoff);
+ }
+ }
+
+ bool delay_timed_out =
+ now.ms() - it->second.created_at_time >= send_nack_delay_ms_;
+ bool nack_on_rtt_passed =
+ now.ms() - it->second.sent_at_time >= resend_delay.ms();
+ bool nack_on_seq_num_passed =
+ it->second.sent_at_time == -1 &&
+ AheadOrAt(newest_seq_num_, it->second.send_at_seq_num);
+ if (delay_timed_out && ((consider_seq_num && nack_on_seq_num_passed) ||
+ (consider_timestamp && nack_on_rtt_passed))) {
+ nack_batch.emplace_back(it->second.seq_num);
+ ++it->second.retries;
+ it->second.sent_at_time = now.ms();
+ if (it->second.retries >= kMaxNackRetries) {
+ RTC_LOG(LS_WARNING) << "Sequence number " << it->second.seq_num
+ << " removed from NACK list due to max retries.";
+ it = nack_list_.erase(it);
+ } else {
+ ++it;
+ }
+ continue;
+ }
+ ++it;
+ }
+ return nack_batch;
+}
+
+void NackModule2::UpdateReorderingStatistics(uint16_t seq_num) {
+ // Running on worker_thread_.
+ RTC_DCHECK(AheadOf(newest_seq_num_, seq_num));
+ uint16_t diff = ReverseDiff(newest_seq_num_, seq_num);
+ reordering_histogram_.Add(diff);
+}
+
+int NackModule2::WaitNumberOfPackets(float probability) const {
+ // Called on worker_thread_;
+ if (reordering_histogram_.NumValues() == 0)
+ return 0;
+ return reordering_histogram_.InverseCdf(probability);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module2.h b/chromium/third_party/webrtc/modules/video_coding/nack_module2.h
new file mode 100644
index 00000000000..89dd0821922
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module2.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_CODING_NACK_MODULE2_H_
+#define MODULES_VIDEO_CODING_NACK_MODULE2_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <set>
+#include <vector>
+
+#include "api/units/time_delta.h"
+#include "modules/include/module_common_types.h"
+#include "modules/video_coding/histogram.h"
+#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+// TODO(bugs.webrtc.org/11594): This class no longer implements the Module
+// interface and therefore "NackModule" may not be a descriptive name anymore.
+// Consider renaming to e.g. NackTracker or NackRequester.
+class NackModule2 final {
+ public:
+ static constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(20);
+
+ NackModule2(TaskQueueBase* current_queue,
+ Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender,
+ TimeDelta update_interval = kUpdateInterval);
+ ~NackModule2();
+
+ int OnReceivedPacket(uint16_t seq_num, bool is_keyframe);
+ int OnReceivedPacket(uint16_t seq_num, bool is_keyframe, bool is_recovered);
+
+ void ClearUpTo(uint16_t seq_num);
+ void UpdateRtt(int64_t rtt_ms);
+
+ private:
+ // Which fields to consider when deciding which packet to nack in
+ // GetNackBatch.
+ enum NackFilterOptions { kSeqNumOnly, kTimeOnly, kSeqNumAndTime };
+
+ // This class holds the sequence number of the packet that is in the nack list
+ // as well as the meta data about when it should be nacked and how many times
+ // we have tried to nack this packet.
+ struct NackInfo {
+ NackInfo();
+ NackInfo(uint16_t seq_num,
+ uint16_t send_at_seq_num,
+ int64_t created_at_time);
+
+ uint16_t seq_num;
+ uint16_t send_at_seq_num;
+ int64_t created_at_time;
+ int64_t sent_at_time;
+ int retries;
+ };
+
+ struct BackoffSettings {
+ BackoffSettings(TimeDelta min_retry, TimeDelta max_rtt, double base);
+ static absl::optional<BackoffSettings> ParseFromFieldTrials();
+
+ // Min time between nacks.
+ const TimeDelta min_retry_interval;
+ // Upper bound on link-delay considered for exponential backoff.
+ const TimeDelta max_rtt;
+ // Base for the exponential backoff.
+ const double base;
+ };
+
+ void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+
+ // Removes packets from the nack list until the next keyframe. Returns true
+ // if packets were removed.
+ bool RemovePacketsUntilKeyFrame()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+ std::vector<uint16_t> GetNackBatch(NackFilterOptions options)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+
+ // Update the reordering distribution.
+ void UpdateReorderingStatistics(uint16_t seq_num)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+
+ // Returns how many packets we have to wait in order to receive the packet
+ // with probability |probabilty| or higher.
+ int WaitNumberOfPackets(float probability) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_);
+
+ TaskQueueBase* const worker_thread_;
+
+ // Used to regularly call SendNack if needed.
+ RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(worker_thread_);
+ const TimeDelta update_interval_;
+
+ Clock* const clock_;
+ NackSender* const nack_sender_;
+ KeyFrameRequestSender* const keyframe_request_sender_;
+
+ // TODO(philipel): Some of the variables below are consistently used on a
+ // known thread (e.g. see |initialized_|). Those probably do not need
+ // synchronized access.
+ std::map<uint16_t, NackInfo, DescendingSeqNumComp<uint16_t>> nack_list_
+ RTC_GUARDED_BY(worker_thread_);
+ std::set<uint16_t, DescendingSeqNumComp<uint16_t>> keyframe_list_
+ RTC_GUARDED_BY(worker_thread_);
+ std::set<uint16_t, DescendingSeqNumComp<uint16_t>> recovered_list_
+ RTC_GUARDED_BY(worker_thread_);
+ video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(worker_thread_);
+ bool initialized_ RTC_GUARDED_BY(worker_thread_);
+ int64_t rtt_ms_ RTC_GUARDED_BY(worker_thread_);
+ uint16_t newest_seq_num_ RTC_GUARDED_BY(worker_thread_);
+
+ // Adds a delay before send nack on packet received.
+ const int64_t send_nack_delay_ms_;
+
+ const absl::optional<BackoffSettings> backoff_settings_;
+
+ // Used to signal destruction to potentially pending tasks.
+ ScopedTaskSafety task_safety_;
+};
+
+} // namespace webrtc
+
+#endif // MODULES_VIDEO_CODING_NACK_MODULE2_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module2_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/nack_module2_unittest.cc
new file mode 100644
index 00000000000..acd1eead012
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module2_unittest.cc
@@ -0,0 +1,411 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/nack_module2.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <cstring>
+#include <memory>
+
+#include "system_wrappers/include/clock.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+
+namespace webrtc {
+// TODO(bugs.webrtc.org/11594): Use the use the GlobalSimulatedTimeController
+// instead of RunLoop. At the moment we mix use of the Clock and the underlying
+// implementation of RunLoop, which is realtime.
+class TestNackModule2 : public ::testing::TestWithParam<bool>,
+ public NackSender,
+ public KeyFrameRequestSender {
+ protected:
+ TestNackModule2()
+ : clock_(new SimulatedClock(0)),
+ field_trial_(GetParam()
+ ? "WebRTC-ExponentialNackBackoff/enabled:true/"
+ : "WebRTC-ExponentialNackBackoff/enabled:false/"),
+ keyframes_requested_(0) {}
+
+ void SetUp() override {}
+
+ void SendNack(const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed) override {
+ sent_nacks_.insert(sent_nacks_.end(), sequence_numbers.begin(),
+ sequence_numbers.end());
+ if (waiting_for_send_nack_) {
+ waiting_for_send_nack_ = false;
+ loop_.Quit();
+ }
+ }
+
+ void RequestKeyFrame() override { ++keyframes_requested_; }
+
+ void Flush() {
+ // nack_module.Process();
+ loop_.Flush();
+ }
+
+ bool WaitForSendNack() {
+ if (timed_out_) {
+ RTC_NOTREACHED();
+ return false;
+ }
+
+ RTC_DCHECK(!waiting_for_send_nack_);
+
+ waiting_for_send_nack_ = true;
+ loop_.PostDelayedTask(
+ [this]() {
+ timed_out_ = true;
+ loop_.Quit();
+ },
+ 1000);
+
+ loop_.Run();
+
+ if (timed_out_)
+ return false;
+
+ RTC_DCHECK(!waiting_for_send_nack_);
+ return true;
+ }
+
+ NackModule2& CreateNackModule(
+ TimeDelta interval = NackModule2::kUpdateInterval) {
+ RTC_DCHECK(!nack_module_.get());
+ nack_module_ = std::make_unique<NackModule2>(
+ TaskQueueBase::Current(), clock_.get(), this, this, interval);
+ nack_module_->UpdateRtt(kDefaultRttMs);
+ return *nack_module_.get();
+ }
+
+ static constexpr int64_t kDefaultRttMs = 20;
+ test::RunLoop loop_;
+ std::unique_ptr<SimulatedClock> clock_;
+ test::ScopedFieldTrials field_trial_;
+ std::unique_ptr<NackModule2> nack_module_;
+ std::vector<uint16_t> sent_nacks_;
+ int keyframes_requested_;
+ bool waiting_for_send_nack_ = false;
+ bool timed_out_ = false;
+};
+
+TEST_P(TestNackModule2, NackOnePacket) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(1, false, false);
+ nack_module.OnReceivedPacket(3, false, false);
+ ASSERT_EQ(1u, sent_nacks_.size());
+ EXPECT_EQ(2, sent_nacks_[0]);
+}
+
+TEST_P(TestNackModule2, WrappingSeqNum) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0xfffe, false, false);
+ nack_module.OnReceivedPacket(1, false, false);
+ ASSERT_EQ(2u, sent_nacks_.size());
+ EXPECT_EQ(0xffff, sent_nacks_[0]);
+ EXPECT_EQ(0, sent_nacks_[1]);
+}
+
+TEST_P(TestNackModule2, WrappingSeqNumClearToKeyframe) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(10));
+ nack_module.OnReceivedPacket(0xfffe, false, false);
+ nack_module.OnReceivedPacket(1, false, false);
+ ASSERT_EQ(2u, sent_nacks_.size());
+ EXPECT_EQ(0xffff, sent_nacks_[0]);
+ EXPECT_EQ(0, sent_nacks_[1]);
+
+ sent_nacks_.clear();
+ nack_module.OnReceivedPacket(2, true, false);
+ ASSERT_EQ(0u, sent_nacks_.size());
+
+ nack_module.OnReceivedPacket(501, true, false);
+ ASSERT_EQ(498u, sent_nacks_.size());
+ for (int seq_num = 3; seq_num < 501; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 3]);
+
+ sent_nacks_.clear();
+ nack_module.OnReceivedPacket(1001, false, false);
+ EXPECT_EQ(499u, sent_nacks_.size());
+ for (int seq_num = 502; seq_num < 1001; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 502]);
+
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ ASSERT_TRUE(WaitForSendNack());
+ ASSERT_EQ(999u, sent_nacks_.size());
+ EXPECT_EQ(0xffff, sent_nacks_[0]);
+ EXPECT_EQ(0, sent_nacks_[1]);
+ for (int seq_num = 3; seq_num < 501; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 1]);
+ for (int seq_num = 502; seq_num < 1001; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 2]);
+
+ // Adding packet 1004 will cause the nack list to reach it's max limit.
+ // It will then clear all nacks up to the next keyframe (seq num 2),
+ // thus removing 0xffff and 0 from the nack list.
+ sent_nacks_.clear();
+ nack_module.OnReceivedPacket(1004, false, false);
+ ASSERT_EQ(2u, sent_nacks_.size());
+ EXPECT_EQ(1002, sent_nacks_[0]);
+ EXPECT_EQ(1003, sent_nacks_[1]);
+
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ ASSERT_TRUE(WaitForSendNack());
+ ASSERT_EQ(999u, sent_nacks_.size());
+ for (int seq_num = 3; seq_num < 501; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 3]);
+ for (int seq_num = 502; seq_num < 1001; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 4]);
+
+ // Adding packet 1007 will cause the nack module to overflow again, thus
+ // clearing everything up to 501 which is the next keyframe.
+ nack_module.OnReceivedPacket(1007, false, false);
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ ASSERT_TRUE(WaitForSendNack());
+ ASSERT_EQ(503u, sent_nacks_.size());
+ for (int seq_num = 502; seq_num < 1001; ++seq_num)
+ EXPECT_EQ(seq_num, sent_nacks_[seq_num - 502]);
+ EXPECT_EQ(1005, sent_nacks_[501]);
+ EXPECT_EQ(1006, sent_nacks_[502]);
+}
+
+TEST_P(TestNackModule2, ResendNack) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1));
+ nack_module.OnReceivedPacket(1, false, false);
+ nack_module.OnReceivedPacket(3, false, false);
+ size_t expected_nacks_sent = 1;
+ ASSERT_EQ(expected_nacks_sent, sent_nacks_.size());
+ EXPECT_EQ(2, sent_nacks_[0]);
+
+ if (GetParam()) {
+ // Retry has to wait at least 5ms by default.
+ nack_module.UpdateRtt(1);
+ clock_->AdvanceTimeMilliseconds(4);
+ Flush(); // Too early.
+ EXPECT_EQ(expected_nacks_sent, sent_nacks_.size());
+
+ clock_->AdvanceTimeMilliseconds(1);
+ WaitForSendNack(); // Now allowed.
+ EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size());
+ } else {
+ nack_module.UpdateRtt(1);
+ clock_->AdvanceTimeMilliseconds(1);
+ WaitForSendNack(); // Fast retransmit allowed.
+ EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size());
+ }
+
+ // N:th try has to wait b^(N-1) * rtt by default.
+ const double b = GetParam() ? 1.25 : 1.0;
+ for (int i = 2; i < 10; ++i) {
+ // Change RTT, above the 40ms max for exponential backoff.
+ TimeDelta rtt = TimeDelta::Millis(160); // + (i * 10 - 40)
+ nack_module.UpdateRtt(rtt.ms());
+
+ // RTT gets capped at 160ms in backoff calculations.
+ TimeDelta expected_backoff_delay =
+ std::pow(b, i - 1) * std::min(rtt, TimeDelta::Millis(160));
+
+ // Move to one millisecond before next allowed NACK.
+ clock_->AdvanceTimeMilliseconds(expected_backoff_delay.ms() - 1);
+ Flush();
+ EXPECT_EQ(expected_nacks_sent, sent_nacks_.size());
+
+ // Move to one millisecond after next allowed NACK.
+ // After rather than on to avoid rounding errors.
+ clock_->AdvanceTimeMilliseconds(2);
+ WaitForSendNack(); // Now allowed.
+ EXPECT_EQ(++expected_nacks_sent, sent_nacks_.size());
+ }
+
+ // Giving up after 10 tries.
+ clock_->AdvanceTimeMilliseconds(3000);
+ Flush();
+ EXPECT_EQ(expected_nacks_sent, sent_nacks_.size());
+}
+
+TEST_P(TestNackModule2, ResendPacketMaxRetries) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1));
+ nack_module.OnReceivedPacket(1, false, false);
+ nack_module.OnReceivedPacket(3, false, false);
+ ASSERT_EQ(1u, sent_nacks_.size());
+ EXPECT_EQ(2, sent_nacks_[0]);
+
+ int backoff_factor = 1;
+ for (size_t retries = 1; retries < 10; ++retries) {
+ // Exponential backoff, so that we don't reject NACK because of time.
+ clock_->AdvanceTimeMilliseconds(backoff_factor * kDefaultRttMs);
+ backoff_factor *= 2;
+ WaitForSendNack();
+ EXPECT_EQ(retries + 1, sent_nacks_.size());
+ }
+
+ clock_->AdvanceTimeMilliseconds(backoff_factor * kDefaultRttMs);
+ Flush();
+ EXPECT_EQ(10u, sent_nacks_.size());
+}
+
+TEST_P(TestNackModule2, TooLargeNackList) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0, false, false);
+ nack_module.OnReceivedPacket(1001, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(0, keyframes_requested_);
+ nack_module.OnReceivedPacket(1003, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(1, keyframes_requested_);
+ nack_module.OnReceivedPacket(1004, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(1, keyframes_requested_);
+}
+
+TEST_P(TestNackModule2, TooLargeNackListWithKeyFrame) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0, false, false);
+ nack_module.OnReceivedPacket(1, true, false);
+ nack_module.OnReceivedPacket(1001, false, false);
+ EXPECT_EQ(999u, sent_nacks_.size());
+ EXPECT_EQ(0, keyframes_requested_);
+ nack_module.OnReceivedPacket(1003, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(0, keyframes_requested_);
+ nack_module.OnReceivedPacket(1005, false, false);
+ EXPECT_EQ(1000u, sent_nacks_.size());
+ EXPECT_EQ(1, keyframes_requested_);
+}
+
+TEST_P(TestNackModule2, ClearUpTo) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1));
+ nack_module.OnReceivedPacket(0, false, false);
+ nack_module.OnReceivedPacket(100, false, false);
+ EXPECT_EQ(99u, sent_nacks_.size());
+
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ nack_module.ClearUpTo(50);
+ WaitForSendNack();
+ ASSERT_EQ(50u, sent_nacks_.size());
+ EXPECT_EQ(50, sent_nacks_[0]);
+}
+
+TEST_P(TestNackModule2, ClearUpToWrap) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0xfff0, false, false);
+ nack_module.OnReceivedPacket(0xf, false, false);
+ EXPECT_EQ(30u, sent_nacks_.size());
+
+ sent_nacks_.clear();
+ clock_->AdvanceTimeMilliseconds(100);
+ nack_module.ClearUpTo(0);
+ WaitForSendNack();
+ ASSERT_EQ(15u, sent_nacks_.size());
+ EXPECT_EQ(0, sent_nacks_[0]);
+}
+
+TEST_P(TestNackModule2, PacketNackCount) {
+ NackModule2& nack_module = CreateNackModule(TimeDelta::Millis(1));
+ EXPECT_EQ(0, nack_module.OnReceivedPacket(0, false, false));
+ EXPECT_EQ(0, nack_module.OnReceivedPacket(2, false, false));
+ EXPECT_EQ(1, nack_module.OnReceivedPacket(1, false, false));
+
+ sent_nacks_.clear();
+ nack_module.UpdateRtt(100);
+ EXPECT_EQ(0, nack_module.OnReceivedPacket(5, false, false));
+ clock_->AdvanceTimeMilliseconds(100);
+ WaitForSendNack();
+ EXPECT_EQ(4u, sent_nacks_.size());
+
+ clock_->AdvanceTimeMilliseconds(125);
+ WaitForSendNack();
+
+ EXPECT_EQ(6u, sent_nacks_.size());
+
+ EXPECT_EQ(3, nack_module.OnReceivedPacket(3, false, false));
+ EXPECT_EQ(3, nack_module.OnReceivedPacket(4, false, false));
+ EXPECT_EQ(0, nack_module.OnReceivedPacket(4, false, false));
+}
+
+TEST_P(TestNackModule2, NackListFullAndNoOverlapWithKeyframes) {
+ NackModule2& nack_module = CreateNackModule();
+ const int kMaxNackPackets = 1000;
+ const unsigned int kFirstGap = kMaxNackPackets - 20;
+ const unsigned int kSecondGap = 200;
+ uint16_t seq_num = 0;
+ nack_module.OnReceivedPacket(seq_num++, true, false);
+ seq_num += kFirstGap;
+ nack_module.OnReceivedPacket(seq_num++, true, false);
+ EXPECT_EQ(kFirstGap, sent_nacks_.size());
+ sent_nacks_.clear();
+ seq_num += kSecondGap;
+ nack_module.OnReceivedPacket(seq_num, true, false);
+ EXPECT_EQ(kSecondGap, sent_nacks_.size());
+}
+
+TEST_P(TestNackModule2, HandleFecRecoveredPacket) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(1, false, false);
+ nack_module.OnReceivedPacket(4, false, true);
+ EXPECT_EQ(0u, sent_nacks_.size());
+ nack_module.OnReceivedPacket(5, false, false);
+ EXPECT_EQ(2u, sent_nacks_.size());
+}
+
+TEST_P(TestNackModule2, SendNackWithoutDelay) {
+ NackModule2& nack_module = CreateNackModule();
+ nack_module.OnReceivedPacket(0, false, false);
+ nack_module.OnReceivedPacket(100, false, false);
+ EXPECT_EQ(99u, sent_nacks_.size());
+}
+
+INSTANTIATE_TEST_SUITE_P(WithAndWithoutBackoff,
+ TestNackModule2,
+ ::testing::Values(true, false));
+
+class TestNackModule2WithFieldTrial : public ::testing::Test,
+ public NackSender,
+ public KeyFrameRequestSender {
+ protected:
+ TestNackModule2WithFieldTrial()
+ : nack_delay_field_trial_("WebRTC-SendNackDelayMs/10/"),
+ clock_(new SimulatedClock(0)),
+ nack_module_(TaskQueueBase::Current(), clock_.get(), this, this),
+ keyframes_requested_(0) {}
+
+ void SendNack(const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed) override {
+ sent_nacks_.insert(sent_nacks_.end(), sequence_numbers.begin(),
+ sequence_numbers.end());
+ }
+
+ void RequestKeyFrame() override { ++keyframes_requested_; }
+
+ test::ScopedFieldTrials nack_delay_field_trial_;
+ std::unique_ptr<SimulatedClock> clock_;
+ NackModule2 nack_module_;
+ std::vector<uint16_t> sent_nacks_;
+ int keyframes_requested_;
+};
+
+TEST_F(TestNackModule2WithFieldTrial, SendNackWithDelay) {
+ nack_module_.OnReceivedPacket(0, false, false);
+ nack_module_.OnReceivedPacket(100, false, false);
+ EXPECT_EQ(0u, sent_nacks_.size());
+ clock_->AdvanceTimeMilliseconds(10);
+ nack_module_.OnReceivedPacket(106, false, false);
+ EXPECT_EQ(99u, sent_nacks_.size());
+ clock_->AdvanceTimeMilliseconds(10);
+ nack_module_.OnReceivedPacket(109, false, false);
+ EXPECT_EQ(104u, sent_nacks_.size());
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
index ab1c76f1b5c..f91eb750f02 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "modules/video_coding/nack_module.h"
+#include "modules/video_coding/deprecated/nack_module.h"
#include <algorithm>
#include <cstdint>
@@ -45,7 +45,7 @@ class TestNackModule : public ::testing::TestWithParam<bool>,
static constexpr int64_t kDefaultRttMs = 20;
std::unique_ptr<SimulatedClock> clock_;
test::ScopedFieldTrials field_trial_;
- NackModule nack_module_;
+ DEPRECATED_NackModule nack_module_;
std::vector<uint16_t> sent_nacks_;
int keyframes_requested_;
};
@@ -352,7 +352,7 @@ class TestNackModuleWithFieldTrial : public ::testing::Test,
test::ScopedFieldTrials nack_delay_field_trial_;
std::unique_ptr<SimulatedClock> clock_;
- NackModule nack_module_;
+ DEPRECATED_NackModule nack_module_;
std::vector<uint16_t> sent_nacks_;
int keyframes_requested_;
};
diff --git a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
index 5db3c0f670a..7da8a1c3016 100644
--- a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
@@ -78,7 +78,7 @@ PacketBuffer::~PacketBuffer() {
PacketBuffer::InsertResult PacketBuffer::InsertPacket(
std::unique_ptr<PacketBuffer::Packet> packet) {
PacketBuffer::InsertResult result;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
uint16_t seq_num = packet->seq_num;
size_t index = seq_num % buffer_.size();
@@ -112,7 +112,7 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket(
// Clear the buffer, delete payload, and return false to signal that a
// new keyframe is needed.
RTC_LOG(LS_WARNING) << "Clear PacketBuffer and request key frame.";
- Clear();
+ ClearInternal();
result.buffer_cleared = true;
return result;
}
@@ -136,7 +136,7 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket(
}
void PacketBuffer::ClearTo(uint16_t seq_num) {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
// We have already cleared past this sequence number, no need to do anything.
if (is_cleared_to_first_seq_num_ &&
AheadOf<uint16_t>(first_seq_num_, seq_num)) {
@@ -173,37 +173,41 @@ void PacketBuffer::ClearTo(uint16_t seq_num) {
}
void PacketBuffer::Clear() {
- rtc::CritScope lock(&crit_);
- for (auto& entry : buffer_) {
- entry = nullptr;
- }
-
- first_packet_received_ = false;
- is_cleared_to_first_seq_num_ = false;
- last_received_packet_ms_.reset();
- last_received_keyframe_packet_ms_.reset();
- newest_inserted_seq_num_.reset();
- missing_packets_.clear();
+ MutexLock lock(&mutex_);
+ ClearInternal();
}
PacketBuffer::InsertResult PacketBuffer::InsertPadding(uint16_t seq_num) {
PacketBuffer::InsertResult result;
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
UpdateMissingPackets(seq_num);
result.packets = FindFrames(static_cast<uint16_t>(seq_num + 1));
return result;
}
absl::optional<int64_t> PacketBuffer::LastReceivedPacketMs() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return last_received_packet_ms_;
}
absl::optional<int64_t> PacketBuffer::LastReceivedKeyframePacketMs() const {
- rtc::CritScope lock(&crit_);
+ MutexLock lock(&mutex_);
return last_received_keyframe_packet_ms_;
}
+void PacketBuffer::ClearInternal() {
+ for (auto& entry : buffer_) {
+ entry = nullptr;
+ }
+
+ first_packet_received_ = false;
+ is_cleared_to_first_seq_num_ = false;
+ last_received_packet_ms_.reset();
+ last_received_keyframe_packet_ms_.reset();
+ newest_inserted_seq_num_.reset();
+ missing_packets_.clear();
+}
+
bool PacketBuffer::ExpandBufferSize() {
if (buffer_.size() == max_size_) {
RTC_LOG(LS_WARNING) << "PacketBuffer is already at max size (" << max_size_
@@ -359,15 +363,10 @@ std::vector<std::unique_ptr<PacketBuffer::Packet>> PacketBuffer::FindFrames(
VideoFrameType::kVideoFrameDelta;
}
- // With IPPP, if this is not a keyframe, make sure there are no gaps
- // in the packet sequence numbers up until this point.
- const uint8_t h264tid =
- buffer_[start_index] != nullptr
- ? buffer_[start_index]->video_header.frame_marking.temporal_id
- : kNoTemporalIdx;
- if (h264tid == kNoTemporalIdx && !is_h264_keyframe &&
- missing_packets_.upper_bound(start_seq_num) !=
- missing_packets_.begin()) {
+ // If this is not a keyframe, make sure there are no gaps in the packet
+ // sequence numbers up until this point.
+ if (!is_h264_keyframe && missing_packets_.upper_bound(start_seq_num) !=
+ missing_packets_.begin()) {
return found_frames;
}
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
index c480e372395..508fa8395f3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
@@ -22,8 +22,8 @@
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_video_header.h"
#include "rtc_base/copy_on_write_buffer.h"
-#include "rtc_base/critical_section.h"
#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "system_wrappers/include/clock.h"
@@ -82,62 +82,68 @@ class PacketBuffer {
PacketBuffer(Clock* clock, size_t start_buffer_size, size_t max_buffer_size);
~PacketBuffer();
- InsertResult InsertPacket(std::unique_ptr<Packet> packet)
- ABSL_MUST_USE_RESULT;
- InsertResult InsertPadding(uint16_t seq_num) ABSL_MUST_USE_RESULT;
- void ClearTo(uint16_t seq_num);
- void Clear();
+ InsertResult InsertPacket(std::unique_ptr<Packet> packet) ABSL_MUST_USE_RESULT
+ RTC_LOCKS_EXCLUDED(mutex_);
+ InsertResult InsertPadding(uint16_t seq_num) ABSL_MUST_USE_RESULT
+ RTC_LOCKS_EXCLUDED(mutex_);
+ void ClearTo(uint16_t seq_num) RTC_LOCKS_EXCLUDED(mutex_);
+ void Clear() RTC_LOCKS_EXCLUDED(mutex_);
// Timestamp (not RTP timestamp) of the last received packet/keyframe packet.
- absl::optional<int64_t> LastReceivedPacketMs() const;
- absl::optional<int64_t> LastReceivedKeyframePacketMs() const;
+ absl::optional<int64_t> LastReceivedPacketMs() const
+ RTC_LOCKS_EXCLUDED(mutex_);
+ absl::optional<int64_t> LastReceivedKeyframePacketMs() const
+ RTC_LOCKS_EXCLUDED(mutex_);
private:
Clock* const clock_;
+ // Clears with |mutex_| taken.
+ void ClearInternal() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
// Tries to expand the buffer.
- bool ExpandBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ bool ExpandBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Test if all previous packets has arrived for the given sequence number.
bool PotentialNewFrame(uint16_t seq_num) const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Test if all packets of a frame has arrived, and if so, returns packets to
// create frames.
std::vector<std::unique_ptr<Packet>> FindFrames(uint16_t seq_num)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void UpdateMissingPackets(uint16_t seq_num)
- RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
- rtc::CriticalSection crit_;
+ mutable Mutex mutex_;
// buffer_.size() and max_size_ must always be a power of two.
const size_t max_size_;
// The fist sequence number currently in the buffer.
- uint16_t first_seq_num_ RTC_GUARDED_BY(crit_);
+ uint16_t first_seq_num_ RTC_GUARDED_BY(mutex_);
// If the packet buffer has received its first packet.
- bool first_packet_received_ RTC_GUARDED_BY(crit_);
+ bool first_packet_received_ RTC_GUARDED_BY(mutex_);
// If the buffer is cleared to |first_seq_num_|.
- bool is_cleared_to_first_seq_num_ RTC_GUARDED_BY(crit_);
+ bool is_cleared_to_first_seq_num_ RTC_GUARDED_BY(mutex_);
// Buffer that holds the the inserted packets and information needed to
// determine continuity between them.
- std::vector<std::unique_ptr<Packet>> buffer_ RTC_GUARDED_BY(crit_);
+ std::vector<std::unique_ptr<Packet>> buffer_ RTC_GUARDED_BY(mutex_);
// Timestamp of the last received packet/keyframe packet.
- absl::optional<int64_t> last_received_packet_ms_ RTC_GUARDED_BY(crit_);
+ absl::optional<int64_t> last_received_packet_ms_ RTC_GUARDED_BY(mutex_);
absl::optional<int64_t> last_received_keyframe_packet_ms_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
absl::optional<uint32_t> last_received_keyframe_rtp_timestamp_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
- absl::optional<uint16_t> newest_inserted_seq_num_ RTC_GUARDED_BY(crit_);
+ absl::optional<uint16_t> newest_inserted_seq_num_ RTC_GUARDED_BY(mutex_);
std::set<uint16_t, DescendingSeqNumComp<uint16_t>> missing_packets_
- RTC_GUARDED_BY(crit_);
+ RTC_GUARDED_BY(mutex_);
// Indicates if we should require SPS, PPS, and IDR for a particular
// RTP timestamp to treat the corresponding frame as a keyframe.
diff --git a/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
index bdef991b8b4..2a43c275d6d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
@@ -108,8 +108,6 @@ RtpFrameReferenceFinder::ManageFrameInternal(RtpFrameObject* frame) {
return ManageFrameVp8(frame);
case kVideoCodecVP9:
return ManageFrameVp9(frame);
- case kVideoCodecH264:
- return ManageFrameH264(frame);
case kVideoCodecGeneric:
if (auto* generic_header = absl::get_if<RTPVideoHeaderLegacyGeneric>(
&frame->GetRtpVideoHeader().video_type_header)) {
@@ -715,130 +713,6 @@ void RtpFrameReferenceFinder::UnwrapPictureIds(RtpFrameObject* frame) {
frame->id.picture_id = unwrapper_.Unwrap(frame->id.picture_id);
}
-RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameH264(
- RtpFrameObject* frame) {
- const FrameMarking& rtp_frame_marking = frame->GetFrameMarking();
-
- uint8_t tid = rtp_frame_marking.temporal_id;
- bool blSync = rtp_frame_marking.base_layer_sync;
-
- if (tid == kNoTemporalIdx)
- return ManageFramePidOrSeqNum(std::move(frame), kNoPictureId);
-
- // Protect against corrupted packets with arbitrary large temporal idx.
- if (tid >= kMaxTemporalLayers)
- return kDrop;
-
- frame->id.picture_id = frame->last_seq_num();
-
- if (frame->frame_type() == VideoFrameType::kVideoFrameKey) {
- // For H264, use last_seq_num_gop_ to simply store last picture id
- // as a pair of unpadded and padded sequence numbers.
- if (last_seq_num_gop_.empty()) {
- last_seq_num_gop_.insert(std::make_pair(
- 0, std::make_pair(frame->id.picture_id, frame->id.picture_id)));
- }
- }
-
- // Stash if we have no keyframe yet.
- if (last_seq_num_gop_.empty())
- return kStash;
-
- // Check for gap in sequence numbers. Store in |not_yet_received_seq_num_|.
- if (frame->frame_type() == VideoFrameType::kVideoFrameDelta) {
- uint16_t last_pic_id_padded = last_seq_num_gop_.begin()->second.second;
- if (AheadOf<uint16_t>(frame->id.picture_id, last_pic_id_padded)) {
- do {
- last_pic_id_padded = last_pic_id_padded + 1;
- not_yet_received_seq_num_.insert(last_pic_id_padded);
- } while (last_pic_id_padded != frame->id.picture_id);
- }
- }
-
- int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(rtp_frame_marking.tl0_pic_idx);
-
- // Clean up info for base layers that are too old.
- int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxLayerInfo;
- auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx);
- layer_info_.erase(layer_info_.begin(), clean_layer_info_to);
-
- // Clean up info about not yet received frames that are too old.
- uint16_t old_picture_id = frame->id.picture_id - kMaxNotYetReceivedFrames * 2;
- auto clean_frames_to = not_yet_received_seq_num_.lower_bound(old_picture_id);
- not_yet_received_seq_num_.erase(not_yet_received_seq_num_.begin(),
- clean_frames_to);
-
- if (frame->frame_type() == VideoFrameType::kVideoFrameKey) {
- frame->num_references = 0;
- layer_info_[unwrapped_tl0].fill(-1);
- UpdateDataH264(frame, unwrapped_tl0, tid);
- return kHandOff;
- }
-
- auto layer_info_it =
- layer_info_.find(tid == 0 ? unwrapped_tl0 - 1 : unwrapped_tl0);
-
- // Stash if we have no base layer frame yet.
- if (layer_info_it == layer_info_.end())
- return kStash;
-
- // Base layer frame. Copy layer info from previous base layer frame.
- if (tid == 0) {
- layer_info_it =
- layer_info_.insert(std::make_pair(unwrapped_tl0, layer_info_it->second))
- .first;
- frame->num_references = 1;
- frame->references[0] = layer_info_it->second[0];
- UpdateDataH264(frame, unwrapped_tl0, tid);
- return kHandOff;
- }
-
- // This frame only references its base layer frame.
- if (blSync) {
- frame->num_references = 1;
- frame->references[0] = layer_info_it->second[0];
- UpdateDataH264(frame, unwrapped_tl0, tid);
- return kHandOff;
- }
-
- // Find all references for general frame.
- frame->num_references = 0;
- for (uint8_t layer = 0; layer <= tid; ++layer) {
- // Stash if we have not yet received frames on this temporal layer.
- if (layer_info_it->second[layer] == -1)
- return kStash;
-
- // Drop if the last frame on this layer is ahead of this frame. A layer sync
- // frame was received after this frame for the same base layer frame.
- uint16_t last_frame_in_layer = layer_info_it->second[layer];
- if (AheadOf<uint16_t>(last_frame_in_layer, frame->id.picture_id))
- return kDrop;
-
- // Stash and wait for missing frame between this frame and the reference
- auto not_received_seq_num_it =
- not_yet_received_seq_num_.upper_bound(last_frame_in_layer);
- if (not_received_seq_num_it != not_yet_received_seq_num_.end() &&
- AheadOf<uint16_t>(frame->id.picture_id, *not_received_seq_num_it)) {
- return kStash;
- }
-
- if (!(AheadOf<uint16_t>(frame->id.picture_id, last_frame_in_layer))) {
- RTC_LOG(LS_WARNING) << "Frame with picture id " << frame->id.picture_id
- << " and packet range [" << frame->first_seq_num()
- << ", " << frame->last_seq_num()
- << "] already received, "
- " dropping frame.";
- return kDrop;
- }
-
- ++frame->num_references;
- frame->references[layer] = last_frame_in_layer;
- }
-
- UpdateDataH264(frame, unwrapped_tl0, tid);
- return kHandOff;
-}
-
void RtpFrameReferenceFinder::UpdateLastPictureIdWithPaddingH264() {
auto seq_num_it = last_seq_num_gop_.begin();
diff --git a/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc
index 9ded6bcb964..0c08ddd3027 100644
--- a/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc
@@ -32,13 +32,11 @@ std::unique_ptr<RtpFrameObject> CreateFrame(
uint16_t seq_num_end,
bool keyframe,
VideoCodecType codec,
- const RTPVideoTypeHeader& video_type_header,
- const FrameMarking& frame_markings) {
+ const RTPVideoTypeHeader& video_type_header) {
RTPVideoHeader video_header;
video_header.frame_type = keyframe ? VideoFrameType::kVideoFrameKey
: VideoFrameType::kVideoFrameDelta;
video_header.video_type_header = video_type_header;
- video_header.frame_marking = frame_markings;
// clang-format off
return std::make_unique<RtpFrameObject>(
@@ -92,7 +90,7 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
bool keyframe) {
std::unique_ptr<RtpFrameObject> frame =
CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecGeneric,
- RTPVideoTypeHeader(), FrameMarking());
+ RTPVideoTypeHeader());
reference_finder_->ManageFrame(std::move(frame));
}
@@ -110,9 +108,8 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
vp8_header.tl0PicIdx = tl0;
vp8_header.layerSync = sync;
- std::unique_ptr<RtpFrameObject> frame =
- CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP8,
- vp8_header, FrameMarking());
+ std::unique_ptr<RtpFrameObject> frame = CreateFrame(
+ seq_num_start, seq_num_end, keyframe, kVideoCodecVP8, vp8_header);
reference_finder_->ManageFrame(std::move(frame));
}
@@ -140,9 +137,8 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
vp9_header.gof = *ss;
}
- std::unique_ptr<RtpFrameObject> frame =
- CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP9,
- vp9_header, FrameMarking());
+ std::unique_ptr<RtpFrameObject> frame = CreateFrame(
+ seq_num_start, seq_num_end, keyframe, kVideoCodecVP9, vp9_header);
reference_finder_->ManageFrame(std::move(frame));
}
@@ -166,26 +162,15 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
for (size_t i = 0; i < refs.size(); ++i)
vp9_header.pid_diff[i] = refs[i];
- std::unique_ptr<RtpFrameObject> frame =
- CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecVP9,
- vp9_header, FrameMarking());
+ std::unique_ptr<RtpFrameObject> frame = CreateFrame(
+ seq_num_start, seq_num_end, keyframe, kVideoCodecVP9, vp9_header);
reference_finder_->ManageFrame(std::move(frame));
}
- void InsertH264(uint16_t seq_num_start,
- uint16_t seq_num_end,
- bool keyframe,
- uint8_t tid = kNoTemporalIdx,
- int32_t tl0 = kNoTl0PicIdx,
- bool sync = false) {
- FrameMarking frame_marking{};
- frame_marking.temporal_id = tid;
- frame_marking.tl0_pic_idx = tl0;
- frame_marking.base_layer_sync = sync;
-
+ void InsertH264(uint16_t seq_num_start, uint16_t seq_num_end, bool keyframe) {
std::unique_ptr<RtpFrameObject> frame =
CreateFrame(seq_num_start, seq_num_end, keyframe, kVideoCodecH264,
- RTPVideoTypeHeader(), frame_marking);
+ RTPVideoTypeHeader());
reference_finder_->ManageFrame(std::move(frame));
}
@@ -1440,53 +1425,46 @@ TEST_F(TestRtpFrameReferenceFinder, H264KeyFrameReferences) {
CheckReferencesH264(sn);
}
-// Test with 1 temporal layer.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_0) {
- uint16_t sn = Rand();
+TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrap) {
+ uint16_t sn = 0xFFFF;
- InsertH264(sn, sn, true, 0, 1);
- InsertH264(sn + 1, sn + 1, false, 0, 2);
- InsertH264(sn + 2, sn + 2, false, 0, 3);
- InsertH264(sn + 3, sn + 3, false, 0, 4);
+ InsertH264(sn - 1, sn - 1, true);
+ InsertH264(sn, sn, false);
+ InsertH264(sn + 1, sn + 1, false);
+ InsertH264(sn + 2, sn + 2, false);
ASSERT_EQ(4UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
+ CheckReferencesH264(sn - 1);
+ CheckReferencesH264(sn, sn - 1);
CheckReferencesH264(sn + 1, sn);
CheckReferencesH264(sn + 2, sn + 1);
- CheckReferencesH264(sn + 3, sn + 2);
}
-TEST_F(TestRtpFrameReferenceFinder, H264DuplicateTl1Frames) {
+TEST_F(TestRtpFrameReferenceFinder, H264Frames) {
uint16_t sn = Rand();
- InsertH264(sn, sn, true, 0, 0);
- InsertH264(sn + 1, sn + 1, false, 1, 0, true);
- InsertH264(sn + 2, sn + 2, false, 0, 1);
- InsertH264(sn + 3, sn + 3, false, 1, 1);
- InsertH264(sn + 3, sn + 3, false, 1, 1);
- InsertH264(sn + 4, sn + 4, false, 0, 2);
- InsertH264(sn + 5, sn + 5, false, 1, 2);
+ InsertH264(sn, sn, true);
+ InsertH264(sn + 1, sn + 1, false);
+ InsertH264(sn + 2, sn + 2, false);
+ InsertH264(sn + 3, sn + 3, false);
- ASSERT_EQ(6UL, frames_from_callback_.size());
+ ASSERT_EQ(4UL, frames_from_callback_.size());
CheckReferencesH264(sn);
CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn + 1, sn + 2);
- CheckReferencesH264(sn + 4, sn + 2);
- CheckReferencesH264(sn + 5, sn + 3, sn + 4);
+ CheckReferencesH264(sn + 2, sn + 1);
+ CheckReferencesH264(sn + 3, sn + 2);
}
-// Test with 1 temporal layer.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0) {
+TEST_F(TestRtpFrameReferenceFinder, H264Reordering) {
uint16_t sn = Rand();
- InsertH264(sn, sn, true, 0, 1);
- InsertH264(sn + 1, sn + 1, false, 0, 2);
- InsertH264(sn + 3, sn + 3, false, 0, 4);
- InsertH264(sn + 2, sn + 2, false, 0, 3);
- InsertH264(sn + 5, sn + 5, false, 0, 6);
- InsertH264(sn + 6, sn + 6, false, 0, 7);
- InsertH264(sn + 4, sn + 4, false, 0, 5);
+ InsertH264(sn, sn, true);
+ InsertH264(sn + 1, sn + 1, false);
+ InsertH264(sn + 3, sn + 3, false);
+ InsertH264(sn + 2, sn + 2, false);
+ InsertH264(sn + 5, sn + 5, false);
+ InsertH264(sn + 6, sn + 6, false);
+ InsertH264(sn + 4, sn + 4, false);
ASSERT_EQ(7UL, frames_from_callback_.size());
CheckReferencesH264(sn);
@@ -1498,258 +1476,13 @@ TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0) {
CheckReferencesH264(sn + 6, sn + 5);
}
-// Test with 2 temporal layers in a 01 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_01) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn, true, 0, 255);
- InsertH264(sn + 1, sn + 1, false, 1, 255, true);
- InsertH264(sn + 2, sn + 2, false, 0, 0);
- InsertH264(sn + 3, sn + 3, false, 1, 0);
-
- ASSERT_EQ(4UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn + 1, sn + 2);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersMultiSn_01) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn + 3, true, 0, 255);
- InsertH264(sn + 4, sn + 5, false, 1, 255, true);
- InsertH264(sn + 6, sn + 8, false, 0, 0);
- InsertH264(sn + 9, sn + 9, false, 1, 0);
-
- ASSERT_EQ(4UL, frames_from_callback_.size());
- CheckReferencesH264(sn + 3);
- CheckReferencesH264(sn + 5, sn + 3);
- CheckReferencesH264(sn + 8, sn + 3);
- CheckReferencesH264(sn + 9, sn + 5, sn + 8);
-}
-
-// Test with 2 temporal layers in a 01 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_01) {
- uint16_t sn = Rand();
-
- InsertH264(sn + 1, sn + 1, false, 1, 255, true);
- InsertH264(sn, sn, true, 0, 255);
- InsertH264(sn + 3, sn + 3, false, 1, 0);
- InsertH264(sn + 5, sn + 5, false, 1, 1);
- InsertH264(sn + 2, sn + 2, false, 0, 0);
- InsertH264(sn + 4, sn + 4, false, 0, 1);
- InsertH264(sn + 6, sn + 6, false, 0, 2);
- InsertH264(sn + 7, sn + 7, false, 1, 2);
-
- ASSERT_EQ(8UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn + 1, sn + 2);
- CheckReferencesH264(sn + 4, sn + 2);
- CheckReferencesH264(sn + 5, sn + 3, sn + 4);
- CheckReferencesH264(sn + 6, sn + 4);
- CheckReferencesH264(sn + 7, sn + 5, sn + 6);
-}
-
-// Test with 3 temporal layers in a 0212 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayers_0212) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn, true, 0, 55);
- InsertH264(sn + 1, sn + 1, false, 2, 55, true);
- InsertH264(sn + 2, sn + 2, false, 1, 55, true);
- InsertH264(sn + 3, sn + 3, false, 2, 55);
- InsertH264(sn + 4, sn + 4, false, 0, 56);
- InsertH264(sn + 5, sn + 5, false, 2, 56, true);
- InsertH264(sn + 6, sn + 6, false, 1, 56, true);
- InsertH264(sn + 7, sn + 7, false, 2, 56);
- InsertH264(sn + 8, sn + 8, false, 0, 57);
- InsertH264(sn + 9, sn + 9, false, 2, 57, true);
- InsertH264(sn + 10, sn + 10, false, 1, 57, true);
- InsertH264(sn + 11, sn + 11, false, 2, 57);
-
- ASSERT_EQ(12UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2);
- CheckReferencesH264(sn + 4, sn);
- CheckReferencesH264(sn + 5, sn + 4);
- CheckReferencesH264(sn + 6, sn + 4);
- CheckReferencesH264(sn + 7, sn + 4, sn + 5, sn + 6);
- CheckReferencesH264(sn + 8, sn + 4);
- CheckReferencesH264(sn + 9, sn + 8);
- CheckReferencesH264(sn + 10, sn + 8);
- CheckReferencesH264(sn + 11, sn + 8, sn + 9, sn + 10);
-}
-
-// Test with 3 temporal layers in a 0212 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersMissingFrame_0212) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn, true, 0, 55, false);
- InsertH264(sn + 2, sn + 2, false, 1, 55, true);
- InsertH264(sn + 3, sn + 3, false, 2, 55, false);
-
- ASSERT_EQ(2UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 2, sn);
-}
-
-// Test with 3 temporal layers in a 0212 pattern.
-TEST_F(TestRtpFrameReferenceFinder, H264TemporalLayersReordering_0212) {
- uint16_t sn = Rand();
-
- InsertH264(sn + 1, sn + 1, false, 2, 55, true);
- InsertH264(sn, sn, true, 0, 55, false);
- InsertH264(sn + 2, sn + 2, false, 1, 55, true);
- InsertH264(sn + 4, sn + 4, false, 0, 56, false);
- InsertH264(sn + 5, sn + 5, false, 2, 56, false);
- InsertH264(sn + 3, sn + 3, false, 2, 55, false);
- InsertH264(sn + 7, sn + 7, false, 2, 56, false);
- InsertH264(sn + 9, sn + 9, false, 2, 57, true);
- InsertH264(sn + 6, sn + 6, false, 1, 56, false);
- InsertH264(sn + 8, sn + 8, false, 0, 57, false);
- InsertH264(sn + 11, sn + 11, false, 2, 57, false);
- InsertH264(sn + 10, sn + 10, false, 1, 57, true);
-
- ASSERT_EQ(12UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2);
- CheckReferencesH264(sn + 4, sn);
- CheckReferencesH264(sn + 5, sn + 2, sn + 3, sn + 4);
- CheckReferencesH264(sn + 6, sn + 2, sn + 4);
- CheckReferencesH264(sn + 7, sn + 4, sn + 5, sn + 6);
- CheckReferencesH264(sn + 8, sn + 4);
- CheckReferencesH264(sn + 9, sn + 8);
- CheckReferencesH264(sn + 10, sn + 8);
- CheckReferencesH264(sn + 11, sn + 8, sn + 9, sn + 10);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264InsertManyFrames_0212) {
- uint16_t sn = Rand();
-
- const int keyframes_to_insert = 50;
- const int frames_per_keyframe = 120; // Should be a multiple of 4.
- uint8_t tl0 = 128;
-
- for (int k = 0; k < keyframes_to_insert; ++k) {
- InsertH264(sn, sn, true, 0, tl0, false);
- InsertH264(sn + 1, sn + 1, false, 2, tl0, true);
- InsertH264(sn + 2, sn + 2, false, 1, tl0, true);
- InsertH264(sn + 3, sn + 3, false, 2, tl0, false);
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 3, sn, sn + 1, sn + 2);
- frames_from_callback_.clear();
- ++tl0;
-
- for (int f = 4; f < frames_per_keyframe; f += 4) {
- uint16_t sf = sn + f;
-
- InsertH264(sf, sf, false, 0, tl0, false);
- InsertH264(sf + 1, sf + 1, false, 2, tl0, false);
- InsertH264(sf + 2, sf + 2, false, 1, tl0, false);
- InsertH264(sf + 3, sf + 3, false, 2, tl0, false);
- CheckReferencesH264(sf, sf - 4);
- CheckReferencesH264(sf + 1, sf, sf - 1, sf - 2);
- CheckReferencesH264(sf + 2, sf, sf - 2);
- CheckReferencesH264(sf + 3, sf, sf + 1, sf + 2);
- frames_from_callback_.clear();
- ++tl0;
- }
-
- sn += frames_per_keyframe;
- }
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264LayerSync) {
- uint16_t sn = Rand();
-
- InsertH264(sn, sn, true, 0, 0, false);
- InsertH264(sn + 1, sn + 1, false, 1, 0, true);
- InsertH264(sn + 2, sn + 2, false, 0, 1, false);
- ASSERT_EQ(3UL, frames_from_callback_.size());
-
- InsertH264(sn + 4, sn + 4, false, 0, 2, false);
- InsertH264(sn + 5, sn + 5, false, 1, 2, true);
- InsertH264(sn + 6, sn + 6, false, 0, 3, false);
- InsertH264(sn + 7, sn + 7, false, 1, 3, false);
-
- ASSERT_EQ(7UL, frames_from_callback_.size());
- CheckReferencesH264(sn);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn);
- CheckReferencesH264(sn + 4, sn + 2);
- CheckReferencesH264(sn + 5, sn + 4);
- CheckReferencesH264(sn + 6, sn + 4);
- CheckReferencesH264(sn + 7, sn + 6, sn + 5);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264Tl1SyncFrameAfterTl1Frame) {
- InsertH264(1000, 1000, true, 0, 247, true);
- InsertH264(1001, 1001, false, 0, 248, false);
- InsertH264(1002, 1002, false, 1, 248, false); // Will be dropped
- InsertH264(1003, 1003, false, 1, 248, true); // due to this frame.
-
- ASSERT_EQ(3UL, frames_from_callback_.size());
- CheckReferencesH264(1000);
- CheckReferencesH264(1001, 1000);
- CheckReferencesH264(1003, 1001);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264DetectMissingFrame_0212) {
- InsertH264(1, 1, true, 0, 1, false);
- InsertH264(2, 2, false, 2, 1, true);
- InsertH264(3, 3, false, 1, 1, true);
- InsertH264(4, 4, false, 2, 1, false);
-
- InsertH264(6, 6, false, 2, 2, false);
- InsertH264(7, 7, false, 1, 2, false);
- InsertH264(8, 8, false, 2, 2, false);
- ASSERT_EQ(4UL, frames_from_callback_.size());
-
- InsertH264(5, 5, false, 0, 2, false);
- ASSERT_EQ(8UL, frames_from_callback_.size());
-
- CheckReferencesH264(1);
- CheckReferencesH264(2, 1);
- CheckReferencesH264(3, 1);
- CheckReferencesH264(4, 3, 2, 1);
-
- CheckReferencesH264(5, 1);
- CheckReferencesH264(6, 5, 4, 3);
- CheckReferencesH264(7, 5, 3);
- CheckReferencesH264(8, 7, 6, 5);
-}
-
-TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrap) {
- uint16_t sn = 0xFFFF;
-
- InsertH264(sn - 1, sn - 1, true, 0, 1);
- InsertH264(sn, sn, false, 0, 2);
- InsertH264(sn + 1, sn + 1, false, 0, 3);
- InsertH264(sn + 2, sn + 2, false, 0, 4);
-
- ASSERT_EQ(4UL, frames_from_callback_.size());
- CheckReferencesH264(sn - 1);
- CheckReferencesH264(sn, sn - 1);
- CheckReferencesH264(sn + 1, sn);
- CheckReferencesH264(sn + 2, sn + 1);
-}
-
TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrapMulti) {
uint16_t sn = 0xFFFF;
- InsertH264(sn - 3, sn - 2, true, 0, 1);
- InsertH264(sn - 1, sn + 1, false, 0, 2);
- InsertH264(sn + 2, sn + 3, false, 0, 3);
- InsertH264(sn + 4, sn + 7, false, 0, 4);
+ InsertH264(sn - 3, sn - 2, true);
+ InsertH264(sn - 1, sn + 1, false);
+ InsertH264(sn + 2, sn + 3, false);
+ InsertH264(sn + 4, sn + 7, false);
ASSERT_EQ(4UL, frames_from_callback_.size());
CheckReferencesH264(sn - 2);
@@ -1758,35 +1491,5 @@ TEST_F(TestRtpFrameReferenceFinder, H264SequenceNumberWrapMulti) {
CheckReferencesH264(sn + 7, sn + 3);
}
-TEST_F(TestRtpFrameReferenceFinder, H264Tl0PicIdxWrap) {
- int numTl0Wraps = 1000;
- int64_t sn = Rand();
-
- for (int i = 0; i < numTl0Wraps; i++) {
- for (int tl0 = 0; tl0 < 256; tl0 += 16, sn += 16) {
- InsertH264(sn, sn, true, 0, tl0);
- reference_finder_->ClearTo(sn); // Too many stashed frames cause errors.
-
- for (int k = 1; k < 8; k++) {
- InsertH264(sn + k, sn + k, false, 0, tl0 + k);
- }
-
- // Skip a TL0 index.
- for (int k = 9; k < 16; k++) {
- InsertH264(sn + k, sn + k, false, 0, tl0 + k);
- }
-
- ASSERT_EQ(8UL, frames_from_callback_.size());
-
- CheckReferencesH264(sn);
- for (int k = 1; k < 8; k++) {
- CheckReferencesH264(sn + k, sn + k - 1);
- }
-
- frames_from_callback_.clear();
- }
- }
-}
-
} // namespace video_coding
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/session_info.cc b/chromium/third_party/webrtc/modules/video_coding/session_info.cc
index e51d2936077..07b9a9d6b57 100644
--- a/chromium/third_party/webrtc/modules/video_coding/session_info.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/session_info.cc
@@ -95,8 +95,6 @@ int VCMSessionInfo::TemporalId() const {
return absl::get<RTPVideoHeaderVP9>(
packets_.front().video_header.video_type_header)
.temporal_idx;
- } else if (packets_.front().video_header.codec == kVideoCodecH264) {
- return packets_.front().video_header.frame_marking.temporal_id;
} else {
return kNoTemporalIdx;
}
@@ -113,8 +111,6 @@ bool VCMSessionInfo::LayerSync() const {
return absl::get<RTPVideoHeaderVP9>(
packets_.front().video_header.video_type_header)
.temporal_up_switch;
- } else if (packets_.front().video_header.codec == kVideoCodecH264) {
- return packets_.front().video_header.frame_marking.base_layer_sync;
} else {
return false;
}
@@ -131,8 +127,6 @@ int VCMSessionInfo::Tl0PicId() const {
return absl::get<RTPVideoHeaderVP9>(
packets_.front().video_header.video_type_header)
.tl0_pic_idx;
- } else if (packets_.front().video_header.codec == kVideoCodecH264) {
- return packets_.front().video_header.frame_marking.tl0_pic_idx;
} else {
return kNoTl0PicIdx;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc
index db104c49d16..871e5a16924 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc
@@ -38,14 +38,16 @@ constexpr uint32_t kSimulcastScreenshareMaxBitrateKbps = 1250;
class MockTemporalLayers : public Vp8FrameBufferController {
public:
- MOCK_METHOD2(NextFrameConfig, Vp8FrameConfig(size_t, uint32_t));
- MOCK_METHOD3(OnRatesUpdated, void(size_t, const std::vector<uint32_t>&, int));
- MOCK_METHOD1(UpdateConfiguration, Vp8EncoderConfig(size_t));
- MOCK_METHOD6(OnEncodeDone,
- void(size_t, uint32_t, size_t, bool, int, CodecSpecificInfo*));
- MOCK_METHOD4(FrameEncoded, void(size_t, uint32_t, size_t, int));
- MOCK_CONST_METHOD0(Tl0PicIdx, uint8_t());
- MOCK_CONST_METHOD1(GetTemporalLayerId, int(const Vp8FrameConfig&));
+ MOCK_METHOD(Vp8FrameConfig, NextFrameConfig, (size_t, uint32_t), (override));
+ MOCK_METHOD(void,
+ OnRatesUpdated,
+ (size_t, const std::vector<uint32_t>&, int),
+ (override));
+ MOCK_METHOD(Vp8EncoderConfig, UpdateConfiguration, (size_t), (override));
+ MOCK_METHOD(void,
+ OnEncodeDone,
+ (size_t, uint32_t, size_t, bool, int, CodecSpecificInfo*),
+ (override));
};
} // namespace
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc b/chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
index 9c89235fe21..f8ddd4db41c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc
@@ -52,40 +52,65 @@ bool Vp9ReadSyncCode(rtc::BitBuffer* br) {
return true;
}
-bool Vp9ReadColorConfig(rtc::BitBuffer* br, uint8_t profile) {
- if (profile == 2 || profile == 3) {
- // Bitdepth.
- RETURN_FALSE_IF_ERROR(br->ConsumeBits(1));
+bool Vp9ReadColorConfig(rtc::BitBuffer* br,
+ uint8_t profile,
+ FrameInfo* frame_info) {
+ if (profile == 0 || profile == 1) {
+ frame_info->bit_detph = BitDept::k8Bit;
+ } else if (profile == 2 || profile == 3) {
+ uint32_t ten_or_twelve_bits;
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&ten_or_twelve_bits, 1));
+ frame_info->bit_detph =
+ ten_or_twelve_bits ? BitDept::k12Bit : BitDept::k10Bit;
}
uint32_t color_space;
RETURN_FALSE_IF_ERROR(br->ReadBits(&color_space, 3));
+ frame_info->color_space = static_cast<ColorSpace>(color_space);
// SRGB is 7.
if (color_space != 7) {
- // YUV range flag.
- RETURN_FALSE_IF_ERROR(br->ConsumeBits(1));
+ uint32_t color_range;
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&color_range, 1));
+ frame_info->color_range =
+ color_range ? ColorRange::kFull : ColorRange::kStudio;
+
if (profile == 1 || profile == 3) {
- // 1 bit: subsampling x.
- // 1 bit: subsampling y.
- RETURN_FALSE_IF_ERROR(br->ConsumeBits(2));
+ uint32_t subsampling_x;
+ uint32_t subsampling_y;
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&subsampling_x, 1));
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&subsampling_y, 1));
+ if (subsampling_x) {
+ frame_info->sub_sampling =
+ subsampling_y ? YuvSubsampling::k420 : YuvSubsampling::k422;
+ } else {
+ frame_info->sub_sampling =
+ subsampling_y ? YuvSubsampling::k440 : YuvSubsampling::k444;
+ }
+
uint32_t reserved_bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1));
if (reserved_bit) {
- RTC_LOG(LS_WARNING) << "Failed to get QP. Reserved bit set.";
+ RTC_LOG(LS_WARNING) << "Failed to parse header. Reserved bit set.";
return false;
}
+ } else {
+ // Profile 0 or 2.
+ frame_info->sub_sampling = YuvSubsampling::k420;
}
} else {
+ // SRGB
+ frame_info->color_range = ColorRange::kFull;
if (profile == 1 || profile == 3) {
+ frame_info->sub_sampling = YuvSubsampling::k444;
uint32_t reserved_bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1));
if (reserved_bit) {
- RTC_LOG(LS_WARNING) << "Failed to get QP. Reserved bit set.";
+ RTC_LOG(LS_WARNING) << "Failed to parse header. Reserved bit set.";
return false;
}
} else {
- RTC_LOG(LS_WARNING) << "Failed to get QP. 4:4:4 color not supported in "
- "profile 0 or 2.";
+ RTC_LOG(LS_WARNING) << "Failed to parse header. 4:4:4 color not supported"
+ " in profile 0 or 2.";
return false;
}
}
@@ -93,24 +118,38 @@ bool Vp9ReadColorConfig(rtc::BitBuffer* br, uint8_t profile) {
return true;
}
-bool Vp9ReadFrameSize(rtc::BitBuffer* br) {
- // 2 bytes: frame width.
- // 2 bytes: frame height.
- return br->ConsumeBytes(4);
+bool Vp9ReadFrameSize(rtc::BitBuffer* br, FrameInfo* frame_info) {
+ // 16 bits: frame width - 1.
+ uint16_t frame_width_minus_one;
+ RETURN_FALSE_IF_ERROR(br->ReadUInt16(&frame_width_minus_one));
+ // 16 bits: frame height - 1.
+ uint16_t frame_height_minus_one;
+ RETURN_FALSE_IF_ERROR(br->ReadUInt16(&frame_height_minus_one));
+ frame_info->frame_width = frame_width_minus_one + 1;
+ frame_info->frame_height = frame_height_minus_one + 1;
+ return true;
}
-bool Vp9ReadRenderSize(rtc::BitBuffer* br) {
- uint32_t bit;
- RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1));
- if (bit) {
- // 2 bytes: render width.
- // 2 bytes: render height.
- RETURN_FALSE_IF_ERROR(br->ConsumeBytes(4));
+bool Vp9ReadRenderSize(rtc::BitBuffer* br, FrameInfo* frame_info) {
+ uint32_t render_and_frame_size_different;
+ RETURN_FALSE_IF_ERROR(br->ReadBits(&render_and_frame_size_different, 1));
+ if (render_and_frame_size_different) {
+ // 16 bits: render width - 1.
+ uint16_t render_width_minus_one;
+ RETURN_FALSE_IF_ERROR(br->ReadUInt16(&render_width_minus_one));
+ // 16 bits: render height - 1.
+ uint16_t render_height_minus_one;
+ RETURN_FALSE_IF_ERROR(br->ReadUInt16(&render_height_minus_one));
+ frame_info->render_width = render_width_minus_one + 1;
+ frame_info->render_height = render_height_minus_one + 1;
+ } else {
+ frame_info->render_width = frame_info->frame_width;
+ frame_info->render_height = frame_info->frame_height;
}
return true;
}
-bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br) {
+bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br, FrameInfo* frame_info) {
uint32_t found_ref = 0;
for (size_t i = 0; i < kVp9NumRefsPerFrame; i++) {
// Size in refs.
@@ -120,11 +159,11 @@ bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br) {
}
if (!found_ref) {
- if (!Vp9ReadFrameSize(br)) {
+ if (!Vp9ReadFrameSize(br, frame_info)) {
return false;
}
}
- return Vp9ReadRenderSize(br);
+ return Vp9ReadRenderSize(br, frame_info);
}
bool Vp9ReadInterpolationFilter(rtc::BitBuffer* br) {
@@ -166,14 +205,14 @@ bool Vp9ReadLoopfilter(rtc::BitBuffer* br) {
}
} // namespace
-bool GetQp(const uint8_t* buf, size_t length, int* qp) {
+bool Parse(const uint8_t* buf, size_t length, int* qp, FrameInfo* frame_info) {
rtc::BitBuffer br(buf, length);
// Frame marker.
uint32_t frame_marker;
RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_marker, 2));
if (frame_marker != 0x2) {
- RTC_LOG(LS_WARNING) << "Failed to get QP. Frame marker should be 2.";
+ RTC_LOG(LS_WARNING) << "Failed to parse header. Frame marker should be 2.";
return false;
}
@@ -181,6 +220,7 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
uint8_t profile;
if (!Vp9ReadProfile(&br, &profile))
return false;
+ frame_info->profile = profile;
// Show existing frame.
uint32_t show_existing_frame;
@@ -195,18 +235,21 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_type, 1));
RETURN_FALSE_IF_ERROR(br.ReadBits(&show_frame, 1));
RETURN_FALSE_IF_ERROR(br.ReadBits(&error_resilient, 1));
+ frame_info->show_frame = show_frame;
+ frame_info->error_resilient = error_resilient;
- if (!frame_type) {
+ if (frame_type == 0) {
+ // Key-frame.
if (!Vp9ReadSyncCode(&br))
return false;
- if (!Vp9ReadColorConfig(&br, profile))
+ if (!Vp9ReadColorConfig(&br, profile, frame_info))
return false;
- if (!Vp9ReadFrameSize(&br))
+ if (!Vp9ReadFrameSize(&br, frame_info))
return false;
- if (!Vp9ReadRenderSize(&br))
+ if (!Vp9ReadRenderSize(&br, frame_info))
return false;
-
} else {
+ // Non-keyframe.
uint32_t intra_only = 0;
if (!show_frame)
RETURN_FALSE_IF_ERROR(br.ReadBits(&intra_only, 1));
@@ -218,14 +261,14 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
return false;
if (profile > 0) {
- if (!Vp9ReadColorConfig(&br, profile))
+ if (!Vp9ReadColorConfig(&br, profile, frame_info))
return false;
}
// Refresh frame flags.
RETURN_FALSE_IF_ERROR(br.ConsumeBits(8));
- if (!Vp9ReadFrameSize(&br))
+ if (!Vp9ReadFrameSize(&br, frame_info))
return false;
- if (!Vp9ReadRenderSize(&br))
+ if (!Vp9ReadRenderSize(&br, frame_info))
return false;
} else {
// Refresh frame flags.
@@ -237,7 +280,7 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
RETURN_FALSE_IF_ERROR(br.ConsumeBits(4));
}
- if (!Vp9ReadFrameSizeFromRefs(&br))
+ if (!Vp9ReadFrameSizeFromRefs(&br, frame_info))
return false;
// Allow high precision mv.
@@ -267,6 +310,20 @@ bool GetQp(const uint8_t* buf, size_t length, int* qp) {
return true;
}
-} // namespace vp9
+bool GetQp(const uint8_t* buf, size_t length, int* qp) {
+ FrameInfo frame_info;
+ return Parse(buf, length, qp, &frame_info);
+}
+absl::optional<FrameInfo> ParseIntraFrameInfo(const uint8_t* buf,
+ size_t length) {
+ int qp = 0;
+ FrameInfo frame_info;
+ if (Parse(buf, length, &qp, &frame_info) && frame_info.frame_width > 0) {
+ return frame_info;
+ }
+ return absl::nullopt;
+}
+
+} // namespace vp9
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h b/chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h
index 69e8de87df0..a7f04670d22 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h
@@ -13,6 +13,7 @@
#include <stddef.h>
#include <stdint.h>
+#include "absl/types/optional.h"
namespace webrtc {
@@ -22,6 +23,65 @@ namespace vp9 {
// Returns true on success, false otherwise.
bool GetQp(const uint8_t* buf, size_t length, int* qp);
+// Bit depth per channel. Support varies by profile.
+enum class BitDept : uint8_t {
+ k8Bit = 8,
+ k10Bit = 10,
+ k12Bit = 12,
+};
+
+enum class ColorSpace : uint8_t {
+ CS_UNKNOWN = 0, // Unknown (in this case the color space must be signaled
+ // outside the VP9 bitstream).
+ CS_BT_601 = 1, // CS_BT_601 Rec. ITU-R BT.601-7
+ CS_BT_709 = 2, // Rec. ITU-R BT.709-6
+ CS_SMPTE_170 = 3, // SMPTE-170
+ CS_SMPTE_240 = 4, // SMPTE-240
+ CS_BT_2020 = 5, // Rec. ITU-R BT.2020-2
+ CS_RESERVED = 6, // Reserved
+ CS_RGB = 7, // sRGB (IEC 61966-2-1)
+};
+
+enum class ColorRange {
+ kStudio, // Studio swing:
+ // For BitDepth equals 8:
+ // Y is between 16 and 235 inclusive.
+ // U and V are between 16 and 240 inclusive.
+ // For BitDepth equals 10:
+ // Y is between 64 and 940 inclusive.
+ // U and V are between 64 and 960 inclusive.
+ // For BitDepth equals 12:
+ // Y is between 256 and 3760.
+ // U and V are between 256 and 3840 inclusive.
+ kFull // Full swing; no restriction on Y, U, V values.
+};
+
+enum class YuvSubsampling {
+ k444,
+ k440,
+ k422,
+ k420,
+};
+
+struct FrameInfo {
+ int profile = 0; // Profile 0-3 are valid.
+ bool show_frame = false;
+ bool error_resilient = false;
+ BitDept bit_detph = BitDept::k8Bit;
+ ColorSpace color_space = ColorSpace::CS_UNKNOWN;
+ ColorRange color_range;
+ YuvSubsampling sub_sampling;
+ int frame_width = 0;
+ int frame_height = 0;
+ int render_width = 0;
+ int render_height = 0;
+};
+
+// Parses frame information for a VP9 key-frame or all-intra frame from a
+// bitstream. Returns nullopt on failure or if not a key-frame.
+absl::optional<FrameInfo> ParseIntraFrameInfo(const uint8_t* buf,
+ size_t length);
+
} // namespace vp9
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
index 363838b846e..2872c8d2a9c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
@@ -26,8 +26,10 @@ namespace {
class MockPacketRequestCallback : public VCMPacketRequestCallback {
public:
- MOCK_METHOD2(ResendPackets,
- int32_t(const uint16_t* sequenceNumbers, uint16_t length));
+ MOCK_METHOD(int32_t,
+ ResendPackets,
+ (const uint16_t* sequenceNumbers, uint16_t length),
+ (override));
};
class MockVCMReceiveCallback : public VCMReceiveCallback {
@@ -35,11 +37,12 @@ class MockVCMReceiveCallback : public VCMReceiveCallback {
MockVCMReceiveCallback() {}
virtual ~MockVCMReceiveCallback() {}
- MOCK_METHOD4(
- FrameToRender,
- int32_t(VideoFrame&, absl::optional<uint8_t>, int32_t, VideoContentType));
- MOCK_METHOD1(OnIncomingPayloadType, void(int));
- MOCK_METHOD1(OnDecoderImplementationName, void(const char*));
+ MOCK_METHOD(int32_t,
+ FrameToRender,
+ (VideoFrame&, absl::optional<uint8_t>, int32_t, VideoContentType),
+ (override));
+ MOCK_METHOD(void, OnIncomingPayloadType, (int), (override));
+ MOCK_METHOD(void, OnDecoderImplementationName, (const char*), (override));
};
class TestVideoReceiver : public ::testing::Test {
diff --git a/chromium/third_party/webrtc/p2p/BUILD.gn b/chromium/third_party/webrtc/p2p/BUILD.gn
index ae49deb264d..98680f62d2b 100644
--- a/chromium/third_party/webrtc/p2p/BUILD.gn
+++ b/chromium/third_party/webrtc/p2p/BUILD.gn
@@ -99,7 +99,6 @@ rtc_library("rtc_p2p") {
"../rtc_base:checks",
"../rtc_base:rtc_numerics",
"../rtc_base/experiments:field_trial_parser",
- "//third_party/abseil-cpp/absl/memory",
# Needed by pseudo_tcp, which should move to a separate target.
"../rtc_base:safe_minmax",
@@ -107,11 +106,15 @@ rtc_library("rtc_p2p") {
"../rtc_base/memory:fifo_buffer",
"../rtc_base/network:sent_packet",
"../rtc_base/system:rtc_export",
+ "../rtc_base/task_utils:to_queued_task",
"../rtc_base/third_party/base64",
"../rtc_base/third_party/sigslot",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -127,6 +130,8 @@ if (rtc_include_tests) {
"../api:libjingle_peerconnection_api",
"../rtc_base",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -169,6 +174,8 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_tests_utils",
"../rtc_base/third_party/sigslot",
"../test:test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -221,8 +228,11 @@ if (rtc_include_tests) {
"../test:field_trial",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
}
@@ -243,8 +253,8 @@ rtc_library("p2p_server_utils") {
"../rtc_base:checks",
"../rtc_base:rtc_base_tests_utils",
"../rtc_base/third_party/sigslot",
- "//third_party/abseil-cpp/absl/algorithm:container",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
}
rtc_library("libstunprober") {
diff --git a/chromium/third_party/webrtc/p2p/base/basic_async_resolver_factory_unittest.cc b/chromium/third_party/webrtc/p2p/base/basic_async_resolver_factory_unittest.cc
index 0c21c682fbe..8242146bae6 100644
--- a/chromium/third_party/webrtc/p2p/base/basic_async_resolver_factory_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/basic_async_resolver_factory_unittest.cc
@@ -30,6 +30,7 @@ class BasicAsyncResolverFactoryTest : public ::testing::Test,
rtc::SocketAddress address("", 0);
resolver->Start(address);
ASSERT_TRUE_WAIT(address_resolved_, 10000 /*ms*/);
+ resolver->Destroy(false);
}
void SetAddressResolved(rtc::AsyncResolverInterface* resolver) {
diff --git a/chromium/third_party/webrtc/p2p/base/connection.h b/chromium/third_party/webrtc/p2p/base/connection.h
index 4b71a7da556..7c468bcff8e 100644
--- a/chromium/third_party/webrtc/p2p/base/connection.h
+++ b/chromium/third_party/webrtc/p2p/base/connection.h
@@ -71,7 +71,7 @@ class ConnectionRequest : public StunRequest {
// Represents a communication link between a port on the local client and a
// port on the remote client.
class Connection : public CandidatePairInterface,
- public rtc::MessageHandler,
+ public rtc::MessageHandlerAutoCleanup,
public sigslot::has_slots<> {
public:
struct SentPing {
diff --git a/chromium/third_party/webrtc/p2p/base/ice_controller_interface.h b/chromium/third_party/webrtc/p2p/base/ice_controller_interface.h
index cc4cf4d0d78..d5dc29e7826 100644
--- a/chromium/third_party/webrtc/p2p/base/ice_controller_interface.h
+++ b/chromium/third_party/webrtc/p2p/base/ice_controller_interface.h
@@ -51,12 +51,20 @@ struct IceControllerEvent {
// - which connection to ping
// - which connection to use
// - which connection to prune
+// - which connection to forget learned state on
//
-// P2PTransportChannel creates a |Connection| and adds a const pointer
-// to the IceController using |AddConnection|, i.e the IceController
-// should not call any non-const methods on a Connection.
+// The P2PTransportChannel owns (creates and destroys) Connections,
+// but P2PTransportChannel gives const pointers to the the IceController using
+// |AddConnection|, i.e the IceController should not call any non-const methods
+// on a Connection but signal back in the interface if any mutable function
+// shall be called.
//
-// The IceController shall keeps track of all connections added
+// Current these are limited to:
+// Connection::Ping - returned in PingResult
+// Connection::Prune - retuned in PruneConnections
+// Connection::ForgetLearnedState - return in SwitchResult
+//
+// The IceController shall keep track of all connections added
// (and not destroyed) and give them back using the connections()-function-
//
// When a Connection gets destroyed
@@ -71,6 +79,9 @@ class IceControllerInterface {
// An optional recheck event for when a Switch() should be attempted again.
absl::optional<IceControllerEvent> recheck_event;
+
+ // A vector with connection to run ForgetLearnedState on.
+ std::vector<const Connection*> connections_to_forget_state_on;
};
// This represents the result of a call to SelectConnectionToPing.
diff --git a/chromium/third_party/webrtc/p2p/base/mock_async_resolver.h b/chromium/third_party/webrtc/p2p/base/mock_async_resolver.h
index 7d3be5b0b04..8bc0eb9cff4 100644
--- a/chromium/third_party/webrtc/p2p/base/mock_async_resolver.h
+++ b/chromium/third_party/webrtc/p2p/base/mock_async_resolver.h
@@ -29,14 +29,17 @@ class MockAsyncResolver : public AsyncResolverInterface {
}
~MockAsyncResolver() = default;
- MOCK_METHOD1(Start, void(const rtc::SocketAddress&));
- MOCK_CONST_METHOD2(GetResolvedAddress, bool(int family, SocketAddress* addr));
- MOCK_CONST_METHOD0(GetError, int());
+ MOCK_METHOD(void, Start, (const rtc::SocketAddress&), (override));
+ MOCK_METHOD(bool,
+ GetResolvedAddress,
+ (int family, SocketAddress* addr),
+ (const, override));
+ MOCK_METHOD(int, GetError, (), (const, override));
// Note that this won't delete the object like AsyncResolverInterface says in
// order to avoid sanitizer failures caused by this being a synchronous
// implementation. The test code should delete the object instead.
- MOCK_METHOD1(Destroy, void(bool));
+ MOCK_METHOD(void, Destroy, (bool), (override));
};
} // namespace rtc
@@ -45,7 +48,7 @@ namespace webrtc {
class MockAsyncResolverFactory : public AsyncResolverFactory {
public:
- MOCK_METHOD0(Create, rtc::AsyncResolverInterface*());
+ MOCK_METHOD(rtc::AsyncResolverInterface*, Create, (), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/p2p/base/mock_ice_transport.h b/chromium/third_party/webrtc/p2p/base/mock_ice_transport.h
index 1436cacb50d..ef9f1b18ea1 100644
--- a/chromium/third_party/webrtc/p2p/base/mock_ice_transport.h
+++ b/chromium/third_party/webrtc/p2p/base/mock_ice_transport.h
@@ -32,15 +32,20 @@ class MockIceTransport : public IceTransportInternal {
SignalWritableState(this);
}
- MOCK_METHOD4(SendPacket,
- int(const char* data,
- size_t len,
- const rtc::PacketOptions& options,
- int flags));
- MOCK_METHOD2(SetOption, int(rtc::Socket::Option opt, int value));
- MOCK_METHOD0(GetError, int());
- MOCK_CONST_METHOD0(GetIceRole, cricket::IceRole());
- MOCK_METHOD1(GetStats, bool(cricket::IceTransportStats* ice_transport_stats));
+ MOCK_METHOD(int,
+ SendPacket,
+ (const char* data,
+ size_t len,
+ const rtc::PacketOptions& options,
+ int flags),
+ (override));
+ MOCK_METHOD(int, SetOption, (rtc::Socket::Option opt, int value), (override));
+ MOCK_METHOD(int, GetError, (), (override));
+ MOCK_METHOD(cricket::IceRole, GetIceRole, (), (const, override));
+ MOCK_METHOD(bool,
+ GetStats,
+ (cricket::IceTransportStats * ice_transport_stats),
+ (override));
IceTransportState GetState() const override {
return IceTransportState::STATE_INIT;
diff --git a/chromium/third_party/webrtc/p2p/base/p2p_transport_channel.cc b/chromium/third_party/webrtc/p2p/base/p2p_transport_channel.cc
index 73d12c77415..6f0df04150d 100644
--- a/chromium/third_party/webrtc/p2p/base/p2p_transport_channel.cc
+++ b/chromium/third_party/webrtc/p2p/base/p2p_transport_channel.cc
@@ -30,6 +30,7 @@
#include "rtc_base/net_helper.h"
#include "rtc_base/net_helpers.h"
#include "rtc_base/string_encode.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/time_utils.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
@@ -274,8 +275,7 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection(
if (result.connection.has_value()) {
RTC_LOG(LS_INFO) << "Switching selected connection due to: "
<< reason.ToString();
- SwitchSelectedConnection(const_cast<Connection*>(*result.connection),
- reason);
+ SwitchSelectedConnection(FromIceController(*result.connection), reason);
}
if (result.recheck_event.has_value()) {
@@ -290,6 +290,10 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection(
result.recheck_event->recheck_delay_ms);
}
+ for (const auto* con : result.connections_to_forget_state_on) {
+ FromIceController(con)->ForgetLearnedState();
+ }
+
return result.connection.has_value();
}
@@ -1194,7 +1198,12 @@ void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) {
}
if (new_remote_candidate.address().IsUnresolvedIP()) {
- ResolveHostnameCandidate(new_remote_candidate);
+ // Don't do DNS lookups if the IceTransportPolicy is "none" or "relay".
+ bool sharing_host = ((allocator_->candidate_filter() & CF_HOST) != 0);
+ bool sharing_stun = ((allocator_->candidate_filter() & CF_REFLEXIVE) != 0);
+ if (sharing_host || sharing_stun) {
+ ResolveHostnameCandidate(new_remote_candidate);
+ }
return;
}
@@ -1223,9 +1232,8 @@ void P2PTransportChannel::OnCandidateResolved(
Candidate candidate = p->candidate_;
resolvers_.erase(p);
AddRemoteCandidateWithResolver(candidate, resolver);
- invoker_.AsyncInvoke<void>(
- RTC_FROM_HERE, thread(),
- rtc::Bind(&rtc::AsyncResolverInterface::Destroy, resolver, false));
+ thread()->PostTask(
+ webrtc::ToQueuedTask([] {}, [resolver] { resolver->Destroy(false); }));
}
void P2PTransportChannel::AddRemoteCandidateWithResolver(
@@ -1398,7 +1406,7 @@ bool P2PTransportChannel::CreateConnection(PortInterface* port,
return false;
}
-bool P2PTransportChannel::FindConnection(Connection* connection) const {
+bool P2PTransportChannel::FindConnection(const Connection* connection) const {
RTC_DCHECK_RUN_ON(network_thread_);
return absl::c_linear_search(connections(), connection);
}
@@ -1704,7 +1712,7 @@ void P2PTransportChannel::PruneConnections() {
std::vector<const Connection*> connections_to_prune =
ice_controller_->PruneConnections();
for (const Connection* conn : connections_to_prune) {
- const_cast<Connection*>(conn)->Prune();
+ FromIceController(conn)->Prune();
}
}
@@ -1907,11 +1915,10 @@ void P2PTransportChannel::CheckAndPing() {
UpdateConnectionStates();
auto result = ice_controller_->SelectConnectionToPing(last_ping_sent_ms_);
- Connection* conn =
- const_cast<Connection*>(result.connection.value_or(nullptr));
int delay = result.recheck_delay_ms;
- if (conn) {
+ if (result.connection.value_or(nullptr)) {
+ Connection* conn = FromIceController(*result.connection);
PingConnection(conn);
MarkConnectionPinged(conn);
}
@@ -1924,7 +1931,12 @@ void P2PTransportChannel::CheckAndPing() {
// This method is only for unit testing.
Connection* P2PTransportChannel::FindNextPingableConnection() {
RTC_DCHECK_RUN_ON(network_thread_);
- return const_cast<Connection*>(ice_controller_->FindNextPingableConnection());
+ auto* conn = ice_controller_->FindNextPingableConnection();
+ if (conn) {
+ return FromIceController(conn);
+ } else {
+ return nullptr;
+ }
}
// A connection is considered a backup connection if the channel state
diff --git a/chromium/third_party/webrtc/p2p/base/p2p_transport_channel.h b/chromium/third_party/webrtc/p2p/base/p2p_transport_channel.h
index 3d6c86f0319..4f891beb1eb 100644
--- a/chromium/third_party/webrtc/p2p/base/p2p_transport_channel.h
+++ b/chromium/third_party/webrtc/p2p/base/p2p_transport_channel.h
@@ -245,7 +245,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal {
bool CreateConnection(PortInterface* port,
const Candidate& remote_candidate,
PortInterface* origin_port);
- bool FindConnection(Connection* connection) const;
+ bool FindConnection(const Connection* connection) const;
uint32_t GetRemoteCandidateGeneration(const Candidate& candidate);
bool IsDuplicateRemoteCandidate(const Candidate& candidate);
@@ -348,6 +348,16 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal {
// 2. Peer-reflexive remote candidates.
Candidate SanitizeRemoteCandidate(const Candidate& c) const;
+ // Cast a Connection returned from IceController and verify that it exists.
+ // (P2P owns all Connections, and only gives const pointers to IceController,
+ // see IceControllerInterface).
+ Connection* FromIceController(const Connection* conn) {
+ // Verify that IceController does not return a connection
+ // that we have destroyed.
+ RTC_DCHECK(FindConnection(conn));
+ return const_cast<Connection*>(conn);
+ }
+
std::string transport_name_ RTC_GUARDED_BY(network_thread_);
int component_ RTC_GUARDED_BY(network_thread_);
PortAllocator* allocator_ RTC_GUARDED_BY(network_thread_);
diff --git a/chromium/third_party/webrtc/p2p/base/p2p_transport_channel_unittest.cc b/chromium/third_party/webrtc/p2p/base/p2p_transport_channel_unittest.cc
index ce78335fd94..721ee1589ce 100644
--- a/chromium/third_party/webrtc/p2p/base/p2p_transport_channel_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/p2p_transport_channel_unittest.cc
@@ -177,14 +177,14 @@ cricket::BasicPortAllocator* CreateBasicPortAllocator(
class MockIceControllerFactory : public cricket::IceControllerFactoryInterface {
public:
- ~MockIceControllerFactory() = default;
+ ~MockIceControllerFactory() override = default;
std::unique_ptr<cricket::IceControllerInterface> Create(
- const cricket::IceControllerFactoryArgs& args) {
+ const cricket::IceControllerFactoryArgs& args) override {
RecordIceControllerCreated();
return std::make_unique<cricket::BasicIceController>(args);
}
- MOCK_METHOD0(RecordIceControllerCreated, void());
+ MOCK_METHOD(void, RecordIceControllerCreated, ());
};
} // namespace
@@ -207,7 +207,7 @@ namespace cricket {
// Note that this class is a base class for use by other tests, who will provide
// specialized test behavior.
class P2PTransportChannelTestBase : public ::testing::Test,
- public rtc::MessageHandler,
+ public rtc::MessageHandlerAutoCleanup,
public sigslot::has_slots<> {
public:
P2PTransportChannelTestBase()
@@ -1480,7 +1480,7 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) {
PauseCandidates(1);
// Wait until the callee becomes writable to make sure that a ping request is
- // received by the caller before his remote ICE credentials are set.
+ // received by the caller before their remote ICE credentials are set.
ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout);
// Add two sets of remote ICE credentials, so that the ones used by the
// candidate will be generation 1 instead of 0.
@@ -1588,7 +1588,7 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) {
PauseCandidates(1);
// Wait until the callee becomes writable to make sure that a ping request is
- // received by the caller before his remote ICE credentials are set.
+ // received by the caller before their remote ICE credentials are set.
ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout);
// Add two sets of remote ICE credentials, so that the ones used by the
// candidate will be generation 1 instead of 0.
@@ -4844,10 +4844,13 @@ TEST_F(P2PTransportChannelTest,
// address after the resolution completes.
TEST_F(P2PTransportChannelTest,
PeerReflexiveCandidateDuringResolvingHostCandidateWithMdnsName) {
- NiceMock<rtc::MockAsyncResolver> mock_async_resolver;
+ auto mock_async_resolver = new NiceMock<rtc::MockAsyncResolver>();
+ ON_CALL(*mock_async_resolver, Destroy).WillByDefault([mock_async_resolver] {
+ delete mock_async_resolver;
+ });
webrtc::MockAsyncResolverFactory mock_async_resolver_factory;
EXPECT_CALL(mock_async_resolver_factory, Create())
- .WillOnce(Return(&mock_async_resolver));
+ .WillOnce(Return(mock_async_resolver));
// ep1 and ep2 will only gather host candidates with addresses
// kPublicAddrs[0] and kPublicAddrs[1], respectively.
@@ -4874,7 +4877,7 @@ TEST_F(P2PTransportChannelTest,
bool mock_async_resolver_started = false;
// Not signaling done yet, and only make sure we are in the process of
// resolution.
- EXPECT_CALL(mock_async_resolver, Start(_))
+ EXPECT_CALL(*mock_async_resolver, Start(_))
.WillOnce(InvokeWithoutArgs([&mock_async_resolver_started]() {
mock_async_resolver_started = true;
}));
@@ -4887,7 +4890,7 @@ TEST_F(P2PTransportChannelTest,
ResumeCandidates(1);
ASSERT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr, kMediumTimeout);
// Let the mock resolver of ep2 receives the correct resolution.
- EXPECT_CALL(mock_async_resolver, GetResolvedAddress(_, _))
+ EXPECT_CALL(*mock_async_resolver, GetResolvedAddress(_, _))
.WillOnce(DoAll(SetArgPointee<1>(local_address), Return(true)));
// Upon receiving a ping from ep1, ep2 adds a prflx candidate from the
// unknown address and establishes a connection.
@@ -4899,7 +4902,7 @@ TEST_F(P2PTransportChannelTest,
ep2_ch1()->selected_connection()->remote_candidate().type());
// ep2 should also be able resolve the hostname candidate. The resolved remote
// host candidate should be merged with the prflx remote candidate.
- mock_async_resolver.SignalDone(&mock_async_resolver);
+ mock_async_resolver->SignalDone(mock_async_resolver);
EXPECT_EQ_WAIT(LOCAL_PORT_TYPE,
ep2_ch1()->selected_connection()->remote_candidate().type(),
kMediumTimeout);
@@ -5255,10 +5258,14 @@ TEST_F(P2PTransportChannelTest,
class MockMdnsResponder : public webrtc::MdnsResponderInterface {
public:
- MOCK_METHOD2(CreateNameForAddress,
- void(const rtc::IPAddress&, NameCreatedCallback));
- MOCK_METHOD2(RemoveNameForAddress,
- void(const rtc::IPAddress&, NameRemovedCallback));
+ MOCK_METHOD(void,
+ CreateNameForAddress,
+ (const rtc::IPAddress&, NameCreatedCallback),
+ (override));
+ MOCK_METHOD(void,
+ RemoveNameForAddress,
+ (const rtc::IPAddress&, NameRemovedCallback),
+ (override));
};
TEST_F(P2PTransportChannelTest,
@@ -5539,6 +5546,76 @@ TEST_F(P2PTransportChannelTest,
DestroyChannels();
}
+// Verify that things break unless
+// - both parties use the surface_ice_candidates_on_ice_transport_type_changed
+// - both parties loosen candidate filter at the same time (approx.).
+//
+// i.e surface_ice_candidates_on_ice_transport_type_changed requires
+// coordination outside of webrtc to function properly.
+TEST_F(P2PTransportChannelTest, SurfaceRequiresCoordination) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-IceFieldTrials/skip_relay_to_non_relay_connections:true/");
+ rtc::ScopedFakeClock clock;
+
+ ConfigureEndpoints(
+ OPEN, OPEN,
+ kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET,
+ kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET);
+ auto* ep1 = GetEndpoint(0);
+ auto* ep2 = GetEndpoint(1);
+ ep1->allocator_->SetCandidateFilter(CF_RELAY);
+ ep2->allocator_->SetCandidateFilter(CF_ALL);
+ // Enable continual gathering and also resurfacing gathered candidates upon
+ // the candidate filter changed in the ICE configuration.
+ IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY);
+ ice_config.surface_ice_candidates_on_ice_transport_type_changed = true;
+ // Pause candidates gathering so we can gather all types of candidates. See
+ // P2PTransportChannel::OnConnectionStateChange, where we would stop the
+ // gathering when we have a strongly connected candidate pair.
+ PauseCandidates(0);
+ PauseCandidates(1);
+ CreateChannels(ice_config, ice_config);
+
+ // On the caller we only have relay,
+ // on the callee we have host, srflx and relay.
+ EXPECT_TRUE_SIMULATED_WAIT(ep1->saved_candidates_.size() == 1u,
+ kDefaultTimeout, clock);
+ EXPECT_TRUE_SIMULATED_WAIT(ep2->saved_candidates_.size() == 3u,
+ kDefaultTimeout, clock);
+
+ ResumeCandidates(0);
+ ResumeCandidates(1);
+ ASSERT_TRUE_SIMULATED_WAIT(
+ ep1_ch1()->selected_connection() != nullptr &&
+ RELAY_PORT_TYPE ==
+ ep1_ch1()->selected_connection()->local_candidate().type() &&
+ ep2_ch1()->selected_connection() != nullptr &&
+ RELAY_PORT_TYPE ==
+ ep1_ch1()->selected_connection()->remote_candidate().type(),
+ kDefaultTimeout, clock);
+ ASSERT_TRUE_SIMULATED_WAIT(ep2_ch1()->selected_connection() != nullptr,
+ kDefaultTimeout, clock);
+
+ // Wait until the callee discards it's candidates
+ // since they don't manage to connect.
+ SIMULATED_WAIT(false, 300000, clock);
+
+ // And then loosen caller candidate filter.
+ ep1->allocator_->SetCandidateFilter(CF_ALL);
+
+ SIMULATED_WAIT(false, kDefaultTimeout, clock);
+
+ // No p2p connection will be made, it will remain on relay.
+ EXPECT_TRUE(ep1_ch1()->selected_connection() != nullptr &&
+ RELAY_PORT_TYPE ==
+ ep1_ch1()->selected_connection()->local_candidate().type() &&
+ ep2_ch1()->selected_connection() != nullptr &&
+ RELAY_PORT_TYPE ==
+ ep1_ch1()->selected_connection()->remote_candidate().type());
+
+ DestroyChannels();
+}
+
TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampening0) {
webrtc::test::ScopedFieldTrials field_trials(
"WebRTC-IceFieldTrials/initial_select_dampening:0/");
@@ -5651,4 +5728,159 @@ TEST(P2PTransportChannel, InjectIceController) {
/* event_log = */ nullptr, &factory);
}
+class ForgetLearnedStateController : public cricket::BasicIceController {
+ public:
+ explicit ForgetLearnedStateController(
+ const cricket::IceControllerFactoryArgs& args)
+ : cricket::BasicIceController(args) {}
+
+ SwitchResult SortAndSwitchConnection(IceControllerEvent reason) override {
+ auto result = cricket::BasicIceController::SortAndSwitchConnection(reason);
+ if (forget_connnection_) {
+ result.connections_to_forget_state_on.push_back(forget_connnection_);
+ forget_connnection_ = nullptr;
+ }
+ result.recheck_event =
+ IceControllerEvent(IceControllerEvent::ICE_CONTROLLER_RECHECK);
+ result.recheck_event->recheck_delay_ms = 100;
+ return result;
+ }
+
+ void ForgetThisConnectionNextTimeSortAndSwitchConnectionIsCalled(
+ Connection* con) {
+ forget_connnection_ = con;
+ }
+
+ private:
+ Connection* forget_connnection_ = nullptr;
+};
+
+class ForgetLearnedStateControllerFactory
+ : public cricket::IceControllerFactoryInterface {
+ public:
+ std::unique_ptr<cricket::IceControllerInterface> Create(
+ const cricket::IceControllerFactoryArgs& args) override {
+ auto controller = std::make_unique<ForgetLearnedStateController>(args);
+ // Keep a pointer to allow modifying calls.
+ // Must not be used after the p2ptransportchannel has been destructed.
+ controller_ = controller.get();
+ return controller;
+ }
+ virtual ~ForgetLearnedStateControllerFactory() = default;
+
+ ForgetLearnedStateController* controller_;
+};
+
+TEST_F(P2PTransportChannelPingTest, TestForgetLearnedState) {
+ ForgetLearnedStateControllerFactory factory;
+ FakePortAllocator pa(rtc::Thread::Current(), nullptr);
+ P2PTransportChannel ch("ping sufficiently", 1, &pa, nullptr, nullptr,
+ &factory);
+ PrepareChannel(&ch);
+ ch.MaybeStartGathering();
+ ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1));
+ ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2));
+
+ Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1);
+ Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2);
+ ASSERT_TRUE(conn1 != nullptr);
+ ASSERT_TRUE(conn2 != nullptr);
+
+ // Wait for conn1 to be selected.
+ conn1->ReceivedPingResponse(LOW_RTT, "id");
+ EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kMediumTimeout);
+
+ conn2->ReceivedPingResponse(LOW_RTT, "id");
+ EXPECT_TRUE(conn2->writable());
+
+ // Now let the ice controller signal to P2PTransportChannel that it
+ // should Forget conn2.
+ factory.controller_
+ ->ForgetThisConnectionNextTimeSortAndSwitchConnectionIsCalled(conn2);
+
+ // We don't have a mock Connection, so verify this by checking that it
+ // is no longer writable.
+ EXPECT_EQ_WAIT(false, conn2->writable(), kMediumTimeout);
+}
+
+TEST_F(P2PTransportChannelTest, DisableDnsLookupsWithTransportPolicyRelay) {
+ ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags,
+ kDefaultPortAllocatorFlags);
+ auto* ep1 = GetEndpoint(0);
+ ep1->allocator_->SetCandidateFilter(CF_RELAY);
+
+ rtc::MockAsyncResolver mock_async_resolver;
+ webrtc::MockAsyncResolverFactory mock_async_resolver_factory;
+ ON_CALL(mock_async_resolver_factory, Create())
+ .WillByDefault(Return(&mock_async_resolver));
+ ep1->async_resolver_factory_ = &mock_async_resolver_factory;
+
+ bool lookup_started = false;
+ ON_CALL(mock_async_resolver, Start(_))
+ .WillByDefault(Assign(&lookup_started, true));
+
+ CreateChannels();
+
+ ep1_ch1()->AddRemoteCandidate(
+ CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100));
+
+ EXPECT_FALSE(lookup_started);
+
+ DestroyChannels();
+}
+
+TEST_F(P2PTransportChannelTest, DisableDnsLookupsWithTransportPolicyNone) {
+ ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags,
+ kDefaultPortAllocatorFlags);
+ auto* ep1 = GetEndpoint(0);
+ ep1->allocator_->SetCandidateFilter(CF_NONE);
+
+ rtc::MockAsyncResolver mock_async_resolver;
+ webrtc::MockAsyncResolverFactory mock_async_resolver_factory;
+ ON_CALL(mock_async_resolver_factory, Create())
+ .WillByDefault(Return(&mock_async_resolver));
+ ep1->async_resolver_factory_ = &mock_async_resolver_factory;
+
+ bool lookup_started = false;
+ ON_CALL(mock_async_resolver, Start(_))
+ .WillByDefault(Assign(&lookup_started, true));
+
+ CreateChannels();
+
+ ep1_ch1()->AddRemoteCandidate(
+ CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100));
+
+ EXPECT_FALSE(lookup_started);
+
+ DestroyChannels();
+}
+
+TEST_F(P2PTransportChannelTest, EnableDnsLookupsWithTransportPolicyNoHost) {
+ ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags,
+ kDefaultPortAllocatorFlags);
+ auto* ep1 = GetEndpoint(0);
+ ep1->allocator_->SetCandidateFilter(CF_ALL & ~CF_HOST);
+
+ rtc::MockAsyncResolver mock_async_resolver;
+ webrtc::MockAsyncResolverFactory mock_async_resolver_factory;
+ EXPECT_CALL(mock_async_resolver_factory, Create())
+ .WillOnce(Return(&mock_async_resolver));
+ EXPECT_CALL(mock_async_resolver, Destroy(_));
+
+ ep1->async_resolver_factory_ = &mock_async_resolver_factory;
+
+ bool lookup_started = false;
+ EXPECT_CALL(mock_async_resolver, Start(_))
+ .WillOnce(Assign(&lookup_started, true));
+
+ CreateChannels();
+
+ ep1_ch1()->AddRemoteCandidate(
+ CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100));
+
+ EXPECT_TRUE(lookup_started);
+
+ DestroyChannels();
+}
+
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/port.h b/chromium/third_party/webrtc/p2p/base/port.h
index 893e80b20f4..7418a972659 100644
--- a/chromium/third_party/webrtc/p2p/base/port.h
+++ b/chromium/third_party/webrtc/p2p/base/port.h
@@ -158,7 +158,7 @@ typedef std::set<rtc::SocketAddress> ServerAddresses;
// connections to similar mechanisms of the other client. Subclasses of this
// one add support for specific mechanisms like local UDP ports.
class Port : public PortInterface,
- public rtc::MessageHandler,
+ public rtc::MessageHandlerAutoCleanup,
public sigslot::has_slots<> {
public:
// INIT: The state when a port is just created.
diff --git a/chromium/third_party/webrtc/p2p/base/pseudo_tcp_unittest.cc b/chromium/third_party/webrtc/p2p/base/pseudo_tcp_unittest.cc
index a7fc9b3e698..ecafec9fb64 100644
--- a/chromium/third_party/webrtc/p2p/base/pseudo_tcp_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/pseudo_tcp_unittest.cc
@@ -44,7 +44,7 @@ class PseudoTcpForTest : public cricket::PseudoTcp {
};
class PseudoTcpTestBase : public ::testing::Test,
- public rtc::MessageHandler,
+ public rtc::MessageHandlerAutoCleanup,
public cricket::IPseudoTcpNotify {
public:
PseudoTcpTestBase()
diff --git a/chromium/third_party/webrtc/p2p/base/stun_port_unittest.cc b/chromium/third_party/webrtc/p2p/base/stun_port_unittest.cc
index dfc72362ce2..2804ac03aff 100644
--- a/chromium/third_party/webrtc/p2p/base/stun_port_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/stun_port_unittest.cc
@@ -412,24 +412,29 @@ class MockAsyncPacketSocket : public rtc::AsyncPacketSocket {
public:
~MockAsyncPacketSocket() = default;
- MOCK_CONST_METHOD0(GetLocalAddress, SocketAddress());
- MOCK_CONST_METHOD0(GetRemoteAddress, SocketAddress());
- MOCK_METHOD3(Send,
- int(const void* pv,
- size_t cb,
- const rtc::PacketOptions& options));
-
- MOCK_METHOD4(SendTo,
- int(const void* pv,
- size_t cb,
- const SocketAddress& addr,
- const rtc::PacketOptions& options));
- MOCK_METHOD0(Close, int());
- MOCK_CONST_METHOD0(GetState, State());
- MOCK_METHOD2(GetOption, int(rtc::Socket::Option opt, int* value));
- MOCK_METHOD2(SetOption, int(rtc::Socket::Option opt, int value));
- MOCK_CONST_METHOD0(GetError, int());
- MOCK_METHOD1(SetError, void(int error));
+ MOCK_METHOD(SocketAddress, GetLocalAddress, (), (const, override));
+ MOCK_METHOD(SocketAddress, GetRemoteAddress, (), (const, override));
+ MOCK_METHOD(int,
+ Send,
+ (const void* pv, size_t cb, const rtc::PacketOptions& options),
+ (override));
+
+ MOCK_METHOD(int,
+ SendTo,
+ (const void* pv,
+ size_t cb,
+ const SocketAddress& addr,
+ const rtc::PacketOptions& options),
+ (override));
+ MOCK_METHOD(int, Close, (), (override));
+ MOCK_METHOD(State, GetState, (), (const, override));
+ MOCK_METHOD(int,
+ GetOption,
+ (rtc::Socket::Option opt, int* value),
+ (override));
+ MOCK_METHOD(int, SetOption, (rtc::Socket::Option opt, int value), (override));
+ MOCK_METHOD(int, GetError, (), (const, override));
+ MOCK_METHOD(void, SetError, (int error), (override));
};
// Test that outbound packets inherit the dscp value assigned to the socket.
diff --git a/chromium/third_party/webrtc/p2p/base/stun_request.h b/chromium/third_party/webrtc/p2p/base/stun_request.h
index d45376ea550..17569046151 100644
--- a/chromium/third_party/webrtc/p2p/base/stun_request.h
+++ b/chromium/third_party/webrtc/p2p/base/stun_request.h
@@ -85,7 +85,7 @@ class StunRequestManager {
// Represents an individual request to be sent. The STUN message can either be
// constructed beforehand or built on demand.
-class StunRequest : public rtc::MessageHandler {
+class StunRequest : public rtc::MessageHandlerAutoCleanup {
public:
StunRequest();
explicit StunRequest(StunMessage* request);
diff --git a/chromium/third_party/webrtc/p2p/base/transport_description.cc b/chromium/third_party/webrtc/p2p/base/transport_description.cc
index 729b4ae8c3d..96fb9597e04 100644
--- a/chromium/third_party/webrtc/p2p/base/transport_description.cc
+++ b/chromium/third_party/webrtc/p2p/base/transport_description.cc
@@ -172,8 +172,7 @@ TransportDescription::TransportDescription(const TransportDescription& from)
ice_pwd(from.ice_pwd),
ice_mode(from.ice_mode),
connection_role(from.connection_role),
- identity_fingerprint(CopyFingerprint(from.identity_fingerprint.get())),
- opaque_parameters(from.opaque_parameters) {}
+ identity_fingerprint(CopyFingerprint(from.identity_fingerprint.get())) {}
TransportDescription::~TransportDescription() = default;
@@ -190,7 +189,6 @@ TransportDescription& TransportDescription::operator=(
connection_role = from.connection_role;
identity_fingerprint.reset(CopyFingerprint(from.identity_fingerprint.get()));
- opaque_parameters = from.opaque_parameters;
return *this;
}
diff --git a/chromium/third_party/webrtc/p2p/base/transport_description.h b/chromium/third_party/webrtc/p2p/base/transport_description.h
index 1a458c95714..32fdb5c9b32 100644
--- a/chromium/third_party/webrtc/p2p/base/transport_description.h
+++ b/chromium/third_party/webrtc/p2p/base/transport_description.h
@@ -100,28 +100,6 @@ constexpr auto* ICE_OPTION_RENOMINATION = "renomination";
bool StringToConnectionRole(const std::string& role_str, ConnectionRole* role);
bool ConnectionRoleToString(const ConnectionRole& role, std::string* role_str);
-// Parameters for an opaque transport protocol which may be plugged into WebRTC.
-struct OpaqueTransportParameters {
- // Protocol used by this opaque transport. Two endpoints that support the
- // same protocol are expected to be able to understand the contents of each
- // others' |parameters| fields. If those parameters are compatible, the
- // endpoints are expected to use this transport protocol.
- std::string protocol;
-
- // Opaque parameters for this transport. These parameters are serialized in a
- // manner determined by the |protocol|. They can be parsed and understood by
- // the plugin that supports |protocol|.
- std::string parameters;
-
- bool operator==(const OpaqueTransportParameters& other) const {
- return protocol == other.protocol && parameters == other.parameters;
- }
-
- bool operator!=(const OpaqueTransportParameters& other) const {
- return !(*this == other);
- }
-};
-
struct TransportDescription {
TransportDescription();
TransportDescription(const std::vector<std::string>& transport_options,
@@ -168,7 +146,6 @@ struct TransportDescription {
ConnectionRole connection_role;
std::unique_ptr<rtc::SSLFingerprint> identity_fingerprint;
- absl::optional<OpaqueTransportParameters> opaque_parameters;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/transport_description_factory.cc b/chromium/third_party/webrtc/p2p/base/transport_description_factory.cc
index 17152d1a047..5cce2ac09d6 100644
--- a/chromium/third_party/webrtc/p2p/base/transport_description_factory.cc
+++ b/chromium/third_party/webrtc/p2p/base/transport_description_factory.cc
@@ -55,8 +55,6 @@ std::unique_ptr<TransportDescription> TransportDescriptionFactory::CreateOffer(
}
}
- desc->opaque_parameters = options.opaque_parameters;
-
return desc;
}
@@ -110,13 +108,6 @@ std::unique_ptr<TransportDescription> TransportDescriptionFactory::CreateAnswer(
return NULL;
}
- // Answers may only attach opaque parameters if the offer contained them as
- // well. The answer's parameters may differ, and it's up to the opaque
- // transport implementation to decide if the difference is acceptable.
- if (offer->opaque_parameters && options.opaque_parameters) {
- desc->opaque_parameters = options.opaque_parameters;
- }
-
return desc;
}
diff --git a/chromium/third_party/webrtc/p2p/base/transport_description_factory.h b/chromium/third_party/webrtc/p2p/base/transport_description_factory.h
index d0813dc541e..c1656a0fac2 100644
--- a/chromium/third_party/webrtc/p2p/base/transport_description_factory.h
+++ b/chromium/third_party/webrtc/p2p/base/transport_description_factory.h
@@ -29,9 +29,6 @@ struct TransportOptions {
// If true, ICE renomination is supported and will be used if it is also
// supported by the remote side.
bool enable_ice_renomination = false;
-
- // Opaque parameters for plug-in transports.
- absl::optional<OpaqueTransportParameters> opaque_parameters;
};
// Creates transport descriptions according to the supplied configuration.
diff --git a/chromium/third_party/webrtc/p2p/base/transport_description_factory_unittest.cc b/chromium/third_party/webrtc/p2p/base/transport_description_factory_unittest.cc
index 8359ffc1c9d..f7675ae643f 100644
--- a/chromium/third_party/webrtc/p2p/base/transport_description_factory_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/transport_description_factory_unittest.cc
@@ -26,7 +26,6 @@
#include "test/gmock.h"
#include "test/gtest.h"
-using cricket::OpaqueTransportParameters;
using cricket::TransportDescription;
using cricket::TransportDescriptionFactory;
using cricket::TransportOptions;
@@ -210,73 +209,6 @@ TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsReofferDtls) {
CheckDesc(desc.get(), "", old_desc->ice_ufrag, old_desc->ice_pwd, digest_alg);
}
-TEST_F(TransportDescriptionFactoryTest, TestOfferOpaqueTransportParameters) {
- OpaqueTransportParameters params;
- params.protocol = "fake";
- params.parameters = "foobar";
-
- TransportOptions options;
- options.opaque_parameters = params;
-
- std::unique_ptr<TransportDescription> desc =
- f1_.CreateOffer(options, NULL, &ice_credentials_);
-
- CheckDesc(desc.get(), "", "", "", "");
- EXPECT_EQ(desc->opaque_parameters, params);
-}
-
-TEST_F(TransportDescriptionFactoryTest, TestAnswerOpaqueTransportParameters) {
- OpaqueTransportParameters params;
- params.protocol = "fake";
- params.parameters = "foobar";
-
- TransportOptions options;
- options.opaque_parameters = params;
-
- std::unique_ptr<TransportDescription> offer =
- f1_.CreateOffer(options, NULL, &ice_credentials_);
- std::unique_ptr<TransportDescription> answer =
- f2_.CreateAnswer(offer.get(), options, true, NULL, &ice_credentials_);
-
- CheckDesc(answer.get(), "", "", "", "");
- EXPECT_EQ(answer->opaque_parameters, params);
-}
-
-TEST_F(TransportDescriptionFactoryTest, TestAnswerNoOpaqueTransportParameters) {
- OpaqueTransportParameters params;
- params.protocol = "fake";
- params.parameters = "foobar";
-
- TransportOptions options;
- options.opaque_parameters = params;
-
- std::unique_ptr<TransportDescription> offer =
- f1_.CreateOffer(options, NULL, &ice_credentials_);
- std::unique_ptr<TransportDescription> answer = f2_.CreateAnswer(
- offer.get(), TransportOptions(), true, NULL, &ice_credentials_);
-
- CheckDesc(answer.get(), "", "", "", "");
- EXPECT_EQ(answer->opaque_parameters, absl::nullopt);
-}
-
-TEST_F(TransportDescriptionFactoryTest,
- TestAnswerNoOpaqueTransportParametersInOffer) {
- std::unique_ptr<TransportDescription> offer =
- f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_);
-
- OpaqueTransportParameters params;
- params.protocol = "fake";
- params.parameters = "foobar";
-
- TransportOptions options;
- options.opaque_parameters = params;
- std::unique_ptr<TransportDescription> answer =
- f2_.CreateAnswer(offer.get(), options, true, NULL, &ice_credentials_);
-
- CheckDesc(answer.get(), "", "", "", "");
- EXPECT_EQ(answer->opaque_parameters, absl::nullopt);
-}
-
TEST_F(TransportDescriptionFactoryTest, TestAnswerDefault) {
std::unique_ptr<TransportDescription> offer =
f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_);
diff --git a/chromium/third_party/webrtc/p2p/base/turn_port.cc b/chromium/third_party/webrtc/p2p/base/turn_port.cc
index 2e8024dcb61..4d39f207b46 100644
--- a/chromium/third_party/webrtc/p2p/base/turn_port.cc
+++ b/chromium/third_party/webrtc/p2p/base/turn_port.cc
@@ -367,7 +367,7 @@ void TurnPort::PrepareAddress() {
<< server_address_.address.ToSensitiveString();
if (!CreateTurnClientSocket()) {
RTC_LOG(LS_ERROR) << "Failed to create TURN client socket";
- OnAllocateError(STUN_ERROR_GLOBAL_FAILURE,
+ OnAllocateError(SERVER_NOT_REACHABLE_ERROR,
"Failed to create TURN client socket.");
return;
}
@@ -883,12 +883,17 @@ void TurnPort::OnAllocateError(int error_code, const std::string& reason) {
// port initialization. This way it will not be blocking other port
// creation.
thread()->Post(RTC_FROM_HERE, this, MSG_ALLOCATE_ERROR);
+ std::string address = GetLocalAddress().HostAsSensitiveURIString();
+ int port = GetLocalAddress().port();
+ if (server_address_.proto == PROTO_TCP &&
+ server_address_.address.IsPrivateIP()) {
+ address.clear();
+ port = 0;
+ }
SignalCandidateError(
- this,
- IceCandidateErrorEvent(GetLocalAddress().HostAsSensitiveURIString(),
- GetLocalAddress().port(),
- ReconstructedServerUrl(true /* use_hostname */),
- error_code, reason));
+ this, IceCandidateErrorEvent(
+ address, port, ReconstructedServerUrl(true /* use_hostname */),
+ error_code, reason));
}
void TurnPort::OnRefreshError() {
diff --git a/chromium/third_party/webrtc/p2p/base/turn_port_unittest.cc b/chromium/third_party/webrtc/p2p/base/turn_port_unittest.cc
index 1f5a7bf49ed..ce86fe4a3a5 100644
--- a/chromium/third_party/webrtc/p2p/base/turn_port_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/turn_port_unittest.cc
@@ -148,7 +148,7 @@ class TestConnectionWrapper : public sigslot::has_slots<> {
// (between local port and TURN server) of kSimulatedRtt.
class TurnPortTest : public ::testing::Test,
public sigslot::has_slots<>,
- public rtc::MessageHandler {
+ public rtc::MessageHandlerAutoCleanup {
public:
TurnPortTest()
: ss_(new TurnPortTestVirtualSocketServer()),
diff --git a/chromium/third_party/webrtc/p2p/base/turn_server.cc b/chromium/third_party/webrtc/p2p/base/turn_server.cc
index 3a4784ac528..17a49e403de 100644
--- a/chromium/third_party/webrtc/p2p/base/turn_server.cc
+++ b/chromium/third_party/webrtc/p2p/base/turn_server.cc
@@ -59,7 +59,7 @@ enum {
// Encapsulates a TURN permission.
// The object is created when a create permission request is received by an
// allocation, and self-deletes when its lifetime timer expires.
-class TurnServerAllocation::Permission : public rtc::MessageHandler {
+class TurnServerAllocation::Permission : public rtc::MessageHandlerAutoCleanup {
public:
Permission(rtc::Thread* thread, const rtc::IPAddress& peer);
~Permission() override;
@@ -79,7 +79,7 @@ class TurnServerAllocation::Permission : public rtc::MessageHandler {
// Encapsulates a TURN channel binding.
// The object is created when a channel bind request is received by an
// allocation, and self-deletes when its lifetime timer expires.
-class TurnServerAllocation::Channel : public rtc::MessageHandler {
+class TurnServerAllocation::Channel : public rtc::MessageHandlerAutoCleanup {
public:
Channel(rtc::Thread* thread, int id, const rtc::SocketAddress& peer);
~Channel() override;
diff --git a/chromium/third_party/webrtc/p2p/base/turn_server.h b/chromium/third_party/webrtc/p2p/base/turn_server.h
index 0f4fefea840..ca856448b3f 100644
--- a/chromium/third_party/webrtc/p2p/base/turn_server.h
+++ b/chromium/third_party/webrtc/p2p/base/turn_server.h
@@ -66,7 +66,7 @@ class TurnServerConnection {
// handles TURN messages (via HandleTurnMessage) and channel data messages
// (via HandleChannelData) for this allocation when received by the server.
// The object self-deletes and informs the server if its lifetime timer expires.
-class TurnServerAllocation : public rtc::MessageHandler,
+class TurnServerAllocation : public rtc::MessageHandlerAutoCleanup,
public sigslot::has_slots<> {
public:
TurnServerAllocation(TurnServer* server_,
diff --git a/chromium/third_party/webrtc/p2p/client/basic_port_allocator.cc b/chromium/third_party/webrtc/p2p/client/basic_port_allocator.cc
index 8aeef9361dd..bb640d9498d 100644
--- a/chromium/third_party/webrtc/p2p/client/basic_port_allocator.cc
+++ b/chromium/third_party/webrtc/p2p/client/basic_port_allocator.cc
@@ -979,8 +979,11 @@ void BasicPortAllocatorSession::OnCandidateError(
const IceCandidateErrorEvent& event) {
RTC_DCHECK_RUN_ON(network_thread_);
RTC_DCHECK(FindPort(port));
-
- SignalCandidateError(this, event);
+ if (event.address.empty()) {
+ candidate_error_events_.push_back(event);
+ } else {
+ SignalCandidateError(this, event);
+ }
}
Port* BasicPortAllocatorSession::GetBestTurnPortForNetwork(
@@ -1140,6 +1143,10 @@ void BasicPortAllocatorSession::MaybeSignalCandidatesAllocationDone() {
RTC_LOG(LS_INFO) << "All candidates gathered for " << content_name()
<< ":" << component() << ":" << generation();
}
+ for (const auto& event : candidate_error_events_) {
+ SignalCandidateError(this, event);
+ }
+ candidate_error_events_.clear();
SignalCandidatesAllocationDone(this);
}
}
diff --git a/chromium/third_party/webrtc/p2p/client/basic_port_allocator.h b/chromium/third_party/webrtc/p2p/client/basic_port_allocator.h
index b9f2b2ebd25..26b181807a7 100644
--- a/chromium/third_party/webrtc/p2p/client/basic_port_allocator.h
+++ b/chromium/third_party/webrtc/p2p/client/basic_port_allocator.h
@@ -106,8 +106,9 @@ enum class SessionState {
// process will be started.
};
-class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
- public rtc::MessageHandler {
+class RTC_EXPORT BasicPortAllocatorSession
+ : public PortAllocatorSession,
+ public rtc::MessageHandlerAutoCleanup {
public:
BasicPortAllocatorSession(BasicPortAllocator* allocator,
const std::string& content_name,
@@ -269,6 +270,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession,
std::vector<PortConfiguration*> configs_;
std::vector<AllocationSequence*> sequences_;
std::vector<PortData> ports_;
+ std::vector<IceCandidateErrorEvent> candidate_error_events_;
uint32_t candidate_filter_ = CF_ALL;
// Policy on how to prune turn ports, taken from the port allocator.
webrtc::PortPrunePolicy turn_port_prune_policy_;
@@ -322,7 +324,7 @@ class TurnPort;
// Performs the allocation of ports, in a sequenced (timed) manner, for a given
// network and IP address.
-class AllocationSequence : public rtc::MessageHandler,
+class AllocationSequence : public rtc::MessageHandlerAutoCleanup,
public sigslot::has_slots<> {
public:
enum State {
diff --git a/chromium/third_party/webrtc/pc/BUILD.gn b/chromium/third_party/webrtc/pc/BUILD.gn
index a48a0469d91..3ab816d0616 100644
--- a/chromium/third_party/webrtc/pc/BUILD.gn
+++ b/chromium/third_party/webrtc/pc/BUILD.gn
@@ -36,8 +36,6 @@ rtc_library("rtc_pc_base") {
"composite_data_channel_transport.h",
"composite_rtp_transport.cc",
"composite_rtp_transport.h",
- "datagram_rtp_transport.cc",
- "datagram_rtp_transport.h",
"dtls_srtp_transport.cc",
"dtls_srtp_transport.h",
"dtls_transport.cc",
@@ -88,6 +86,7 @@ rtc_library("rtc_pc_base") {
"../api:function_view",
"../api:ice_transport_factory",
"../api:libjingle_peerconnection_api",
+ "../api:priority",
"../api:rtc_error",
"../api:rtp_headers",
"../api:rtp_parameters",
@@ -96,7 +95,6 @@ rtc_library("rtc_pc_base") {
"../api/crypto:options",
"../api/rtc_event_log",
"../api/transport:datagram_transport_interface",
- "../api/transport/media:media_transport_interface",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
@@ -124,6 +122,8 @@ rtc_library("rtc_pc_base") {
"../rtc_base/third_party/sigslot",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
@@ -236,6 +236,7 @@ rtc_library("peerconnection") {
"../api:libjingle_peerconnection_api",
"../api:media_stream_interface",
"../api:network_state_predictor_api",
+ "../api:priority",
"../api:rtc_error",
"../api:rtc_event_log_output_file",
"../api:rtc_stats_api",
@@ -246,7 +247,6 @@ rtc_library("peerconnection") {
"../api/task_queue",
"../api/transport:datagram_transport_interface",
"../api/transport:field_trial_based_config",
- "../api/transport/media:media_transport_interface",
"../api/units:data_rate",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_frame",
@@ -274,6 +274,8 @@ rtc_library("peerconnection") {
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -329,14 +331,11 @@ if (rtc_include_tests) {
":rtc_pc_base",
"../api:array_view",
"../api:audio_options_api",
- "../api:fake_media_transport",
"../api:ice_transport_factory",
"../api:libjingle_peerconnection_api",
- "../api:loopback_media_transport",
"../api:rtc_error",
"../api:rtp_headers",
"../api:rtp_parameters",
- "../api/transport/media:media_transport_interface",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video/test:mock_recordable_encoded_frame",
"../call:rtp_interfaces",
@@ -404,8 +403,8 @@ if (rtc_include_tests) {
"../system_wrappers",
"../test:perf_test",
"../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("peerconnection_wrapper") {
@@ -497,12 +496,13 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
"../rtc_base:task_queue_for_test",
+ "../rtc_base/synchronization:sequence_checker",
"../rtc_base/task_utils:repeating_task",
"../rtc_base/third_party/sigslot",
"../test:test_support",
"../test:video_test_common",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_test("peerconnection_unittests") {
@@ -515,6 +515,7 @@ if (rtc_include_tests) {
"jsep_session_description_unittest.cc",
"local_audio_source_unittest.cc",
"media_stream_unittest.cc",
+ "peer_connection_adaptation_integrationtest.cc",
"peer_connection_bundle_unittest.cc",
"peer_connection_crypto_unittest.cc",
"peer_connection_data_channel_unittest.cc",
@@ -566,7 +567,6 @@ if (rtc_include_tests) {
"../api:function_view",
"../api:libjingle_logging_api",
"../api:libjingle_peerconnection_api",
- "../api:loopback_media_transport",
"../api:media_stream_interface",
"../api:mock_rtp",
"../api:rtc_error",
@@ -578,10 +578,10 @@ if (rtc_include_tests) {
"../api/rtc_event_log",
"../api/rtc_event_log:rtc_event_log_factory",
"../api/task_queue:default_task_queue_factory",
- "../api/transport/media:media_transport_interface",
"../api/transport/rtp:rtp_source",
"../api/units:time_delta",
"../api/video:builtin_video_bitrate_allocator_factory",
+ "../call/adaptation:resource_adaptation_test_utilities",
"../logging:fake_rtc_event_log",
"../media:rtc_media_config",
"../media:rtc_media_engine_defaults",
@@ -605,6 +605,7 @@ if (rtc_include_tests) {
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
]
if (is_android) {
deps += [ ":android_black_magic" ]
@@ -646,7 +647,6 @@ if (rtc_include_tests) {
"../test:audio_codec_mocks",
"../test:test_main",
"../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
if (is_android) {
diff --git a/chromium/third_party/webrtc/pc/channel.cc b/chromium/third_party/webrtc/pc/channel.cc
index f83f5cdd9a3..e7f62c6aa6d 100644
--- a/chromium/third_party/webrtc/pc/channel.cc
+++ b/chromium/third_party/webrtc/pc/channel.cc
@@ -16,7 +16,6 @@
#include "absl/algorithm/container.h"
#include "absl/memory/memory.h"
#include "api/call/audio_sink.h"
-#include "api/transport/media/media_transport_config.h"
#include "media/base/media_constants.h"
#include "media/base/rtp_utils.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
@@ -142,7 +141,7 @@ BaseChannel::BaseChannel(rtc::Thread* worker_thread,
RTC_DCHECK_RUN_ON(worker_thread_);
RTC_DCHECK(ssrc_generator_);
demuxer_criteria_.mid = content_name;
- RTC_LOG(LS_INFO) << "Created channel for " << content_name;
+ RTC_LOG(LS_INFO) << "Created channel: " << ToString();
}
BaseChannel::~BaseChannel() {
@@ -156,12 +155,23 @@ BaseChannel::~BaseChannel() {
// the media channel may try to send on the dead transport channel. NULLing
// is not an effective strategy since the sends will come on another thread.
media_channel_.reset();
- RTC_LOG(LS_INFO) << "Destroyed channel: " << content_name_;
+ RTC_LOG(LS_INFO) << "Destroyed channel: " << ToString();
+}
+
+std::string BaseChannel::ToString() const {
+ rtc::StringBuilder sb;
+ sb << "{mid: " << content_name_;
+ if (media_channel_) {
+ sb << ", media_type: " << MediaTypeToString(media_channel_->media_type());
+ }
+ sb << "}";
+ return sb.Release();
}
bool BaseChannel::ConnectToRtpTransport() {
RTC_DCHECK(rtp_transport_);
if (!RegisterRtpDemuxerSink()) {
+ RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString();
return false;
}
rtp_transport_->SignalReadyToSend.connect(
@@ -184,24 +194,20 @@ void BaseChannel::DisconnectFromRtpTransport() {
rtp_transport_->SignalSentPacket.disconnect(this);
}
-void BaseChannel::Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) {
+void BaseChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) {
RTC_DCHECK_RUN_ON(worker_thread_);
- media_transport_config_ = media_transport_config;
network_thread_->Invoke<void>(
RTC_FROM_HERE, [this, rtp_transport] { SetRtpTransport(rtp_transport); });
// Both RTP and RTCP channels should be set, we can call SetInterface on
// the media channel and it can set network options.
- media_channel_->SetInterface(this, media_transport_config);
+ media_channel_->SetInterface(this);
}
void BaseChannel::Deinit() {
RTC_DCHECK(worker_thread_->IsCurrent());
- media_channel_->SetInterface(/*iface=*/nullptr,
- webrtc::MediaTransportConfig());
+ media_channel_->SetInterface(/*iface=*/nullptr);
// Packets arrive on the network thread, processing packets calls virtual
// functions, so need to stop this process in Deinit that is called in
// derived classes destructor.
@@ -237,7 +243,8 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) {
transport_name_ = rtp_transport_->transport_name();
if (!ConnectToRtpTransport()) {
- RTC_LOG(LS_ERROR) << "Failed to connect to the new RtpTransport.";
+ RTC_LOG(LS_ERROR) << "Failed to connect to the new RtpTransport for "
+ << ToString() << ".";
return false;
}
OnTransportReadyToSend(rtp_transport_->IsReadyToSend());
@@ -349,7 +356,7 @@ void BaseChannel::OnWritableState(bool writable) {
void BaseChannel::OnNetworkRouteChanged(
absl::optional<rtc::NetworkRoute> network_route) {
- RTC_LOG(LS_INFO) << "Network route was changed.";
+ RTC_LOG(LS_INFO) << "Network route for " << ToString() << " was changed.";
RTC_DCHECK(network_thread_->IsCurrent());
rtc::NetworkRoute new_route;
@@ -404,7 +411,7 @@ bool BaseChannel::SendPacket(bool rtcp,
// Protect ourselves against crazy data.
if (!IsValidRtpPacketSize(packet_type, packet->size())) {
- RTC_LOG(LS_ERROR) << "Dropping outgoing " << content_name_ << " "
+ RTC_LOG(LS_ERROR) << "Dropping outgoing " << ToString() << " "
<< RtpPacketTypeToString(packet_type)
<< " packet: wrong size=" << packet->size();
return false;
@@ -420,16 +427,16 @@ bool BaseChannel::SendPacket(bool rtcp,
}
// However, there shouldn't be any RTP packets sent before SRTP is set up
// (and SetSend(true) is called).
- RTC_LOG(LS_ERROR)
- << "Can't send outgoing RTP packet when SRTP is inactive"
- " and crypto is required";
+ RTC_LOG(LS_ERROR) << "Can't send outgoing RTP packet for " << ToString()
+ << " when SRTP is inactive and crypto is required";
RTC_NOTREACHED();
return false;
}
std::string packet_type = rtcp ? "RTCP" : "RTP";
RTC_LOG(LS_WARNING) << "Sending an " << packet_type
- << " packet without encryption.";
+ << " packet without encryption for " << ToString()
+ << ".";
}
// Bon voyage.
@@ -463,7 +470,8 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) {
// for us to just eat packets here. This is all sidestepped if RTCP mux
// is used anyway.
RTC_LOG(LS_WARNING) << "Can't process incoming RTP packet when "
- "SRTP is inactive and crypto is required";
+ "SRTP is inactive and crypto is required "
+ << ToString();
return;
}
@@ -504,7 +512,7 @@ void BaseChannel::EnableMedia_w() {
if (enabled_)
return;
- RTC_LOG(LS_INFO) << "Channel enabled";
+ RTC_LOG(LS_INFO) << "Channel enabled: " << ToString();
enabled_ = true;
UpdateMediaSendRecvState_w();
}
@@ -514,7 +522,7 @@ void BaseChannel::DisableMedia_w() {
if (!enabled_)
return;
- RTC_LOG(LS_INFO) << "Channel disabled";
+ RTC_LOG(LS_INFO) << "Channel disabled: " << ToString();
enabled_ = false;
UpdateMediaSendRecvState_w();
}
@@ -534,7 +542,7 @@ void BaseChannel::ChannelWritable_n() {
return;
}
- RTC_LOG(LS_INFO) << "Channel writable (" << content_name_ << ")"
+ RTC_LOG(LS_INFO) << "Channel writable (" << ToString() << ")"
<< (was_ever_writable_ ? "" : " for the first time");
was_ever_writable_ = true;
@@ -547,7 +555,7 @@ void BaseChannel::ChannelNotWritable_n() {
if (!writable_)
return;
- RTC_LOG(LS_INFO) << "Channel not writable (" << content_name_ << ")";
+ RTC_LOG(LS_INFO) << "Channel not writable (" << ToString() << ")";
writable_ = false;
UpdateMediaSendRecvState();
}
@@ -591,7 +599,8 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector<StreamParams>& streams,
if (!media_channel()->RemoveSendStream(old_stream.first_ssrc())) {
rtc::StringBuilder desc;
desc << "Failed to remove send stream with ssrc "
- << old_stream.first_ssrc() << ".";
+ << old_stream.first_ssrc() << " from m-section with mid='"
+ << content_name() << "'.";
SafeSetError(desc.str(), error_desc);
ret = false;
}
@@ -617,7 +626,8 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector<StreamParams>& streams,
if (new_stream.has_ssrcs() && new_stream.has_rids()) {
rtc::StringBuilder desc;
desc << "Failed to add send stream: " << new_stream.first_ssrc()
- << ". Stream has both SSRCs and RIDs.";
+ << " into m-section with mid='" << content_name()
+ << "'. Stream has both SSRCs and RIDs.";
SafeSetError(desc.str(), error_desc);
ret = false;
continue;
@@ -632,10 +642,12 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector<StreamParams>& streams,
}
if (media_channel()->AddSendStream(new_stream)) {
- RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0];
+ RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0]
+ << " into " << ToString();
} else {
rtc::StringBuilder desc;
- desc << "Failed to add send stream ssrc: " << new_stream.first_ssrc();
+ desc << "Failed to add send stream ssrc: " << new_stream.first_ssrc()
+ << " into m-section with mid='" << content_name() << "'";
SafeSetError(desc.str(), error_desc);
ret = false;
}
@@ -655,15 +667,18 @@ bool BaseChannel::UpdateRemoteStreams_w(
// the unsignaled stream params that are cached.
if (!old_stream.has_ssrcs() && !HasStreamWithNoSsrcs(streams)) {
ResetUnsignaledRecvStream_w();
- RTC_LOG(LS_INFO) << "Reset unsignaled remote stream.";
+ RTC_LOG(LS_INFO) << "Reset unsignaled remote stream for " << ToString()
+ << ".";
} else if (old_stream.has_ssrcs() &&
!GetStreamBySsrc(streams, old_stream.first_ssrc())) {
if (RemoveRecvStream_w(old_stream.first_ssrc())) {
- RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc();
+ RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc()
+ << " from " << ToString() << ".";
} else {
rtc::StringBuilder desc;
desc << "Failed to remove remote stream with ssrc "
- << old_stream.first_ssrc() << ".";
+ << old_stream.first_ssrc() << " from m-section with mid='"
+ << content_name() << "'.";
SafeSetError(desc.str(), error_desc);
ret = false;
}
@@ -681,13 +696,15 @@ bool BaseChannel::UpdateRemoteStreams_w(
RTC_LOG(LS_INFO) << "Add remote ssrc: "
<< (new_stream.has_ssrcs()
? std::to_string(new_stream.first_ssrc())
- : "unsignaled");
+ : "unsignaled")
+ << " to " << ToString();
} else {
rtc::StringBuilder desc;
desc << "Failed to add remote stream ssrc: "
<< (new_stream.has_ssrcs()
? std::to_string(new_stream.first_ssrc())
- : "unsignaled");
+ : "unsignaled")
+ << " to " << ToString();
SafeSetError(desc.str(), error_desc);
ret = false;
}
@@ -697,7 +714,9 @@ bool BaseChannel::UpdateRemoteStreams_w(
new_stream.ssrcs.end());
}
// Re-register the sink to update the receiving ssrcs.
- RegisterRtpDemuxerSink();
+ if (!RegisterRtpDemuxerSink()) {
+ RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString();
+ }
remote_streams_ = streams;
return ret;
}
@@ -796,10 +815,8 @@ void BaseChannel::UpdateMediaSendRecvState() {
[this] { UpdateMediaSendRecvState_w(); });
}
-void VoiceChannel::Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) {
- BaseChannel::Init_w(rtp_transport, media_transport_config);
+void VoiceChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) {
+ BaseChannel::Init_w(rtp_transport);
}
void VoiceChannel::UpdateMediaSendRecvState_w() {
@@ -813,7 +830,8 @@ void VoiceChannel::UpdateMediaSendRecvState_w() {
bool send = IsReadyToSendMedia_w();
media_channel()->SetSend(send);
- RTC_LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send;
+ RTC_LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send
+ << " for " << ToString();
}
bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
@@ -821,7 +839,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "VoiceChannel::SetLocalContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting local voice description";
+ RTC_LOG(LS_INFO) << "Setting local voice description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -841,8 +859,11 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
audio, rtp_header_extensions,
webrtc::RtpTransceiverDirectionHasRecv(audio->direction()), &recv_params);
if (!media_channel()->SetRecvParameters(recv_params)) {
- SafeSetError("Failed to set local audio description recv parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set local audio description recv parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -852,7 +873,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
}
// Need to re-register the sink to update the handled payload.
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing for " << ToString();
return false;
}
}
@@ -864,7 +885,11 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
// description too (without a remote description, we won't be able
// to send them anyway).
if (!UpdateLocalStreams_w(audio->streams(), type, error_desc)) {
- SafeSetError("Failed to set local audio description streams.", error_desc);
+ SafeSetError(
+ "Failed to set local audio description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -878,7 +903,7 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "VoiceChannel::SetRemoteContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting remote voice description";
+ RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -899,18 +924,22 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
bool parameters_applied = media_channel()->SetSendParameters(send_params);
if (!parameters_applied) {
- SafeSetError("Failed to set remote audio description send parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set remote audio description send parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_send_params_ = send_params;
if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) {
RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - "
- "disable payload type demuxing";
+ "disable payload type demuxing for "
+ << ToString();
ClearHandledPayloadTypes();
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to update audio demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to update audio demuxing for " << ToString();
return false;
}
}
@@ -920,7 +949,11 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
// description too (without a local description, we won't be able to
// recv them anyway).
if (!UpdateRemoteStreams_w(audio->streams(), type, error_desc)) {
- SafeSetError("Failed to set remote audio description streams.", error_desc);
+ SafeSetError(
+ "Failed to set remote audio description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -958,11 +991,12 @@ void VideoChannel::UpdateMediaSendRecvState_w() {
// and we have had some form of connectivity.
bool send = IsReadyToSendMedia_w();
if (!media_channel()->SetSend(send)) {
- RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel";
+ RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel: " + ToString();
// TODO(gangji): Report error back to server.
}
- RTC_LOG(LS_INFO) << "Changing video state, send=" << send;
+ RTC_LOG(LS_INFO) << "Changing video state, send=" << send << " for "
+ << ToString();
}
void VideoChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) {
@@ -975,7 +1009,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "VideoChannel::SetLocalContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting local video description";
+ RTC_LOG(LS_INFO) << "Setting local video description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -1007,7 +1041,9 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
needs_send_params_update = true;
} else if (recv_codec->packetization != send_codec.packetization) {
SafeSetError(
- "Failed to set local answer due to invalid codec packetization.",
+ "Failed to set local answer due to invalid codec packetization "
+ "specified in m-section with mid='" +
+ content_name() + "'.",
error_desc);
return false;
}
@@ -1016,8 +1052,11 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
}
if (!media_channel()->SetRecvParameters(recv_params)) {
- SafeSetError("Failed to set local video description recv parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set local video description recv parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -1027,7 +1066,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
}
// Need to re-register the sink to update the handled payload.
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to set up video demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to set up video demuxing for " << ToString();
return false;
}
}
@@ -1036,7 +1075,9 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
if (needs_send_params_update) {
if (!media_channel()->SetSendParameters(send_params)) {
- SafeSetError("Failed to set send parameters.", error_desc);
+ SafeSetError("Failed to set send parameters for m-section with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_send_params_ = send_params;
@@ -1047,7 +1088,11 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
// description too (without a remote description, we won't be able
// to send them anyway).
if (!UpdateLocalStreams_w(video->streams(), type, error_desc)) {
- SafeSetError("Failed to set local video description streams.", error_desc);
+ SafeSetError(
+ "Failed to set local video description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -1061,7 +1106,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "VideoChannel::SetRemoteContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting remote video description";
+ RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -1095,7 +1140,9 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
needs_recv_params_update = true;
} else if (send_codec->packetization != recv_codec.packetization) {
SafeSetError(
- "Failed to set remote answer due to invalid codec packetization.",
+ "Failed to set remote answer due to invalid codec packetization "
+ "specifid in m-section with mid='" +
+ content_name() + "'.",
error_desc);
return false;
}
@@ -1104,15 +1151,20 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
}
if (!media_channel()->SetSendParameters(send_params)) {
- SafeSetError("Failed to set remote video description send parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set remote video description send parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_send_params_ = send_params;
if (needs_recv_params_update) {
if (!media_channel()->SetRecvParameters(recv_params)) {
- SafeSetError("Failed to set recv parameters.", error_desc);
+ SafeSetError("Failed to set recv parameters for m-section with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_recv_params_ = recv_params;
@@ -1120,10 +1172,11 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) {
RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - "
- "disable payload type demuxing";
+ "disable payload type demuxing for "
+ << ToString();
ClearHandledPayloadTypes();
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to update video demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to update video demuxing for " << ToString();
return false;
}
}
@@ -1133,7 +1186,11 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
// description too (without a local description, we won't be able to
// recv them anyway).
if (!UpdateRemoteStreams_w(video->streams(), type, error_desc)) {
- SafeSetError("Failed to set remote video description streams.", error_desc);
+ SafeSetError(
+ "Failed to set remote video description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
set_remote_content_direction(content->direction());
@@ -1165,10 +1222,8 @@ RtpDataChannel::~RtpDataChannel() {
Deinit();
}
-void RtpDataChannel::Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) {
- BaseChannel::Init_w(rtp_transport, media_transport_config);
+void RtpDataChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) {
+ BaseChannel::Init_w(rtp_transport);
media_channel()->SignalDataReceived.connect(this,
&RtpDataChannel::OnDataReceived);
media_channel()->SignalReadyToSend.connect(
@@ -1203,7 +1258,7 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "RtpDataChannel::SetLocalContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting local data description";
+ RTC_LOG(LS_INFO) << "Setting local data description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -1224,8 +1279,11 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
data, rtp_header_extensions,
webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &recv_params);
if (!media_channel()->SetRecvParameters(recv_params)) {
- SafeSetError("Failed to set remote data description recv parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set remote data description recv parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
for (const DataCodec& codec : data->codecs()) {
@@ -1233,7 +1291,7 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
}
// Need to re-register the sink to update the handled payload.
if (!RegisterRtpDemuxerSink()) {
- RTC_LOG(LS_ERROR) << "Failed to set up data demuxing.";
+ RTC_LOG(LS_ERROR) << "Failed to set up data demuxing for " << ToString();
return false;
}
@@ -1244,7 +1302,11 @@ bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content,
// description too (without a remote description, we won't be able
// to send them anyway).
if (!UpdateLocalStreams_w(data->streams(), type, error_desc)) {
- SafeSetError("Failed to set local data description streams.", error_desc);
+ SafeSetError(
+ "Failed to set local data description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -1258,7 +1320,7 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content,
std::string* error_desc) {
TRACE_EVENT0("webrtc", "RtpDataChannel::SetRemoteContent_w");
RTC_DCHECK_RUN_ON(worker_thread());
- RTC_LOG(LS_INFO) << "Setting remote data description";
+ RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString();
RTC_DCHECK(content);
if (!content) {
@@ -1280,14 +1342,17 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content,
RtpHeaderExtensions rtp_header_extensions =
GetFilteredRtpHeaderExtensions(data->rtp_header_extensions());
- RTC_LOG(LS_INFO) << "Setting remote data description";
+ RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString();
DataSendParameters send_params = last_send_params_;
RtpSendParametersFromMediaDescription<DataCodec>(
data, rtp_header_extensions,
webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &send_params);
if (!media_channel()->SetSendParameters(send_params)) {
- SafeSetError("Failed to set remote data description send parameters.",
- error_desc);
+ SafeSetError(
+ "Failed to set remote data description send parameters for m-section "
+ "with mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
last_send_params_ = send_params;
@@ -1297,7 +1362,11 @@ bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content,
// description too (without a local description, we won't be able to
// recv them anyway).
if (!UpdateRemoteStreams_w(data->streams(), type, error_desc)) {
- SafeSetError("Failed to set remote data description streams.", error_desc);
+ SafeSetError(
+ "Failed to set remote data description streams for m-section with "
+ "mid='" +
+ content_name() + "'.",
+ error_desc);
return false;
}
@@ -1311,20 +1380,21 @@ void RtpDataChannel::UpdateMediaSendRecvState_w() {
// content. We receive data on the default channel and multiplexed streams.
bool recv = IsReadyToReceiveMedia_w();
if (!media_channel()->SetReceive(recv)) {
- RTC_LOG(LS_ERROR) << "Failed to SetReceive on data channel";
+ RTC_LOG(LS_ERROR) << "Failed to SetReceive on data channel: " << ToString();
}
// Send outgoing data if we're the active call, we have the remote content,
// and we have had some form of connectivity.
bool send = IsReadyToSendMedia_w();
if (!media_channel()->SetSend(send)) {
- RTC_LOG(LS_ERROR) << "Failed to SetSend on data channel";
+ RTC_LOG(LS_ERROR) << "Failed to SetSend on data channel: " << ToString();
}
// Trigger SignalReadyToSendData asynchronously.
OnDataChannelReadyToSend(send);
- RTC_LOG(LS_INFO) << "Changing data state, recv=" << recv << " send=" << send;
+ RTC_LOG(LS_INFO) << "Changing data state, recv=" << recv << " send=" << send
+ << " for " << ToString();
}
void RtpDataChannel::OnMessage(rtc::Message* pmsg) {
diff --git a/chromium/third_party/webrtc/pc/channel.h b/chromium/third_party/webrtc/pc/channel.h
index 238a8e20fee..406058ed4f2 100644
--- a/chromium/third_party/webrtc/pc/channel.h
+++ b/chromium/third_party/webrtc/pc/channel.h
@@ -22,7 +22,6 @@
#include "api/function_view.h"
#include "api/jsep.h"
#include "api/rtp_receiver_interface.h"
-#include "api/transport/media/media_transport_config.h"
#include "api/video/video_sink_interface.h"
#include "api/video/video_source_interface.h"
#include "call/rtp_packet_sink_interface.h"
@@ -46,7 +45,6 @@
namespace webrtc {
class AudioSinkInterface;
-class MediaTransportInterface;
} // namespace webrtc
namespace cricket {
@@ -72,7 +70,7 @@ struct CryptoParams;
// NetworkInterface.
class BaseChannel : public ChannelInterface,
- public rtc::MessageHandler,
+ public rtc::MessageHandlerAutoCleanup,
public sigslot::has_slots<>,
public MediaChannel::NetworkInterface,
public webrtc::RtpPacketSinkInterface {
@@ -92,9 +90,7 @@ class BaseChannel : public ChannelInterface,
webrtc::CryptoOptions crypto_options,
rtc::UniqueRandomIdGenerator* ssrc_generator);
virtual ~BaseChannel();
- virtual void Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config);
+ virtual void Init_w(webrtc::RtpTransportInternal* rtp_transport);
// Deinit may be called multiple times and is simply ignored if it's already
// done.
@@ -275,6 +271,9 @@ class BaseChannel : public ChannelInterface,
bool RegisterRtpDemuxerSink();
+ // Return description of media channel to facilitate logging
+ std::string ToString() const;
+
bool has_received_packet_ = false;
private:
@@ -296,9 +295,6 @@ class BaseChannel : public ChannelInterface,
webrtc::RtpTransportInternal* rtp_transport_ = nullptr;
- // Optional media transport configuration (experimental).
- webrtc::MediaTransportConfig media_transport_config_;
-
std::vector<std::pair<rtc::Socket::Option, int> > socket_options_;
std::vector<std::pair<rtc::Socket::Option, int> > rtcp_socket_options_;
bool writable_ = false;
@@ -350,9 +346,7 @@ class VoiceChannel : public BaseChannel {
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_AUDIO;
}
- void Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) override;
+ void Init_w(webrtc::RtpTransportInternal* rtp_transport) override;
private:
// overrides from BaseChannel
@@ -432,9 +426,7 @@ class RtpDataChannel : public BaseChannel {
DtlsTransportInternal* rtcp_dtls_transport,
rtc::PacketTransportInternal* rtp_packet_transport,
rtc::PacketTransportInternal* rtcp_packet_transport);
- void Init_w(
- webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config) override;
+ void Init_w(webrtc::RtpTransportInternal* rtp_transport) override;
virtual bool SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
diff --git a/chromium/third_party/webrtc/pc/channel_manager.cc b/chromium/third_party/webrtc/pc/channel_manager.cc
index f5f3dd4a7bb..84d74678b5b 100644
--- a/chromium/third_party/webrtc/pc/channel_manager.cc
+++ b/chromium/third_party/webrtc/pc/channel_manager.cc
@@ -187,7 +187,6 @@ VoiceChannel* ChannelManager::CreateVoiceChannel(
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
@@ -197,9 +196,8 @@ VoiceChannel* ChannelManager::CreateVoiceChannel(
if (!worker_thread_->IsCurrent()) {
return worker_thread_->Invoke<VoiceChannel*>(RTC_FROM_HERE, [&] {
return CreateVoiceChannel(call, media_config, rtp_transport,
- media_transport_config, signaling_thread,
- content_name, srtp_required, crypto_options,
- ssrc_generator, options);
+ signaling_thread, content_name, srtp_required,
+ crypto_options, ssrc_generator, options);
});
}
@@ -221,7 +219,7 @@ VoiceChannel* ChannelManager::CreateVoiceChannel(
absl::WrapUnique(media_channel), content_name, srtp_required,
crypto_options, ssrc_generator);
- voice_channel->Init_w(rtp_transport, media_transport_config);
+ voice_channel->Init_w(rtp_transport);
VoiceChannel* voice_channel_ptr = voice_channel.get();
voice_channels_.push_back(std::move(voice_channel));
@@ -257,7 +255,6 @@ VideoChannel* ChannelManager::CreateVideoChannel(
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
@@ -267,10 +264,10 @@ VideoChannel* ChannelManager::CreateVideoChannel(
webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) {
if (!worker_thread_->IsCurrent()) {
return worker_thread_->Invoke<VideoChannel*>(RTC_FROM_HERE, [&] {
- return CreateVideoChannel(
- call, media_config, rtp_transport, media_transport_config,
- signaling_thread, content_name, srtp_required, crypto_options,
- ssrc_generator, options, video_bitrate_allocator_factory);
+ return CreateVideoChannel(call, media_config, rtp_transport,
+ signaling_thread, content_name, srtp_required,
+ crypto_options, ssrc_generator, options,
+ video_bitrate_allocator_factory);
});
}
@@ -293,7 +290,7 @@ VideoChannel* ChannelManager::CreateVideoChannel(
absl::WrapUnique(media_channel), content_name, srtp_required,
crypto_options, ssrc_generator);
- video_channel->Init_w(rtp_transport, media_transport_config);
+ video_channel->Init_w(rtp_transport);
VideoChannel* video_channel_ptr = video_channel.get();
video_channels_.push_back(std::move(video_channel));
@@ -355,7 +352,7 @@ RtpDataChannel* ChannelManager::CreateRtpDataChannel(
crypto_options, ssrc_generator);
// Media Transports are not supported with Rtp Data Channel.
- data_channel->Init_w(rtp_transport, webrtc::MediaTransportConfig());
+ data_channel->Init_w(rtp_transport);
RtpDataChannel* data_channel_ptr = data_channel.get();
data_channels_.push_back(std::move(data_channel));
diff --git a/chromium/third_party/webrtc/pc/channel_manager.h b/chromium/third_party/webrtc/pc/channel_manager.h
index 415e476a90a..8d5fc0aa5b9 100644
--- a/chromium/third_party/webrtc/pc/channel_manager.h
+++ b/chromium/third_party/webrtc/pc/channel_manager.h
@@ -19,7 +19,6 @@
#include "api/audio_options.h"
#include "api/crypto/crypto_options.h"
-#include "api/transport/media/media_transport_config.h"
#include "call/call.h"
#include "media/base/codec.h"
#include "media/base/media_channel.h"
@@ -101,7 +100,6 @@ class ChannelManager final {
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
@@ -118,7 +116,6 @@ class ChannelManager final {
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
- const webrtc::MediaTransportConfig& media_transport_config,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
diff --git a/chromium/third_party/webrtc/pc/channel_manager_unittest.cc b/chromium/third_party/webrtc/pc/channel_manager_unittest.cc
index 6f3128ebde1..610d7979ab4 100644
--- a/chromium/third_party/webrtc/pc/channel_manager_unittest.cc
+++ b/chromium/third_party/webrtc/pc/channel_manager_unittest.cc
@@ -13,7 +13,6 @@
#include <memory>
#include "api/rtc_error.h"
-#include "api/transport/media/media_transport_config.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "media/base/fake_media_engine.h"
#include "media/base/test_utils.h"
@@ -73,20 +72,17 @@ class ChannelManagerTest : public ::testing::Test {
return dtls_srtp_transport;
}
- void TestCreateDestroyChannels(
- webrtc::RtpTransportInternal* rtp_transport,
- webrtc::MediaTransportConfig media_transport_config) {
+ void TestCreateDestroyChannels(webrtc::RtpTransportInternal* rtp_transport) {
cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport,
- media_transport_config, rtc::Thread::Current(), cricket::CN_AUDIO,
- kDefaultSrtpRequired, webrtc::CryptoOptions(), &ssrc_generator_,
- AudioOptions());
+ rtc::Thread::Current(), cricket::CN_AUDIO, kDefaultSrtpRequired,
+ webrtc::CryptoOptions(), &ssrc_generator_, AudioOptions());
EXPECT_TRUE(voice_channel != nullptr);
cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport,
- media_transport_config, rtc::Thread::Current(), cricket::CN_VIDEO,
- kDefaultSrtpRequired, webrtc::CryptoOptions(), &ssrc_generator_,
- VideoOptions(), video_bitrate_allocator_factory_.get());
+ rtc::Thread::Current(), cricket::CN_VIDEO, kDefaultSrtpRequired,
+ webrtc::CryptoOptions(), &ssrc_generator_, VideoOptions(),
+ video_bitrate_allocator_factory_.get());
EXPECT_TRUE(video_channel != nullptr);
cricket::RtpDataChannel* rtp_data_channel = cm_->CreateRtpDataChannel(
cricket::MediaConfig(), rtp_transport, rtc::Thread::Current(),
@@ -183,8 +179,7 @@ TEST_F(ChannelManagerTest, SetVideoRtxEnabled) {
TEST_F(ChannelManagerTest, CreateDestroyChannels) {
EXPECT_TRUE(cm_->Init());
auto rtp_transport = CreateDtlsSrtpTransport();
- TestCreateDestroyChannels(rtp_transport.get(),
- webrtc::MediaTransportConfig());
+ TestCreateDestroyChannels(rtp_transport.get());
}
TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) {
@@ -194,8 +189,7 @@ TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) {
EXPECT_TRUE(cm_->set_network_thread(network_.get()));
EXPECT_TRUE(cm_->Init());
auto rtp_transport = CreateDtlsSrtpTransport();
- TestCreateDestroyChannels(rtp_transport.get(),
- webrtc::MediaTransportConfig());
+ TestCreateDestroyChannels(rtp_transport.get());
}
} // namespace cricket
diff --git a/chromium/third_party/webrtc/pc/channel_unittest.cc b/chromium/third_party/webrtc/pc/channel_unittest.cc
index a3fe3f68de4..479340c5202 100644
--- a/chromium/third_party/webrtc/pc/channel_unittest.cc
+++ b/chromium/third_party/webrtc/pc/channel_unittest.cc
@@ -17,7 +17,6 @@
#include "api/array_view.h"
#include "api/audio_options.h"
#include "api/rtp_parameters.h"
-#include "api/transport/media/media_transport_config.h"
#include "media/base/codec.h"
#include "media/base/fake_media_engine.h"
#include "media/base/fake_rtp.h"
@@ -1431,7 +1430,7 @@ std::unique_ptr<cricket::VoiceChannel> ChannelTest<VoiceTraits>::CreateChannel(
worker_thread, network_thread, signaling_thread, std::move(ch),
cricket::CN_AUDIO, (flags & DTLS) != 0, webrtc::CryptoOptions(),
&ssrc_generator_);
- channel->Init_w(rtp_transport, webrtc::MediaTransportConfig());
+ channel->Init_w(rtp_transport);
return channel;
}
@@ -1514,7 +1513,7 @@ std::unique_ptr<cricket::VideoChannel> ChannelTest<VideoTraits>::CreateChannel(
worker_thread, network_thread, signaling_thread, std::move(ch),
cricket::CN_VIDEO, (flags & DTLS) != 0, webrtc::CryptoOptions(),
&ssrc_generator_);
- channel->Init_w(rtp_transport, webrtc::MediaTransportConfig());
+ channel->Init_w(rtp_transport);
return channel;
}
@@ -2301,7 +2300,7 @@ std::unique_ptr<cricket::RtpDataChannel> ChannelTest<DataTraits>::CreateChannel(
worker_thread, network_thread, signaling_thread, std::move(ch),
cricket::CN_DATA, (flags & DTLS) != 0, webrtc::CryptoOptions(),
&ssrc_generator_);
- channel->Init_w(rtp_transport, webrtc::MediaTransportConfig());
+ channel->Init_w(rtp_transport);
return channel;
}
diff --git a/chromium/third_party/webrtc/pc/data_channel.cc b/chromium/third_party/webrtc/pc/data_channel.cc
index 4f871b4d504..ca6b6145cb1 100644
--- a/chromium/third_party/webrtc/pc/data_channel.cc
+++ b/chromium/third_party/webrtc/pc/data_channel.cc
@@ -14,6 +14,7 @@
#include <string>
#include <utility>
+#include "api/proxy.h"
#include "media/sctp/sctp_transport_internal.h"
#include "pc/sctp_utils.h"
#include "rtc_base/checks.h"
@@ -137,26 +138,75 @@ rtc::scoped_refptr<DataChannel> DataChannel::Create(
DataChannelProviderInterface* provider,
cricket::DataChannelType dct,
const std::string& label,
- const InternalDataChannelInit& config) {
+ const InternalDataChannelInit& config,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread) {
rtc::scoped_refptr<DataChannel> channel(
- new rtc::RefCountedObject<DataChannel>(provider, dct, label));
- if (!channel->Init(config)) {
- return NULL;
+ new rtc::RefCountedObject<DataChannel>(config, provider, dct, label,
+ signaling_thread, network_thread));
+ if (!channel->Init()) {
+ return nullptr;
}
return channel;
}
+// Define proxy for DataChannelInterface.
+BEGIN_SIGNALING_PROXY_MAP(DataChannel)
+PROXY_SIGNALING_THREAD_DESTRUCTOR()
+PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
+PROXY_METHOD0(void, UnregisterObserver)
+BYPASS_PROXY_CONSTMETHOD0(std::string, label)
+BYPASS_PROXY_CONSTMETHOD0(bool, reliable)
+BYPASS_PROXY_CONSTMETHOD0(bool, ordered)
+BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
+BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
+BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxRetransmitsOpt)
+BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxPacketLifeTime)
+BYPASS_PROXY_CONSTMETHOD0(std::string, protocol)
+BYPASS_PROXY_CONSTMETHOD0(bool, negotiated)
+// Can't bypass the proxy since the id may change.
+PROXY_CONSTMETHOD0(int, id)
+BYPASS_PROXY_CONSTMETHOD0(Priority, priority)
+PROXY_CONSTMETHOD0(DataState, state)
+PROXY_CONSTMETHOD0(RTCError, error)
+PROXY_CONSTMETHOD0(uint32_t, messages_sent)
+PROXY_CONSTMETHOD0(uint64_t, bytes_sent)
+PROXY_CONSTMETHOD0(uint32_t, messages_received)
+PROXY_CONSTMETHOD0(uint64_t, bytes_received)
+PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
+PROXY_METHOD0(void, Close)
+// TODO(bugs.webrtc.org/11547): Change to run on the network thread.
+PROXY_METHOD1(bool, Send, const DataBuffer&)
+END_PROXY_MAP()
+
+// static
+rtc::scoped_refptr<DataChannelInterface> DataChannel::CreateProxy(
+ rtc::scoped_refptr<DataChannel> channel) {
+ // TODO(bugs.webrtc.org/11547): incorporate the network thread in the proxy.
+ // Also, consider allowing the proxy object to own the reference (std::move).
+ // As is, the proxy has a raw pointer and no reference to the channel object
+ // and trusting that the lifetime management aligns with the
+ // sctp_data_channels_ array in DataChannelController.
+ return DataChannelProxy::Create(channel->signaling_thread_, channel.get());
+}
+
bool DataChannel::IsSctpLike(cricket::DataChannelType type) {
return type == cricket::DCT_SCTP || type == cricket::DCT_MEDIA_TRANSPORT ||
type == cricket::DCT_DATA_CHANNEL_TRANSPORT ||
type == cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP;
}
-DataChannel::DataChannel(DataChannelProviderInterface* provider,
+DataChannel::DataChannel(const InternalDataChannelInit& config,
+ DataChannelProviderInterface* provider,
cricket::DataChannelType dct,
- const std::string& label)
- : internal_id_(GenerateUniqueId()),
+ const std::string& label,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread)
+ : signaling_thread_(signaling_thread),
+ network_thread_(network_thread),
+ internal_id_(GenerateUniqueId()),
label_(label),
+ config_(config),
observer_(nullptr),
state_(kConnecting),
messages_sent_(0),
@@ -172,31 +222,33 @@ DataChannel::DataChannel(DataChannelProviderInterface* provider,
receive_ssrc_set_(false),
writable_(false),
send_ssrc_(0),
- receive_ssrc_(0) {}
+ receive_ssrc_(0) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+}
-bool DataChannel::Init(const InternalDataChannelInit& config) {
+bool DataChannel::Init() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (data_channel_type_ == cricket::DCT_RTP) {
- if (config.reliable || config.id != -1 || config.maxRetransmits ||
- config.maxRetransmitTime) {
+ if (config_.reliable || config_.id != -1 || config_.maxRetransmits ||
+ config_.maxRetransmitTime) {
RTC_LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
"invalid DataChannelInit.";
return false;
}
handshake_state_ = kHandshakeReady;
} else if (IsSctpLike(data_channel_type_)) {
- if (config.id < -1 ||
- (config.maxRetransmits && *config.maxRetransmits < 0) ||
- (config.maxRetransmitTime && *config.maxRetransmitTime < 0)) {
+ if (config_.id < -1 ||
+ (config_.maxRetransmits && *config_.maxRetransmits < 0) ||
+ (config_.maxRetransmitTime && *config_.maxRetransmitTime < 0)) {
RTC_LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to "
"invalid DataChannelInit.";
return false;
}
- if (config.maxRetransmits && config.maxRetransmitTime) {
+ if (config_.maxRetransmits && config_.maxRetransmitTime) {
RTC_LOG(LS_ERROR)
<< "maxRetransmits and maxRetransmitTime should not be both set.";
return false;
}
- config_ = config;
switch (config_.open_handshake_role) {
case webrtc::InternalDataChannelInit::kNone: // pre-negotiated
@@ -228,18 +280,23 @@ bool DataChannel::Init(const InternalDataChannelInit& config) {
return true;
}
-DataChannel::~DataChannel() {}
+DataChannel::~DataChannel() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+}
void DataChannel::RegisterObserver(DataChannelObserver* observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
observer_ = observer;
DeliverQueuedReceivedData();
}
void DataChannel::UnregisterObserver() {
- observer_ = NULL;
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ observer_ = nullptr;
}
bool DataChannel::reliable() const {
+ // May be called on any thread.
if (data_channel_type_ == cricket::DCT_RTP) {
return false;
} else {
@@ -248,10 +305,12 @@ bool DataChannel::reliable() const {
}
uint64_t DataChannel::buffered_amount() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
return buffered_amount_;
}
void DataChannel::Close() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (state_ == kClosed)
return;
send_ssrc_ = 0;
@@ -261,12 +320,42 @@ void DataChannel::Close() {
UpdateState();
}
+DataChannel::DataState DataChannel::state() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return state_;
+}
+
RTCError DataChannel::error() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
return error_;
}
+uint32_t DataChannel::messages_sent() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return messages_sent_;
+}
+
+uint64_t DataChannel::bytes_sent() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return bytes_sent_;
+}
+
+uint32_t DataChannel::messages_received() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return messages_received_;
+}
+
+uint64_t DataChannel::bytes_received() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return bytes_received_;
+}
+
bool DataChannel::Send(const DataBuffer& buffer) {
- buffered_amount_ += buffer.size();
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ // TODO(bugs.webrtc.org/11547): Expect this method to be called on the network
+ // thread. Bring buffer management etc to the network thread and keep the
+ // operational state management on the signaling thread.
+
if (state_ != kOpen) {
return false;
}
@@ -278,6 +367,8 @@ bool DataChannel::Send(const DataBuffer& buffer) {
return true;
}
+ buffered_amount_ += buffer.size();
+
// If the queue is non-empty, we're waiting for SignalReadyToSend,
// so just add to the end of the queue and keep waiting.
if (!queued_send_data_.Empty()) {
@@ -305,6 +396,7 @@ bool DataChannel::Send(const DataBuffer& buffer) {
}
void DataChannel::SetReceiveSsrc(uint32_t receive_ssrc) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
RTC_DCHECK(data_channel_type_ == cricket::DCT_RTP);
if (receive_ssrc_set_) {
@@ -323,11 +415,12 @@ void DataChannel::SetSctpSid(int sid) {
return;
}
- config_.id = sid;
+ const_cast<InternalDataChannelInit&>(config_).id = sid;
provider_->AddSctpDataStream(sid);
}
void DataChannel::OnClosingProcedureStartedRemotely(int sid) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (IsSctpLike(data_channel_type_) && sid == config_.id &&
state_ != kClosing && state_ != kClosed) {
// Don't bother sending queued data since the side that initiated the
@@ -344,6 +437,7 @@ void DataChannel::OnClosingProcedureStartedRemotely(int sid) {
}
void DataChannel::OnClosingProcedureComplete(int sid) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (IsSctpLike(data_channel_type_) && sid == config_.id) {
// If the closing procedure is complete, we should have finished sending
// all pending data and transitioned to kClosing already.
@@ -355,6 +449,7 @@ void DataChannel::OnClosingProcedureComplete(int sid) {
}
void DataChannel::OnTransportChannelCreated() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
RTC_DCHECK(IsSctpLike(data_channel_type_));
if (!connected_to_provider_) {
connected_to_provider_ = provider_->ConnectDataChannel(this);
@@ -370,10 +465,18 @@ void DataChannel::OnTransportChannelClosed() {
// The SctpTransport is unusable (for example, because the SCTP m= section
// was rejected, or because the DTLS transport closed), so we need to close
// abruptly.
- // Note: this needs to differentiate between normal close and error close.
- // https://w3c.github.io/webrtc-pc/#announcing-a-data-channel-as-closed
- CloseAbruptlyWithError(
- RTCError(RTCErrorType::NETWORK_ERROR, "Transport channel closed"));
+ RTCError error = RTCError(RTCErrorType::OPERATION_ERROR_WITH_DATA,
+ "Transport channel closed");
+ error.set_error_detail(RTCErrorDetailType::SCTP_FAILURE);
+ CloseAbruptlyWithError(std::move(error));
+}
+
+DataChannel::Stats DataChannel::GetStats() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ Stats stats{internal_id_, id(), label(),
+ protocol(), state(), messages_sent(),
+ messages_received(), bytes_sent(), bytes_received()};
+ return stats;
}
// The remote peer request that this channel shall be closed.
@@ -384,6 +487,7 @@ void DataChannel::RemotePeerRequestClose() {
}
void DataChannel::SetSendSsrc(uint32_t send_ssrc) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
RTC_DCHECK(data_channel_type_ == cricket::DCT_RTP);
if (send_ssrc_set_) {
return;
@@ -395,6 +499,7 @@ void DataChannel::SetSendSsrc(uint32_t send_ssrc) {
void DataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& payload) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (data_channel_type_ == cricket::DCT_RTP && params.ssrc != receive_ssrc_) {
return;
}
@@ -461,6 +566,8 @@ void DataChannel::OnDataReceived(const cricket::ReceiveDataParams& params,
}
void DataChannel::OnChannelReady(bool writable) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
writable_ = writable;
if (!writable) {
return;
@@ -472,6 +579,8 @@ void DataChannel::OnChannelReady(bool writable) {
}
void DataChannel::CloseAbruptlyWithError(RTCError error) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
if (state_ == kClosed) {
return;
}
@@ -500,6 +609,7 @@ void DataChannel::CloseAbruptlyWithDataChannelFailure(
}
void DataChannel::UpdateState() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
// UpdateState determines what to do from a few state variables. Include
// all conditions required for each state transition here for
// clarity. OnChannelReady(true) will send any queued data and then invoke
@@ -567,6 +677,7 @@ void DataChannel::UpdateState() {
}
void DataChannel::SetState(DataState state) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (state_ == state) {
return;
}
@@ -583,6 +694,7 @@ void DataChannel::SetState(DataState state) {
}
void DataChannel::DisconnectFromProvider() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (!connected_to_provider_)
return;
@@ -591,6 +703,7 @@ void DataChannel::DisconnectFromProvider() {
}
void DataChannel::DeliverQueuedReceivedData() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (!observer_) {
return;
}
@@ -604,6 +717,7 @@ void DataChannel::DeliverQueuedReceivedData() {
}
void DataChannel::SendQueuedDataMessages() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
if (queued_send_data_.Empty()) {
return;
}
@@ -622,6 +736,7 @@ void DataChannel::SendQueuedDataMessages() {
bool DataChannel::SendDataMessage(const DataBuffer& buffer,
bool queue_if_blocked) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
cricket::SendDataParams send_params;
if (IsSctpLike(data_channel_type_)) {
@@ -680,6 +795,7 @@ bool DataChannel::SendDataMessage(const DataBuffer& buffer,
}
bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
size_t start_buffered_amount = queued_send_data_.byte_count();
if (start_buffered_amount + buffer.size() > kMaxQueuedSendDataBytes) {
RTC_LOG(LS_ERROR) << "Can't buffer any more data for the data channel.";
@@ -690,6 +806,7 @@ bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
}
void DataChannel::SendQueuedControlMessages() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
PacketQueue control_packets;
control_packets.Swap(&queued_control_data_);
@@ -700,10 +817,12 @@ void DataChannel::SendQueuedControlMessages() {
}
void DataChannel::QueueControlMessage(const rtc::CopyOnWriteBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
queued_control_data_.PushBack(std::make_unique<DataBuffer>(buffer, true));
}
bool DataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen;
RTC_DCHECK(IsSctpLike(data_channel_type_));
diff --git a/chromium/third_party/webrtc/pc/data_channel.h b/chromium/third_party/webrtc/pc/data_channel.h
index c1de7c7a7ae..09b6692f02d 100644
--- a/chromium/third_party/webrtc/pc/data_channel.h
+++ b/chromium/third_party/webrtc/pc/data_channel.h
@@ -17,8 +17,9 @@
#include <string>
#include "api/data_channel_interface.h"
-#include "api/proxy.h"
+#include "api/priority.h"
#include "api/scoped_refptr.h"
+#include "api/transport/data_channel_transport_interface.h"
#include "media/base/media_channel.h"
#include "pc/channel.h"
#include "rtc_base/async_invoker.h"
@@ -53,6 +54,8 @@ class DataChannelProviderInterface {
virtual ~DataChannelProviderInterface() {}
};
+// TODO(tommi): Change to not inherit from DataChannelInit but to have it as
+// a const member. Block access to the 'id' member since it cannot be const.
struct InternalDataChannelInit : public DataChannelInit {
enum OpenHandshakeRole { kOpener, kAcker, kNone };
// The default role is kOpener because the default |negotiated| is false.
@@ -83,7 +86,7 @@ class SctpSidAllocator {
std::set<int> used_sids_;
};
-// DataChannel is a an implementation of the DataChannelInterface based on
+// DataChannel is an implementation of the DataChannelInterface based on
// libjingle's data engine. It provides an implementation of unreliable or
// reliabledata channels. Currently this class is specifically designed to use
// both RtpDataChannel and SctpTransport.
@@ -110,48 +113,72 @@ class SctpSidAllocator {
// callback and transition to kClosed.
class DataChannel : public DataChannelInterface, public sigslot::has_slots<> {
public:
+ struct Stats {
+ int internal_id;
+ int id;
+ std::string label;
+ std::string protocol;
+ DataState state;
+ uint32_t messages_sent;
+ uint32_t messages_received;
+ uint64_t bytes_sent;
+ uint64_t bytes_received;
+ };
+
static rtc::scoped_refptr<DataChannel> Create(
DataChannelProviderInterface* provider,
cricket::DataChannelType dct,
const std::string& label,
- const InternalDataChannelInit& config);
+ const InternalDataChannelInit& config,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread);
+
+ // Instantiates an API proxy for a DataChannel instance that will be handed
+ // out to external callers.
+ static rtc::scoped_refptr<DataChannelInterface> CreateProxy(
+ rtc::scoped_refptr<DataChannel> channel);
static bool IsSctpLike(cricket::DataChannelType type);
- virtual void RegisterObserver(DataChannelObserver* observer);
- virtual void UnregisterObserver();
+ void RegisterObserver(DataChannelObserver* observer) override;
+ void UnregisterObserver() override;
- virtual std::string label() const { return label_; }
- virtual bool reliable() const;
- virtual bool ordered() const { return config_.ordered; }
+ std::string label() const override { return label_; }
+ bool reliable() const override;
+ bool ordered() const override { return config_.ordered; }
// Backwards compatible accessors
- virtual uint16_t maxRetransmitTime() const {
+ uint16_t maxRetransmitTime() const override {
return config_.maxRetransmitTime ? *config_.maxRetransmitTime
: static_cast<uint16_t>(-1);
}
- virtual uint16_t maxRetransmits() const {
+ uint16_t maxRetransmits() const override {
return config_.maxRetransmits ? *config_.maxRetransmits
: static_cast<uint16_t>(-1);
}
- virtual absl::optional<int> maxPacketLifeTime() const {
+ absl::optional<int> maxPacketLifeTime() const override {
return config_.maxRetransmitTime;
}
- virtual absl::optional<int> maxRetransmitsOpt() const {
+ absl::optional<int> maxRetransmitsOpt() const override {
return config_.maxRetransmits;
}
- virtual std::string protocol() const { return config_.protocol; }
- virtual bool negotiated() const { return config_.negotiated; }
- virtual int id() const { return config_.id; }
+ std::string protocol() const override { return config_.protocol; }
+ bool negotiated() const override { return config_.negotiated; }
+ int id() const override { return config_.id; }
+ Priority priority() const override {
+ return config_.priority ? *config_.priority : Priority::kLow;
+ }
+
virtual int internal_id() const { return internal_id_; }
- virtual uint64_t buffered_amount() const;
- virtual void Close();
- virtual DataState state() const { return state_; }
- virtual RTCError error() const;
- virtual uint32_t messages_sent() const { return messages_sent_; }
- virtual uint64_t bytes_sent() const { return bytes_sent_; }
- virtual uint32_t messages_received() const { return messages_received_; }
- virtual uint64_t bytes_received() const { return bytes_received_; }
- virtual bool Send(const DataBuffer& buffer);
+
+ uint64_t buffered_amount() const override;
+ void Close() override;
+ DataState state() const override;
+ RTCError error() const override;
+ uint32_t messages_sent() const override;
+ uint64_t bytes_sent() const override;
+ uint32_t messages_received() const override;
+ uint64_t bytes_received() const override;
+ bool Send(const DataBuffer& buffer) override;
// Close immediately, ignoring any queued data or closing procedure.
// This is called for RTP data channels when SDP indicates a channel should
@@ -195,6 +222,8 @@ class DataChannel : public DataChannelInterface, public sigslot::has_slots<> {
// to kClosed.
void OnTransportChannelClosed();
+ Stats GetStats() const;
+
/*******************************************
* The following methods are for RTP only. *
*******************************************/
@@ -225,10 +254,13 @@ class DataChannel : public DataChannelInterface, public sigslot::has_slots<> {
static void ResetInternalIdAllocatorForTesting(int new_value);
protected:
- DataChannel(DataChannelProviderInterface* client,
+ DataChannel(const InternalDataChannelInit& config,
+ DataChannelProviderInterface* client,
cricket::DataChannelType dct,
- const std::string& label);
- virtual ~DataChannel();
+ const std::string& label,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread);
+ ~DataChannel() override;
private:
// A packet queue which tracks the total queued bytes. Queued packets are
@@ -262,7 +294,7 @@ class DataChannel : public DataChannelInterface, public sigslot::has_slots<> {
kHandshakeReady
};
- bool Init(const InternalDataChannelInit& config);
+ bool Init();
void UpdateState();
void SetState(DataState state);
void DisconnectFromProvider();
@@ -277,64 +309,40 @@ class DataChannel : public DataChannelInterface, public sigslot::has_slots<> {
void QueueControlMessage(const rtc::CopyOnWriteBuffer& buffer);
bool SendControlMessage(const rtc::CopyOnWriteBuffer& buffer);
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const network_thread_;
const int internal_id_;
- std::string label_;
- InternalDataChannelInit config_;
- DataChannelObserver* observer_;
- DataState state_;
- RTCError error_;
- uint32_t messages_sent_;
- uint64_t bytes_sent_;
- uint32_t messages_received_;
- uint64_t bytes_received_;
+ const std::string label_;
+ const InternalDataChannelInit config_;
+ DataChannelObserver* observer_ RTC_GUARDED_BY(signaling_thread_);
+ DataState state_ RTC_GUARDED_BY(signaling_thread_);
+ RTCError error_ RTC_GUARDED_BY(signaling_thread_);
+ uint32_t messages_sent_ RTC_GUARDED_BY(signaling_thread_);
+ uint64_t bytes_sent_ RTC_GUARDED_BY(signaling_thread_);
+ uint32_t messages_received_ RTC_GUARDED_BY(signaling_thread_);
+ uint64_t bytes_received_ RTC_GUARDED_BY(signaling_thread_);
// Number of bytes of data that have been queued using Send(). Increased
// before each transport send and decreased after each successful send.
- uint64_t buffered_amount_;
- cricket::DataChannelType data_channel_type_;
- DataChannelProviderInterface* provider_;
- HandshakeState handshake_state_;
- bool connected_to_provider_;
- bool send_ssrc_set_;
- bool receive_ssrc_set_;
- bool writable_;
+ uint64_t buffered_amount_ RTC_GUARDED_BY(signaling_thread_);
+ const cricket::DataChannelType data_channel_type_;
+ DataChannelProviderInterface* const provider_;
+ HandshakeState handshake_state_ RTC_GUARDED_BY(signaling_thread_);
+ bool connected_to_provider_ RTC_GUARDED_BY(signaling_thread_);
+ bool send_ssrc_set_ RTC_GUARDED_BY(signaling_thread_);
+ bool receive_ssrc_set_ RTC_GUARDED_BY(signaling_thread_);
+ bool writable_ RTC_GUARDED_BY(signaling_thread_);
// Did we already start the graceful SCTP closing procedure?
- bool started_closing_procedure_ = false;
- uint32_t send_ssrc_;
- uint32_t receive_ssrc_;
+ bool started_closing_procedure_ RTC_GUARDED_BY(signaling_thread_) = false;
+ uint32_t send_ssrc_ RTC_GUARDED_BY(signaling_thread_);
+ uint32_t receive_ssrc_ RTC_GUARDED_BY(signaling_thread_);
// Control messages that always have to get sent out before any queued
// data.
- PacketQueue queued_control_data_;
- PacketQueue queued_received_data_;
- PacketQueue queued_send_data_;
- rtc::AsyncInvoker invoker_;
+ PacketQueue queued_control_data_ RTC_GUARDED_BY(signaling_thread_);
+ PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_);
+ PacketQueue queued_send_data_ RTC_GUARDED_BY(signaling_thread_);
+ rtc::AsyncInvoker invoker_ RTC_GUARDED_BY(signaling_thread_);
};
-// Define proxy for DataChannelInterface.
-BEGIN_SIGNALING_PROXY_MAP(DataChannel)
-PROXY_SIGNALING_THREAD_DESTRUCTOR()
-PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
-PROXY_METHOD0(void, UnregisterObserver)
-PROXY_CONSTMETHOD0(std::string, label)
-PROXY_CONSTMETHOD0(bool, reliable)
-PROXY_CONSTMETHOD0(bool, ordered)
-PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
-PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
-PROXY_CONSTMETHOD0(absl::optional<int>, maxRetransmitsOpt)
-PROXY_CONSTMETHOD0(absl::optional<int>, maxPacketLifeTime)
-PROXY_CONSTMETHOD0(std::string, protocol)
-PROXY_CONSTMETHOD0(bool, negotiated)
-PROXY_CONSTMETHOD0(int, id)
-PROXY_CONSTMETHOD0(DataState, state)
-PROXY_CONSTMETHOD0(RTCError, error)
-PROXY_CONSTMETHOD0(uint32_t, messages_sent)
-PROXY_CONSTMETHOD0(uint64_t, bytes_sent)
-PROXY_CONSTMETHOD0(uint32_t, messages_received)
-PROXY_CONSTMETHOD0(uint64_t, bytes_received)
-PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
-PROXY_METHOD0(void, Close)
-PROXY_METHOD1(bool, Send, const DataBuffer&)
-END_PROXY_MAP()
-
} // namespace webrtc
#endif // PC_DATA_CHANNEL_H_
diff --git a/chromium/third_party/webrtc/pc/data_channel_controller.cc b/chromium/third_party/webrtc/pc/data_channel_controller.cc
index e9ea742c44f..710ca8e5317 100644
--- a/chromium/third_party/webrtc/pc/data_channel_controller.cc
+++ b/chromium/third_party/webrtc/pc/data_channel_controller.cc
@@ -25,37 +25,10 @@ bool DataChannelController::HasDataChannels() const {
bool DataChannelController::SendData(const cricket::SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
cricket::SendDataResult* result) {
- // RTC_DCHECK_RUN_ON(signaling_thread());
- if (data_channel_transport()) {
- SendDataParams send_params;
- send_params.type = ToWebrtcDataMessageType(params.type);
- send_params.ordered = params.ordered;
- if (params.max_rtx_count >= 0) {
- send_params.max_rtx_count = params.max_rtx_count;
- } else if (params.max_rtx_ms >= 0) {
- send_params.max_rtx_ms = params.max_rtx_ms;
- }
-
- RTCError error = network_thread()->Invoke<RTCError>(
- RTC_FROM_HERE, [this, params, send_params, payload] {
- return data_channel_transport()->SendData(params.sid, send_params,
- payload);
- });
-
- if (error.ok()) {
- *result = cricket::SendDataResult::SDR_SUCCESS;
- return true;
- } else if (error.type() == RTCErrorType::RESOURCE_EXHAUSTED) {
- // SCTP transport uses RESOURCE_EXHAUSTED when it's blocked.
- // TODO(mellem): Stop using RTCError here and get rid of the mapping.
- *result = cricket::SendDataResult::SDR_BLOCK;
- return false;
- }
- *result = cricket::SendDataResult::SDR_ERROR;
- return false;
- } else if (rtp_data_channel()) {
+ if (data_channel_transport())
+ return DataChannelSendData(params, payload, result);
+ if (rtp_data_channel())
return rtp_data_channel()->SendData(params, payload, result);
- }
RTC_LOG(LS_ERROR) << "SendData called before transport is ready";
return false;
}
@@ -146,6 +119,14 @@ void DataChannelController::OnDataReceived(
data_channel_transport_invoker_->AsyncInvoke<void>(
RTC_FROM_HERE, signaling_thread(), [this, params, buffer] {
RTC_DCHECK_RUN_ON(signaling_thread());
+ // TODO(bugs.webrtc.org/11547): The data being received should be
+ // delivered on the network thread. The way HandleOpenMessage_s works
+ // right now is that it's called for all types of buffers and operates
+ // as a selector function. Change this so that it's only called for
+ // buffers that it should be able to handle. Once we do that, we can
+ // deliver all other buffers on the network thread (change
+ // SignalDataChannelTransportReceivedData_s to
+ // SignalDataChannelTransportReceivedData_n).
if (!HandleOpenMessage_s(params, buffer)) {
SignalDataChannelTransportReceivedData_s(params, buffer);
}
@@ -193,6 +174,11 @@ void DataChannelController::OnTransportClosed() {
void DataChannelController::SetupDataChannelTransport_n() {
RTC_DCHECK_RUN_ON(network_thread());
data_channel_transport_invoker_ = std::make_unique<rtc::AsyncInvoker>();
+
+ // There's a new data channel transport. This needs to be signaled to the
+ // |sctp_data_channels_| so that they can reopen and reconnect. This is
+ // necessary when bundling is applied.
+ NotifyDataChannelsOfTransportCreated();
}
void DataChannelController::TeardownDataChannelTransport_n() {
@@ -219,17 +205,21 @@ void DataChannelController::OnTransportChanged(
// There's a new data channel transport. This needs to be signaled to the
// |sctp_data_channels_| so that they can reopen and reconnect. This is
// necessary when bundling is applied.
- data_channel_transport_invoker_->AsyncInvoke<void>(
- RTC_FROM_HERE, signaling_thread(), [this] {
- RTC_DCHECK_RUN_ON(signaling_thread());
- for (const auto& channel : sctp_data_channels_) {
- channel->OnTransportChannelCreated();
- }
- });
+ NotifyDataChannelsOfTransportCreated();
}
}
}
+std::vector<DataChannel::Stats> DataChannelController::GetDataChannelStats()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<DataChannel::Stats> stats;
+ stats.reserve(sctp_data_channels_.size());
+ for (const auto& channel : sctp_data_channels_)
+ stats.push_back(channel->GetStats());
+ return stats;
+}
+
bool DataChannelController::HandleOpenMessage_s(
const cricket::ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
@@ -262,7 +252,7 @@ void DataChannelController::OnDataChannelOpenMessage(
}
rtc::scoped_refptr<DataChannelInterface> proxy_channel =
- DataChannelProxy::Create(signaling_thread(), channel);
+ DataChannel::CreateProxy(std::move(channel));
pc_->Observer()->OnDataChannel(std::move(proxy_channel));
pc_->NoteDataAddedEvent();
}
@@ -299,7 +289,8 @@ DataChannelController::InternalCreateDataChannel(
}
rtc::scoped_refptr<DataChannel> channel(
- DataChannel::Create(this, data_channel_type(), label, new_config));
+ DataChannel::Create(this, data_channel_type(), label, new_config,
+ signaling_thread(), network_thread()));
if (!channel) {
sid_allocator_.ReleaseSid(new_config.id);
return nullptr;
@@ -424,9 +415,10 @@ void DataChannelController::UpdateLocalRtpDataChannels(
void DataChannelController::UpdateRemoteRtpDataChannels(
const cricket::StreamParamsVec& streams) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
std::vector<std::string> existing_channels;
- RTC_DCHECK_RUN_ON(signaling_thread());
// Find new and active data channels.
for (const cricket::StreamParams& params : streams) {
// The data channel label is either the mslabel or the SSRC if the mslabel
@@ -447,6 +439,38 @@ void DataChannelController::UpdateRemoteRtpDataChannels(
UpdateClosingRtpDataChannels(existing_channels, false);
}
+cricket::DataChannelType DataChannelController::data_channel_type() const {
+ // TODO(bugs.webrtc.org/9987): Should be restricted to the signaling thread.
+ // RTC_DCHECK_RUN_ON(signaling_thread());
+ return data_channel_type_;
+}
+
+void DataChannelController::set_data_channel_type(
+ cricket::DataChannelType type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ data_channel_type_ = type;
+}
+
+DataChannelTransportInterface* DataChannelController::data_channel_transport()
+ const {
+ // TODO(bugs.webrtc.org/11547): Only allow this accessor to be called on the
+ // network thread.
+ // RTC_DCHECK_RUN_ON(network_thread());
+ return data_channel_transport_;
+}
+
+void DataChannelController::set_data_channel_transport(
+ DataChannelTransportInterface* transport) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ data_channel_transport_ = transport;
+}
+
+const std::map<std::string, rtc::scoped_refptr<DataChannel>>*
+DataChannelController::rtp_data_channels() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return &rtp_data_channels_;
+}
+
void DataChannelController::UpdateClosingRtpDataChannels(
const std::vector<std::string>& active_channels,
bool is_local_update) {
@@ -484,10 +508,59 @@ void DataChannelController::CreateRemoteRtpDataChannel(const std::string& label,
}
channel->SetReceiveSsrc(remote_ssrc);
rtc::scoped_refptr<DataChannelInterface> proxy_channel =
- DataChannelProxy::Create(signaling_thread(), channel);
+ DataChannel::CreateProxy(std::move(channel));
pc_->Observer()->OnDataChannel(std::move(proxy_channel));
}
+bool DataChannelController::DataChannelSendData(
+ const cricket::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result) {
+ // TODO(bugs.webrtc.org/11547): Expect method to be called on the network
+ // thread instead. Remove the Invoke() below and move assocated state to
+ // the network thread.
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(data_channel_transport());
+
+ SendDataParams send_params;
+ send_params.type = ToWebrtcDataMessageType(params.type);
+ send_params.ordered = params.ordered;
+ if (params.max_rtx_count >= 0) {
+ send_params.max_rtx_count = params.max_rtx_count;
+ } else if (params.max_rtx_ms >= 0) {
+ send_params.max_rtx_ms = params.max_rtx_ms;
+ }
+
+ RTCError error = network_thread()->Invoke<RTCError>(
+ RTC_FROM_HERE, [this, params, send_params, payload] {
+ return data_channel_transport()->SendData(params.sid, send_params,
+ payload);
+ });
+
+ if (error.ok()) {
+ *result = cricket::SendDataResult::SDR_SUCCESS;
+ return true;
+ } else if (error.type() == RTCErrorType::RESOURCE_EXHAUSTED) {
+ // SCTP transport uses RESOURCE_EXHAUSTED when it's blocked.
+ // TODO(mellem): Stop using RTCError here and get rid of the mapping.
+ *result = cricket::SendDataResult::SDR_BLOCK;
+ return false;
+ }
+ *result = cricket::SendDataResult::SDR_ERROR;
+ return false;
+}
+
+void DataChannelController::NotifyDataChannelsOfTransportCreated() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ data_channel_transport_invoker_->AsyncInvoke<void>(
+ RTC_FROM_HERE, signaling_thread(), [this] {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ for (const auto& channel : sctp_data_channels_) {
+ channel->OnTransportChannelCreated();
+ }
+ });
+}
+
rtc::Thread* DataChannelController::network_thread() const {
return pc_->network_thread();
}
diff --git a/chromium/third_party/webrtc/pc/data_channel_controller.h b/chromium/third_party/webrtc/pc/data_channel_controller.h
index 60bcbb32a87..c3e64aba953 100644
--- a/chromium/third_party/webrtc/pc/data_channel_controller.h
+++ b/chromium/third_party/webrtc/pc/data_channel_controller.h
@@ -64,6 +64,9 @@ class DataChannelController : public DataChannelProviderInterface,
void OnTransportChanged(
DataChannelTransportInterface* data_channel_transport);
+ // Called from PeerConnection::GetDataChannelStats on the signaling thread.
+ std::vector<DataChannel::Stats> GetDataChannelStats() const;
+
// Creates channel and adds it to the collection of DataChannels that will
// be offered in a SessionDescription.
rtc::scoped_refptr<DataChannel> InternalCreateDataChannel(
@@ -89,34 +92,18 @@ class DataChannelController : public DataChannelProviderInterface,
void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
// Accessors
- cricket::DataChannelType data_channel_type() const {
- return data_channel_type_;
- }
- void set_data_channel_type(cricket::DataChannelType type) {
- data_channel_type_ = type;
- }
+ cricket::DataChannelType data_channel_type() const;
+ void set_data_channel_type(cricket::DataChannelType type);
cricket::RtpDataChannel* rtp_data_channel() const {
return rtp_data_channel_;
}
void set_rtp_data_channel(cricket::RtpDataChannel* channel) {
rtp_data_channel_ = channel;
}
- DataChannelTransportInterface* data_channel_transport() const {
- return data_channel_transport_;
- }
- void set_data_channel_transport(DataChannelTransportInterface* transport) {
- data_channel_transport_ = transport;
- }
+ DataChannelTransportInterface* data_channel_transport() const;
+ void set_data_channel_transport(DataChannelTransportInterface* transport);
const std::map<std::string, rtc::scoped_refptr<DataChannel>>*
- rtp_data_channels() const {
- RTC_DCHECK_RUN_ON(signaling_thread());
- return &rtp_data_channels_;
- }
- const std::vector<rtc::scoped_refptr<DataChannel>>* sctp_data_channels()
- const {
- RTC_DCHECK_RUN_ON(signaling_thread());
- return &sctp_data_channels_;
- }
+ rtp_data_channels() const;
sigslot::signal1<DataChannel*>& SignalDataChannelCreated() {
RTC_DCHECK_RUN_ON(signaling_thread());
@@ -146,6 +133,15 @@ class DataChannelController : public DataChannelProviderInterface,
const std::vector<std::string>& active_channels,
bool is_local_update) RTC_RUN_ON(signaling_thread());
+ // Called from SendData when data_channel_transport() is true.
+ bool DataChannelSendData(const cricket::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result);
+
+ // Called when all data channels need to be notified of a transport channel
+ // (calls OnTransportChannelCreated on the signaling thread).
+ void NotifyDataChannelsOfTransportCreated();
+
rtc::Thread* network_thread() const;
rtc::Thread* signaling_thread() const;
@@ -189,6 +185,8 @@ class DataChannelController : public DataChannelProviderInterface,
// Signals from |data_channel_transport_|. These are invoked on the
// signaling thread.
+ // TODO(bugs.webrtc.org/11547): These '_s' signals likely all belong on the
+ // network thread.
sigslot::signal1<bool> SignalDataChannelTransportWritable_s
RTC_GUARDED_BY(signaling_thread());
sigslot::signal2<const cricket::ReceiveDataParams&,
diff --git a/chromium/third_party/webrtc/pc/data_channel_unittest.cc b/chromium/third_party/webrtc/pc/data_channel_unittest.cc
index 6bb8f7e5c70..11dfcc4aeed 100644
--- a/chromium/third_party/webrtc/pc/data_channel_unittest.cc
+++ b/chromium/third_party/webrtc/pc/data_channel_unittest.cc
@@ -64,6 +64,7 @@ class FakeDataChannelObserver : public webrtc::DataChannelObserver {
// TODO(deadbeef): The fact that these tests use a fake provider makes them not
// too valuable. Should rewrite using the
// peerconnection_datachannel_unittest.cc infrastructure.
+// TODO(bugs.webrtc.org/11547): Incorporate a dedicated network thread.
class SctpDataChannelTest : public ::testing::Test {
protected:
SctpDataChannelTest()
@@ -71,7 +72,9 @@ class SctpDataChannelTest : public ::testing::Test {
webrtc_data_channel_(DataChannel::Create(provider_.get(),
cricket::DCT_SCTP,
"test",
- init_)) {}
+ init_,
+ rtc::Thread::Current(),
+ rtc::Thread::Current())) {}
void SetChannelReady() {
provider_->set_transport_available(true);
@@ -111,7 +114,8 @@ class StateSignalsListener : public sigslot::has_slots<> {
TEST_F(SctpDataChannelTest, ConnectedToTransportOnCreated) {
provider_->set_transport_available(true);
rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init_);
+ DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init_,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_TRUE(provider_->IsConnected(dc.get()));
// The sid is not set yet, so it should not have added the streams.
@@ -305,7 +309,8 @@ TEST_F(SctpDataChannelTest, LateCreatedChannelTransitionToOpen) {
webrtc::InternalDataChannelInit init;
init.id = 1;
rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init);
+ DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, dc->state());
EXPECT_TRUE_WAIT(webrtc::DataChannelInterface::kOpen == dc->state(), 1000);
}
@@ -318,7 +323,8 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) {
init.id = 1;
init.ordered = false;
rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init);
+ DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
@@ -348,7 +354,8 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
init.id = 1;
init.ordered = false;
rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init);
+ DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", init,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
@@ -449,7 +456,8 @@ TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) {
SetChannelReady();
rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", config);
+ DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", config,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
EXPECT_EQ(0U, provider_->last_send_data_params().ssrc);
@@ -512,7 +520,8 @@ TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) {
SetChannelReady();
rtc::scoped_refptr<DataChannel> dc =
- DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", config);
+ DataChannel::Create(provider_.get(), cricket::DCT_SCTP, "test1", config,
+ rtc::Thread::Current(), rtc::Thread::Current());
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
@@ -630,9 +639,9 @@ TEST_F(SctpDataChannelTest, TransportDestroyedWhileDataBuffered) {
EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed,
webrtc_data_channel_->state(), kDefaultTimeout);
EXPECT_FALSE(webrtc_data_channel_->error().ok());
- EXPECT_EQ(webrtc::RTCErrorType::NETWORK_ERROR,
+ EXPECT_EQ(webrtc::RTCErrorType::OPERATION_ERROR_WITH_DATA,
webrtc_data_channel_->error().type());
- EXPECT_EQ(webrtc::RTCErrorDetailType::NONE,
+ EXPECT_EQ(webrtc::RTCErrorDetailType::SCTP_FAILURE,
webrtc_data_channel_->error().error_detail());
}
diff --git a/chromium/third_party/webrtc/pc/datagram_rtp_transport.cc b/chromium/third_party/webrtc/pc/datagram_rtp_transport.cc
deleted file mode 100644
index ad1e6dc995b..00000000000
--- a/chromium/third_party/webrtc/pc/datagram_rtp_transport.cc
+++ /dev/null
@@ -1,380 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "pc/datagram_rtp_transport.h"
-
-#include <algorithm>
-#include <memory>
-#include <utility>
-
-#include "absl/memory/memory.h"
-#include "absl/strings/string_view.h"
-#include "absl/types/optional.h"
-#include "api/array_view.h"
-#include "api/rtc_error.h"
-#include "media/base/rtp_utils.h"
-#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
-#include "modules/rtp_rtcp/source/rtp_packet.h"
-#include "modules/rtp_rtcp/source/rtp_packet_received.h"
-#include "p2p/base/dtls_transport_internal.h"
-#include "p2p/base/packet_transport_internal.h"
-#include "rtc_base/buffer.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/dscp.h"
-#include "rtc_base/logging.h"
-#include "rtc_base/rtc_certificate.h"
-#include "rtc_base/ssl_stream_adapter.h"
-#include "rtc_base/stream.h"
-#include "rtc_base/thread.h"
-#include "system_wrappers/include/field_trial.h"
-
-namespace webrtc {
-
-namespace {
-
-// Field trials.
-// Disable datagram to RTCP feedback translation and enable RTCP feedback loop
-// on top of datagram feedback loop. Note that two
-// feedback loops add unneccesary overhead, so it's preferable to use feedback
-// loop provided by datagram transport and convert datagram ACKs to RTCP ACKs,
-// but enabling RTCP feedback loop may be useful in tests and experiments.
-const char kDisableDatagramToRtcpFeebackTranslationFieldTrial[] =
- "WebRTC-kDisableDatagramToRtcpFeebackTranslation";
-
-} // namespace
-
-// Maximum packet size of RTCP feedback packet for allocation. We re-create RTCP
-// feedback packets when we get ACK notifications from datagram transport. Our
-// rtcp feedback packets contain only 1 ACK, so they are much smaller than 1250.
-constexpr size_t kMaxRtcpFeedbackPacketSize = 1250;
-
-DatagramRtpTransport::DatagramRtpTransport(
- const std::vector<RtpExtension>& rtp_header_extensions,
- cricket::IceTransportInternal* ice_transport,
- DatagramTransportInterface* datagram_transport)
- : ice_transport_(ice_transport),
- datagram_transport_(datagram_transport),
- disable_datagram_to_rtcp_feeback_translation_(field_trial::IsEnabled(
- kDisableDatagramToRtcpFeebackTranslationFieldTrial)) {
- // Save extension map for parsing RTP packets (we only need transport
- // sequence numbers).
- const RtpExtension* transport_sequence_number_extension =
- RtpExtension::FindHeaderExtensionByUri(rtp_header_extensions,
- TransportSequenceNumber::kUri);
-
- if (transport_sequence_number_extension != nullptr) {
- rtp_header_extension_map_.Register<TransportSequenceNumber>(
- transport_sequence_number_extension->id);
- } else {
- RTC_LOG(LS_ERROR) << "Transport sequence numbers are not supported in "
- "datagram transport connection";
- }
-
- RTC_DCHECK(ice_transport_);
- RTC_DCHECK(datagram_transport_);
-
- ice_transport_->SignalNetworkRouteChanged.connect(
- this, &DatagramRtpTransport::OnNetworkRouteChanged);
- // Subscribe to DatagramTransport to read incoming packets.
- datagram_transport_->SetDatagramSink(this);
- datagram_transport_->SetTransportStateCallback(this);
-}
-
-DatagramRtpTransport::~DatagramRtpTransport() {
- // Unsubscribe from DatagramTransport sinks.
- datagram_transport_->SetDatagramSink(nullptr);
- datagram_transport_->SetTransportStateCallback(nullptr);
-}
-
-bool DatagramRtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options,
- int flags) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- // Assign and increment datagram_id.
- const DatagramId datagram_id = current_datagram_id_++;
-
- // Send as is (without extracting transport sequence number) for
- // RTP packets if we are not doing datagram => RTCP feedback translation.
- if (disable_datagram_to_rtcp_feeback_translation_) {
- // Even if we are not extracting transport sequence number we need to
- // propagate "Sent" notification for both RTP and RTCP packets. For this
- // reason we need save options.packet_id in packet map.
- sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id);
-
- return SendDatagram(*packet, datagram_id);
- }
-
- // Parse RTP packet.
- RtpPacket rtp_packet(&rtp_header_extension_map_);
- // TODO(mellem): Verify that this doesn't mangle something (it shouldn't).
- if (!rtp_packet.Parse(*packet)) {
- RTC_NOTREACHED() << "Failed to parse outgoing RtpPacket, len="
- << packet->size()
- << ", options.packet_id=" << options.packet_id;
- return -1;
- }
-
- // Try to get transport sequence number.
- uint16_t transport_senquence_number;
- if (!rtp_packet.GetExtension<TransportSequenceNumber>(
- &transport_senquence_number)) {
- // Save packet info without transport sequence number.
- sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id);
-
- RTC_LOG(LS_VERBOSE)
- << "Sending rtp packet without transport sequence number, packet="
- << rtp_packet.ToString();
-
- return SendDatagram(*packet, datagram_id);
- }
-
- // Save packet info with sequence number and ssrc so we could reconstruct
- // RTCP feedback packet when we receive datagram ACK.
- sent_rtp_packet_map_[datagram_id] = SentPacketInfo(
- options.packet_id, rtp_packet.Ssrc(), transport_senquence_number);
-
- // Since datagram transport provides feedback and timestamps, we do not need
- // to send transport sequence number, so we remove it from RTP packet. Later
- // when we get Ack for sent datagram, we will re-create RTCP feedback packet.
- if (!rtp_packet.RemoveExtension(TransportSequenceNumber::kId)) {
- RTC_NOTREACHED() << "Failed to remove transport sequence number, packet="
- << rtp_packet.ToString();
- return -1;
- }
-
- RTC_LOG(LS_VERBOSE) << "Removed transport_senquence_number="
- << transport_senquence_number
- << " from packet=" << rtp_packet.ToString()
- << ", saved bytes=" << packet->size() - rtp_packet.size();
-
- return SendDatagram(
- rtc::ArrayView<const uint8_t>(rtp_packet.data(), rtp_packet.size()),
- datagram_id);
-}
-
-bool DatagramRtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options,
- int flags) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- // Assign and increment datagram_id.
- const DatagramId datagram_id = current_datagram_id_++;
-
- // Even if we are not extracting transport sequence number we need to
- // propagate "Sent" notification for both RTP and RTCP packets. For this
- // reason we need save options.packet_id in packet map.
- sent_rtp_packet_map_[datagram_id] = SentPacketInfo(options.packet_id);
- return SendDatagram(*packet, datagram_id);
-}
-
-bool DatagramRtpTransport::SendDatagram(rtc::ArrayView<const uint8_t> data,
- DatagramId datagram_id) {
- return datagram_transport_->SendDatagram(data, datagram_id).ok();
-}
-
-void DatagramRtpTransport::OnDatagramReceived(
- rtc::ArrayView<const uint8_t> data) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- rtc::ArrayView<const char> cdata(reinterpret_cast<const char*>(data.data()),
- data.size());
- if (cricket::InferRtpPacketType(cdata) == cricket::RtpPacketType::kRtcp) {
- rtc::CopyOnWriteBuffer buffer(data.data(), data.size());
- SignalRtcpPacketReceived(&buffer, /*packet_time_us=*/-1);
- return;
- }
-
- // TODO(sukhanov): I am not filling out time, but on my video quality
- // test in WebRTC the time was not set either and higher layers of the stack
- // overwrite -1 with current current rtc time. Leaveing comment for now to
- // make sure it works as expected.
- RtpPacketReceived parsed_packet(&rtp_header_extension_map_);
- if (!parsed_packet.Parse(data)) {
- RTC_LOG(LS_ERROR) << "Failed to parse incoming RTP packet";
- return;
- }
- if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) {
- RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: "
- << RtpDemuxer::DescribePacket(parsed_packet);
- }
-}
-
-void DatagramRtpTransport::OnDatagramSent(DatagramId datagram_id) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- // Find packet_id and propagate OnPacketSent notification.
- const auto& it = sent_rtp_packet_map_.find(datagram_id);
- if (it == sent_rtp_packet_map_.end()) {
- RTC_NOTREACHED() << "Did not find sent packet info for sent datagram_id="
- << datagram_id;
- return;
- }
-
- // Also see how DatagramRtpTransport::OnSentPacket handles OnSentPacket
- // notification from ICE in bypass mode.
- rtc::SentPacket sent_packet(/*packet_id=*/it->second.packet_id,
- rtc::TimeMillis());
-
- SignalSentPacket(sent_packet);
-}
-
-bool DatagramRtpTransport::GetAndRemoveSentPacketInfo(
- DatagramId datagram_id,
- SentPacketInfo* sent_packet_info) {
- RTC_CHECK(sent_packet_info != nullptr);
-
- const auto& it = sent_rtp_packet_map_.find(datagram_id);
- if (it == sent_rtp_packet_map_.end()) {
- return false;
- }
-
- *sent_packet_info = it->second;
- sent_rtp_packet_map_.erase(it);
- return true;
-}
-
-void DatagramRtpTransport::OnDatagramAcked(const DatagramAck& ack) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- SentPacketInfo sent_packet_info;
- if (!GetAndRemoveSentPacketInfo(ack.datagram_id, &sent_packet_info)) {
- // TODO(sukhanov): If OnDatagramAck() can come after OnDatagramLost(),
- // datagram_id is already deleted and we may need to relax the CHECK below.
- // It's probably OK to ignore such datagrams, because it's been a few RTTs
- // anyway since they were sent.
- RTC_NOTREACHED() << "Did not find sent packet info for datagram_id="
- << ack.datagram_id;
- return;
- }
-
- RTC_LOG(LS_VERBOSE) << "Datagram acked, ack.datagram_id=" << ack.datagram_id
- << ", sent_packet_info.packet_id="
- << sent_packet_info.packet_id
- << ", sent_packet_info.transport_sequence_number="
- << sent_packet_info.transport_sequence_number.value_or(-1)
- << ", sent_packet_info.ssrc="
- << sent_packet_info.ssrc.value_or(-1)
- << ", receive_timestamp_ms="
- << ack.receive_timestamp.ms();
-
- // If transport sequence number was not present in RTP packet, we do not need
- // to propagate RTCP feedback.
- if (!sent_packet_info.transport_sequence_number) {
- return;
- }
-
- // TODO(sukhanov): We noticed that datagram transport implementations can
- // return zero timestamps in the middle of the call. This is workaround to
- // avoid propagating zero timestamps, but we need to understand why we have
- // them in the first place.
- int64_t receive_timestamp_us = ack.receive_timestamp.us();
-
- if (receive_timestamp_us == 0) {
- receive_timestamp_us = previous_nonzero_timestamp_us_;
- } else {
- previous_nonzero_timestamp_us_ = receive_timestamp_us;
- }
-
- // Ssrc must be provided in packet info if transport sequence number is set,
- // which is guaranteed by SentPacketInfo constructor.
- RTC_CHECK(sent_packet_info.ssrc);
-
- // Recreate RTCP feedback packet.
- rtcp::TransportFeedback feedback_packet;
- feedback_packet.SetMediaSsrc(*sent_packet_info.ssrc);
-
- const uint16_t transport_sequence_number =
- sent_packet_info.transport_sequence_number.value();
-
- feedback_packet.SetBase(transport_sequence_number, receive_timestamp_us);
- feedback_packet.AddReceivedPacket(transport_sequence_number,
- receive_timestamp_us);
-
- rtc::CopyOnWriteBuffer buffer(kMaxRtcpFeedbackPacketSize);
- size_t index = 0;
- if (!feedback_packet.Create(buffer.data(), &index, buffer.capacity(),
- nullptr)) {
- RTC_NOTREACHED() << "Failed to create RTCP feedback packet";
- return;
- }
-
- RTC_CHECK_GT(index, 0);
- RTC_CHECK_LE(index, kMaxRtcpFeedbackPacketSize);
-
- // Propagage created RTCP packet as normal incoming packet.
- buffer.SetSize(index);
- SignalRtcpPacketReceived(&buffer, /*packet_time_us=*/-1);
-}
-
-void DatagramRtpTransport::OnDatagramLost(DatagramId datagram_id) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
-
- RTC_LOG(LS_INFO) << "Datagram lost, datagram_id=" << datagram_id;
-
- SentPacketInfo sent_packet_info;
- if (!GetAndRemoveSentPacketInfo(datagram_id, &sent_packet_info)) {
- RTC_NOTREACHED() << "Did not find sent packet info for lost datagram_id="
- << datagram_id;
- }
-}
-
-void DatagramRtpTransport::OnStateChanged(MediaTransportState state) {
- state_ = state;
- SignalWritableState(state_ == MediaTransportState::kWritable);
- if (state_ == MediaTransportState::kWritable) {
- SignalReadyToSend(true);
- }
-}
-
-const std::string& DatagramRtpTransport::transport_name() const {
- return ice_transport_->transport_name();
-}
-
-int DatagramRtpTransport::SetRtpOption(rtc::Socket::Option opt, int value) {
- return ice_transport_->SetOption(opt, value);
-}
-
-int DatagramRtpTransport::SetRtcpOption(rtc::Socket::Option opt, int value) {
- return -1;
-}
-
-bool DatagramRtpTransport::IsReadyToSend() const {
- return state_ == MediaTransportState::kWritable;
-}
-
-bool DatagramRtpTransport::IsWritable(bool /*rtcp*/) const {
- return state_ == MediaTransportState::kWritable;
-}
-
-void DatagramRtpTransport::UpdateRtpHeaderExtensionMap(
- const cricket::RtpHeaderExtensions& header_extensions) {
- rtp_header_extension_map_ = RtpHeaderExtensionMap(header_extensions);
-}
-
-bool DatagramRtpTransport::RegisterRtpDemuxerSink(
- const RtpDemuxerCriteria& criteria,
- RtpPacketSinkInterface* sink) {
- rtp_demuxer_.RemoveSink(sink);
- return rtp_demuxer_.AddSink(criteria, sink);
-}
-
-bool DatagramRtpTransport::UnregisterRtpDemuxerSink(
- RtpPacketSinkInterface* sink) {
- return rtp_demuxer_.RemoveSink(sink);
-}
-
-void DatagramRtpTransport::OnNetworkRouteChanged(
- absl::optional<rtc::NetworkRoute> network_route) {
- RTC_DCHECK_RUN_ON(&thread_checker_);
- SignalNetworkRouteChanged(network_route);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/datagram_rtp_transport.h b/chromium/third_party/webrtc/pc/datagram_rtp_transport.h
deleted file mode 100644
index f9684c69c03..00000000000
--- a/chromium/third_party/webrtc/pc/datagram_rtp_transport.h
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright 2019 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef PC_DATAGRAM_RTP_TRANSPORT_H_
-#define PC_DATAGRAM_RTP_TRANSPORT_H_
-
-#include <map>
-#include <memory>
-#include <string>
-#include <vector>
-
-#include "api/crypto/crypto_options.h"
-#include "api/transport/datagram_transport_interface.h"
-#include "api/transport/media/media_transport_interface.h"
-#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
-#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
-#include "p2p/base/ice_transport_internal.h"
-#include "p2p/base/packet_transport_internal.h"
-#include "pc/rtp_transport_internal.h"
-#include "rtc_base/buffer.h"
-#include "rtc_base/buffer_queue.h"
-#include "rtc_base/constructor_magic.h"
-#include "rtc_base/ssl_stream_adapter.h"
-#include "rtc_base/stream.h"
-#include "rtc_base/strings/string_builder.h"
-#include "rtc_base/thread_checker.h"
-
-namespace webrtc {
-
-constexpr int kDatagramDtlsAdaptorComponent = -1;
-
-// RTP transport which uses the DatagramTransportInterface to send and receive
-// packets.
-class DatagramRtpTransport : public RtpTransportInternal,
- public webrtc::DatagramSinkInterface,
- public webrtc::MediaTransportStateCallback {
- public:
- DatagramRtpTransport(
- const std::vector<webrtc::RtpExtension>& rtp_header_extensions,
- cricket::IceTransportInternal* ice_transport,
- DatagramTransportInterface* datagram_transport);
-
- ~DatagramRtpTransport() override;
-
- // =====================================================
- // Overrides for webrtc::DatagramTransportSinkInterface
- // and MediaTransportStateCallback
- // =====================================================
- void OnDatagramReceived(rtc::ArrayView<const uint8_t> data) override;
-
- void OnDatagramSent(webrtc::DatagramId datagram_id) override;
-
- void OnDatagramAcked(const webrtc::DatagramAck& ack) override;
-
- void OnDatagramLost(webrtc::DatagramId datagram_id) override;
-
- void OnStateChanged(webrtc::MediaTransportState state) override;
-
- // =====================================================
- // RtpTransportInternal overrides
- // =====================================================
- bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options,
- int flags) override;
-
- bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
- const rtc::PacketOptions& options,
- int flags) override;
-
- const std::string& transport_name() const override;
-
- // Datagram transport always muxes RTCP.
- bool rtcp_mux_enabled() const override { return true; }
- void SetRtcpMuxEnabled(bool enable) override {}
-
- int SetRtpOption(rtc::Socket::Option opt, int value) override;
- int SetRtcpOption(rtc::Socket::Option opt, int value) override;
-
- bool IsReadyToSend() const override;
-
- bool IsWritable(bool rtcp) const override;
-
- bool IsSrtpActive() const override { return false; }
-
- void UpdateRtpHeaderExtensionMap(
- const cricket::RtpHeaderExtensions& header_extensions) override;
-
- bool RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria,
- RtpPacketSinkInterface* sink) override;
-
- bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override;
-
- private:
- // RTP/RTCP packet info stored for each sent packet.
- struct SentPacketInfo {
- // RTP packet info with ssrc and transport sequence number.
- SentPacketInfo(int64_t packet_id,
- uint32_t ssrc,
- uint16_t transport_sequence_number)
- : ssrc(ssrc),
- transport_sequence_number(transport_sequence_number),
- packet_id(packet_id) {}
-
- // Packet info without SSRC and transport sequence number used for RTCP
- // packets, RTP packets when transport sequence number is not provided or
- // when feedback translation is disabled.
- explicit SentPacketInfo(int64_t packet_id) : packet_id(packet_id) {}
-
- SentPacketInfo() = default;
-
- absl::optional<uint32_t> ssrc;
-
- // Transport sequence number (if it was provided in outgoing RTP packet).
- // It is used to re-create RTCP feedback packets from datagram ACKs.
- absl::optional<uint16_t> transport_sequence_number;
-
- // Packet id from rtc::PacketOptions. It is required to propagage sent
- // notification up the stack (SignalSentPacket).
- int64_t packet_id = 0;
- };
-
- // Finds SentPacketInfo for given |datagram_id| and removes map entry.
- // Returns false if entry was not found.
- bool GetAndRemoveSentPacketInfo(webrtc::DatagramId datagram_id,
- SentPacketInfo* sent_packet_info);
-
- // Sends datagram to datagram_transport.
- bool SendDatagram(rtc::ArrayView<const uint8_t> data,
- webrtc::DatagramId datagram_id);
-
- // Propagates network route changes from ICE.
- void OnNetworkRouteChanged(absl::optional<rtc::NetworkRoute> network_route);
-
- rtc::ThreadChecker thread_checker_;
- cricket::IceTransportInternal* ice_transport_;
- webrtc::DatagramTransportInterface* datagram_transport_;
-
- RtpDemuxer rtp_demuxer_;
-
- MediaTransportState state_ = MediaTransportState::kPending;
-
- // Extension map for parsing transport sequence numbers.
- webrtc::RtpHeaderExtensionMap rtp_header_extension_map_;
-
- // Keeps information about sent RTP packet until they are Acked or Lost.
- std::map<webrtc::DatagramId, SentPacketInfo> sent_rtp_packet_map_;
-
- // Current datagram_id, incremented after each sent RTP packets.
- // Datagram id is passed to datagram transport when we send datagram and we
- // get it back in notifications about Sent, Acked and Lost datagrams.
- int64_t current_datagram_id_ = 0;
-
- // TODO(sukhanov): Previous nonzero timestamp is required for workaround for
- // zero timestamps received, which sometimes are received from datagram
- // transport. Investigate if we can eliminate zero timestamps.
- int64_t previous_nonzero_timestamp_us_ = 0;
-
- // Disable datagram to RTCP feedback translation and enable RTCP feedback
- // loop (note that having both RTCP and datagram feedback loops is
- // inefficient, but can be useful in tests and experiments).
- const bool disable_datagram_to_rtcp_feeback_translation_;
-};
-
-} // namespace webrtc
-
-#endif // PC_DATAGRAM_RTP_TRANSPORT_H_
diff --git a/chromium/third_party/webrtc/pc/jsep_transport.cc b/chromium/third_party/webrtc/pc/jsep_transport.cc
index 5bf74f1e87a..6864dfc45db 100644
--- a/chromium/third_party/webrtc/pc/jsep_transport.cc
+++ b/chromium/third_party/webrtc/pc/jsep_transport.cc
@@ -38,16 +38,12 @@ JsepTransportDescription::JsepTransportDescription(
const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_header_extension_ids,
int rtp_abs_sendtime_extn_id,
- const TransportDescription& transport_desc,
- absl::optional<std::string> media_alt_protocol,
- absl::optional<std::string> data_alt_protocol)
+ const TransportDescription& transport_desc)
: rtcp_mux_enabled(rtcp_mux_enabled),
cryptos(cryptos),
encrypted_header_extension_ids(encrypted_header_extension_ids),
rtp_abs_sendtime_extn_id(rtp_abs_sendtime_extn_id),
- transport_desc(transport_desc),
- media_alt_protocol(media_alt_protocol),
- data_alt_protocol(data_alt_protocol) {}
+ transport_desc(transport_desc) {}
JsepTransportDescription::JsepTransportDescription(
const JsepTransportDescription& from)
@@ -55,9 +51,7 @@ JsepTransportDescription::JsepTransportDescription(
cryptos(from.cryptos),
encrypted_header_extension_ids(from.encrypted_header_extension_ids),
rtp_abs_sendtime_extn_id(from.rtp_abs_sendtime_extn_id),
- transport_desc(from.transport_desc),
- media_alt_protocol(from.media_alt_protocol),
- data_alt_protocol(from.data_alt_protocol) {}
+ transport_desc(from.transport_desc) {}
JsepTransportDescription::~JsepTransportDescription() = default;
@@ -71,8 +65,6 @@ JsepTransportDescription& JsepTransportDescription::operator=(
encrypted_header_extension_ids = from.encrypted_header_extension_ids;
rtp_abs_sendtime_extn_id = from.rtp_abs_sendtime_extn_id;
transport_desc = from.transport_desc;
- media_alt_protocol = from.media_alt_protocol;
- data_alt_protocol = from.data_alt_protocol;
return *this;
}
@@ -88,9 +80,7 @@ JsepTransport::JsepTransport(
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport,
std::unique_ptr<DtlsTransportInternal> rtp_dtls_transport,
std::unique_ptr<DtlsTransportInternal> rtcp_dtls_transport,
- std::unique_ptr<SctpTransportInternal> sctp_transport,
- std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport,
- webrtc::DataChannelTransportInterface* data_channel_transport)
+ std::unique_ptr<SctpTransportInternal> sctp_transport)
: network_thread_(rtc::Thread::Current()),
mid_(mid),
local_certificate_(local_certificate),
@@ -115,10 +105,7 @@ JsepTransport::JsepTransport(
sctp_transport_(sctp_transport
? new rtc::RefCountedObject<webrtc::SctpTransport>(
std::move(sctp_transport))
- : nullptr),
- datagram_transport_(std::move(datagram_transport)),
- datagram_rtp_transport_(std::move(datagram_rtp_transport)),
- data_channel_transport_(data_channel_transport) {
+ : nullptr) {
RTC_DCHECK(ice_transport_);
RTC_DCHECK(rtp_dtls_transport_);
// |rtcp_ice_transport_| must be present iff |rtcp_dtls_transport_| is
@@ -383,18 +370,6 @@ absl::optional<rtc::SSLRole> JsepTransport::GetDtlsRole() const {
return absl::optional<rtc::SSLRole>(dtls_role);
}
-absl::optional<OpaqueTransportParameters>
-JsepTransport::GetTransportParameters() const {
- rtc::CritScope scope(&accessor_lock_);
- if (!datagram_transport()) {
- return absl::nullopt;
- }
-
- OpaqueTransportParameters params;
- params.parameters = datagram_transport()->GetTransportParameters();
- return params;
-}
-
bool JsepTransport::GetStats(TransportStats* stats) {
RTC_DCHECK_RUN_ON(network_thread_);
rtc::CritScope scope(&accessor_lock_);
@@ -462,7 +437,6 @@ webrtc::RTCError JsepTransport::SetNegotiatedDtlsParameters(
DtlsTransportInternal* dtls_transport,
absl::optional<rtc::SSLRole> dtls_role,
rtc::SSLFingerprint* remote_fingerprint) {
- RTC_DCHECK_RUN_ON(network_thread_);
RTC_DCHECK(dtls_transport);
// Set SSL role. Role must be set before fingerprint is applied, which
// initiates DTLS setup.
@@ -535,7 +509,7 @@ void JsepTransport::ActivateRtcpMux() {
RTC_DCHECK(dtls_srtp_transport_);
RTC_DCHECK(!unencrypted_rtp_transport_);
RTC_DCHECK(!sdes_transport_);
- dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport(),
+ dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport_locked(),
/*rtcp_dtls_transport=*/nullptr);
}
rtcp_dtls_transport_ = nullptr; // Destroy this reference.
@@ -549,7 +523,6 @@ bool JsepTransport::SetSdes(const std::vector<CryptoParams>& cryptos,
webrtc::SdpType type,
ContentSource source) {
RTC_DCHECK_RUN_ON(network_thread_);
- rtc::CritScope scope(&accessor_lock_);
bool ret = false;
ret = sdes_negotiator_.Process(cryptos, type, source);
if (!ret) {
@@ -734,7 +707,6 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole(
bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport,
TransportStats* stats) {
RTC_DCHECK_RUN_ON(network_thread_);
- rtc::CritScope scope(&accessor_lock_);
RTC_DCHECK(dtls_transport);
TransportChannelStats substats;
if (rtcp_dtls_transport_) {
@@ -756,106 +728,10 @@ bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport,
return true;
}
+// TODO(nisse): Delete.
void JsepTransport::NegotiateDatagramTransport(SdpType type) {
RTC_DCHECK(type == SdpType::kAnswer || type == SdpType::kPrAnswer);
- rtc::CritScope lock(&accessor_lock_);
- if (!datagram_transport_) {
- return; // No need to negotiate the use of datagram transport.
- }
-
- bool compatible_datagram_transport = false;
- if (datagram_transport_ &&
- local_description_->transport_desc.opaque_parameters &&
- remote_description_->transport_desc.opaque_parameters) {
- // If both descriptions have datagram transport parameters, and the remote
- // parameters are accepted by the datagram transport, then use the datagram
- // transport. Otherwise, fall back to RTP.
- compatible_datagram_transport =
- datagram_transport_
- ->SetRemoteTransportParameters(remote_description_->transport_desc
- .opaque_parameters->parameters)
- .ok();
- }
-
- bool use_datagram_transport_for_media =
- compatible_datagram_transport &&
- remote_description_->media_alt_protocol ==
- remote_description_->transport_desc.opaque_parameters->protocol &&
- remote_description_->media_alt_protocol ==
- local_description_->media_alt_protocol;
-
- bool use_datagram_transport_for_data =
- compatible_datagram_transport &&
- remote_description_->data_alt_protocol ==
- remote_description_->transport_desc.opaque_parameters->protocol &&
- remote_description_->data_alt_protocol ==
- local_description_->data_alt_protocol;
-
- RTC_LOG(LS_INFO)
- << "Negotiating datagram transport, use_datagram_transport_for_media="
- << use_datagram_transport_for_media
- << ", use_datagram_transport_for_data=" << use_datagram_transport_for_data
- << " answer type=" << (type == SdpType::kAnswer ? "answer" : "pr_answer");
-
- // A provisional or full or answer lets the peer start sending on one of the
- // transports.
- if (composite_rtp_transport_) {
- composite_rtp_transport_->SetSendTransport(
- use_datagram_transport_for_media ? datagram_rtp_transport_.get()
- : default_rtp_transport());
- }
- if (composite_data_channel_transport_) {
- composite_data_channel_transport_->SetSendTransport(
- use_datagram_transport_for_data ? data_channel_transport_
- : sctp_data_channel_transport_.get());
- }
-
- if (type != SdpType::kAnswer) {
- return;
- }
-
- if (composite_rtp_transport_) {
- if (use_datagram_transport_for_media) {
- // Negotiated use of datagram transport for RTP, so remove the
- // non-datagram RTP transport.
- composite_rtp_transport_->RemoveTransport(default_rtp_transport());
- if (unencrypted_rtp_transport_) {
- unencrypted_rtp_transport_ = nullptr;
- } else if (sdes_transport_) {
- sdes_transport_ = nullptr;
- } else {
- dtls_srtp_transport_ = nullptr;
- }
- } else {
- composite_rtp_transport_->RemoveTransport(datagram_rtp_transport_.get());
- datagram_rtp_transport_ = nullptr;
- }
- }
-
- if (composite_data_channel_transport_) {
- if (use_datagram_transport_for_data) {
- // Negotiated use of datagram transport for data channels, so remove the
- // non-datagram data channel transport.
- composite_data_channel_transport_->RemoveTransport(
- sctp_data_channel_transport_.get());
- sctp_data_channel_transport_ = nullptr;
- sctp_transport_ = nullptr;
- } else {
- composite_data_channel_transport_->RemoveTransport(
- data_channel_transport_);
- data_channel_transport_ = nullptr;
- }
- } else if (data_channel_transport_ && !use_datagram_transport_for_data) {
- // The datagram transport has been rejected without a fallback. We still
- // need to inform the application and delete it.
- SignalDataChannelTransportNegotiated(this, nullptr);
- data_channel_transport_ = nullptr;
- }
-
- if (!use_datagram_transport_for_media && !use_datagram_transport_for_data) {
- // Datagram transport is not being used for anything, so clean it up.
- datagram_transport_ = nullptr;
- }
+ return; // No need to negotiate the use of datagram transport.
}
} // namespace cricket
diff --git a/chromium/third_party/webrtc/pc/jsep_transport.h b/chromium/third_party/webrtc/pc/jsep_transport.h
index 6d88deff076..27c30a5202c 100644
--- a/chromium/third_party/webrtc/pc/jsep_transport.h
+++ b/chromium/third_party/webrtc/pc/jsep_transport.h
@@ -20,7 +20,6 @@
#include "api/candidate.h"
#include "api/ice_transport_interface.h"
#include "api/jsep.h"
-#include "api/transport/datagram_transport_interface.h"
#include "media/sctp/sctp_transport_internal.h"
#include "p2p/base/dtls_transport.h"
#include "p2p/base/p2p_constants.h"
@@ -54,9 +53,7 @@ struct JsepTransportDescription {
const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_header_extension_ids,
int rtp_abs_sendtime_extn_id,
- const TransportDescription& transport_description,
- absl::optional<std::string> media_alt_protocol,
- absl::optional<std::string> data_alt_protocol);
+ const TransportDescription& transport_description);
JsepTransportDescription(const JsepTransportDescription& from);
~JsepTransportDescription();
@@ -69,14 +66,6 @@ struct JsepTransportDescription {
// TODO(zhihuang): Add the ICE and DTLS related variables and methods from
// TransportDescription and remove this extra layer of abstraction.
TransportDescription transport_desc;
-
- // Alt-protocols that apply to this JsepTransport. Presence indicates a
- // request to use an alternative protocol for media and/or data. The
- // alt-protocol is handled by a datagram transport. If one or both of these
- // values are present, JsepTransport will attempt to negotiate use of the
- // datagram transport for media and/or data.
- absl::optional<std::string> media_alt_protocol;
- absl::optional<std::string> data_alt_protocol;
};
// Helper class used by JsepTransportController that processes
@@ -103,9 +92,7 @@ class JsepTransport : public sigslot::has_slots<> {
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport,
std::unique_ptr<DtlsTransportInternal> rtp_dtls_transport,
std::unique_ptr<DtlsTransportInternal> rtcp_dtls_transport,
- std::unique_ptr<SctpTransportInternal> sctp_transport,
- std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport,
- webrtc::DataChannelTransportInterface* data_channel_transport);
+ std::unique_ptr<SctpTransportInternal> sctp_transport);
~JsepTransport() override;
@@ -128,14 +115,15 @@ class JsepTransport : public sigslot::has_slots<> {
webrtc::RTCError SetLocalJsepTransportDescription(
const JsepTransportDescription& jsep_description,
- webrtc::SdpType type);
+ webrtc::SdpType type) RTC_LOCKS_EXCLUDED(accessor_lock_);
// Set the remote TransportDescription to be used by DTLS and ICE channels
// that are part of this Transport.
webrtc::RTCError SetRemoteJsepTransportDescription(
const JsepTransportDescription& jsep_description,
- webrtc::SdpType type);
- webrtc::RTCError AddRemoteCandidates(const Candidates& candidates);
+ webrtc::SdpType type) RTC_LOCKS_EXCLUDED(accessor_lock_);
+ webrtc::RTCError AddRemoteCandidates(const Candidates& candidates)
+ RTC_LOCKS_EXCLUDED(accessor_lock_);
// Set the "needs-ice-restart" flag as described in JSEP. After the flag is
// set, offers should generate new ufrags/passwords until an ICE restart
@@ -143,23 +131,22 @@ class JsepTransport : public sigslot::has_slots<> {
//
// This and the below method can be called safely from any thread as long as
// SetXTransportDescription is not in progress.
- void SetNeedsIceRestartFlag();
+ void SetNeedsIceRestartFlag() RTC_LOCKS_EXCLUDED(accessor_lock_);
// Returns true if the ICE restart flag above was set, and no ICE restart has
// occurred yet for this transport (by applying a local description with
// changed ufrag/password).
- bool needs_ice_restart() const {
+ bool needs_ice_restart() const RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
return needs_ice_restart_;
}
// Returns role if negotiated, or empty absl::optional if it hasn't been
// negotiated yet.
- absl::optional<rtc::SSLRole> GetDtlsRole() const;
-
- absl::optional<OpaqueTransportParameters> GetTransportParameters() const;
+ absl::optional<rtc::SSLRole> GetDtlsRole() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_);
// TODO(deadbeef): Make this const. See comment in transportcontroller.h.
- bool GetStats(TransportStats* stats);
+ bool GetStats(TransportStats* stats) RTC_LOCKS_EXCLUDED(accessor_lock_);
const JsepTransportDescription* local_description() const {
RTC_DCHECK_RUN_ON(network_thread_);
@@ -171,7 +158,8 @@ class JsepTransport : public sigslot::has_slots<> {
return remote_description_.get();
}
- webrtc::RtpTransportInternal* rtp_transport() const {
+ webrtc::RtpTransportInternal* rtp_transport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (composite_rtp_transport_) {
return composite_rtp_transport_.get();
@@ -182,7 +170,8 @@ class JsepTransport : public sigslot::has_slots<> {
}
}
- const DtlsTransportInternal* rtp_dtls_transport() const {
+ const DtlsTransportInternal* rtp_dtls_transport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (rtp_dtls_transport_) {
return rtp_dtls_transport_->internal();
@@ -191,16 +180,14 @@ class JsepTransport : public sigslot::has_slots<> {
}
}
- DtlsTransportInternal* rtp_dtls_transport() {
+ DtlsTransportInternal* rtp_dtls_transport()
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
- if (rtp_dtls_transport_) {
- return rtp_dtls_transport_->internal();
- } else {
- return nullptr;
- }
+ return rtp_dtls_transport_locked();
}
- const DtlsTransportInternal* rtcp_dtls_transport() const {
+ const DtlsTransportInternal* rtcp_dtls_transport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (rtcp_dtls_transport_) {
return rtcp_dtls_transport_->internal();
@@ -209,7 +196,8 @@ class JsepTransport : public sigslot::has_slots<> {
}
}
- DtlsTransportInternal* rtcp_dtls_transport() {
+ DtlsTransportInternal* rtcp_dtls_transport()
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (rtcp_dtls_transport_) {
return rtcp_dtls_transport_->internal();
@@ -218,17 +206,20 @@ class JsepTransport : public sigslot::has_slots<> {
}
}
- rtc::scoped_refptr<webrtc::DtlsTransport> RtpDtlsTransport() {
+ rtc::scoped_refptr<webrtc::DtlsTransport> RtpDtlsTransport()
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
return rtp_dtls_transport_;
}
- rtc::scoped_refptr<webrtc::SctpTransport> SctpTransport() const {
+ rtc::scoped_refptr<webrtc::SctpTransport> SctpTransport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
return sctp_transport_;
}
- webrtc::DataChannelTransportInterface* data_channel_transport() const {
+ webrtc::DataChannelTransportInterface* data_channel_transport() const
+ RTC_LOCKS_EXCLUDED(accessor_lock_) {
rtc::CritScope scope(&accessor_lock_);
if (composite_data_channel_transport_) {
return composite_data_channel_transport_.get();
@@ -238,12 +229,6 @@ class JsepTransport : public sigslot::has_slots<> {
return data_channel_transport_;
}
- // Returns datagram transport, if available.
- webrtc::DatagramTransportInterface* datagram_transport() const {
- rtc::CritScope scope(&accessor_lock_);
- return datagram_transport_.get();
- }
-
// This is signaled when RTCP-mux becomes active and
// |rtcp_dtls_transport_| is destroyed. The JsepTransportController will
// handle the signal and update the aggregate transport states.
@@ -271,6 +256,15 @@ class JsepTransport : public sigslot::has_slots<> {
void SetActiveResetSrtpParams(bool active_reset_srtp_params);
private:
+ DtlsTransportInternal* rtp_dtls_transport_locked()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_) {
+ if (rtp_dtls_transport_) {
+ return rtp_dtls_transport_->internal();
+ } else {
+ return nullptr;
+ }
+ }
+
bool SetRtcpMux(bool enable, webrtc::SdpType type, ContentSource source);
void ActivateRtcpMux();
@@ -278,7 +272,8 @@ class JsepTransport : public sigslot::has_slots<> {
bool SetSdes(const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_extension_ids,
webrtc::SdpType type,
- ContentSource source);
+ ContentSource source)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_);
// Negotiates and sets the DTLS parameters based on the current local and
// remote transport description, such as the DTLS role to use, and whether
@@ -295,26 +290,28 @@ class JsepTransport : public sigslot::has_slots<> {
webrtc::SdpType local_description_type,
ConnectionRole local_connection_role,
ConnectionRole remote_connection_role,
- absl::optional<rtc::SSLRole>* negotiated_dtls_role);
+ absl::optional<rtc::SSLRole>* negotiated_dtls_role)
+ RTC_LOCKS_EXCLUDED(accessor_lock_);
// Pushes down the ICE parameters from the remote description.
void SetRemoteIceParameters(const IceParameters& ice_parameters,
IceTransportInternal* ice);
// Pushes down the DTLS parameters obtained via negotiation.
- webrtc::RTCError SetNegotiatedDtlsParameters(
+ static webrtc::RTCError SetNegotiatedDtlsParameters(
DtlsTransportInternal* dtls_transport,
absl::optional<rtc::SSLRole> dtls_role,
rtc::SSLFingerprint* remote_fingerprint);
bool GetTransportStats(DtlsTransportInternal* dtls_transport,
- TransportStats* stats);
+ TransportStats* stats)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_);
// Deactivates, signals removal, and deletes |composite_rtp_transport_| if the
// current state of negotiation is sufficient to determine which rtp_transport
// and data channel transport to use.
void NegotiateDatagramTransport(webrtc::SdpType type)
- RTC_RUN_ON(network_thread_);
+ RTC_RUN_ON(network_thread_) RTC_LOCKS_EXCLUDED(accessor_lock_);
// Returns the default (non-datagram) rtp transport, if any.
webrtc::RtpTransportInternal* default_rtp_transport() const
@@ -387,10 +384,6 @@ class JsepTransport : public sigslot::has_slots<> {
absl::optional<std::vector<int>> recv_extension_ids_
RTC_GUARDED_BY(network_thread_);
- // Optional datagram transport (experimental).
- std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport_
- RTC_GUARDED_BY(accessor_lock_);
-
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport_
RTC_GUARDED_BY(accessor_lock_);
diff --git a/chromium/third_party/webrtc/pc/jsep_transport_controller.cc b/chromium/third_party/webrtc/pc/jsep_transport_controller.cc
index a7e1b876fe0..dbe7435c020 100644
--- a/chromium/third_party/webrtc/pc/jsep_transport_controller.cc
+++ b/chromium/third_party/webrtc/pc/jsep_transport_controller.cc
@@ -15,11 +15,8 @@
#include "absl/algorithm/container.h"
#include "api/ice_transport_factory.h"
-#include "api/transport/datagram_transport_interface.h"
-#include "api/transport/media/media_transport_interface.h"
#include "p2p/base/ice_transport_internal.h"
#include "p2p/base/port.h"
-#include "pc/datagram_rtp_transport.h"
#include "pc/srtp_filter.h"
#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
@@ -140,26 +137,6 @@ RtpTransportInternal* JsepTransportController::GetRtpTransport(
return jsep_transport->rtp_transport();
}
-MediaTransportConfig JsepTransportController::GetMediaTransportConfig(
- const std::string& mid) const {
- auto jsep_transport = GetJsepTransportForMid(mid);
- if (!jsep_transport) {
- return MediaTransportConfig();
- }
-
- DatagramTransportInterface* datagram_transport = nullptr;
- if (config_.use_datagram_transport) {
- datagram_transport = jsep_transport->datagram_transport();
- }
-
- if (datagram_transport) {
- return MediaTransportConfig(
- /*rtp_max_packet_size=*/datagram_transport->GetLargestDatagramSize());
- } else {
- return MediaTransportConfig();
- }
-}
-
DataChannelTransportInterface* JsepTransportController::GetDataChannelTransport(
const std::string& mid) const {
auto jsep_transport = GetJsepTransportForMid(mid);
@@ -425,17 +402,6 @@ void JsepTransportController::SetActiveResetSrtpParams(
}
}
-void JsepTransportController::SetMediaTransportSettings(
- bool use_datagram_transport,
- bool use_datagram_transport_for_data_channels,
- bool use_datagram_transport_for_data_channels_receive_only) {
- config_.use_datagram_transport = use_datagram_transport;
- config_.use_datagram_transport_for_data_channels =
- use_datagram_transport_for_data_channels;
- config_.use_datagram_transport_for_data_channels_receive_only =
- use_datagram_transport_for_data_channels_receive_only;
-}
-
void JsepTransportController::RollbackTransports() {
if (!network_thread_->IsCurrent()) {
network_thread_->Invoke<void>(RTC_FROM_HERE, [=] { RollbackTransports(); });
@@ -468,16 +434,12 @@ JsepTransportController::CreateIceTransport(const std::string& transport_name,
std::unique_ptr<cricket::DtlsTransportInternal>
JsepTransportController::CreateDtlsTransport(
const cricket::ContentInfo& content_info,
- cricket::IceTransportInternal* ice,
- DatagramTransportInterface* datagram_transport) {
+ cricket::IceTransportInternal* ice) {
RTC_DCHECK(network_thread_->IsCurrent());
std::unique_ptr<cricket::DtlsTransportInternal> dtls;
- if (datagram_transport) {
- RTC_DCHECK(config_.use_datagram_transport ||
- config_.use_datagram_transport_for_data_channels);
- } else if (config_.dtls_transport_factory) {
+ if (config_.dtls_transport_factory) {
dtls = config_.dtls_transport_factory->CreateDtlsTransport(
ice, config_.crypto_options);
} else {
@@ -614,16 +576,9 @@ RTCError JsepTransportController::ApplyDescription_n(
}
std::vector<int> merged_encrypted_extension_ids;
- absl::optional<std::string> bundle_media_alt_protocol;
- absl::optional<std::string> bundle_data_alt_protocol;
if (bundle_group_) {
merged_encrypted_extension_ids =
MergeEncryptedHeaderExtensionIdsForBundle(description);
- error = GetAltProtocolsForBundle(description, &bundle_media_alt_protocol,
- &bundle_data_alt_protocol);
- if (!error.ok()) {
- return error;
- }
}
for (const cricket::ContentInfo& content_info : description->contents()) {
@@ -642,8 +597,6 @@ RTCError JsepTransportController::ApplyDescription_n(
description->transport_infos().size());
for (size_t i = 0; i < description->contents().size(); ++i) {
const cricket::ContentInfo& content_info = description->contents()[i];
- const cricket::MediaContentDescription* media_description =
- content_info.media_description();
const cricket::TransportInfo& transport_info =
description->transport_infos()[i];
if (content_info.rejected) {
@@ -654,7 +607,8 @@ RTCError JsepTransportController::ApplyDescription_n(
if (IsBundled(content_info.name) && content_info.name != *bundled_mid()) {
if (!HandleBundledContent(content_info)) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "Failed to process the bundled m= section.");
+ "Failed to process the bundled m= section with mid='" +
+ content_info.name + "'.");
}
continue;
}
@@ -665,23 +619,10 @@ RTCError JsepTransportController::ApplyDescription_n(
}
std::vector<int> extension_ids;
- absl::optional<std::string> media_alt_protocol;
- absl::optional<std::string> data_alt_protocol;
if (bundled_mid() && content_info.name == *bundled_mid()) {
extension_ids = merged_encrypted_extension_ids;
- media_alt_protocol = bundle_media_alt_protocol;
- data_alt_protocol = bundle_data_alt_protocol;
} else {
extension_ids = GetEncryptedHeaderExtensionIds(content_info);
- switch (media_description->type()) {
- case cricket::MEDIA_TYPE_AUDIO:
- case cricket::MEDIA_TYPE_VIDEO:
- media_alt_protocol = media_description->alt_protocol();
- break;
- case cricket::MEDIA_TYPE_DATA:
- data_alt_protocol = media_description->alt_protocol();
- break;
- }
}
int rtp_abs_sendtime_extn_id =
@@ -695,8 +636,7 @@ RTCError JsepTransportController::ApplyDescription_n(
cricket::JsepTransportDescription jsep_description =
CreateJsepTransportDescription(content_info, transport_info,
- extension_ids, rtp_abs_sendtime_extn_id,
- media_alt_protocol, data_alt_protocol);
+ extension_ids, rtp_abs_sendtime_extn_id);
if (local) {
error =
transport->SetLocalJsepTransportDescription(jsep_description, type);
@@ -706,9 +646,10 @@ RTCError JsepTransportController::ApplyDescription_n(
}
if (!error.ok()) {
- LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
- "Failed to apply the description for " +
- content_info.name + ": " + error.message());
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Failed to apply the description for m= section with mid='" +
+ content_info.name + "': " + error.message());
}
}
if (type == SdpType::kAnswer) {
@@ -727,11 +668,11 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
// The BUNDLE group containing a MID that no m= section has is invalid.
if (new_bundle_group) {
- for (const auto& content_name : new_bundle_group->content_names()) {
+ for (const std::string& content_name : new_bundle_group->content_names()) {
if (!description->GetContentByName(content_name)) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group contains MID:" + content_name +
- " matching no m= section.");
+ "The BUNDLE group contains MID='" + content_name +
+ "' matching no m= section.");
}
}
}
@@ -743,18 +684,21 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
if (new_bundle_group) {
// The BUNDLE group in answer should be a subset of offered group.
- for (const auto& content_name : new_bundle_group->content_names()) {
+ for (const std::string& content_name :
+ new_bundle_group->content_names()) {
if (!offered_bundle_group ||
!offered_bundle_group->HasContentName(content_name)) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group in answer contains a MID that was "
- "not in the offered group.");
+ "The BUNDLE group in answer contains a MID='" +
+ content_name +
+ "' that was "
+ "not in the offered group.");
}
}
}
if (bundle_group_) {
- for (const auto& content_name : bundle_group_->content_names()) {
+ for (const std::string& content_name : bundle_group_->content_names()) {
// An answer that removes m= sections from pre-negotiated BUNDLE group
// without rejecting it, is invalid.
if (!new_bundle_group ||
@@ -762,8 +706,9 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
auto* content_info = description->GetContentByName(content_name);
if (!content_info || !content_info->rejected) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "Answer cannot remove m= section " + content_name +
- " from already-established BUNDLE group.");
+ "Answer cannot remove m= section with mid='" +
+ content_name +
+ "' from already-established BUNDLE group.");
}
}
}
@@ -798,9 +743,9 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup(
for (const auto& content_name : bundle_group_->content_names()) {
auto other_content = description->GetContentByName(content_name);
if (!other_content->rejected) {
- return RTCError(
- RTCErrorType::INVALID_PARAMETER,
- "The m= section:" + content_name + " should be rejected.");
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "The m= section with mid='" + content_name +
+ "' should be rejected.");
}
}
}
@@ -815,8 +760,8 @@ RTCError JsepTransportController::ValidateContent(
content_info.type == cricket::MediaProtocolType::kRtp &&
!content_info.media_description()->rtcp_mux()) {
return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The m= section:" + content_info.name +
- " is invalid. RTCP-MUX is not "
+ "The m= section with mid='" + content_info.name +
+ "' is invalid. RTCP-MUX is not "
"enabled when it is required.");
}
return RTCError::OK();
@@ -892,9 +837,7 @@ JsepTransportController::CreateJsepTransportDescription(
const cricket::ContentInfo& content_info,
const cricket::TransportInfo& transport_info,
const std::vector<int>& encrypted_extension_ids,
- int rtp_abs_sendtime_extn_id,
- absl::optional<std::string> media_alt_protocol,
- absl::optional<std::string> data_alt_protocol) {
+ int rtp_abs_sendtime_extn_id) {
const cricket::MediaContentDescription* content_desc =
content_info.media_description();
RTC_DCHECK(content_desc);
@@ -904,8 +847,7 @@ JsepTransportController::CreateJsepTransportDescription(
return cricket::JsepTransportDescription(
rtcp_mux_enabled, content_desc->cryptos(), encrypted_extension_ids,
- rtp_abs_sendtime_extn_id, transport_info.description, media_alt_protocol,
- data_alt_protocol);
+ rtp_abs_sendtime_extn_id, transport_info.description);
}
bool JsepTransportController::ShouldUpdateBundleGroup(
@@ -971,55 +913,6 @@ JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundle(
return merged_ids;
}
-RTCError JsepTransportController::GetAltProtocolsForBundle(
- const cricket::SessionDescription* description,
- absl::optional<std::string>* media_alt_protocol,
- absl::optional<std::string>* data_alt_protocol) {
- RTC_DCHECK(description);
- RTC_DCHECK(bundle_group_);
- RTC_DCHECK(media_alt_protocol);
- RTC_DCHECK(data_alt_protocol);
-
- bool found_media = false;
- bool found_data = false;
- for (const cricket::ContentInfo& content : description->contents()) {
- if (bundle_group_->HasContentName(content.name)) {
- const cricket::MediaContentDescription* media_description =
- content.media_description();
- switch (media_description->type()) {
- case cricket::MEDIA_TYPE_AUDIO:
- case cricket::MEDIA_TYPE_VIDEO:
- if (found_media &&
- *media_alt_protocol != media_description->alt_protocol()) {
- return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group contains conflicting "
- "alt-protocols for media ('" +
- media_alt_protocol->value_or("") + "' and '" +
- media_description->alt_protocol().value_or("") +
- "')");
- }
- found_media = true;
- *media_alt_protocol = media_description->alt_protocol();
- break;
- case cricket::MEDIA_TYPE_DATA:
- if (found_data &&
- *data_alt_protocol != media_description->alt_protocol()) {
- return RTCError(RTCErrorType::INVALID_PARAMETER,
- "The BUNDLE group contains conflicting "
- "alt-protocols for data ('" +
- data_alt_protocol->value_or("") + "' and '" +
- media_description->alt_protocol().value_or("") +
- "')");
- }
- found_data = true;
- *data_alt_protocol = media_description->alt_protocol();
- break;
- }
- }
- }
- return RTCError::OK();
-}
-
int JsepTransportController::GetRtpAbsSendTimeHeaderExtensionId(
const cricket::ContentInfo& content_info) {
if (!config_.enable_external_auth) {
@@ -1060,83 +953,6 @@ cricket::JsepTransport* JsepTransportController::GetJsepTransportByName(
return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get();
}
-// TODO(sukhanov): Refactor to avoid code duplication for Media and Datagram
-// transports setup.
-std::unique_ptr<webrtc::DatagramTransportInterface>
-JsepTransportController::MaybeCreateDatagramTransport(
- const cricket::ContentInfo& content_info,
- const cricket::SessionDescription& description,
- bool local) {
- if (config_.media_transport_factory == nullptr) {
- return nullptr;
- }
-
- if (!(config_.use_datagram_transport ||
- config_.use_datagram_transport_for_data_channels)) {
- return nullptr;
- }
-
- // Caller (offerer) datagram transport.
- if (offer_datagram_transport_) {
- RTC_DCHECK(local);
- RTC_LOG(LS_INFO) << "Offered datagram transport has now been activated.";
- return std::move(offer_datagram_transport_);
- }
-
- const cricket::TransportDescription* transport_description =
- description.GetTransportDescriptionByName(content_info.mid());
- RTC_DCHECK(transport_description)
- << "Missing transport description for mid=" << content_info.mid();
-
- if (!transport_description->opaque_parameters) {
- RTC_LOG(LS_INFO)
- << "No opaque transport parameters, not creating datagram transport";
- return nullptr;
- }
-
- if (transport_description->opaque_parameters->protocol !=
- config_.media_transport_factory->GetTransportName()) {
- RTC_LOG(LS_INFO) << "Opaque transport parameters for protocol="
- << transport_description->opaque_parameters->protocol
- << ", which does not match supported protocol="
- << config_.media_transport_factory->GetTransportName();
- return nullptr;
- }
-
- RTC_DCHECK(!local);
- // When bundle is enabled, two JsepTransports are created, and then
- // the second transport is destroyed (right away).
- // For datagram transport, we don't want to create the second
- // datagram transport in the first place.
- RTC_LOG(LS_INFO) << "Returning new, client datagram transport.";
-
- MediaTransportSettings settings;
- settings.is_caller = local;
- settings.remote_transport_parameters =
- transport_description->opaque_parameters->parameters;
- settings.event_log = config_.event_log;
-
- auto datagram_transport_result =
- config_.media_transport_factory->CreateDatagramTransport(network_thread_,
- settings);
-
- if (!datagram_transport_result.ok()) {
- // Datagram transport negotiation will fail and we'll fall back to RTP.
- return nullptr;
- }
-
- if (!datagram_transport_result.value()
- ->SetRemoteTransportParameters(
- transport_description->opaque_parameters->parameters)
- .ok()) {
- // Datagram transport negotiation failed (parameters are incompatible).
- // Fall back to RTP.
- return nullptr;
- }
-
- return datagram_transport_result.MoveValue();
-}
-
RTCError JsepTransportController::MaybeCreateJsepTransport(
bool local,
const cricket::ContentInfo& content_info,
@@ -1158,14 +974,8 @@ RTCError JsepTransportController::MaybeCreateJsepTransport(
CreateIceTransport(content_info.name, /*rtcp=*/false);
RTC_DCHECK(ice);
- std::unique_ptr<DatagramTransportInterface> datagram_transport =
- MaybeCreateDatagramTransport(content_info, description, local);
- if (datagram_transport) {
- datagram_transport->Connect(ice->internal());
- }
-
std::unique_ptr<cricket::DtlsTransportInternal> rtp_dtls_transport =
- CreateDtlsTransport(content_info, ice->internal(), nullptr);
+ CreateDtlsTransport(content_info, ice->internal());
std::unique_ptr<cricket::DtlsTransportInternal> rtcp_dtls_transport;
std::unique_ptr<RtpTransport> unencrypted_rtp_transport;
@@ -1177,29 +987,9 @@ RTCError JsepTransportController::MaybeCreateJsepTransport(
if (config_.rtcp_mux_policy !=
PeerConnectionInterface::kRtcpMuxPolicyRequire &&
content_info.type == cricket::MediaProtocolType::kRtp) {
- RTC_DCHECK(datagram_transport == nullptr);
rtcp_ice = CreateIceTransport(content_info.name, /*rtcp=*/true);
rtcp_dtls_transport =
- CreateDtlsTransport(content_info, rtcp_ice->internal(),
- /*datagram_transport=*/nullptr);
- }
-
- // Only create a datagram RTP transport if the datagram transport should be
- // used for RTP.
- if (datagram_transport && config_.use_datagram_transport) {
- // TODO(sukhanov): We use unencrypted RTP transport over DatagramTransport,
- // because MediaTransport encrypts. In the future we may want to
- // implement our own version of RtpTransport over MediaTransport, because
- // it will give us more control over things like:
- // - Fusing
- // - Rtp header compression
- // - Handling Rtcp feedback.
- RTC_LOG(LS_INFO) << "Creating UnencryptedRtpTransport, because datagram "
- "transport is used.";
- RTC_DCHECK(!rtcp_dtls_transport);
- datagram_rtp_transport = std::make_unique<DatagramRtpTransport>(
- content_info.media_description()->rtp_header_extensions(),
- ice->internal(), datagram_transport.get());
+ CreateDtlsTransport(content_info, rtcp_ice->internal());
}
if (config_.disable_encryption) {
@@ -1223,19 +1013,13 @@ RTCError JsepTransportController::MaybeCreateJsepTransport(
config_.sctp_factory->CreateSctpTransport(rtp_dtls_transport.get());
}
- DataChannelTransportInterface* data_channel_transport = nullptr;
- if (config_.use_datagram_transport_for_data_channels) {
- data_channel_transport = datagram_transport.get();
- }
-
std::unique_ptr<cricket::JsepTransport> jsep_transport =
std::make_unique<cricket::JsepTransport>(
content_info.name, certificate_, std::move(ice), std::move(rtcp_ice),
std::move(unencrypted_rtp_transport), std::move(sdes_transport),
std::move(dtls_srtp_transport), std::move(datagram_rtp_transport),
std::move(rtp_dtls_transport), std::move(rtcp_dtls_transport),
- std::move(sctp_transport), std::move(datagram_transport),
- data_channel_transport);
+ std::move(sctp_transport));
jsep_transport->rtp_transport()->SignalRtcpPacketReceived.connect(
this, &JsepTransportController::OnRtcpPacketReceived_n);
@@ -1637,54 +1421,4 @@ void JsepTransportController::OnDtlsHandshakeError(
SignalDtlsHandshakeError(error);
}
-absl::optional<cricket::OpaqueTransportParameters>
-JsepTransportController::GetTransportParameters(const std::string& mid) {
- if (!(config_.use_datagram_transport ||
- config_.use_datagram_transport_for_data_channels)) {
- return absl::nullopt;
- }
-
- cricket::JsepTransport* transport = GetJsepTransportForMid(mid);
- if (transport) {
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport->GetTransportParameters();
- if (params) {
- params->protocol = config_.media_transport_factory->GetTransportName();
- }
- return params;
- }
-
- RTC_DCHECK(!local_desc_ && !remote_desc_)
- << "JsepTransport should exist for every mid once any description is set";
-
- if (config_.use_datagram_transport_for_data_channels_receive_only) {
- return absl::nullopt;
- }
-
- // Need to generate a transport for the offer.
- if (!offer_datagram_transport_) {
- webrtc::MediaTransportSettings settings;
- settings.is_caller = true;
- settings.pre_shared_key = rtc::CreateRandomString(32);
- settings.event_log = config_.event_log;
- auto datagram_transport_or_error =
- config_.media_transport_factory->CreateDatagramTransport(
- network_thread_, settings);
-
- if (datagram_transport_or_error.ok()) {
- offer_datagram_transport_ =
- std::move(datagram_transport_or_error.value());
- } else {
- RTC_LOG(LS_INFO) << "Unable to create datagram transport, error="
- << datagram_transport_or_error.error().message();
- }
- }
-
- // We have prepared a transport for the offer, and can now use its parameters.
- cricket::OpaqueTransportParameters params;
- params.parameters = offer_datagram_transport_->GetTransportParameters();
- params.protocol = config_.media_transport_factory->GetTransportName();
- return params;
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/jsep_transport_controller.h b/chromium/third_party/webrtc/pc/jsep_transport_controller.h
index c966e744c68..250501fd055 100644
--- a/chromium/third_party/webrtc/pc/jsep_transport_controller.h
+++ b/chromium/third_party/webrtc/pc/jsep_transport_controller.h
@@ -22,7 +22,6 @@
#include "api/ice_transport_factory.h"
#include "api/peer_connection_interface.h"
#include "api/rtc_event_log/rtc_event_log.h"
-#include "api/transport/media/media_transport_config.h"
#include "media/sctp/sctp_transport_internal.h"
#include "p2p/base/dtls_transport.h"
#include "p2p/base/dtls_transport_factory.h"
@@ -103,31 +102,6 @@ class JsepTransportController : public sigslot::has_slots<> {
// Factory for SCTP transports.
cricket::SctpTransportInternalFactory* sctp_factory = nullptr;
-
- // Whether an RtpMediaTransport should be created as default, when no
- // MediaTransportFactory is provided.
- bool use_rtp_media_transport = false;
-
- // Use encrypted datagram transport to send packets.
- bool use_datagram_transport = false;
-
- // Use datagram transport's implementation of data channels instead of SCTP.
- bool use_datagram_transport_for_data_channels = false;
-
- // Whether |use_datagram_transport_for_data_channels| applies to outgoing
- // calls. If true, |use_datagram_transport_for_data_channels| applies only
- // to incoming calls.
- bool use_datagram_transport_for_data_channels_receive_only = false;
-
- // Optional media transport factory (experimental). If provided it will be
- // used to create datagram_transport (as long as either
- // |use_datagram_transport| or
- // |use_datagram_transport_for_data_channels| is set to true). However,
- // whether it will be used to send / receive audio and video frames instead
- // of RTP is determined by |use_datagram_transport|. Note that currently
- // datagram_transport co-exists with RTP / RTCP transports and may use the
- // same underlying ICE transport.
- MediaTransportFactory* media_transport_factory = nullptr;
};
// The ICE related events are signaled on the |signaling_thread|.
@@ -161,8 +135,6 @@ class JsepTransportController : public sigslot::has_slots<> {
rtc::scoped_refptr<SctpTransport> GetSctpTransport(
const std::string& mid) const;
- MediaTransportConfig GetMediaTransportConfig(const std::string& mid) const;
-
DataChannelTransportInterface* GetDataChannelTransport(
const std::string& mid) const;
@@ -215,26 +187,10 @@ class JsepTransportController : public sigslot::has_slots<> {
void SetActiveResetSrtpParams(bool active_reset_srtp_params);
- // Allows to overwrite the settings from config. You may set or reset the
- // media transport configuration on the jsep transport controller, as long as
- // you did not call 'GetMediaTransport' or 'MaybeCreateJsepTransport'. Once
- // Jsep transport is created, you can't change this setting.
- void SetMediaTransportSettings(
- bool use_datagram_transport,
- bool use_datagram_transport_for_data_channels,
- bool use_datagram_transport_for_data_channels_receive_only);
-
// For now the rollback only removes mid to transport mappings
// and deletes unused transports, but doesn't consider anything more complex.
void RollbackTransports();
- // Gets the transport parameters for the transport identified by |mid|.
- // If |mid| is bundled, returns the parameters for the bundled transport.
- // If the transport for |mid| has not been created yet, it may be allocated in
- // order to generate transport parameters.
- absl::optional<cricket::OpaqueTransportParameters> GetTransportParameters(
- const std::string& mid);
-
// All of these signals are fired on the signaling thread.
// If any transport failed => failed,
@@ -290,9 +246,7 @@ class JsepTransportController : public sigslot::has_slots<> {
const cricket::ContentInfo& content_info,
const cricket::TransportInfo& transport_info,
const std::vector<int>& encrypted_extension_ids,
- int rtp_abs_sendtime_extn_id,
- absl::optional<std::string> media_alt_protocol,
- absl::optional<std::string> data_alt_protocol);
+ int rtp_abs_sendtime_extn_id);
absl::optional<std::string> bundled_mid() const {
absl::optional<std::string> bundled_mid;
@@ -314,12 +268,6 @@ class JsepTransportController : public sigslot::has_slots<> {
std::vector<int> GetEncryptedHeaderExtensionIds(
const cricket::ContentInfo& content_info);
- // Extracts the alt-protocol settings that apply to the bundle group.
- RTCError GetAltProtocolsForBundle(
- const cricket::SessionDescription* description,
- absl::optional<std::string>* media_alt_protocol,
- absl::optional<std::string>* data_alt_protocol);
-
int GetRtpAbsSendTimeHeaderExtensionId(
const cricket::ContentInfo& content_info);
@@ -347,16 +295,6 @@ class JsepTransportController : public sigslot::has_slots<> {
const cricket::ContentInfo& content_info,
const cricket::SessionDescription& description);
- // Creates datagram transport if config wants to use it, and a=x-mt line is
- // present for the current media transport. Returned
- // DatagramTransportInterface is not connected, and must be connected to ICE.
- // You must call |GenerateOrGetLastMediaTransportOffer| on the caller before
- // calling MaybeCreateDatagramTransport.
- std::unique_ptr<webrtc::DatagramTransportInterface>
- MaybeCreateDatagramTransport(const cricket::ContentInfo& content_info,
- const cricket::SessionDescription& description,
- bool local);
-
void MaybeDestroyJsepTransport(const std::string& mid);
void DestroyAllJsepTransports_n();
@@ -370,8 +308,7 @@ class JsepTransportController : public sigslot::has_slots<> {
std::unique_ptr<cricket::DtlsTransportInternal> CreateDtlsTransport(
const cricket::ContentInfo& content_info,
- cricket::IceTransportInternal* ice,
- DatagramTransportInterface* datagram_transport);
+ cricket::IceTransportInternal* ice);
rtc::scoped_refptr<webrtc::IceTransportInterface> CreateIceTransport(
const std::string& transport_name,
bool rtcp);
@@ -446,17 +383,6 @@ class JsepTransportController : public sigslot::has_slots<> {
Config config_;
- // Early on in the call we don't know if datagram transport is going to be
- // used, but we need to get the server-supported parameters to add to an SDP.
- // This server datagram transport will be promoted to the used datagram
- // transport after the local description is set, and the ownership will be
- // transferred to the actual JsepTransport. This "offer" datagram transport is
- // not created if it's done on the party that provides answer. This offer
- // datagram transport is only created once at the beginning of the connection,
- // and never again.
- std::unique_ptr<DatagramTransportInterface> offer_datagram_transport_ =
- nullptr;
-
const cricket::SessionDescription* local_desc_ = nullptr;
const cricket::SessionDescription* remote_desc_ = nullptr;
absl::optional<bool> initial_offerer_;
diff --git a/chromium/third_party/webrtc/pc/jsep_transport_controller_unittest.cc b/chromium/third_party/webrtc/pc/jsep_transport_controller_unittest.cc
index 3fc6f8b7e55..7bdba23c2d8 100644
--- a/chromium/third_party/webrtc/pc/jsep_transport_controller_unittest.cc
+++ b/chromium/third_party/webrtc/pc/jsep_transport_controller_unittest.cc
@@ -13,9 +13,6 @@
#include <map>
#include <memory>
-#include "api/test/fake_media_transport.h"
-#include "api/test/loopback_media_transport.h"
-#include "api/transport/media/media_transport_interface.h"
#include "p2p/base/dtls_transport_factory.h"
#include "p2p/base/fake_dtls_transport.h"
#include "p2p/base/fake_ice_transport.h"
@@ -44,20 +41,6 @@ static const char kDataMid1[] = "data1";
namespace webrtc {
-namespace {
-
-// Media transport factory requires crypto settings to be present in order to
-// create media transport.
-void AddCryptoSettings(cricket::SessionDescription* description) {
- for (auto& content : description->contents()) {
- content.media_description()->AddCrypto(cricket::CryptoParams(
- /*t=*/0, std::string(rtc::CS_AES_CM_128_HMAC_SHA1_80),
- "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2", ""));
- }
-}
-
-} // namespace
-
class FakeIceTransportFactory : public webrtc::IceTransportFactory {
public:
~FakeIceTransportFactory() override = default;
@@ -440,96 +423,6 @@ TEST_F(JsepTransportControllerTest, GetDtlsTransportWithRtcpMux) {
EXPECT_EQ(nullptr, transport_controller_->GetRtcpDtlsTransport(kVideoMid1));
}
-TEST_F(JsepTransportControllerTest,
- DtlsIsStillCreatedIfDatagramTransportIsOnlyUsedForDataChannels) {
- FakeMediaTransportFactory fake_media_transport_factory("transport_params");
- JsepTransportController::Config config;
-
- config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- config.media_transport_factory = &fake_media_transport_factory;
- config.use_datagram_transport_for_data_channels = true;
- CreateJsepTransportController(config);
-
- auto description = CreateSessionDescriptionWithBundledData();
- AddCryptoSettings(description.get());
-
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport_controller_->GetTransportParameters(kAudioMid1);
- for (auto& info : description->transport_infos()) {
- info.description.opaque_parameters = params;
- }
- for (cricket::ContentInfo& content_info : description->contents()) {
- if (content_info.media_description()->type() == cricket::MEDIA_TYPE_DATA) {
- content_info.media_description()->set_alt_protocol(params->protocol);
- }
- }
-
- EXPECT_TRUE(transport_controller_
- ->SetLocalDescription(SdpType::kOffer, description.get())
- .ok());
- EXPECT_TRUE(transport_controller_
- ->SetRemoteDescription(SdpType::kAnswer, description.get())
- .ok());
-
- FakeDatagramTransport* datagram_transport =
- static_cast<FakeDatagramTransport*>(
- transport_controller_->GetDataChannelTransport(kAudioMid1));
-
- ASSERT_NE(nullptr, datagram_transport);
-
- EXPECT_EQ(cricket::ICE_CANDIDATE_COMPONENT_RTP,
- transport_controller_->GetDtlsTransport(kAudioMid1)->component())
- << "Datagram transport for media was not enabled, and so DTLS transport "
- "should be created.";
-
- // Datagram transport is not used for media, so no max packet size is
- // specified.
- EXPECT_EQ(transport_controller_->GetMediaTransportConfig(kAudioMid1)
- .rtp_max_packet_size,
- absl::nullopt);
-
- // Since datagram transport is not used for RTP, setting it to writable should
- // not make the RTP transport writable.
- datagram_transport->set_state(MediaTransportState::kWritable);
- EXPECT_FALSE(transport_controller_->GetRtpTransport(kAudioMid1)
- ->IsWritable(/*rtcp=*/false));
-}
-
-// An offer that bundles different alt-protocols should be rejected.
-TEST_F(JsepTransportControllerTest, CannotBundleDifferentAltProtocols) {
- FakeMediaTransportFactory fake_media_transport_factory("transport_params");
- JsepTransportController::Config config;
- config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- config.media_transport_factory = &fake_media_transport_factory;
- config.use_datagram_transport = true;
- config.use_datagram_transport_for_data_channels = true;
- CreateJsepTransportController(config);
-
- auto description = CreateSessionDescriptionWithBundledData();
- AddCryptoSettings(description.get());
-
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport_controller_->GetTransportParameters(kAudioMid1);
- for (auto& info : description->transport_infos()) {
- info.description.opaque_parameters = params;
- }
-
- // Append a different alt-protocol to each of the sections.
- for (cricket::ContentInfo& content_info : description->contents()) {
- content_info.media_description()->set_alt_protocol(params->protocol + "-" +
- content_info.name);
- }
-
- EXPECT_FALSE(transport_controller_
- ->SetLocalDescription(SdpType::kOffer, description.get())
- .ok());
- EXPECT_FALSE(transport_controller_
- ->SetRemoteDescription(SdpType::kAnswer, description.get())
- .ok());
-}
-
TEST_F(JsepTransportControllerTest, SetIceConfig) {
CreateJsepTransportController(JsepTransportController::Config());
auto description = CreateSessionDescriptionWithoutBundle();
@@ -1650,423 +1543,4 @@ TEST_F(JsepTransportControllerTest, ChangeTaggedMediaSectionMaxBundle) {
.ok());
}
-constexpr char kFakeTransportParameters[] = "fake-params";
-
-// Test fixture that provides common setup and helpers for tests related to the
-// datagram transport.
-class JsepTransportControllerDatagramTest
- : public JsepTransportControllerTest,
- public testing::WithParamInterface<bool> {
- public:
- JsepTransportControllerDatagramTest()
- : JsepTransportControllerTest(),
- fake_media_transport_factory_(kFakeTransportParameters) {
- JsepTransportController::Config config;
- config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- config.media_transport_factory = &fake_media_transport_factory_;
- config.use_datagram_transport = true;
- CreateJsepTransportController(config);
- }
-
- // Whether the JsepTransportController under test acts as the offerer or
- // answerer in this test.
- bool IsOfferer() { return GetParam(); }
-
- // Sets a description as local or remote based on type and current
- // perspective.
- RTCError SetDescription(SdpType type,
- const cricket::SessionDescription* description) {
- if (IsOfferer() == (type == SdpType::kOffer)) {
- return transport_controller_->SetLocalDescription(type, description);
- } else {
- return transport_controller_->SetRemoteDescription(type, description);
- }
- }
-
- // Creates a session description with the settings necessary for datagram
- // transport (bundle + crypto) and the given |transport_params|.
- std::unique_ptr<cricket::SessionDescription>
- CreateSessionDescriptionForDatagramTransport(
- absl::optional<cricket::OpaqueTransportParameters> transport_params) {
- auto description = CreateSessionDescriptionWithBundleGroup();
- AddCryptoSettings(description.get());
-
- for (auto& info : description->transport_infos()) {
- info.description.opaque_parameters = transport_params;
- }
- if (transport_params) {
- for (auto& content_info : description->contents()) {
- content_info.media_description()->set_alt_protocol(
- transport_params->protocol);
- }
- }
- return description;
- }
-
- // Creates transport parameters with |protocol| and |parameters|
- // matching what |fake_media_transport_factory_| provides.
- cricket::OpaqueTransportParameters CreateTransportParameters() {
- cricket::OpaqueTransportParameters params;
- params.protocol = fake_media_transport_factory_.GetTransportName();
- params.parameters = "fake-params";
- return params;
- }
-
- protected:
- FakeMediaTransportFactory fake_media_transport_factory_;
-};
-
-TEST_P(JsepTransportControllerDatagramTest, InitDatagramTransport) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- // Getting transport parameters is allowed before setting a description.
- // This is necessary so that the offerer can include these params.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- // Setting a description activates the datagram transport without changing
- // transport parameters.
- auto description = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok());
-
- // After setting an offer with transport parameters, those parameters are
- // reflected by the controller.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest,
- OfferMissingDatagramTransportParams) {
- if (IsOfferer()) {
- // This test doesn't make sense from the offerer's perspective, as the offer
- // must contain datagram transport params if the offerer supports it.
- return;
- }
-
- auto description =
- CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok());
-
- // The offer didn't contain any datagram transport parameters, so the answer
- // won't either.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, OfferHasWrongTransportName) {
- if (IsOfferer()) {
- // This test doesn't make sense from the offerer's perspective, as the
- // offerer cannot offer itself the wrong transport.
- return;
- }
-
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- fake_params.protocol = "wrong-name";
-
- auto description = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, description.get()).ok());
-
- // The offerer and answerer support different datagram transports, so the
- // answerer rejects the offered parameters.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, IncompatibleAnswer) {
- // Transport will claim that no parameters are compatible, even if they match
- // exactly.
- fake_media_transport_factory_.set_transport_parameters_comparison(
- [](absl::string_view, absl::string_view) { return false; });
-
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- // The offerer and answerer have incompatible parameters, so the answerer
- // rejects the offered parameters.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, CompatibleAnswer) {
- // Transport will claim that no parameters are compatible, even if they are
- // completely different.
- fake_media_transport_factory_.set_transport_parameters_comparison(
- [](absl::string_view, absl::string_view) { return true; });
-
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- cricket::OpaqueTransportParameters answer_params;
- answer_params.protocol = fake_params.protocol;
- answer_params.parameters = "something different from offer";
- auto answer = CreateSessionDescriptionForDatagramTransport(answer_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- // The offerer and answerer have compatible parameters, so the answerer
- // accepts the offered parameters.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, AnswerRejectsDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- // The answer rejected datagram transport, so its parameters are empty.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, AnswerAcceptsDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- // The answer accepted datagram transport, so it is present.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, PrAnswerRejectsDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kPrAnswer, answer.get()).ok());
-
- // The answer rejected datagram transport, but it's provisional, so the
- // transport is kept around for now.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, PrAnswerAcceptsDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kPrAnswer, answer.get()).ok());
-
- // The answer provisionally accepted datagram transport, so it's kept.
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, RenegotiationCannotAddDatagram) {
- auto offer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-
- // Attempting to add a datagram transport on a re-offer does not cause an
- // error, but also does not add a datagram transport.
- auto reoffer =
- CreateSessionDescriptionForDatagramTransport(CreateTransportParameters());
- EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- absl::nullopt);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- absl::nullopt);
-}
-
-TEST_P(JsepTransportControllerDatagramTest, RenegotiationCannotRemoveDatagram) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- // Attempting to remove a datagram transport on a re-offer does not cause an
- // error, but also does not remove the datagram transport.
- auto reoffer = CreateSessionDescriptionForDatagramTransport(absl::nullopt);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-TEST_P(JsepTransportControllerDatagramTest,
- RenegotiationKeepsDatagramTransport) {
- cricket::OpaqueTransportParameters fake_params = CreateTransportParameters();
- if (IsOfferer()) {
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
- }
-
- auto offer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, offer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto answer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, answer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- // Attempting to remove a datagram transport on a re-offer does not cause an
- // error, but also does not remove the datagram transport.
- auto reoffer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kOffer, reoffer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-
- auto reanswer = CreateSessionDescriptionForDatagramTransport(fake_params);
- EXPECT_TRUE(SetDescription(SdpType::kAnswer, reanswer.get()).ok());
-
- EXPECT_EQ(transport_controller_->GetTransportParameters(kAudioMid1),
- fake_params);
- EXPECT_EQ(transport_controller_->GetTransportParameters(kVideoMid1),
- fake_params);
-}
-
-INSTANTIATE_TEST_SUITE_P(
- JsepTransportControllerDatagramTests,
- JsepTransportControllerDatagramTest,
- testing::Values(true, false),
- // The parameter value is the local perspective (offerer or answerer).
- [](const testing::TestParamInfo<bool>& info) {
- return info.param ? "Offerer" : "Answerer";
- });
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/jsep_transport_unittest.cc b/chromium/third_party/webrtc/pc/jsep_transport_unittest.cc
index a4b1d5593e3..d8f2fff621b 100644
--- a/chromium/third_party/webrtc/pc/jsep_transport_unittest.cc
+++ b/chromium/third_party/webrtc/pc/jsep_transport_unittest.cc
@@ -120,9 +120,7 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> {
std::move(sdes_transport), std::move(dtls_srtp_transport),
/*datagram_rtp_transport=*/nullptr, std::move(rtp_dtls_transport),
std::move(rtcp_dtls_transport),
- /*sctp_transport=*/nullptr,
- /*datagram_transport=*/nullptr,
- /*data_channel_transport=*/nullptr);
+ /*sctp_transport=*/nullptr);
signal_rtcp_mux_active_received_ = false;
jsep_transport->SignalRtcpMuxActive.connect(
diff --git a/chromium/third_party/webrtc/pc/media_session.cc b/chromium/third_party/webrtc/pc/media_session.cc
index 51885b4fc42..c03b1bebaaa 100644
--- a/chromium/third_party/webrtc/pc/media_session.cc
+++ b/chromium/third_party/webrtc/pc/media_session.cc
@@ -55,6 +55,57 @@ void GetSupportedSdesCryptoSuiteNames(
}
}
+webrtc::RtpExtension RtpExtensionFromCapability(
+ const webrtc::RtpHeaderExtensionCapability& capability) {
+ return webrtc::RtpExtension(capability.uri,
+ capability.preferred_id.value_or(1));
+}
+
+cricket::RtpHeaderExtensions RtpHeaderExtensionsFromCapabilities(
+ const std::vector<webrtc::RtpHeaderExtensionCapability>& capabilities) {
+ cricket::RtpHeaderExtensions exts;
+ for (const auto& capability : capabilities) {
+ exts.push_back(RtpExtensionFromCapability(capability));
+ }
+ return exts;
+}
+
+std::vector<webrtc::RtpHeaderExtensionCapability>
+UnstoppedRtpHeaderExtensionCapabilities(
+ std::vector<webrtc::RtpHeaderExtensionCapability> capabilities) {
+ capabilities.erase(
+ std::remove_if(
+ capabilities.begin(), capabilities.end(),
+ [](const webrtc::RtpHeaderExtensionCapability& capability) {
+ return capability.direction == RtpTransceiverDirection::kStopped;
+ }),
+ capabilities.end());
+ return capabilities;
+}
+
+bool IsCapabilityPresent(const webrtc::RtpHeaderExtensionCapability& capability,
+ const cricket::RtpHeaderExtensions& extensions) {
+ return std::find_if(extensions.begin(), extensions.end(),
+ [&capability](const webrtc::RtpExtension& extension) {
+ return capability.uri == extension.uri;
+ }) != extensions.end();
+}
+
+cricket::RtpHeaderExtensions UnstoppedOrPresentRtpHeaderExtensions(
+ const std::vector<webrtc::RtpHeaderExtensionCapability>& capabilities,
+ const cricket::RtpHeaderExtensions& unencrypted,
+ const cricket::RtpHeaderExtensions& encrypted) {
+ cricket::RtpHeaderExtensions extensions;
+ for (const auto& capability : capabilities) {
+ if (capability.direction != RtpTransceiverDirection::kStopped ||
+ IsCapabilityPresent(capability, unencrypted) ||
+ IsCapabilityPresent(capability, encrypted)) {
+ extensions.push_back(RtpExtensionFromCapability(capability));
+ }
+ }
+ return extensions;
+}
+
} // namespace
namespace cricket {
@@ -268,19 +319,6 @@ static StreamParamsVec GetCurrentStreamParams(
return stream_params;
}
-// Filters the data codecs for the data channel type.
-void FilterDataCodecs(std::vector<DataCodec>* codecs, bool sctp) {
- // Filter RTP codec for SCTP and vice versa.
- const char* codec_name =
- sctp ? kGoogleRtpDataCodecName : kGoogleSctpDataCodecName;
- codecs->erase(std::remove_if(codecs->begin(), codecs->end(),
- [&codec_name](const DataCodec& codec) {
- return absl::EqualsIgnoreCase(codec.name,
- codec_name);
- }),
- codecs->end());
-}
-
static StreamParams CreateStreamParamsForNewSenderWithSsrcs(
const SenderOptions& sender,
const std::string& rtcp_cname,
@@ -452,15 +490,12 @@ static bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group,
selected_transport_info->description.ice_pwd;
ConnectionRole selected_connection_role =
selected_transport_info->description.connection_role;
- const absl::optional<OpaqueTransportParameters>& selected_opaque_parameters =
- selected_transport_info->description.opaque_parameters;
for (TransportInfo& transport_info : sdesc->transport_infos()) {
if (bundle_group.HasContentName(transport_info.content_name) &&
transport_info.content_name != selected_content_name) {
transport_info.description.ice_ufrag = selected_ufrag;
transport_info.description.ice_pwd = selected_pwd;
transport_info.description.connection_role = selected_connection_role;
- transport_info.description.opaque_parameters = selected_opaque_parameters;
}
}
return true;
@@ -646,7 +681,21 @@ static bool CreateContentOffer(
if (offer->type() == cricket::MEDIA_TYPE_VIDEO) {
offer->set_rtcp_reduced_size(true);
}
- offer->set_rtp_header_extensions(rtp_extensions);
+
+ // Build the vector of header extensions with directions for this
+ // media_description's options.
+ RtpHeaderExtensions extensions;
+ for (auto extension_with_id : rtp_extensions) {
+ for (const auto& extension : media_description_options.header_extensions) {
+ if (extension_with_id.uri == extension.uri) {
+ // TODO(crbug.com/1051821): Configure the extension direction from
+ // the information in the media_description_options extension
+ // capability.
+ extensions.push_back(extension_with_id);
+ }
+ }
+ }
+ offer->set_rtp_header_extensions(extensions);
AddSimulcastToMediaDescription(media_description_options, offer);
@@ -661,8 +710,6 @@ static bool CreateContentOffer(
}
}
- offer->set_alt_protocol(media_description_options.alt_protocol);
-
if (secure_policy == SEC_REQUIRED && offer->cryptos().empty()) {
return false;
}
@@ -1178,7 +1225,7 @@ static bool CreateMediaContentAnswer(
const MediaSessionOptions& session_options,
const SecurePolicy& sdes_policy,
const CryptoParamsVec* current_cryptos,
- const RtpHeaderExtensions& local_rtp_extenstions,
+ const RtpHeaderExtensions& local_rtp_extensions,
UniqueRandomIdGenerator* ssrc_generator,
bool enable_encrypted_rtp_header_extensions,
StreamParamsVec* current_streams,
@@ -1187,7 +1234,7 @@ static bool CreateMediaContentAnswer(
answer->set_extmap_allow_mixed_enum(offer->extmap_allow_mixed_enum());
RtpHeaderExtensions negotiated_rtp_extensions;
NegotiateRtpHeaderExtensions(
- local_rtp_extenstions, offer->rtp_header_extensions(),
+ local_rtp_extensions, offer->rtp_header_extensions(),
enable_encrypted_rtp_header_extensions, &negotiated_rtp_extensions);
answer->set_rtp_header_extensions(negotiated_rtp_extensions);
@@ -1218,9 +1265,6 @@ static bool CreateMediaContentAnswer(
answer->set_direction(NegotiateRtpTransceiverDirection(
offer->direction(), media_description_options.direction));
- if (offer->alt_protocol() == media_description_options.alt_protocol) {
- answer->set_alt_protocol(media_description_options.alt_protocol);
- }
return true;
}
@@ -1365,12 +1409,8 @@ MediaSessionDescriptionFactory::MediaSessionDescriptionFactory(
: MediaSessionDescriptionFactory(transport_desc_factory, ssrc_generator) {
channel_manager->GetSupportedAudioSendCodecs(&audio_send_codecs_);
channel_manager->GetSupportedAudioReceiveCodecs(&audio_recv_codecs_);
- audio_rtp_extensions_ =
- channel_manager->GetDefaultEnabledAudioRtpHeaderExtensions();
channel_manager->GetSupportedVideoSendCodecs(&video_send_codecs_);
channel_manager->GetSupportedVideoReceiveCodecs(&video_recv_codecs_);
- video_rtp_extensions_ =
- channel_manager->GetDefaultEnabledVideoRtpHeaderExtensions();
channel_manager->GetSupportedDataCodecs(&rtp_data_codecs_);
ComputeAudioCodecsIntersectionAndUnion();
ComputeVideoCodecsIntersectionAndUnion();
@@ -1433,22 +1473,11 @@ static void RemoveUnifiedPlanExtensions(RtpHeaderExtensions* extensions) {
}
RtpHeaderExtensions
-MediaSessionDescriptionFactory::audio_rtp_header_extensions() const {
- RtpHeaderExtensions extensions = audio_rtp_extensions_;
+MediaSessionDescriptionFactory::filtered_rtp_header_extensions(
+ RtpHeaderExtensions extensions) const {
if (!is_unified_plan_) {
RemoveUnifiedPlanExtensions(&extensions);
}
-
- return extensions;
-}
-
-RtpHeaderExtensions
-MediaSessionDescriptionFactory::video_rtp_header_extensions() const {
- RtpHeaderExtensions extensions = video_rtp_extensions_;
- if (!is_unified_plan_) {
- RemoveUnifiedPlanExtensions(&extensions);
- }
-
return extensions;
}
@@ -1483,14 +1512,10 @@ std::unique_ptr<SessionDescription> MediaSessionDescriptionFactory::CreateOffer(
// If application doesn't want CN codecs in offer.
StripCNCodecs(&offer_audio_codecs);
}
- FilterDataCodecs(&offer_rtp_data_codecs,
- session_options.data_channel_type == DCT_SCTP);
-
- RtpHeaderExtensions audio_rtp_extensions;
- RtpHeaderExtensions video_rtp_extensions;
- GetRtpHdrExtsToOffer(current_active_contents,
- session_options.offer_extmap_allow_mixed,
- &audio_rtp_extensions, &video_rtp_extensions);
+ AudioVideoRtpHeaderExtensions extensions_with_ids =
+ GetOfferedRtpHeaderExtensionsWithIds(
+ current_active_contents, session_options.offer_extmap_allow_mixed,
+ session_options.media_description_options);
auto offer = std::make_unique<SessionDescription>();
@@ -1510,18 +1535,20 @@ std::unique_ptr<SessionDescription> MediaSessionDescriptionFactory::CreateOffer(
}
switch (media_description_options.type) {
case MEDIA_TYPE_AUDIO:
- if (!AddAudioContentForOffer(
- media_description_options, session_options, current_content,
- current_description, audio_rtp_extensions, offer_audio_codecs,
- &current_streams, offer.get(), &ice_credentials)) {
+ if (!AddAudioContentForOffer(media_description_options, session_options,
+ current_content, current_description,
+ extensions_with_ids.audio,
+ offer_audio_codecs, &current_streams,
+ offer.get(), &ice_credentials)) {
return nullptr;
}
break;
case MEDIA_TYPE_VIDEO:
- if (!AddVideoContentForOffer(
- media_description_options, session_options, current_content,
- current_description, video_rtp_extensions, offer_video_codecs,
- &current_streams, offer.get(), &ice_credentials)) {
+ if (!AddVideoContentForOffer(media_description_options, session_options,
+ current_content, current_description,
+ extensions_with_ids.video,
+ offer_video_codecs, &current_streams,
+ offer.get(), &ice_credentials)) {
return nullptr;
}
break;
@@ -1628,8 +1655,6 @@ MediaSessionDescriptionFactory::CreateAnswer(
// If application doesn't want CN codecs in answer.
StripCNCodecs(&answer_audio_codecs);
}
- FilterDataCodecs(&answer_rtp_data_codecs,
- session_options.data_channel_type == DCT_SCTP);
auto answer = std::make_unique<SessionDescription>();
@@ -1658,13 +1683,16 @@ MediaSessionDescriptionFactory::CreateAnswer(
msection_index < current_description->contents().size()) {
current_content = &current_description->contents()[msection_index];
}
+ RtpHeaderExtensions header_extensions = RtpHeaderExtensionsFromCapabilities(
+ UnstoppedRtpHeaderExtensionCapabilities(
+ media_description_options.header_extensions));
switch (media_description_options.type) {
case MEDIA_TYPE_AUDIO:
if (!AddAudioContentForAnswer(
media_description_options, session_options, offer_content,
offer, current_content, current_description,
- bundle_transport.get(), answer_audio_codecs, &current_streams,
- answer.get(), &ice_credentials)) {
+ bundle_transport.get(), answer_audio_codecs, header_extensions,
+ &current_streams, answer.get(), &ice_credentials)) {
return nullptr;
}
break;
@@ -1672,8 +1700,8 @@ MediaSessionDescriptionFactory::CreateAnswer(
if (!AddVideoContentForAnswer(
media_description_options, session_options, offer_content,
offer, current_content, current_description,
- bundle_transport.get(), answer_video_codecs, &current_streams,
- answer.get(), &ice_credentials)) {
+ bundle_transport.get(), answer_video_codecs, header_extensions,
+ &current_streams, answer.get(), &ice_credentials)) {
return nullptr;
}
break;
@@ -1966,11 +1994,12 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer(
&used_pltypes);
}
-void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer(
+MediaSessionDescriptionFactory::AudioVideoRtpHeaderExtensions
+MediaSessionDescriptionFactory::GetOfferedRtpHeaderExtensionsWithIds(
const std::vector<const ContentInfo*>& current_active_contents,
bool extmap_allow_mixed,
- RtpHeaderExtensions* offer_audio_extensions,
- RtpHeaderExtensions* offer_video_extensions) const {
+ const std::vector<MediaDescriptionOptions>& media_description_options)
+ const {
// All header extensions allocated from the same range to avoid potential
// issues when using BUNDLE.
@@ -1984,6 +2013,7 @@ void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer(
RtpHeaderExtensions all_regular_extensions;
RtpHeaderExtensions all_encrypted_extensions;
+ AudioVideoRtpHeaderExtensions offered_extensions;
// First - get all extensions from the current description if the media type
// is used.
// Add them to |used_ids| so the local ids are not reused if a new media
@@ -1992,36 +2022,45 @@ void MediaSessionDescriptionFactory::GetRtpHdrExtsToOffer(
if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) {
const AudioContentDescription* audio =
content->media_description()->as_audio();
- MergeRtpHdrExts(audio->rtp_header_extensions(), offer_audio_extensions,
+ MergeRtpHdrExts(audio->rtp_header_extensions(), &offered_extensions.audio,
&all_regular_extensions, &all_encrypted_extensions,
&used_ids);
} else if (IsMediaContentOfType(content, MEDIA_TYPE_VIDEO)) {
const VideoContentDescription* video =
content->media_description()->as_video();
- MergeRtpHdrExts(video->rtp_header_extensions(), offer_video_extensions,
+ MergeRtpHdrExts(video->rtp_header_extensions(), &offered_extensions.video,
&all_regular_extensions, &all_encrypted_extensions,
&used_ids);
}
}
- // Add our default RTP header extensions that are not in the current
- // description.
- MergeRtpHdrExts(audio_rtp_header_extensions(), offer_audio_extensions,
- &all_regular_extensions, &all_encrypted_extensions,
- &used_ids);
- MergeRtpHdrExts(video_rtp_header_extensions(), offer_video_extensions,
- &all_regular_extensions, &all_encrypted_extensions,
- &used_ids);
+ // Add all encountered header extensions in the media description options that
+ // are not in the current description.
+ for (const auto& entry : media_description_options) {
+ RtpHeaderExtensions filtered_extensions =
+ filtered_rtp_header_extensions(UnstoppedOrPresentRtpHeaderExtensions(
+ entry.header_extensions, all_regular_extensions,
+ all_encrypted_extensions));
+ if (entry.type == MEDIA_TYPE_AUDIO)
+ MergeRtpHdrExts(filtered_extensions, &offered_extensions.audio,
+ &all_regular_extensions, &all_encrypted_extensions,
+ &used_ids);
+ else if (entry.type == MEDIA_TYPE_VIDEO)
+ MergeRtpHdrExts(filtered_extensions, &offered_extensions.video,
+ &all_regular_extensions, &all_encrypted_extensions,
+ &used_ids);
+ }
// TODO(jbauch): Support adding encrypted header extensions to existing
// sessions.
if (enable_encrypted_rtp_header_extensions_ &&
current_active_contents.empty()) {
- AddEncryptedVersionsOfHdrExts(offer_audio_extensions,
+ AddEncryptedVersionsOfHdrExts(&offered_extensions.audio,
&all_encrypted_extensions, &used_ids);
- AddEncryptedVersionsOfHdrExts(offer_video_extensions,
+ AddEncryptedVersionsOfHdrExts(&offered_extensions.video,
&all_encrypted_extensions, &used_ids);
}
+ return offered_extensions;
}
bool MediaSessionDescriptionFactory::AddTransportOffer(
@@ -2396,6 +2435,7 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer(
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const AudioCodecs& audio_codecs,
+ const RtpHeaderExtensions& default_audio_rtp_header_extensions,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const {
@@ -2468,9 +2508,9 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer(
if (!CreateMediaContentAnswer(
offer_audio_description, media_description_options, session_options,
sdes_policy, GetCryptos(current_content),
- audio_rtp_header_extensions(), ssrc_generator_,
- enable_encrypted_rtp_header_extensions_, current_streams,
- bundle_enabled, audio_answer.get())) {
+ filtered_rtp_header_extensions(default_audio_rtp_header_extensions),
+ ssrc_generator_, enable_encrypted_rtp_header_extensions_,
+ current_streams, bundle_enabled, audio_answer.get())) {
return false; // Fails the session setup.
}
@@ -2506,6 +2546,7 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer(
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const VideoCodecs& video_codecs,
+ const RtpHeaderExtensions& default_video_rtp_header_extensions,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const {
@@ -2586,9 +2627,9 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer(
if (!CreateMediaContentAnswer(
offer_video_description, media_description_options, session_options,
sdes_policy, GetCryptos(current_content),
- video_rtp_header_extensions(), ssrc_generator_,
- enable_encrypted_rtp_header_extensions_, current_streams,
- bundle_enabled, video_answer.get())) {
+ filtered_rtp_header_extensions(default_video_rtp_header_extensions),
+ ssrc_generator_, enable_encrypted_rtp_header_extensions_,
+ current_streams, bundle_enabled, video_answer.get())) {
return false; // Failed the sessin setup.
}
bool secure = bundle_transport ? bundle_transport->description.secure()
diff --git a/chromium/third_party/webrtc/pc/media_session.h b/chromium/third_party/webrtc/pc/media_session.h
index ef83834318f..f305a6214ca 100644
--- a/chromium/third_party/webrtc/pc/media_session.h
+++ b/chromium/third_party/webrtc/pc/media_session.h
@@ -78,7 +78,7 @@ struct MediaDescriptionOptions {
// stream information goes in the local descriptions.
std::vector<SenderOptions> sender_options;
std::vector<webrtc::RtpCodecCapability> codec_preferences;
- absl::optional<std::string> alt_protocol;
+ std::vector<webrtc::RtpHeaderExtensionCapability> header_extensions;
private:
// Doesn't DCHECK on |type|.
@@ -147,19 +147,13 @@ class MediaSessionDescriptionFactory {
const AudioCodecs& audio_recv_codecs() const;
void set_audio_codecs(const AudioCodecs& send_codecs,
const AudioCodecs& recv_codecs);
- void set_audio_rtp_header_extensions(const RtpHeaderExtensions& extensions) {
- audio_rtp_extensions_ = extensions;
- }
- RtpHeaderExtensions audio_rtp_header_extensions() const;
const VideoCodecs& video_sendrecv_codecs() const;
const VideoCodecs& video_send_codecs() const;
const VideoCodecs& video_recv_codecs() const;
void set_video_codecs(const VideoCodecs& send_codecs,
const VideoCodecs& recv_codecs);
- void set_video_rtp_header_extensions(const RtpHeaderExtensions& extensions) {
- video_rtp_extensions_ = extensions;
- }
- RtpHeaderExtensions video_rtp_header_extensions() const;
+ RtpHeaderExtensions filtered_rtp_header_extensions(
+ RtpHeaderExtensions extensions) const;
const RtpDataCodecs& rtp_data_codecs() const { return rtp_data_codecs_; }
void set_rtp_data_codecs(const RtpDataCodecs& codecs) {
rtp_data_codecs_ = codecs;
@@ -184,6 +178,11 @@ class MediaSessionDescriptionFactory {
const SessionDescription* current_description) const;
private:
+ struct AudioVideoRtpHeaderExtensions {
+ RtpHeaderExtensions audio;
+ RtpHeaderExtensions video;
+ };
+
const AudioCodecs& GetAudioCodecsForOffer(
const webrtc::RtpTransceiverDirection& direction) const;
const AudioCodecs& GetAudioCodecsForAnswer(
@@ -205,11 +204,11 @@ class MediaSessionDescriptionFactory {
AudioCodecs* audio_codecs,
VideoCodecs* video_codecs,
RtpDataCodecs* rtp_data_codecs) const;
- void GetRtpHdrExtsToOffer(
+ AudioVideoRtpHeaderExtensions GetOfferedRtpHeaderExtensionsWithIds(
const std::vector<const ContentInfo*>& current_active_contents,
bool extmap_allow_mixed,
- RtpHeaderExtensions* audio_extensions,
- RtpHeaderExtensions* video_extensions) const;
+ const std::vector<MediaDescriptionOptions>& media_description_options)
+ const;
bool AddTransportOffer(const std::string& content_name,
const TransportOptions& transport_options,
const SessionDescription* current_desc,
@@ -293,6 +292,7 @@ class MediaSessionDescriptionFactory {
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const AudioCodecs& audio_codecs,
+ const RtpHeaderExtensions& default_audio_rtp_header_extensions,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const;
@@ -306,6 +306,7 @@ class MediaSessionDescriptionFactory {
const SessionDescription* current_description,
const TransportInfo* bundle_transport,
const VideoCodecs& video_codecs,
+ const RtpHeaderExtensions& default_video_rtp_header_extensions,
StreamParamsVec* current_streams,
SessionDescription* answer,
IceCredentialsIterator* ice_credentials) const;
@@ -334,14 +335,12 @@ class MediaSessionDescriptionFactory {
AudioCodecs audio_sendrecv_codecs_;
// Union of send and recv.
AudioCodecs all_audio_codecs_;
- RtpHeaderExtensions audio_rtp_extensions_;
VideoCodecs video_send_codecs_;
VideoCodecs video_recv_codecs_;
// Intersection of send and recv.
VideoCodecs video_sendrecv_codecs_;
// Union of send and recv.
VideoCodecs all_video_codecs_;
- RtpHeaderExtensions video_rtp_extensions_;
RtpDataCodecs rtp_data_codecs_;
// This object is not owned by the channel so it must outlive it.
rtc::UniqueRandomIdGenerator* const ssrc_generator_;
diff --git a/chromium/third_party/webrtc/pc/media_session_unittest.cc b/chromium/third_party/webrtc/pc/media_session_unittest.cc
index ba4db0a674b..ac949fb6301 100644
--- a/chromium/third_party/webrtc/pc/media_session_unittest.cc
+++ b/chromium/third_party/webrtc/pc/media_session_unittest.cc
@@ -541,9 +541,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
EXPECT_EQ(
media_desc_options_it->transport_options.enable_ice_renomination,
GetIceRenomination(ti_audio));
- EXPECT_EQ(media_desc_options_it->transport_options.opaque_parameters,
- ti_audio->description.opaque_parameters);
-
} else {
EXPECT_TRUE(ti_audio == NULL);
}
@@ -556,8 +553,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
EXPECT_EQ(ti_audio->description.ice_ufrag,
ti_video->description.ice_ufrag);
EXPECT_EQ(ti_audio->description.ice_pwd, ti_video->description.ice_pwd);
- EXPECT_EQ(ti_audio->description.opaque_parameters,
- ti_video->description.opaque_parameters);
} else {
if (has_current_desc) {
EXPECT_EQ(current_video_ufrag, ti_video->description.ice_ufrag);
@@ -568,8 +563,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
EXPECT_EQ(static_cast<size_t>(cricket::ICE_PWD_LENGTH),
ti_video->description.ice_pwd.size());
}
- EXPECT_EQ(media_desc_options_it->transport_options.opaque_parameters,
- ti_video->description.opaque_parameters);
}
EXPECT_EQ(
media_desc_options_it->transport_options.enable_ice_renomination,
@@ -754,13 +747,10 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
const cricket::RtpHeaderExtensions& expectedAnswer) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- f1_.set_audio_rtp_header_extensions(offered);
- f1_.set_video_rtp_header_extensions(offered);
- f2_.set_audio_rtp_header_extensions(local);
- f2_.set_video_rtp_header_extensions(local);
-
+ SetAudioVideoRtpHeaderExtensions(offered, offered, &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(local, local, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -772,6 +762,38 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test {
GetFirstVideoContentDescription(answer.get())->rtp_header_extensions());
}
+ std::vector<webrtc::RtpHeaderExtensionCapability>
+ HeaderExtensionCapabilitiesFromRtpExtensions(
+ cricket::RtpHeaderExtensions extensions) {
+ std::vector<webrtc::RtpHeaderExtensionCapability> capabilities;
+ for (const auto& extension : extensions) {
+ webrtc::RtpHeaderExtensionCapability capability(
+ extension.uri, extension.id,
+ webrtc::RtpTransceiverDirection::kSendRecv);
+ capabilities.push_back(capability);
+ }
+ return capabilities;
+ }
+
+ void SetAudioVideoRtpHeaderExtensions(cricket::RtpHeaderExtensions audio_exts,
+ cricket::RtpHeaderExtensions video_exts,
+ MediaSessionOptions* opts) {
+ auto audio_caps = HeaderExtensionCapabilitiesFromRtpExtensions(audio_exts);
+ auto video_caps = HeaderExtensionCapabilitiesFromRtpExtensions(video_exts);
+ for (auto& entry : opts->media_description_options) {
+ switch (entry.type) {
+ case MEDIA_TYPE_AUDIO:
+ entry.header_extensions = audio_caps;
+ break;
+ case MEDIA_TYPE_VIDEO:
+ entry.header_extensions = video_caps;
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
protected:
UniqueRandomIdGenerator ssrc_generator1;
UniqueRandomIdGenerator ssrc_generator2;
@@ -1659,13 +1681,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, AudioOfferAnswerWithCryptoDisabled) {
TEST_F(MediaSessionDescriptionFactoryTest, TestOfferAnswerWithRtpExtensions) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -1714,21 +1736,21 @@ TEST_F(MediaSessionDescriptionFactoryTest,
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- const auto offered = MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00);
- f1_.set_audio_rtp_header_extensions(offered);
- f1_.set_video_rtp_header_extensions(offered);
- const auto local = MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01);
- f2_.set_audio_rtp_header_extensions(local);
- f2_.set_video_rtp_header_extensions(local);
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00),
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01),
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
- ElementsAreArray(offered));
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
EXPECT_THAT(
GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
- ElementsAreArray(offered));
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
}
TEST_F(MediaSessionDescriptionFactoryTest,
@@ -1736,21 +1758,18 @@ TEST_F(MediaSessionDescriptionFactoryTest,
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- const auto offered = MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00);
- f1_.set_audio_rtp_header_extensions(offered);
- f1_.set_video_rtp_header_extensions(offered);
- const auto local = MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00);
- f2_.set_audio_rtp_header_extensions(local);
- f2_.set_video_rtp_header_extensions(local);
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00),
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
- ElementsAreArray(offered));
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
EXPECT_THAT(
GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
- ElementsAreArray(offered));
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
}
TEST_F(MediaSessionDescriptionFactoryTest,
@@ -1759,10 +1778,10 @@ TEST_F(MediaSessionDescriptionFactoryTest,
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7);
- RtpExtension local_tsn(RtpExtension::kTransportSequenceNumberUri, 5);
- f1_.set_video_rtp_header_extensions({offer_dd});
- f2_.set_video_rtp_header_extensions({local_tsn});
+ SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ RtpExtension local_tsn(RtpExtension::kTransportSequenceNumberUri, 5);
+ SetAudioVideoRtpHeaderExtensions({}, {local_tsn}, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1777,9 +1796,9 @@ TEST_F(MediaSessionDescriptionFactoryTest,
RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7);
RtpExtension local_dd(RtpExtension::kDependencyDescriptorUri, 5);
- f1_.set_video_rtp_header_extensions({offer_dd});
- f2_.set_video_rtp_header_extensions({local_dd});
+ SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions({}, {local_dd}, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1796,12 +1815,10 @@ TEST_F(MediaSessionDescriptionFactoryTest,
RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)};
const cricket::RtpHeaderExtensions local_extensions = {
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 5)};
- f1_.set_video_rtp_header_extensions(offered_extensions);
- f1_.set_audio_rtp_header_extensions(offered_extensions);
- f2_.set_video_rtp_header_extensions(local_extensions);
- f2_.set_audio_rtp_header_extensions(local_extensions);
-
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1821,12 +1838,10 @@ TEST_F(MediaSessionDescriptionFactoryTest,
RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)};
const cricket::RtpHeaderExtensions local_extensions = {
RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)};
- f1_.set_video_rtp_header_extensions(offered_extensions);
- f1_.set_audio_rtp_header_extensions(offered_extensions);
- f2_.set_video_rtp_header_extensions(local_extensions);
- f2_.set_audio_rtp_header_extensions(local_extensions);
-
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1846,12 +1861,10 @@ TEST_F(MediaSessionDescriptionFactoryTest,
RtpExtension(RtpExtension::kTransportSequenceNumberUri, 7)};
const cricket::RtpHeaderExtensions local_extensions = {
RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)};
- f1_.set_video_rtp_header_extensions(offered_extensions);
- f1_.set_audio_rtp_header_extensions(offered_extensions);
- f2_.set_video_rtp_header_extensions(local_extensions);
- f2_.set_audio_rtp_header_extensions(local_extensions);
-
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, nullptr);
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, nullptr);
EXPECT_THAT(
@@ -1863,6 +1876,203 @@ TEST_F(MediaSessionDescriptionFactoryTest,
}
TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithAudioVideoExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kSendOnly)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kSendOnly)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri2"))))),
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri3")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithAudioExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kStopped)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri42", 42,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kSendOnly)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1"))))),
+ Property(
+ &ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ UnorderedElementsAre(Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri42")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithVideoExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 5,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 7,
+ RtpTransceiverDirection::kSendRecv)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri42", 42,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kStopped)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(
+ &ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ UnorderedElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"))))),
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri42")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, AnswersUnstoppedExtensions) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 4,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 2,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri4", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 4,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 2,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri4", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto answer = f2_.CreateAnswer(offer.get(), opts, nullptr);
+ EXPECT_THAT(
+ answer->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri4")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ AppendsUnstoppedExtensionsToCurrentDescription) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 2,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 5,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri4", 6,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer2 = f1_.CreateOffer(opts, offer.get());
+ EXPECT_THAT(
+ offer2->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri4")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ AppendsStoppedExtensionIfKnownAndPresentInTheOffer) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOffer(opts, nullptr);
+
+ // Now add "uri2" as stopped to the options verify that the offer contains
+ // uri2 since it's already present since before.
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 2,
+ RtpTransceiverDirection::kStopped)};
+ auto offer2 = f1_.CreateOffer(opts, offer.get());
+ EXPECT_THAT(
+ offer2->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
TestOfferAnswerWithEncryptedRtpExtensionsBoth) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
@@ -1870,13 +2080,12 @@ TEST_F(MediaSessionDescriptionFactoryTest,
f1_.set_enable_encrypted_rtp_header_extensions(true);
f2_.set_enable_encrypted_rtp_header_extensions(true);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -1901,13 +2110,12 @@ TEST_F(MediaSessionDescriptionFactoryTest,
f1_.set_enable_encrypted_rtp_header_extensions(true);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -1932,13 +2140,12 @@ TEST_F(MediaSessionDescriptionFactoryTest,
f2_.set_enable_encrypted_rtp_header_extensions(true);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
ASSERT_TRUE(offer.get() != NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -3340,12 +3547,11 @@ TEST_F(MediaSessionDescriptionFactoryTest,
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension1));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension1));
- f2_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension2));
- f2_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension2));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
std::unique_ptr<SessionDescription> answer =
f2_.CreateAnswer(offer.get(), opts, NULL);
@@ -3396,9 +3602,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReused) {
MediaSessionOptions opts;
AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
- f1_.set_audio_rtp_header_extensions(MAKE_VECTOR(kAudioRtpExtension3));
- f1_.set_video_rtp_header_extensions(MAKE_VECTOR(kVideoRtpExtension3));
-
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension3),
+ MAKE_VECTOR(kVideoRtpExtension3), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
// Since the audio extensions used ID 3 for "both_audio_and_video", so should
@@ -3435,11 +3640,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReusedEncrypted) {
f1_.set_enable_encrypted_rtp_header_extensions(true);
f2_.set_enable_encrypted_rtp_header_extensions(true);
- f1_.set_audio_rtp_header_extensions(
- MAKE_VECTOR(kAudioRtpExtension3ForEncryption));
- f1_.set_video_rtp_header_extensions(
- MAKE_VECTOR(kVideoRtpExtension3ForEncryption));
-
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kAudioRtpExtension3ForEncryption),
+ MAKE_VECTOR(kVideoRtpExtension3ForEncryption), &opts);
std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(opts, NULL);
// The extensions that are shared between audio and video should use the same
@@ -3631,164 +3834,6 @@ TEST_F(MediaSessionDescriptionFactoryTest,
TestTransportInfo(false, options, true);
}
-TEST_F(MediaSessionDescriptionFactoryTest,
- TestTransportInfoOfferBundlesTransportOptions) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
-
- cricket::OpaqueTransportParameters audio_params;
- audio_params.protocol = "audio-transport";
- audio_params.parameters = "audio-params";
- FindFirstMediaDescriptionByMid("audio", &options)
- ->transport_options.opaque_parameters = audio_params;
-
- cricket::OpaqueTransportParameters video_params;
- video_params.protocol = "video-transport";
- video_params.parameters = "video-params";
- FindFirstMediaDescriptionByMid("video", &options)
- ->transport_options.opaque_parameters = video_params;
-
- TestTransportInfo(/*offer=*/true, options, /*has_current_desc=*/false);
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest,
- TestTransportInfoAnswerBundlesTransportOptions) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
-
- cricket::OpaqueTransportParameters audio_params;
- audio_params.protocol = "audio-transport";
- audio_params.parameters = "audio-params";
- FindFirstMediaDescriptionByMid("audio", &options)
- ->transport_options.opaque_parameters = audio_params;
-
- cricket::OpaqueTransportParameters video_params;
- video_params.protocol = "video-transport";
- video_params.parameters = "video-params";
- FindFirstMediaDescriptionByMid("video", &options)
- ->transport_options.opaque_parameters = video_params;
-
- TestTransportInfo(/*offer=*/false, options, /*has_current_desc=*/false);
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolAddedToOffer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_RTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
-
- EXPECT_EQ(offer->GetContentDescriptionByName("audio")->alt_protocol(), "foo");
- EXPECT_EQ(offer->GetContentDescriptionByName("video")->alt_protocol(), "bar");
- EXPECT_EQ(offer->GetContentDescriptionByName("data")->alt_protocol(), "baz");
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolAddedToAnswer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
- std::unique_ptr<SessionDescription> answer =
- f1_.CreateAnswer(offer.get(), options, nullptr);
-
- EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(),
- "foo");
- EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(),
- "bar");
- EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(), "baz");
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolNotInOffer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> answer =
- f1_.CreateAnswer(offer.get(), options, nullptr);
-
- EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(),
- absl::nullopt);
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolDifferentInOffer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "not-foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "not-bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "not-baz";
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> answer =
- f1_.CreateAnswer(offer.get(), options, nullptr);
-
- EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(),
- absl::nullopt);
-}
-
-TEST_F(MediaSessionDescriptionFactoryTest, AltProtocolNotInAnswer) {
- MediaSessionOptions options;
- AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
- AddDataSection(cricket::DCT_SCTP, RtpTransceiverDirection::kRecvOnly,
- &options);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol = "foo";
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol = "bar";
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol = "baz";
-
- std::unique_ptr<SessionDescription> offer = f1_.CreateOffer(options, nullptr);
-
- FindFirstMediaDescriptionByMid("audio", &options)->alt_protocol =
- absl::nullopt;
- FindFirstMediaDescriptionByMid("video", &options)->alt_protocol =
- absl::nullopt;
- FindFirstMediaDescriptionByMid("data", &options)->alt_protocol =
- absl::nullopt;
-
- std::unique_ptr<SessionDescription> answer =
- f1_.CreateAnswer(offer.get(), options, nullptr);
-
- EXPECT_EQ(answer->GetContentDescriptionByName("audio")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("video")->alt_protocol(),
- absl::nullopt);
- EXPECT_EQ(answer->GetContentDescriptionByName("data")->alt_protocol(),
- absl::nullopt);
-}
-
// Create an offer with bundle enabled and verify the crypto parameters are
// the common set of the available cryptos.
TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoWithOfferBundle) {
@@ -4599,7 +4644,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestSetAudioCodecs) {
// properly.
send_codecs[1].channels = 0;
- // Alther iLBC receive codec to be lowercase, to test that case conversions
+ // Alter iLBC receive codec to be lowercase, to test that case conversions
// are handled properly.
recv_codecs[2].name = "ilbc";
diff --git a/chromium/third_party/webrtc/pc/media_stream_unittest.cc b/chromium/third_party/webrtc/pc/media_stream_unittest.cc
index b70171dfcba..55226992e09 100644
--- a/chromium/third_party/webrtc/pc/media_stream_unittest.cc
+++ b/chromium/third_party/webrtc/pc/media_stream_unittest.cc
@@ -46,7 +46,7 @@ class MockObserver : public ObserverInterface {
}
}
- MOCK_METHOD0(OnChanged, void());
+ MOCK_METHOD(void, OnChanged, (), (override));
private:
NotifierInterface* notifier_;
diff --git a/chromium/third_party/webrtc/pc/peer_connection.cc b/chromium/third_party/webrtc/pc/peer_connection.cc
index 05e7b95591e..1bf937359f3 100644
--- a/chromium/third_party/webrtc/pc/peer_connection.cc
+++ b/chromium/third_party/webrtc/pc/peer_connection.cc
@@ -107,13 +107,6 @@ const char kDtlsSrtpSetupFailureRtcp[] =
namespace {
-// Field trials.
-// Controls datagram transport support.
-const char kDatagramTransportFieldTrial[] = "WebRTC-DatagramTransport";
-// Controls datagram transport data channel support.
-const char kDatagramTransportDataChannelFieldTrial[] =
- "WebRTC-DatagramTransportDataChannels";
-
// UMA metric names.
const char kSimulcastVersionApplyLocalDescription[] =
"WebRTC.PeerConnection.Simulcast.ApplyLocalDescription";
@@ -690,6 +683,26 @@ class CreateSessionDescriptionObserverOperationWrapper
std::function<void()> operation_complete_callback_;
};
+// Check if the changes of IceTransportsType motives an ice restart.
+bool NeedIceRestart(bool surface_ice_candidates_on_ice_transport_type_changed,
+ PeerConnectionInterface::IceTransportsType current,
+ PeerConnectionInterface::IceTransportsType modified) {
+ if (current == modified) {
+ return false;
+ }
+
+ if (!surface_ice_candidates_on_ice_transport_type_changed) {
+ return true;
+ }
+
+ auto current_filter = ConvertIceTransportTypeToCandidateFilter(current);
+ auto modified_filter = ConvertIceTransportTypeToCandidateFilter(modified);
+
+ // If surface_ice_candidates_on_ice_transport_type_changed is true and we
+ // extend the filter, then no ice restart is needed.
+ return (current_filter & modified_filter) != current_filter;
+}
+
} // namespace
// Used by parameterless SetLocalDescription() to create an offer or answer.
@@ -896,11 +909,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==(
SdpSemantics sdp_semantics;
absl::optional<rtc::AdapterType> network_preference;
bool active_reset_srtp_params;
- bool use_media_transport;
- bool use_media_transport_for_data_channels;
- absl::optional<bool> use_datagram_transport;
- absl::optional<bool> use_datagram_transport_for_data_channels;
- absl::optional<bool> use_datagram_transport_for_data_channels_receive_only;
absl::optional<CryptoOptions> crypto_options;
bool offer_extmap_allow_mixed;
std::string turn_logging_id;
@@ -961,20 +969,11 @@ bool PeerConnectionInterface::RTCConfiguration::operator==(
sdp_semantics == o.sdp_semantics &&
network_preference == o.network_preference &&
active_reset_srtp_params == o.active_reset_srtp_params &&
- use_media_transport == o.use_media_transport &&
- use_media_transport_for_data_channels ==
- o.use_media_transport_for_data_channels &&
- use_datagram_transport == o.use_datagram_transport &&
- use_datagram_transport_for_data_channels ==
- o.use_datagram_transport_for_data_channels &&
- use_datagram_transport_for_data_channels_receive_only ==
- o.use_datagram_transport_for_data_channels_receive_only &&
crypto_options == o.crypto_options &&
offer_extmap_allow_mixed == o.offer_extmap_allow_mixed &&
turn_logging_id == o.turn_logging_id &&
enable_implicit_rollback == o.enable_implicit_rollback &&
- allow_codec_switching == o.allow_codec_switching &&
- enable_simulcast_stats == o.enable_simulcast_stats;
+ allow_codec_switching == o.allow_codec_switching;
}
bool PeerConnectionInterface::RTCConfiguration::operator!=(
@@ -1034,14 +1033,11 @@ void ExtractSharedMediaSessionOptions(
PeerConnection::PeerConnection(PeerConnectionFactory* factory,
std::unique_ptr<RtcEventLog> event_log,
std::unique_ptr<Call> call)
- : factory_(factory),
+ : MessageHandler(false),
+ factory_(factory),
event_log_(std::move(event_log)),
event_log_ptr_(event_log_.get()),
operations_chain_(rtc::OperationsChain::Create()),
- datagram_transport_config_(
- field_trial::FindFullName(kDatagramTransportFieldTrial)),
- datagram_transport_data_channel_config_(
- field_trial::FindFullName(kDatagramTransportDataChannelFieldTrial)),
rtcp_cname_(GenerateRtcpCname()),
local_streams_(StreamCollection::Create()),
remote_streams_(StreamCollection::Create()),
@@ -1246,33 +1242,6 @@ bool PeerConnection::Initialize(
#endif
config.active_reset_srtp_params = configuration.active_reset_srtp_params;
- use_datagram_transport_ = datagram_transport_config_.enabled &&
- configuration.use_datagram_transport.value_or(
- datagram_transport_config_.default_value);
- use_datagram_transport_for_data_channels_ =
- datagram_transport_data_channel_config_.enabled &&
- configuration.use_datagram_transport_for_data_channels.value_or(
- datagram_transport_data_channel_config_.default_value);
- use_datagram_transport_for_data_channels_receive_only_ =
- configuration.use_datagram_transport_for_data_channels_receive_only
- .value_or(datagram_transport_data_channel_config_.receive_only);
- if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) {
- if (!factory_->media_transport_factory()) {
- RTC_DCHECK(false)
- << "PeerConnecton is initialized with use_datagram_transport = true "
- "or use_datagram_transport_for_data_channels = true "
- "but media transport factory is not set in PeerConnectionFactory";
- return false;
- }
-
- config.use_datagram_transport = use_datagram_transport_;
- config.use_datagram_transport_for_data_channels =
- use_datagram_transport_for_data_channels_;
- config.use_datagram_transport_for_data_channels_receive_only =
- use_datagram_transport_for_data_channels_receive_only_;
- config.media_transport_factory = factory_->media_transport_factory();
- }
-
// Obtain a certificate from RTCConfiguration if any were provided (optional).
rtc::scoped_refptr<rtc::RTCCertificate> certificate;
if (!configuration.certificates.empty()) {
@@ -1295,24 +1264,7 @@ bool PeerConnection::Initialize(
sctp_factory_ = factory_->CreateSctpTransportInternalFactory();
- if (use_datagram_transport_for_data_channels_) {
- if (configuration.enable_rtp_data_channel) {
- RTC_LOG(LS_ERROR) << "enable_rtp_data_channel and "
- "use_datagram_transport_for_data_channels are "
- "incompatible and cannot both be set to true";
- return false;
- }
- if (configuration.enable_dtls_srtp && !*configuration.enable_dtls_srtp) {
- RTC_LOG(LS_INFO) << "Using data channel transport with no fallback";
- data_channel_controller_.set_data_channel_type(
- cricket::DCT_DATA_CHANNEL_TRANSPORT);
- } else {
- RTC_LOG(LS_INFO) << "Using data channel transport with fallback to SCTP";
- data_channel_controller_.set_data_channel_type(
- cricket::DCT_DATA_CHANNEL_TRANSPORT_SCTP);
- config.sctp_factory = sctp_factory_.get();
- }
- } else if (configuration.enable_rtp_data_channel) {
+ if (configuration.enable_rtp_data_channel) {
// Enable creation of RTP data channels if the kEnableRtpDataChannels is
// set. It takes precendence over the disable_sctp_data_channels
// PeerConnectionFactoryInterface::Options.
@@ -2179,7 +2131,7 @@ rtc::scoped_refptr<DataChannelInterface> PeerConnection::CreateDataChannel(
if (config) {
internal_config.reset(new InternalDataChannelInit(*config));
}
- rtc::scoped_refptr<DataChannelInterface> channel(
+ rtc::scoped_refptr<DataChannel> channel(
data_channel_controller_.InternalCreateDataChannel(
label, internal_config.get()));
if (!channel.get()) {
@@ -2192,7 +2144,7 @@ rtc::scoped_refptr<DataChannelInterface> PeerConnection::CreateDataChannel(
UpdateNegotiationNeeded();
}
NoteUsageEvent(UsageEvent::DATA_ADDED);
- return DataChannelProxy::Create(signaling_thread(), channel.get());
+ return DataChannel::CreateProxy(std::move(channel));
}
void PeerConnection::RestartIce() {
@@ -3937,66 +3889,6 @@ RTCError PeerConnection::SetConfiguration(
"SetLocalDescription.");
}
- if (local_description() && configuration.use_datagram_transport !=
- configuration_.use_datagram_transport) {
- LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport "
- "after calling SetLocalDescription.");
- }
-
- if (remote_description() && configuration.use_datagram_transport !=
- configuration_.use_datagram_transport) {
- LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport "
- "after calling SetRemoteDescription.");
- }
-
- if (local_description() &&
- configuration.use_datagram_transport_for_data_channels !=
- configuration_.use_datagram_transport_for_data_channels) {
- LOG_AND_RETURN_ERROR(
- RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport_for_data_channels "
- "after calling SetLocalDescription.");
- }
-
- if (remote_description() &&
- configuration.use_datagram_transport_for_data_channels !=
- configuration_.use_datagram_transport_for_data_channels) {
- LOG_AND_RETURN_ERROR(
- RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport_for_data_channels "
- "after calling SetRemoteDescription.");
- }
-
- if (local_description() &&
- configuration.use_datagram_transport_for_data_channels_receive_only !=
- configuration_
- .use_datagram_transport_for_data_channels_receive_only) {
- LOG_AND_RETURN_ERROR(
- RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport_for_data_channels_receive_only "
- "after calling SetLocalDescription.");
- }
-
- if (remote_description() &&
- configuration.use_datagram_transport_for_data_channels_receive_only !=
- configuration_
- .use_datagram_transport_for_data_channels_receive_only) {
- LOG_AND_RETURN_ERROR(
- RTCErrorType::INVALID_MODIFICATION,
- "Can't change use_datagram_transport_for_data_channels_receive_only "
- "after calling SetRemoteDescription.");
- }
-
- if ((configuration.use_datagram_transport &&
- *configuration.use_datagram_transport) ||
- (configuration.use_datagram_transport_for_data_channels &&
- *configuration.use_datagram_transport_for_data_channels)) {
- RTC_CHECK(configuration.bundle_policy == kBundlePolicyMaxBundle)
- << "Media transport requires MaxBundle policy.";
- }
-
// The simplest (and most future-compatible) way to tell if the config was
// modified in an invalid way is to copy each property we do support
// modifying, then use operator==. There are far more properties we don't
@@ -4025,11 +3917,6 @@ RTCError PeerConnection::SetConfiguration(
modified_config.network_preference = configuration.network_preference;
modified_config.active_reset_srtp_params =
configuration.active_reset_srtp_params;
- modified_config.use_datagram_transport = configuration.use_datagram_transport;
- modified_config.use_datagram_transport_for_data_channels =
- configuration.use_datagram_transport_for_data_channels;
- modified_config.use_datagram_transport_for_data_channels_receive_only =
- configuration.use_datagram_transport_for_data_channels_receive_only;
modified_config.turn_logging_id = configuration.turn_logging_id;
modified_config.allow_codec_switching = configuration.allow_codec_switching;
if (configuration != modified_config) {
@@ -4089,7 +3976,9 @@ RTCError PeerConnection::SetConfiguration(
// candidate policy must set a "needs-ice-restart" bit so that the next offer
// triggers an ICE restart which will pick up the changes.
if (modified_config.servers != configuration_.servers ||
- modified_config.type != configuration_.type ||
+ NeedIceRestart(
+ configuration_.surface_ice_candidates_on_ice_transport_type_changed,
+ configuration_.type, modified_config.type) ||
modified_config.GetTurnPortPrunePolicy() !=
configuration_.GetTurnPortPrunePolicy()) {
transport_controller_->SetNeedsIceRestartFlag();
@@ -4097,20 +3986,6 @@ RTCError PeerConnection::SetConfiguration(
transport_controller_->SetIceConfig(ParseIceConfig(modified_config));
- use_datagram_transport_ = datagram_transport_config_.enabled &&
- modified_config.use_datagram_transport.value_or(
- datagram_transport_config_.default_value);
- use_datagram_transport_for_data_channels_ =
- datagram_transport_data_channel_config_.enabled &&
- modified_config.use_datagram_transport_for_data_channels.value_or(
- datagram_transport_data_channel_config_.default_value);
- use_datagram_transport_for_data_channels_receive_only_ =
- modified_config.use_datagram_transport_for_data_channels_receive_only
- .value_or(datagram_transport_data_channel_config_.receive_only);
- transport_controller_->SetMediaTransportSettings(
- use_datagram_transport_, use_datagram_transport_for_data_channels_,
- use_datagram_transport_for_data_channels_receive_only_);
-
if (configuration_.active_reset_srtp_params !=
modified_config.active_reset_srtp_params) {
transport_controller_->SetActiveResetSrtpParams(
@@ -4348,6 +4223,21 @@ PeerConnection::GetFirstAudioTransceiver() const {
return nullptr;
}
+void PeerConnection::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ if (!worker_thread()->IsCurrent()) {
+ return worker_thread()->Invoke<void>(RTC_FROM_HERE, [this, resource]() {
+ return AddAdaptationResource(resource);
+ });
+ }
+ RTC_DCHECK_RUN_ON(worker_thread());
+ if (!call_) {
+ // The PeerConnection has been closed.
+ return;
+ }
+ call_->AddAdaptationResource(resource);
+}
+
bool PeerConnection::StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
int64_t output_period_ms) {
return worker_thread()->Invoke<bool>(
@@ -4910,25 +4800,6 @@ void PeerConnection::GetOptionsForOffer(
session_options->offer_extmap_allow_mixed =
configuration_.offer_extmap_allow_mixed;
- // If datagram transport is in use, add opaque transport parameters.
- if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) {
- for (auto& options : session_options->media_description_options) {
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport_controller_->GetTransportParameters(options.mid);
- if (!params) {
- continue;
- }
- options.transport_options.opaque_parameters = params;
- if ((use_datagram_transport_ &&
- (options.type == cricket::MEDIA_TYPE_AUDIO ||
- options.type == cricket::MEDIA_TYPE_VIDEO)) ||
- (use_datagram_transport_for_data_channels_ &&
- options.type == cricket::MEDIA_TYPE_DATA)) {
- options.alt_protocol = params->protocol;
- }
- }
- }
-
// Allow fallback for using obsolete SCTP syntax.
// Note that the default in |session_options| is true, while
// the default in |options| is false.
@@ -4984,21 +4855,21 @@ void PeerConnection::GetOptionsForPlanBOffer(
// Add audio/video/data m= sections to the end if needed.
if (!audio_index && offer_new_audio_description) {
- session_options->media_description_options.push_back(
- cricket::MediaDescriptionOptions(
- cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO,
- RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio),
- false));
-
+ cricket::MediaDescriptionOptions options(
+ cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO,
+ RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false);
+ options.header_extensions =
+ channel_manager()->GetSupportedAudioRtpHeaderExtensions();
+ session_options->media_description_options.push_back(options);
audio_index = session_options->media_description_options.size() - 1;
}
if (!video_index && offer_new_video_description) {
- session_options->media_description_options.push_back(
- cricket::MediaDescriptionOptions(
- cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO,
- RtpTransceiverDirectionFromSendRecv(send_video, recv_video),
- false));
-
+ cricket::MediaDescriptionOptions options(
+ cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO,
+ RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false);
+ options.header_extensions =
+ channel_manager()->GetSupportedVideoRtpHeaderExtensions();
+ session_options->media_description_options.push_back(options);
video_index = session_options->media_description_options.size() - 1;
}
if (!data_index && offer_new_data_description) {
@@ -5030,6 +4901,8 @@ GetMediaDescriptionOptionsForTransceiver(
transceiver->stopped());
media_description_options.codec_preferences =
transceiver->codec_preferences();
+ media_description_options.header_extensions =
+ transceiver->HeaderExtensionsToOffer();
// This behavior is specified in JSEP. The gist is that:
// 1. The MSID is included if the RtpTransceiver's direction is sendonly or
// sendrecv.
@@ -5234,25 +5107,6 @@ void PeerConnection::GetOptionsForAnswer(
RTC_FROM_HERE,
rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials,
port_allocator_.get()));
-
- // If datagram transport is in use, add opaque transport parameters.
- if (use_datagram_transport_ || use_datagram_transport_for_data_channels_) {
- for (auto& options : session_options->media_description_options) {
- absl::optional<cricket::OpaqueTransportParameters> params =
- transport_controller_->GetTransportParameters(options.mid);
- if (!params) {
- continue;
- }
- options.transport_options.opaque_parameters = params;
- if ((use_datagram_transport_ &&
- (options.type == cricket::MEDIA_TYPE_AUDIO ||
- options.type == cricket::MEDIA_TYPE_VIDEO)) ||
- (use_datagram_transport_for_data_channels_ &&
- options.type == cricket::MEDIA_TYPE_DATA)) {
- options.alt_protocol = params->protocol;
- }
- }
- }
}
void PeerConnection::GetOptionsForPlanBAnswer(
@@ -5361,6 +5215,8 @@ void PeerConnection::GenerateMediaDescriptionOptions(
stopped));
*audio_index = session_options->media_description_options.size() - 1;
}
+ session_options->media_description_options.back().header_extensions =
+ channel_manager()->GetSupportedAudioRtpHeaderExtensions();
} else if (IsVideoContent(&content)) {
// If we already have an video m= section, reject this extra one.
if (*video_index) {
@@ -5376,6 +5232,8 @@ void PeerConnection::GenerateMediaDescriptionOptions(
stopped));
*video_index = session_options->media_description_options.size() - 1;
}
+ session_options->media_description_options.back().header_extensions =
+ channel_manager()->GetSupportedVideoRtpHeaderExtensions();
} else {
RTC_DCHECK(IsDataContent(&content));
// If we already have an data m= section, reject this extra one.
@@ -6188,6 +6046,11 @@ cricket::IceConfig PeerConnection::ParseIceConfig(
return ice_config;
}
+std::vector<DataChannel::Stats> PeerConnection::GetDataChannelStats() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return data_channel_controller_.GetDataChannelStats();
+}
+
absl::optional<std::string> PeerConnection::sctp_transport_name() const {
RTC_DCHECK_RUN_ON(signaling_thread());
if (sctp_mid_s_ && transport_controller_) {
@@ -6612,13 +6475,11 @@ RTCError PeerConnection::CreateChannels(const SessionDescription& desc) {
cricket::VoiceChannel* PeerConnection::CreateVoiceChannel(
const std::string& mid) {
RtpTransportInternal* rtp_transport = GetRtpTransport(mid);
- MediaTransportConfig media_transport_config =
- transport_controller_->GetMediaTransportConfig(mid);
cricket::VoiceChannel* voice_channel = channel_manager()->CreateVoiceChannel(
- call_ptr_, configuration_.media_config, rtp_transport,
- media_transport_config, signaling_thread(), mid, SrtpRequired(),
- GetCryptoOptions(), &ssrc_generator_, audio_options_);
+ call_ptr_, configuration_.media_config, rtp_transport, signaling_thread(),
+ mid, SrtpRequired(), GetCryptoOptions(), &ssrc_generator_,
+ audio_options_);
if (!voice_channel) {
return nullptr;
}
@@ -6635,13 +6496,10 @@ cricket::VoiceChannel* PeerConnection::CreateVoiceChannel(
cricket::VideoChannel* PeerConnection::CreateVideoChannel(
const std::string& mid) {
RtpTransportInternal* rtp_transport = GetRtpTransport(mid);
- MediaTransportConfig media_transport_config =
- transport_controller_->GetMediaTransportConfig(mid);
cricket::VideoChannel* video_channel = channel_manager()->CreateVideoChannel(
- call_ptr_, configuration_.media_config, rtp_transport,
- media_transport_config, signaling_thread(), mid, SrtpRequired(),
- GetCryptoOptions(), &ssrc_generator_, video_options_,
+ call_ptr_, configuration_.media_config, rtp_transport, signaling_thread(),
+ mid, SrtpRequired(), GetCryptoOptions(), &ssrc_generator_, video_options_,
video_bitrate_allocator_factory_.get());
if (!video_channel) {
return nullptr;
@@ -6668,16 +6526,12 @@ bool PeerConnection::CreateDataChannel(const std::string& mid) {
} else {
return false;
}
-
- // All non-RTP data channels must initialize |sctp_data_channels_|.
- for (const auto& channel :
- *data_channel_controller_.sctp_data_channels()) {
- channel->OnTransportChannelCreated();
- }
return true;
case cricket::DCT_RTP:
default:
RtpTransportInternal* rtp_transport = GetRtpTransport(mid);
+ // TODO(bugs.webrtc.org/9987): set_rtp_data_channel() should be called on
+ // the network thread like set_data_channel_transport is.
data_channel_controller_.set_rtp_data_channel(
channel_manager()->CreateRtpDataChannel(
configuration_.media_config, rtp_transport, signaling_thread(),
@@ -7090,8 +6944,7 @@ bool PeerConnection::ReadyToUseRemoteCandidate(
}
bool PeerConnection::SrtpRequired() const {
- return !use_datagram_transport_ &&
- (dtls_enabled_ ||
+ return (dtls_enabled_ ||
webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED);
}
diff --git a/chromium/third_party/webrtc/pc/peer_connection.h b/chromium/third_party/webrtc/pc/peer_connection.h
index f3102572fbe..a91dc9c0423 100644
--- a/chromium/third_party/webrtc/pc/peer_connection.h
+++ b/chromium/third_party/webrtc/pc/peer_connection.h
@@ -237,6 +237,8 @@ class PeerConnection : public PeerConnectionInternal,
rtc::scoped_refptr<SctpTransportInterface> GetSctpTransport() const override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+
bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
int64_t output_period_ms) override;
bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output) override;
@@ -278,11 +280,7 @@ class PeerConnection : public PeerConnectionInternal,
return data_channel_controller_.rtp_data_channel();
}
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels()
- const override {
- RTC_DCHECK_RUN_ON(signaling_thread());
- return *data_channel_controller_.sctp_data_channels();
- }
+ std::vector<DataChannel::Stats> GetDataChannelStats() const override;
absl::optional<std::string> sctp_transport_name() const override;
@@ -360,57 +358,6 @@ class PeerConnection : public PeerConnectionInternal,
uint32_t first_ssrc;
};
- // Field-trial based configuration for datagram transport.
- struct DatagramTransportConfig {
- explicit DatagramTransportConfig(const std::string& field_trial)
- : enabled("enabled", true), default_value("default_value", false) {
- ParseFieldTrial({&enabled, &default_value}, field_trial);
- }
-
- // Whether datagram transport support is enabled at all. Defaults to true,
- // allowing datagram transport to be used if (a) the application provides a
- // factory for it and (b) the configuration specifies its use. This flag
- // provides a kill-switch to force-disable datagram transport across all
- // applications, without code changes.
- FieldTrialFlag enabled;
-
- // Whether the datagram transport is enabled or disabled by default.
- // Defaults to false, meaning that applications must configure use of
- // datagram transport through RTCConfiguration. If set to true,
- // applications will use the datagram transport by default (but may still
- // explicitly configure themselves not to use it through RTCConfiguration).
- FieldTrialFlag default_value;
- };
-
- // Field-trial based configuration for datagram transport data channels.
- struct DatagramTransportDataChannelConfig {
- explicit DatagramTransportDataChannelConfig(const std::string& field_trial)
- : enabled("enabled", true),
- default_value("default_value", false),
- receive_only("receive_only", false) {
- ParseFieldTrial({&enabled, &default_value, &receive_only}, field_trial);
- }
-
- // Whether datagram transport data channel support is enabled at all.
- // Defaults to true, allowing datagram transport to be used if (a) the
- // application provides a factory for it and (b) the configuration specifies
- // its use. This flag provides a kill-switch to force-disable datagram
- // transport across all applications, without code changes.
- FieldTrialFlag enabled;
-
- // Whether the datagram transport data channels are enabled or disabled by
- // default. Defaults to false, meaning that applications must configure use
- // of datagram transport through RTCConfiguration. If set to true,
- // applications will use the datagram transport by default (but may still
- // explicitly configure themselves not to use it through RTCConfiguration).
- FieldTrialFlag default_value;
-
- // Whether the datagram transport is enabled in receive-only mode. If true,
- // and if the datagram transport is enabled, it will only be used when
- // receiving incoming calls, not when placing outgoing calls.
- FieldTrialFlag receive_only;
- };
-
// Captures partial state to be used for rollback. Applicable only in
// Unified Plan.
class TransceiverStableState {
@@ -1214,25 +1161,6 @@ class PeerConnection : public PeerConnectionInternal,
PeerConnectionInterface::RTCConfiguration configuration_
RTC_GUARDED_BY(signaling_thread());
- // Field-trial based configuration for datagram transport.
- const DatagramTransportConfig datagram_transport_config_;
-
- // Field-trial based configuration for datagram transport data channels.
- const DatagramTransportDataChannelConfig
- datagram_transport_data_channel_config_;
-
- // Final, resolved value for whether datagram transport is in use.
- bool use_datagram_transport_ RTC_GUARDED_BY(signaling_thread()) = false;
-
- // Equivalent of |use_datagram_transport_|, but for its use with data
- // channels.
- bool use_datagram_transport_for_data_channels_
- RTC_GUARDED_BY(signaling_thread()) = false;
-
- // Resolved value of whether to use data channels only for incoming calls.
- bool use_datagram_transport_for_data_channels_receive_only_
- RTC_GUARDED_BY(signaling_thread()) = false;
-
// TODO(zstein): |async_resolver_factory_| can currently be nullptr if it
// is not injected. It should be required once chromium supplies it.
std::unique_ptr<AsyncResolverFactory> async_resolver_factory_
diff --git a/chromium/third_party/webrtc/pc/peer_connection_adaptation_integrationtest.cc b/chromium/third_party/webrtc/pc/peer_connection_adaptation_integrationtest.cc
new file mode 100644
index 00000000000..71d054eb909
--- /dev/null
+++ b/chromium/third_party/webrtc/pc/peer_connection_adaptation_integrationtest.cc
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/rtp_parameters.h"
+#include "api/scoped_refptr.h"
+#include "call/adaptation/test/fake_resource.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/fake_periodic_video_track_source.h"
+#include "pc/test/peer_connection_test_wrapper.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+const int64_t kDefaultTimeoutMs = 5000;
+
+struct TrackWithPeriodicSource {
+ rtc::scoped_refptr<VideoTrackInterface> track;
+ rtc::scoped_refptr<FakePeriodicVideoTrackSource> periodic_track_source;
+};
+
+// Performs an O/A exchange and waits until the signaling state is stable again.
+void Negotiate(rtc::scoped_refptr<PeerConnectionTestWrapper> caller,
+ rtc::scoped_refptr<PeerConnectionTestWrapper> callee) {
+ // Wire up callbacks and listeners such that a full O/A is performed in
+ // response to CreateOffer().
+ PeerConnectionTestWrapper::Connect(caller.get(), callee.get());
+ caller->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions());
+ caller->WaitForNegotiation();
+}
+
+TrackWithPeriodicSource CreateTrackWithPeriodicSource(
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory) {
+ FakePeriodicVideoSource::Config periodic_track_source_config;
+ periodic_track_source_config.frame_interval_ms = 100;
+ periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis();
+ rtc::scoped_refptr<FakePeriodicVideoTrackSource> periodic_track_source =
+ new rtc::RefCountedObject<FakePeriodicVideoTrackSource>(
+ periodic_track_source_config, /* remote */ false);
+ TrackWithPeriodicSource track_with_source;
+ track_with_source.track =
+ factory->CreateVideoTrack("PeriodicTrack", periodic_track_source);
+ track_with_source.periodic_track_source = periodic_track_source;
+ return track_with_source;
+}
+
+// Triggers overuse and obtains VideoSinkWants. Adaptation processing happens in
+// parallel and this function makes no guarantee that the returnd VideoSinkWants
+// have yet to reflect the overuse signal. Used together with EXPECT_TRUE_WAIT
+// to "spam overuse until a change is observed".
+rtc::VideoSinkWants TriggerOveruseAndGetSinkWants(
+ rtc::scoped_refptr<FakeResource> fake_resource,
+ const FakePeriodicVideoSource& source) {
+ fake_resource->SetUsageState(ResourceUsageState::kOveruse);
+ return source.wants();
+}
+
+class PeerConnectionAdaptationIntegrationTest : public ::testing::Test {
+ public:
+ PeerConnectionAdaptationIntegrationTest()
+ : virtual_socket_server_(),
+ network_thread_(new rtc::Thread(&virtual_socket_server_)),
+ worker_thread_(rtc::Thread::Create()) {
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+ }
+
+ rtc::scoped_refptr<PeerConnectionTestWrapper> CreatePcWrapper(
+ const char* name) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper =
+ new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ name, network_thread_.get(), worker_thread_.get());
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ EXPECT_TRUE(pc_wrapper->CreatePc(config, CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory()));
+ return pc_wrapper;
+ }
+
+ protected:
+ rtc::VirtualSocketServer virtual_socket_server_;
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+};
+
+TEST_F(PeerConnectionAdaptationIntegrationTest,
+ ResouceInjectedAfterNegotiationCausesReductionInResolution) {
+ auto caller_wrapper = CreatePcWrapper("caller");
+ auto caller = caller_wrapper->pc();
+ auto callee_wrapper = CreatePcWrapper("callee");
+
+ // Adding a track and negotiating ensures that a VideoSendStream exists.
+ TrackWithPeriodicSource track_with_source =
+ CreateTrackWithPeriodicSource(caller_wrapper->pc_factory());
+ auto sender = caller->AddTrack(track_with_source.track, {}).value();
+ Negotiate(caller_wrapper, callee_wrapper);
+ // Prefer degrading resolution.
+ auto parameters = sender->GetParameters();
+ parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE;
+ sender->SetParameters(parameters);
+
+ const auto& source =
+ track_with_source.periodic_track_source->fake_periodic_source();
+ int pixel_count_before_overuse = source.wants().max_pixel_count;
+
+ // Inject a fake resource and spam kOveruse until resolution becomes limited.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ caller->AddAdaptationResource(fake_resource);
+ EXPECT_TRUE_WAIT(
+ TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count <
+ pixel_count_before_overuse,
+ kDefaultTimeoutMs);
+}
+
+TEST_F(PeerConnectionAdaptationIntegrationTest,
+ ResouceInjectedBeforeNegotiationCausesReductionInResolution) {
+ auto caller_wrapper = CreatePcWrapper("caller");
+ auto caller = caller_wrapper->pc();
+ auto callee_wrapper = CreatePcWrapper("callee");
+
+ // Inject a fake resource before adding any tracks or negotiating.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ caller->AddAdaptationResource(fake_resource);
+
+ // Adding a track and negotiating ensures that a VideoSendStream exists.
+ TrackWithPeriodicSource track_with_source =
+ CreateTrackWithPeriodicSource(caller_wrapper->pc_factory());
+ auto sender = caller->AddTrack(track_with_source.track, {}).value();
+ Negotiate(caller_wrapper, callee_wrapper);
+ // Prefer degrading resolution.
+ auto parameters = sender->GetParameters();
+ parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE;
+ sender->SetParameters(parameters);
+
+ const auto& source =
+ track_with_source.periodic_track_source->fake_periodic_source();
+ int pixel_count_before_overuse = source.wants().max_pixel_count;
+
+ // Spam kOveruse until resolution becomes limited.
+ EXPECT_TRUE_WAIT(
+ TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count <
+ pixel_count_before_overuse,
+ kDefaultTimeoutMs);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/peer_connection_factory.cc b/chromium/third_party/webrtc/pc/peer_connection_factory.cc
index 9a758bea2d0..2ff8dee4d00 100644
--- a/chromium/third_party/webrtc/pc/peer_connection_factory.cc
+++ b/chromium/third_party/webrtc/pc/peer_connection_factory.cc
@@ -23,7 +23,6 @@
#include "api/peer_connection_proxy.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/transport/field_trial_based_config.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/turn_customizer.h"
#include "api/units/data_rate.h"
#include "api/video_track_source_proxy.h"
@@ -82,7 +81,6 @@ PeerConnectionFactory::PeerConnectionFactory(
std::move(dependencies.network_state_predictor_factory)),
injected_network_controller_factory_(
std::move(dependencies.network_controller_factory)),
- media_transport_factory_(std::move(dependencies.media_transport_factory)),
neteq_factory_(std::move(dependencies.neteq_factory)),
trials_(dependencies.trials ? std::move(dependencies.trials)
: std::make_unique<FieldTrialBasedConfig>()) {
diff --git a/chromium/third_party/webrtc/pc/peer_connection_factory.h b/chromium/third_party/webrtc/pc/peer_connection_factory.h
index 962b08c7c99..58859a0296a 100644
--- a/chromium/third_party/webrtc/pc/peer_connection_factory.h
+++ b/chromium/third_party/webrtc/pc/peer_connection_factory.h
@@ -18,7 +18,6 @@
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
#include "api/scoped_refptr.h"
-#include "api/transport/media/media_transport_interface.h"
#include "media/sctp/sctp_transport_internal.h"
#include "pc/channel_manager.h"
#include "rtc_base/rtc_certificate_generator.h"
@@ -87,10 +86,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
const Options& options() const { return options_; }
- MediaTransportFactory* media_transport_factory() {
- return media_transport_factory_.get();
- }
-
protected:
// This structure allows simple management of all new dependencies being added
// to the PeerConnectionFactory.
@@ -128,7 +123,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
network_state_predictor_factory_;
std::unique_ptr<NetworkControllerFactoryInterface>
injected_network_controller_factory_;
- std::unique_ptr<MediaTransportFactory> media_transport_factory_;
std::unique_ptr<NetEqFactory> neteq_factory_;
const std::unique_ptr<WebRtcKeyValueConfig> trials_;
};
diff --git a/chromium/third_party/webrtc/pc/peer_connection_header_extension_unittest.cc b/chromium/third_party/webrtc/pc/peer_connection_header_extension_unittest.cc
index 3f44d4f877d..62fda592127 100644
--- a/chromium/third_party/webrtc/pc/peer_connection_header_extension_unittest.cc
+++ b/chromium/third_party/webrtc/pc/peer_connection_header_extension_unittest.cc
@@ -33,16 +33,31 @@ class PeerConnectionHeaderExtensionTest
: public ::testing::TestWithParam<
std::tuple<cricket::MediaType, SdpSemantics>> {
protected:
+ PeerConnectionHeaderExtensionTest()
+ : extensions_(
+ {RtpHeaderExtensionCapability("uri1",
+ 1,
+ RtpTransceiverDirection::kStopped),
+ RtpHeaderExtensionCapability("uri2",
+ 2,
+ RtpTransceiverDirection::kSendOnly),
+ RtpHeaderExtensionCapability("uri3",
+ 3,
+ RtpTransceiverDirection::kRecvOnly),
+ RtpHeaderExtensionCapability(
+ "uri4",
+ 4,
+ RtpTransceiverDirection::kSendRecv)}) {}
+
std::unique_ptr<PeerConnectionWrapper> CreatePeerConnection(
cricket::MediaType media_type,
- absl::optional<SdpSemantics> semantics,
- std::vector<RtpHeaderExtensionCapability> extensions) {
+ absl::optional<SdpSemantics> semantics) {
auto voice = std::make_unique<cricket::FakeVoiceEngine>();
auto video = std::make_unique<cricket::FakeVideoEngine>();
if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO)
- voice->SetRtpHeaderExtensions(extensions);
+ voice->SetRtpHeaderExtensions(extensions_);
else
- video->SetRtpHeaderExtensions(extensions);
+ video->SetRtpHeaderExtensions(extensions_);
auto media_engine = std::make_unique<cricket::CompositeMediaEngine>(
std::move(voice), std::move(video));
PeerConnectionFactoryDependencies factory_dependencies;
@@ -71,6 +86,8 @@ class PeerConnectionHeaderExtensionTest
return std::make_unique<PeerConnectionWrapper>(pc_factory, pc,
std::move(observer));
}
+
+ std::vector<RtpHeaderExtensionCapability> extensions_;
};
TEST_P(PeerConnectionHeaderExtensionTest, TransceiverOffersHeaderExtensions) {
@@ -79,19 +96,10 @@ TEST_P(PeerConnectionHeaderExtensionTest, TransceiverOffersHeaderExtensions) {
std::tie(media_type, semantics) = GetParam();
if (semantics != SdpSemantics::kUnifiedPlan)
return;
- std::vector<RtpHeaderExtensionCapability> extensions(
- {RtpHeaderExtensionCapability("uri1", 1,
- RtpTransceiverDirection::kStopped),
- RtpHeaderExtensionCapability("uri2", 2,
- RtpTransceiverDirection::kSendOnly),
- RtpHeaderExtensionCapability("uri3", 3,
- RtpTransceiverDirection::kRecvOnly),
- RtpHeaderExtensionCapability("uri4", 4,
- RtpTransceiverDirection::kSendRecv)});
std::unique_ptr<PeerConnectionWrapper> wrapper =
- CreatePeerConnection(media_type, semantics, extensions);
+ CreatePeerConnection(media_type, semantics);
auto transceiver = wrapper->AddTransceiver(media_type);
- EXPECT_EQ(transceiver->HeaderExtensionsToOffer(), extensions);
+ EXPECT_EQ(transceiver->HeaderExtensionsToOffer(), extensions_);
}
TEST_P(PeerConnectionHeaderExtensionTest,
@@ -99,20 +107,14 @@ TEST_P(PeerConnectionHeaderExtensionTest,
cricket::MediaType media_type;
SdpSemantics semantics;
std::tie(media_type, semantics) = GetParam();
- std::unique_ptr<PeerConnectionWrapper> wrapper = CreatePeerConnection(
- media_type, semantics,
- std::vector<RtpHeaderExtensionCapability>(
- {RtpHeaderExtensionCapability("uri1", 1,
- RtpTransceiverDirection::kSendRecv),
- RtpHeaderExtensionCapability("uri2", 2,
- RtpTransceiverDirection::kStopped),
- RtpHeaderExtensionCapability("uri3", 3,
- RtpTransceiverDirection::kRecvOnly)}));
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
EXPECT_THAT(wrapper->pc_factory()
->GetRtpSenderCapabilities(media_type)
.header_extensions,
- ElementsAre(Field(&RtpHeaderExtensionCapability::uri, "uri1"),
- Field(&RtpHeaderExtensionCapability::uri, "uri3")));
+ ElementsAre(Field(&RtpHeaderExtensionCapability::uri, "uri2"),
+ Field(&RtpHeaderExtensionCapability::uri, "uri3"),
+ Field(&RtpHeaderExtensionCapability::uri, "uri4")));
EXPECT_EQ(wrapper->pc_factory()
->GetRtpReceiverCapabilities(media_type)
.header_extensions,
@@ -121,6 +123,49 @@ TEST_P(PeerConnectionHeaderExtensionTest,
.header_extensions);
}
+TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedDefaultExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver = wrapper->AddTransceiver(media_type);
+ auto session_description = wrapper->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri4")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedModifiedExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver = wrapper->AddTransceiver(media_type);
+ auto modified_extensions = transceiver->HeaderExtensionsToOffer();
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv;
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_TRUE(
+ transceiver->SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ auto session_description = wrapper->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3")));
+}
+
INSTANTIATE_TEST_SUITE_P(
,
PeerConnectionHeaderExtensionTest,
diff --git a/chromium/third_party/webrtc/pc/peer_connection_integrationtest.cc b/chromium/third_party/webrtc/pc/peer_connection_integrationtest.cc
index f3b4f283606..afb5f2ba752 100644
--- a/chromium/third_party/webrtc/pc/peer_connection_integrationtest.cc
+++ b/chromium/third_party/webrtc/pc/peer_connection_integrationtest.cc
@@ -28,7 +28,6 @@
#include "api/rtc_event_log/rtc_event_log_factory.h"
#include "api/rtp_receiver_interface.h"
#include "api/task_queue/default_task_queue_factory.h"
-#include "api/test/loopback_media_transport.h"
#include "api/uma_metrics.h"
#include "api/video_codecs/sdp_video_format.h"
#include "call/call.h"
@@ -215,7 +214,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
dependencies.cert_generator = std::move(cert_generator);
if (!client->Init(nullptr, nullptr, std::move(dependencies), network_thread,
worker_thread, nullptr,
- /*media_transport_factory=*/nullptr,
/*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false)) {
delete client;
@@ -611,7 +609,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory,
- std::unique_ptr<webrtc::MediaTransportFactory> media_transport_factory,
bool reset_encoder_factory,
bool reset_decoder_factory) {
// There's an error in this test code if Init ends up being called twice.
@@ -666,10 +663,6 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
std::make_unique<webrtc::RtcEventLogFactory>(
pc_factory_dependencies.task_queue_factory.get());
}
- if (media_transport_factory) {
- pc_factory_dependencies.media_transport_factory =
- std::move(media_transport_factory);
- }
peer_connection_factory_ = webrtc::CreateModularPeerConnectionFactory(
std::move(pc_factory_dependencies));
@@ -1082,8 +1075,8 @@ class PeerConnectionWrapper : public webrtc::PeerConnectionObserver,
class MockRtcEventLogOutput : public webrtc::RtcEventLogOutput {
public:
virtual ~MockRtcEventLogOutput() = default;
- MOCK_CONST_METHOD0(IsActive, bool());
- MOCK_METHOD1(Write, bool(const std::string&));
+ MOCK_METHOD(bool, IsActive, (), (const, override));
+ MOCK_METHOD(bool, Write, (const std::string&), (override));
};
// This helper object is used for both specifying how many audio/video frames
@@ -1216,7 +1209,7 @@ class MockIceTransportFactory : public IceTransportFactory {
return new rtc::RefCountedObject<MockIceTransport>(transport_name,
component);
}
- MOCK_METHOD0(RecordIceTransportCreated, void());
+ MOCK_METHOD(void, RecordIceTransportCreated, ());
};
// Tests two PeerConnections connecting to each other end-to-end, using a
@@ -1230,8 +1223,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
ss_(new rtc::VirtualSocketServer()),
fss_(new rtc::FirewallSocketServer(ss_.get())),
network_thread_(new rtc::Thread(fss_.get())),
- worker_thread_(rtc::Thread::Create()),
- loopback_media_transports_(network_thread_.get()) {
+ worker_thread_(rtc::Thread::Create()) {
network_thread_->SetName("PCNetworkThread", this);
worker_thread_->SetName("PCWorkerThread", this);
RTC_CHECK(network_thread_->Start());
@@ -1288,7 +1280,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
const RTCConfiguration* config,
webrtc::PeerConnectionDependencies dependencies,
std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory,
- std::unique_ptr<webrtc::MediaTransportFactory> media_transport_factory,
bool reset_encoder_factory,
bool reset_decoder_factory) {
RTCConfiguration modified_config;
@@ -1305,8 +1296,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
if (!client->Init(options, &modified_config, std::move(dependencies),
network_thread_.get(), worker_thread_.get(),
- std::move(event_log_factory),
- std::move(media_transport_factory), reset_encoder_factory,
+ std::move(event_log_factory), reset_encoder_factory,
reset_decoder_factory)) {
return nullptr;
}
@@ -1321,11 +1311,11 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
webrtc::PeerConnectionDependencies dependencies) {
std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory(
new webrtc::FakeRtcEventLogFactory(rtc::Thread::Current()));
- return CreatePeerConnectionWrapper(
- debug_name, options, config, std::move(dependencies),
- std::move(event_log_factory),
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
+ return CreatePeerConnectionWrapper(debug_name, options, config,
+ std::move(dependencies),
+ std::move(event_log_factory),
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
}
bool CreatePeerConnectionWrappers() {
@@ -1346,13 +1336,13 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
sdp_semantics_ = caller_semantics;
caller_ = CreatePeerConnectionWrapper(
"Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
- nullptr, /*media_transport_factory=*/nullptr,
+ nullptr,
/*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
sdp_semantics_ = callee_semantics;
callee_ = CreatePeerConnectionWrapper(
"Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
- nullptr, /*media_transport_factory=*/nullptr,
+ nullptr,
/*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
sdp_semantics_ = original_semantics;
@@ -1365,30 +1355,12 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
caller_ = CreatePeerConnectionWrapper(
"Caller", nullptr, &caller_config,
webrtc::PeerConnectionDependencies(nullptr), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
- callee_ = CreatePeerConnectionWrapper(
- "Callee", nullptr, &callee_config,
- webrtc::PeerConnectionDependencies(nullptr), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
- return caller_ && callee_;
- }
-
- bool CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- const PeerConnectionInterface::RTCConfiguration& caller_config,
- const PeerConnectionInterface::RTCConfiguration& callee_config,
- std::unique_ptr<webrtc::MediaTransportFactory> caller_factory,
- std::unique_ptr<webrtc::MediaTransportFactory> callee_factory) {
- caller_ = CreatePeerConnectionWrapper(
- "Caller", nullptr, &caller_config,
- webrtc::PeerConnectionDependencies(nullptr), nullptr,
- std::move(caller_factory), /*reset_encoder_factory=*/false,
+ /*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
callee_ = CreatePeerConnectionWrapper(
"Callee", nullptr, &callee_config,
webrtc::PeerConnectionDependencies(nullptr), nullptr,
- std::move(callee_factory), /*reset_encoder_factory=*/false,
+ /*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
return caller_ && callee_;
}
@@ -1398,16 +1370,16 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
webrtc::PeerConnectionDependencies caller_dependencies,
const PeerConnectionInterface::RTCConfiguration& callee_config,
webrtc::PeerConnectionDependencies callee_dependencies) {
- caller_ = CreatePeerConnectionWrapper(
- "Caller", nullptr, &caller_config, std::move(caller_dependencies),
- nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
- callee_ = CreatePeerConnectionWrapper(
- "Callee", nullptr, &callee_config, std::move(callee_dependencies),
- nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
+ caller_ =
+ CreatePeerConnectionWrapper("Caller", nullptr, &caller_config,
+ std::move(caller_dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ callee_ =
+ CreatePeerConnectionWrapper("Callee", nullptr, &callee_config,
+ std::move(callee_dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
return caller_ && callee_;
}
@@ -1417,12 +1389,12 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
caller_ = CreatePeerConnectionWrapper(
"Caller", &caller_options, nullptr,
webrtc::PeerConnectionDependencies(nullptr), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
+ /*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
callee_ = CreatePeerConnectionWrapper(
"Callee", &callee_options, nullptr,
webrtc::PeerConnectionDependencies(nullptr), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
+ /*reset_encoder_factory=*/false,
/*reset_decoder_factory=*/false);
return caller_ && callee_;
}
@@ -1446,21 +1418,21 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
webrtc::PeerConnectionDependencies dependencies(nullptr);
dependencies.cert_generator = std::move(cert_generator);
- return CreatePeerConnectionWrapper(
- "New Peer", nullptr, nullptr, std::move(dependencies), nullptr,
- /*media_transport_factory=*/nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
+ return CreatePeerConnectionWrapper("New Peer", nullptr, nullptr,
+ std::move(dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
}
bool CreateOneDirectionalPeerConnectionWrappers(bool caller_to_callee) {
caller_ = CreatePeerConnectionWrapper(
"Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
- nullptr, /*media_transport_factory=*/nullptr,
+ nullptr,
/*reset_encoder_factory=*/!caller_to_callee,
/*reset_decoder_factory=*/caller_to_callee);
callee_ = CreatePeerConnectionWrapper(
"Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
- nullptr, /*media_transport_factory=*/nullptr,
+ nullptr,
/*reset_encoder_factory=*/caller_to_callee,
/*reset_decoder_factory=*/!caller_to_callee);
return caller_ && callee_;
@@ -1551,10 +1523,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
rtc::VirtualSocketServer* virtual_socket_server() { return ss_.get(); }
- webrtc::MediaTransportPair* loopback_media_transports() {
- return &loopback_media_transports_;
- }
-
PeerConnectionWrapper* caller() { return caller_.get(); }
// Set the |caller_| to the |wrapper| passed in and return the
@@ -1752,7 +1720,6 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test {
// on the network thread.
std::vector<std::unique_ptr<cricket::TestTurnServer>> turn_servers_;
std::vector<std::unique_ptr<cricket::TestTurnCustomizer>> turn_customizers_;
- webrtc::MediaTransportPair loopback_media_transports_;
std::unique_ptr<PeerConnectionWrapper> caller_;
std::unique_ptr<PeerConnectionWrapper> callee_;
};
@@ -3875,877 +3842,8 @@ TEST_P(PeerConnectionIntegrationTest,
kDefaultTimeout);
}
-// Tests that the datagram transport to SCTP fallback works correctly when
-// datagram transport negotiation fails.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelFallbackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, RTCConfiguration(),
- loopback_media_transports()->first_factory(), nullptr));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Negotiation should fallback to SCTP, allowing the data channel to be
- // established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that the data channel transport works correctly when datagram transport
-// negotiation succeeds and does not fall back to SCTP.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelDoesNotFallbackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Negotiation should succeed, allowing the data channel to be established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that the datagram transport to SCTP fallback works correctly when
-// datagram transports do not advertise compatible transport parameters.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportIncompatibleParametersFallsBackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // By default, only equal parameters are compatible.
- loopback_media_transports()->SetFirstDatagramTransportParameters("foo");
- loopback_media_transports()->SetSecondDatagramTransportParameters("bar");
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Negotiation should fallback to SCTP, allowing the data channel to be
- // established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use SCTP for data channels.
- EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that the datagram transport to SCTP fallback works correctly when
-// only the answerer believes datagram transport parameters are incompatible.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportIncompatibleParametersOnAnswererFallsBackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // By default, only equal parameters are compatible.
- loopback_media_transports()->SetFirstDatagramTransportParameters("foo");
- loopback_media_transports()->SetSecondDatagramTransportParameters("bar");
-
- // Set the offerer to accept different parameters, while the answerer rejects
- // them.
- loopback_media_transports()->SetFirstDatagramTransportParametersComparison(
- [](absl::string_view a, absl::string_view b) { return true; });
- loopback_media_transports()->SetSecondDatagramTransportParametersComparison(
- [](absl::string_view a, absl::string_view b) { return false; });
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Negotiation should fallback to SCTP, allowing the data channel to be
- // established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use SCTP for data channels.
- EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that the data channel transport works correctly when datagram
-// transports provide different, but compatible, transport parameters.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportCompatibleParametersDoNotFallbackToSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
-
- // By default, only equal parameters are compatible.
- loopback_media_transports()->SetFirstDatagramTransportParameters("foo");
- loopback_media_transports()->SetSecondDatagramTransportParameters("bar");
-
- // Change the comparison used to treat these transport parameters are
- // compatible (on both sides).
- loopback_media_transports()->SetFirstDatagramTransportParametersComparison(
- [](absl::string_view a, absl::string_view b) { return true; });
- loopback_media_transports()->SetSecondDatagramTransportParametersComparison(
- [](absl::string_view a, absl::string_view b) { return true; });
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Negotiation should succeed, allowing the data channel to be established.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use datagram transport for data channels.
- EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Ensure that failure of the datagram negotiation doesn't impede media flow.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelWithMediaOnCaller) {
- // Configure the caller to attempt use of datagram transport for media and
- // data channels.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
- offerer_config.use_datagram_transport = true;
-
- // Configure the callee to only use datagram transport for data channels.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport_for_data_channels = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use datagram transport for data channels.
- EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportMediaWithDataChannelOnCaller) {
- // Configure the caller to attempt use of datagram transport for media and
- // data channels.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
- offerer_config.use_datagram_transport = true;
-
- // Configure the callee to only use datagram transport for media.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use SCTP for data channels.
- EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelWithMediaOnCallee) {
- // Configure the caller to attempt use of datagram transport for data
- // channels.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
-
- // Configure the callee to use datagram transport for data channels and media.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport_for_data_channels = true;
- answerer_config.use_datagram_transport = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use datagram transport for data channels.
- EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportMediaWithDataChannelOnCallee) {
- // Configure the caller to attempt use of datagram transport for media.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport = true;
-
- // Configure the callee to only use datagram transport for media and data
- // channels.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport = true;
- answerer_config.use_datagram_transport_for_data_channels = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use SCTP for data channels.
- EXPECT_NE(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_NE(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelAndMedia) {
- // Configure the caller to use datagram transport for data channels and media.
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
- offerer_config.use_datagram_transport = true;
-
- // Configure the callee to use datagram transport for data channels and media.
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport_for_data_channels = true;
- answerer_config.use_datagram_transport = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Offer both media and data.
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Both endpoints should agree to use datagram transport for data channels.
- EXPECT_EQ(nullptr, caller()->pc()->GetSctpTransport());
- EXPECT_EQ(nullptr, callee()->pc()->GetSctpTransport());
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-
- // Media flow should not be impacted.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
-// Tests that data channels use SCTP instead of datagram transport if datagram
-// transport is configured in receive-only mode on the caller.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelReceiveOnlyOnCallerUsesSctp) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.use_datagram_transport_for_data_channels_receive_only = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // The caller should offer a data channel using SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // SCTP transports should be present, since they are in use.
- EXPECT_NE(caller()->pc()->GetSctpTransport(), nullptr);
- EXPECT_NE(callee()->pc()->GetSctpTransport(), nullptr);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-}
-
#endif // HAVE_SCTP
-// Tests that a callee configured for receive-only use of datagram transport
-// data channels accepts them on incoming calls.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelReceiveOnlyOnCallee) {
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
-
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- answerer_config.use_datagram_transport_for_data_channels = true;
- answerer_config.use_datagram_transport_for_data_channels_receive_only = true;
-
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // SCTP transports should not be present, since datagram transport is used.
- EXPECT_EQ(caller()->pc()->GetSctpTransport(), nullptr);
- EXPECT_EQ(callee()->pc()->GetSctpTransport(), nullptr);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-}
-
-// This test sets up a call between two parties with a datagram transport data
-// channel.
-TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelEndToEnd) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.enable_dtls_srtp = false;
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Expect that data channel created on caller side will show up for callee as
- // well.
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Caller data channel should already exist (it created one). Callee data
- // channel may not exist yet, since negotiation happens in-band, not in SDP.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-}
-
-// Tests that 'zero-rtt' data channel transports (which are ready-to-send as
-// soon as they're created) work correctly.
-TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelZeroRtt) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
- rtc_config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.enable_dtls_srtp = false; // SDES is required for media transport.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Ensure that the callee's media transport is ready-to-send immediately.
- // Note that only the callee can become writable in zero RTTs. The caller
- // must wait for the callee's answer.
- loopback_media_transports()->SetSecondStateAfterConnect(
- webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Expect that data channel created on caller side will show up for callee as
- // well.
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- loopback_media_transports()->SetFirstState(
- webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Caller data channel should already exist (it created one). Callee data
- // channel may not exist yet, since negotiation happens in-band, not in SDP.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Ensure data can be sent in both directions.
- std::string data = "hello world";
- caller()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
- kDefaultTimeout);
- callee()->data_channel()->Send(DataBuffer(data));
- EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
- kDefaultTimeout);
-}
-
-// Ensures that when the callee closes a datagram transport data channel, the
-// closing procedure results in the data channel being closed for the caller
-// as well.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelCalleeCloses) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.enable_dtls_srtp = false;
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Create a data channel on the caller and signal it to the callee.
- caller()->CreateDataChannel();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Data channels exist and open on both ends of the connection.
- ASSERT_NE(nullptr, caller()->data_channel());
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
- ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
-
- // Close the data channel on the callee side, and wait for it to reach the
- // "closed" state on both sides.
- callee()->data_channel()->Close();
- EXPECT_TRUE_WAIT(!caller()->data_observer()->IsOpen(), kDefaultTimeout);
- EXPECT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout);
-}
-
-// Tests that datagram transport data channels can do in-band negotiation.
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelConfigSentToOtherSide) {
- PeerConnectionInterface::RTCConfiguration rtc_config;
- rtc_config.use_datagram_transport_for_data_channels = true;
- rtc_config.enable_dtls_srtp = false;
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- rtc_config, rtc_config, loopback_media_transports()->first_factory(),
- loopback_media_transports()->second_factory()));
- ConnectFakeSignaling();
-
- // Create a data channel with a non-default configuration and signal it to the
- // callee.
- webrtc::DataChannelInit init;
- init.id = 53;
- init.maxRetransmits = 52;
- caller()->CreateDataChannel("data-channel", &init);
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Ensure that the data channel transport is ready.
- loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
- loopback_media_transports()->FlushAsyncInvokes();
-
- // Ensure that the data channel exists on the callee with the correct
- // configuration.
- ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
- ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
- // Since "negotiate" is false, the "id" parameter is ignored.
- EXPECT_NE(init.id, callee()->data_channel()->id());
- EXPECT_EQ("data-channel", callee()->data_channel()->label());
- EXPECT_EQ(init.maxRetransmits, callee()->data_channel()->maxRetransmits());
- EXPECT_FALSE(callee()->data_channel()->negotiated());
-}
-
-TEST_P(PeerConnectionIntegrationTest,
- DatagramTransportDataChannelRejectedWithNoFallback) {
- PeerConnectionInterface::RTCConfiguration offerer_config;
- offerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- offerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- offerer_config.use_datagram_transport_for_data_channels = true;
- // Disabling DTLS precludes a fallback to SCTP.
- offerer_config.enable_dtls_srtp = false;
-
- PeerConnectionInterface::RTCConfiguration answerer_config;
- answerer_config.rtcp_mux_policy =
- PeerConnectionInterface::kRtcpMuxPolicyRequire;
- answerer_config.bundle_policy =
- PeerConnectionInterface::kBundlePolicyMaxBundle;
- // Both endpoints must disable DTLS or SetRemoteDescription will fail.
- answerer_config.enable_dtls_srtp = false;
-
- // Configure one endpoint to use datagram transport for data channels while
- // the other does not.
- ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
- offerer_config, answerer_config,
- loopback_media_transports()->first_factory(), nullptr));
- ConnectFakeSignaling();
-
- // The caller offers a data channel using either datagram transport or SCTP.
- caller()->CreateDataChannel();
- caller()->AddAudioVideoTracks();
- callee()->AddAudioVideoTracks();
- caller()->CreateAndSetAndSignalOffer();
- ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
-
- // Caller data channel should already exist (it created one). Callee data
- // channel should not exist, since negotiation happens in-band, not in SDP.
- EXPECT_NE(nullptr, caller()->data_channel());
- EXPECT_EQ(nullptr, callee()->data_channel());
-
- // The caller's data channel should close when the datagram transport is
- // rejected.
- EXPECT_FALSE(caller()->data_observer()->IsOpen());
-
- // Media flow should not be impacted by the failed data channel.
- MediaExpectations media_expectations;
- media_expectations.ExpectBidirectionalAudioAndVideo();
- ASSERT_TRUE(ExpectNewFrames(media_expectations));
-}
-
// Test that the ICE connection and gathering states eventually reach
// "complete".
TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletion) {
@@ -5702,10 +4800,10 @@ TEST_P(PeerConnectionIntegrationTest, IceTransportFactoryUsedForConnections) {
auto ice_transport_factory = std::make_unique<MockIceTransportFactory>();
EXPECT_CALL(*ice_transport_factory, RecordIceTransportCreated()).Times(1);
dependencies.ice_transport_factory = std::move(ice_transport_factory);
- auto wrapper = CreatePeerConnectionWrapper(
- "Caller", nullptr, &default_config, std::move(dependencies), nullptr,
- nullptr, /*reset_encoder_factory=*/false,
- /*reset_decoder_factory=*/false);
+ auto wrapper = CreatePeerConnectionWrapper("Caller", nullptr, &default_config,
+ std::move(dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
ASSERT_TRUE(wrapper);
wrapper->CreateDataChannel();
rtc::scoped_refptr<MockSetSessionDescriptionObserver> observer(
@@ -6134,6 +5232,23 @@ TEST_P(PeerConnectionIntegrationTest, RegatherAfterChangingIceTransportType) {
callee()->pc()->SetConfiguration(callee_config);
EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE,
callee()->last_candidate_gathered().type(), kDefaultTimeout);
+
+ // Create an offer and verify that it does not contain an ICE restart (i.e new
+ // ice credentials).
+ std::string caller_ufrag_pre_offer = caller()
+ ->pc()
+ ->local_description()
+ ->description()
+ ->transport_infos()[0]
+ .description.ice_ufrag;
+ caller()->CreateAndSetAndSignalOffer();
+ std::string caller_ufrag_post_offer = caller()
+ ->pc()
+ ->local_description()
+ ->description()
+ ->transport_infos()[0]
+ .description.ice_ufrag;
+ EXPECT_EQ(caller_ufrag_pre_offer, caller_ufrag_post_offer);
}
TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) {
@@ -6173,6 +5288,35 @@ TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) {
EXPECT_NE(caller()->error_event().address, "");
}
+TEST_P(PeerConnectionIntegrationTest, OnIceCandidateErrorWithEmptyAddress) {
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back("turn:127.0.0.1:3478?transport=tcp");
+ ice_server.username = "test";
+ ice_server.password = "test";
+
+ PeerConnectionInterface::RTCConfiguration caller_config;
+ caller_config.servers.push_back(ice_server);
+ caller_config.type = webrtc::PeerConnectionInterface::kRelay;
+ caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+
+ PeerConnectionInterface::RTCConfiguration callee_config;
+ callee_config.servers.push_back(ice_server);
+ callee_config.type = webrtc::PeerConnectionInterface::kRelay;
+ callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithConfig(caller_config, callee_config));
+
+ // Do normal offer/answer and wait for ICE to complete.
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(701, caller()->error_event().error_code, kDefaultTimeout);
+ EXPECT_EQ(caller()->error_event().address, "");
+}
+
TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
AudioKeepsFlowingAfterImplicitRollback) {
PeerConnectionInterface::RTCConfiguration config;
diff --git a/chromium/third_party/webrtc/pc/peer_connection_interface_unittest.cc b/chromium/third_party/webrtc/pc/peer_connection_interface_unittest.cc
index 2a369705975..9251511747f 100644
--- a/chromium/third_party/webrtc/pc/peer_connection_interface_unittest.cc
+++ b/chromium/third_party/webrtc/pc/peer_connection_interface_unittest.cc
@@ -627,7 +627,7 @@ class MockTrackObserver : public ObserverInterface {
}
}
- MOCK_METHOD0(OnChanged, void());
+ MOCK_METHOD(void, OnChanged, (), (override));
private:
NotifierInterface* notifier_;
@@ -1421,15 +1421,11 @@ TEST_P(PeerConnectionInterfaceTest, GetConfigurationAfterSetConfiguration) {
PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
config.type = PeerConnectionInterface::kRelay;
- config.use_datagram_transport = true;
- config.use_datagram_transport_for_data_channels = true;
EXPECT_TRUE(pc_->SetConfiguration(config).ok());
PeerConnectionInterface::RTCConfiguration returned_config =
pc_->GetConfiguration();
EXPECT_EQ(PeerConnectionInterface::kRelay, returned_config.type);
- EXPECT_TRUE(returned_config.use_datagram_transport);
- EXPECT_TRUE(returned_config.use_datagram_transport_for_data_channels);
}
TEST_P(PeerConnectionInterfaceTest, SetConfigurationFailsAfterClose) {
diff --git a/chromium/third_party/webrtc/pc/peer_connection_internal.h b/chromium/third_party/webrtc/pc/peer_connection_internal.h
index 52ffe85c2c2..66d585b592d 100644
--- a/chromium/third_party/webrtc/pc/peer_connection_internal.h
+++ b/chromium/third_party/webrtc/pc/peer_connection_internal.h
@@ -46,8 +46,11 @@ class PeerConnectionInternal : public PeerConnectionInterface {
// Only valid when using deprecated RTP data channels.
virtual cricket::RtpDataChannel* rtp_data_channel() const = 0;
- virtual std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels()
- const = 0;
+ // Call on the network thread to fetch stats for all the data channels.
+ // TODO(tommi): Make pure virtual after downstream updates.
+ virtual std::vector<DataChannel::Stats> GetDataChannelStats() const {
+ return {};
+ }
virtual absl::optional<std::string> sctp_transport_name() const = 0;
diff --git a/chromium/third_party/webrtc/pc/peer_connection_media_unittest.cc b/chromium/third_party/webrtc/pc/peer_connection_media_unittest.cc
index c9ffd776d9d..3c117c3ecd8 100644
--- a/chromium/third_party/webrtc/pc/peer_connection_media_unittest.cc
+++ b/chromium/third_party/webrtc/pc/peer_connection_media_unittest.cc
@@ -825,8 +825,10 @@ TEST_P(PeerConnectionMediaTest, AnswerHasDifferentDirectionsForAudioVideo) {
}
void AddComfortNoiseCodecsToSend(cricket::FakeMediaEngine* media_engine) {
- const cricket::AudioCodec kComfortNoiseCodec8k(102, "CN", 8000, 0, 1);
- const cricket::AudioCodec kComfortNoiseCodec16k(103, "CN", 16000, 0, 1);
+ const cricket::AudioCodec kComfortNoiseCodec8k(102, cricket::kCnCodecName,
+ 8000, 0, 1);
+ const cricket::AudioCodec kComfortNoiseCodec16k(103, cricket::kCnCodecName,
+ 16000, 0, 1);
auto codecs = media_engine->voice().send_codecs();
codecs.push_back(kComfortNoiseCodec8k);
@@ -837,7 +839,7 @@ void AddComfortNoiseCodecsToSend(cricket::FakeMediaEngine* media_engine) {
bool HasAnyComfortNoiseCodecs(const cricket::SessionDescription* desc) {
const auto* audio_desc = cricket::GetFirstAudioContentDescription(desc);
for (const auto& codec : audio_desc->codecs()) {
- if (codec.name == "CN") {
+ if (codec.name == cricket::kCnCodecName) {
return true;
}
}
@@ -1118,10 +1120,11 @@ TEST_P(PeerConnectionMediaTest, MediaEngineErrorPropagatedToClients) {
std::string error;
ASSERT_FALSE(caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal(),
&error));
- EXPECT_EQ(
- "Failed to set remote answer sdp: Failed to set remote video description "
- "send parameters.",
- error);
+ EXPECT_EQ(std::string("Failed to set remote answer sdp: Failed to set remote "
+ "video description "
+ "send parameters for m-section with mid='") +
+ (IsUnifiedPlan() ? "1" : "video") + "'.",
+ error);
}
// Tests that if the underlying video encoder fails once then subsequent
diff --git a/chromium/third_party/webrtc/pc/proxy_unittest.cc b/chromium/third_party/webrtc/pc/proxy_unittest.cc
index a00b47ff6b6..500828a03ee 100644
--- a/chromium/third_party/webrtc/pc/proxy_unittest.cc
+++ b/chromium/third_party/webrtc/pc/proxy_unittest.cc
@@ -46,16 +46,16 @@ class Fake : public FakeInterface {
return new rtc::RefCountedObject<Fake>();
}
// Used to verify destructor is called on the correct thread.
- MOCK_METHOD0(Destroy, void());
+ MOCK_METHOD(void, Destroy, ());
- MOCK_METHOD0(VoidMethod0, void());
- MOCK_METHOD0(Method0, std::string());
- MOCK_CONST_METHOD0(ConstMethod0, std::string());
+ MOCK_METHOD(void, VoidMethod0, (), (override));
+ MOCK_METHOD(std::string, Method0, (), (override));
+ MOCK_METHOD(std::string, ConstMethod0, (), (const, override));
- MOCK_METHOD1(Method1, std::string(std::string));
- MOCK_CONST_METHOD1(ConstMethod1, std::string(std::string));
+ MOCK_METHOD(std::string, Method1, (std::string), (override));
+ MOCK_METHOD(std::string, ConstMethod1, (std::string), (const, override));
- MOCK_METHOD2(Method2, std::string(std::string, std::string));
+ MOCK_METHOD(std::string, Method2, (std::string, std::string), (override));
protected:
Fake() {}
@@ -266,7 +266,7 @@ class FooInterface {
class Foo : public FooInterface {
public:
Foo() {}
- MOCK_METHOD0(Bar, void());
+ MOCK_METHOD(void, Bar, (), (override));
};
BEGIN_OWNED_PROXY_MAP(Foo)
diff --git a/chromium/third_party/webrtc/pc/remote_audio_source.cc b/chromium/third_party/webrtc/pc/remote_audio_source.cc
index da00402e414..ce6a2d42c6f 100644
--- a/chromium/third_party/webrtc/pc/remote_audio_source.cc
+++ b/chromium/third_party/webrtc/pc/remote_audio_source.cc
@@ -49,7 +49,8 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface {
};
RemoteAudioSource::RemoteAudioSource(rtc::Thread* worker_thread)
- : main_thread_(rtc::Thread::Current()),
+ : MessageHandler(false),
+ main_thread_(rtc::Thread::Current()),
worker_thread_(worker_thread),
state_(MediaSourceInterface::kLive) {
RTC_DCHECK(main_thread_);
diff --git a/chromium/third_party/webrtc/pc/rtc_stats_collector.cc b/chromium/third_party/webrtc/pc/rtc_stats_collector.cc
index 0e2f170ff0e..f66be30dd3d 100644
--- a/chromium/third_party/webrtc/pc/rtc_stats_collector.cc
+++ b/chromium/third_party/webrtc/pc/rtc_stats_collector.cc
@@ -397,7 +397,6 @@ void SetOutboundRTPStreamStatsFromVoiceSenderInfo(
void SetOutboundRTPStreamStatsFromVideoSenderInfo(
const std::string& mid,
const cricket::VideoSenderInfo& video_sender_info,
- bool enable_simulcast_stats,
RTCOutboundRTPStreamStats* outbound_video) {
SetOutboundRTPStreamStatsFromMediaSenderInfo(video_sender_info,
outbound_video);
@@ -422,21 +421,19 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo(
rtc::kNumMillisecsPerSec;
outbound_video->total_encoded_bytes_target =
video_sender_info.total_encoded_bytes_target;
- if (enable_simulcast_stats) {
- if (video_sender_info.send_frame_width > 0) {
- outbound_video->frame_width =
- static_cast<uint32_t>(video_sender_info.send_frame_width);
- }
- if (video_sender_info.send_frame_height > 0) {
- outbound_video->frame_height =
- static_cast<uint32_t>(video_sender_info.send_frame_height);
- }
- if (video_sender_info.framerate_sent > 0) {
- outbound_video->frames_per_second = video_sender_info.framerate_sent;
- }
- outbound_video->frames_sent = video_sender_info.frames_sent;
- outbound_video->huge_frames_sent = video_sender_info.huge_frames_sent;
+ if (video_sender_info.send_frame_width > 0) {
+ outbound_video->frame_width =
+ static_cast<uint32_t>(video_sender_info.send_frame_width);
+ }
+ if (video_sender_info.send_frame_height > 0) {
+ outbound_video->frame_height =
+ static_cast<uint32_t>(video_sender_info.send_frame_height);
}
+ if (video_sender_info.framerate_sent > 0) {
+ outbound_video->frames_per_second = video_sender_info.framerate_sent;
+ }
+ outbound_video->frames_sent = video_sender_info.frames_sent;
+ outbound_video->huge_frames_sent = video_sender_info.huge_frames_sent;
outbound_video->total_packet_send_delay =
static_cast<double>(video_sender_info.total_packet_send_delay_ms) /
rtc::kNumMillisecsPerSec;
@@ -987,7 +984,6 @@ RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc,
RTC_DCHECK_GE(cache_lifetime_us_, 0);
pc_->SignalDataChannelCreated().connect(
this, &RTCStatsCollector::OnDataChannelCreated);
- enable_simulcast_stats_ = pc_->GetConfiguration().enable_simulcast_stats;
}
RTCStatsCollector::~RTCStatsCollector() {
@@ -1279,22 +1275,21 @@ void RTCStatsCollector::ProduceCodecStats_n(
void RTCStatsCollector::ProduceDataChannelStats_s(
int64_t timestamp_us,
RTCStatsReport* report) const {
- RTC_DCHECK(signaling_thread_->IsCurrent());
- for (const rtc::scoped_refptr<DataChannel>& data_channel :
- pc_->sctp_data_channels()) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ std::vector<DataChannel::Stats> data_stats = pc_->GetDataChannelStats();
+ for (const auto& stats : data_stats) {
std::unique_ptr<RTCDataChannelStats> data_channel_stats(
new RTCDataChannelStats(
- "RTCDataChannel_" + rtc::ToString(data_channel->internal_id()),
+ "RTCDataChannel_" + rtc::ToString(stats.internal_id),
timestamp_us));
- data_channel_stats->label = data_channel->label();
- data_channel_stats->protocol = data_channel->protocol();
- data_channel_stats->datachannelid = data_channel->id();
- data_channel_stats->state =
- DataStateToRTCDataChannelState(data_channel->state());
- data_channel_stats->messages_sent = data_channel->messages_sent();
- data_channel_stats->bytes_sent = data_channel->bytes_sent();
- data_channel_stats->messages_received = data_channel->messages_received();
- data_channel_stats->bytes_received = data_channel->bytes_received();
+ data_channel_stats->label = std::move(stats.label);
+ data_channel_stats->protocol = std::move(stats.protocol);
+ data_channel_stats->data_channel_identifier = stats.id;
+ data_channel_stats->state = DataStateToRTCDataChannelState(stats.state);
+ data_channel_stats->messages_sent = stats.messages_sent;
+ data_channel_stats->bytes_sent = stats.bytes_sent;
+ data_channel_stats->messages_received = stats.messages_received;
+ data_channel_stats->bytes_received = stats.bytes_received;
report->AddStats(std::move(data_channel_stats));
}
}
@@ -1663,16 +1658,14 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n(
// Outbound
std::map<std::string, RTCOutboundRTPStreamStats*> video_outbound_rtps;
for (const cricket::VideoSenderInfo& video_sender_info :
- enable_simulcast_stats_
- ? track_media_info_map.video_media_info()->senders
- : track_media_info_map.video_media_info()->aggregated_senders) {
+ track_media_info_map.video_media_info()->senders) {
if (!video_sender_info.connected())
continue;
auto outbound_video = std::make_unique<RTCOutboundRTPStreamStats>(
RTCOutboundRTPStreamStatsIDFromSSRC(false, video_sender_info.ssrc()),
timestamp_us);
- SetOutboundRTPStreamStatsFromVideoSenderInfo(
- mid, video_sender_info, enable_simulcast_stats_, outbound_video.get());
+ SetOutboundRTPStreamStatsFromVideoSenderInfo(mid, video_sender_info,
+ outbound_video.get());
rtc::scoped_refptr<VideoTrackInterface> video_track =
track_media_info_map.GetVideoTrack(video_sender_info);
if (video_track) {
diff --git a/chromium/third_party/webrtc/pc/rtc_stats_collector.h b/chromium/third_party/webrtc/pc/rtc_stats_collector.h
index 7c85a35fe04..cd5ec21041a 100644
--- a/chromium/third_party/webrtc/pc/rtc_stats_collector.h
+++ b/chromium/third_party/webrtc/pc/rtc_stats_collector.h
@@ -288,7 +288,6 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface,
std::set<uintptr_t> opened_data_channels;
};
InternalRecord internal_record_;
- bool enable_simulcast_stats_ = false;
};
const char* CandidateTypeToRTCIceCandidateTypeForTesting(
diff --git a/chromium/third_party/webrtc/pc/rtc_stats_collector_unittest.cc b/chromium/third_party/webrtc/pc/rtc_stats_collector_unittest.cc
index db00dd7d91d..013965c4861 100644
--- a/chromium/third_party/webrtc/pc/rtc_stats_collector_unittest.cc
+++ b/chromium/third_party/webrtc/pc/rtc_stats_collector_unittest.cc
@@ -993,7 +993,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) {
RTCDataChannelStats expected_data_channel0("RTCDataChannel_0", 0);
expected_data_channel0.label = "MockDataChannel0";
expected_data_channel0.protocol = "udp";
- expected_data_channel0.datachannelid = 0;
+ expected_data_channel0.data_channel_identifier = 0;
expected_data_channel0.state = "connecting";
expected_data_channel0.messages_sent = 1;
expected_data_channel0.bytes_sent = 2;
@@ -1005,7 +1005,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) {
RTCDataChannelStats expected_data_channel1("RTCDataChannel_1", 0);
expected_data_channel1.label = "MockDataChannel1";
expected_data_channel1.protocol = "tcp";
- expected_data_channel1.datachannelid = 1;
+ expected_data_channel1.data_channel_identifier = 1;
expected_data_channel1.state = "open";
expected_data_channel1.messages_sent = 5;
expected_data_channel1.bytes_sent = 6;
@@ -1018,7 +1018,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) {
RTCDataChannelStats expected_data_channel2("RTCDataChannel_2", 0);
expected_data_channel2.label = "MockDataChannel2";
expected_data_channel2.protocol = "udp";
- expected_data_channel2.datachannelid = 2;
+ expected_data_channel2.data_channel_identifier = 2;
expected_data_channel2.state = "closing";
expected_data_channel2.messages_sent = 9;
expected_data_channel2.bytes_sent = 10;
@@ -1031,7 +1031,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) {
RTCDataChannelStats expected_data_channel3("RTCDataChannel_3", 0);
expected_data_channel3.label = "MockDataChannel3";
expected_data_channel3.protocol = "tcp";
- expected_data_channel3.datachannelid = 3;
+ expected_data_channel3.data_channel_identifier = 3;
expected_data_channel3.state = "closed";
expected_data_channel3.messages_sent = 13;
expected_data_channel3.bytes_sent = 14;
@@ -1398,11 +1398,14 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) {
report->Get("RTCPeerConnection")->cast_to<RTCPeerConnectionStats>());
}
+ // TODO(bugs.webrtc.org/11547): Supply a separate network thread.
rtc::scoped_refptr<DataChannel> dummy_channel_a = DataChannel::Create(
- nullptr, cricket::DCT_NONE, "DummyChannelA", InternalDataChannelInit());
+ nullptr, cricket::DCT_NONE, "DummyChannelA", InternalDataChannelInit(),
+ rtc::Thread::Current(), rtc::Thread::Current());
pc_->SignalDataChannelCreated()(dummy_channel_a.get());
rtc::scoped_refptr<DataChannel> dummy_channel_b = DataChannel::Create(
- nullptr, cricket::DCT_NONE, "DummyChannelB", InternalDataChannelInit());
+ nullptr, cricket::DCT_NONE, "DummyChannelB", InternalDataChannelInit(),
+ rtc::Thread::Current(), rtc::Thread::Current());
pc_->SignalDataChannelCreated()(dummy_channel_b.get());
dummy_channel_a->SignalOpened(dummy_channel_a.get());
@@ -2075,13 +2078,11 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRTPStreamStats_Video) {
expected_video.total_packet_send_delay = 10.0;
expected_video.quality_limitation_reason = "bandwidth";
expected_video.quality_limitation_resolution_changes = 56u;
- if (pc_->GetConfiguration().enable_simulcast_stats) {
- expected_video.frame_width = 200u;
- expected_video.frame_height = 100u;
- expected_video.frames_per_second = 10.0;
- expected_video.frames_sent = 5;
- expected_video.huge_frames_sent = 2;
- }
+ expected_video.frame_width = 200u;
+ expected_video.frame_height = 100u;
+ expected_video.frames_per_second = 10.0;
+ expected_video.frames_sent = 5;
+ expected_video.huge_frames_sent = 2;
// |expected_video.content_type| should be undefined.
// |expected_video.qp_sum| should be undefined.
// |expected_video.encoder_implementation| should be undefined.
diff --git a/chromium/third_party/webrtc/pc/rtc_stats_integrationtest.cc b/chromium/third_party/webrtc/pc/rtc_stats_integrationtest.cc
index d6d5c6f819c..a0fd2d02535 100644
--- a/chromium/third_party/webrtc/pc/rtc_stats_integrationtest.cc
+++ b/chromium/third_party/webrtc/pc/rtc_stats_integrationtest.cc
@@ -352,8 +352,7 @@ class RTCStatsReportVerifier {
explicit RTCStatsReportVerifier(const RTCStatsReport* report)
: report_(report) {}
- void VerifyReport(std::vector<const char*> allowed_missing_stats,
- bool enable_simulcast_stats) {
+ void VerifyReport(std::vector<const char*> allowed_missing_stats) {
std::set<const char*> missing_stats = StatsTypes();
bool verify_successful = true;
std::vector<const RTCTransportStats*> transport_stats =
@@ -396,7 +395,7 @@ class RTCStatsReportVerifier {
stats.cast_to<RTCInboundRTPStreamStats>());
} else if (stats.type() == RTCOutboundRTPStreamStats::kType) {
verify_successful &= VerifyRTCOutboundRTPStreamStats(
- stats.cast_to<RTCOutboundRTPStreamStats>(), enable_simulcast_stats);
+ stats.cast_to<RTCOutboundRTPStreamStats>());
} else if (stats.type() == RTCRemoteInboundRtpStreamStats::kType) {
verify_successful &= VerifyRTCRemoteInboundRtpStreamStats(
stats.cast_to<RTCRemoteInboundRtpStreamStats>());
@@ -461,7 +460,7 @@ class RTCStatsReportVerifier {
RTCStatsVerifier verifier(report_, &data_channel);
verifier.TestMemberIsDefined(data_channel.label);
verifier.TestMemberIsDefined(data_channel.protocol);
- verifier.TestMemberIsDefined(data_channel.datachannelid);
+ verifier.TestMemberIsDefined(data_channel.data_channel_identifier);
verifier.TestMemberIsDefined(data_channel.state);
verifier.TestMemberIsNonNegative<uint32_t>(data_channel.messages_sent);
verifier.TestMemberIsNonNegative<uint64_t>(data_channel.bytes_sent);
@@ -866,8 +865,7 @@ class RTCStatsReportVerifier {
}
bool VerifyRTCOutboundRTPStreamStats(
- const RTCOutboundRTPStreamStats& outbound_stream,
- bool enable_simulcast_stats) {
+ const RTCOutboundRTPStreamStats& outbound_stream) {
RTCStatsVerifier verifier(report_, &outbound_stream);
VerifyRTCRTPStreamStats(outbound_stream, &verifier);
if (outbound_stream.media_type.is_defined() &&
@@ -912,24 +910,23 @@ class RTCStatsReportVerifier {
// this to be present.
verifier.MarkMemberTested(outbound_stream.content_type, true);
verifier.TestMemberIsDefined(outbound_stream.encoder_implementation);
- if (enable_simulcast_stats) {
+ // Unless an implementation-specific amount of time has passed and at
+ // least one frame has been encoded, undefined is reported. Because it
+ // is hard to tell what is the case here, we treat FPS as optional.
+ // TODO(hbos): Update the tests to run until all implemented metrics
+ // should be populated.
+ if (outbound_stream.frames_per_second.is_defined()) {
verifier.TestMemberIsNonNegative<double>(
outbound_stream.frames_per_second);
- verifier.TestMemberIsNonNegative<uint32_t>(
- outbound_stream.frame_height);
- verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frame_width);
- verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frames_sent);
- verifier.TestMemberIsNonNegative<uint32_t>(
- outbound_stream.huge_frames_sent);
- verifier.MarkMemberTested(outbound_stream.rid, true);
} else {
verifier.TestMemberIsUndefined(outbound_stream.frames_per_second);
- verifier.TestMemberIsUndefined(outbound_stream.frame_height);
- verifier.TestMemberIsUndefined(outbound_stream.frame_width);
- verifier.TestMemberIsUndefined(outbound_stream.frames_sent);
- verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent);
- verifier.TestMemberIsUndefined(outbound_stream.rid);
}
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frame_height);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frame_width);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frames_sent);
+ verifier.TestMemberIsNonNegative<uint32_t>(
+ outbound_stream.huge_frames_sent);
+ verifier.MarkMemberTested(outbound_stream.rid, true);
} else {
verifier.TestMemberIsUndefined(outbound_stream.frames_encoded);
verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded);
@@ -1044,9 +1041,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsFromCaller) {
StartCall();
rtc::scoped_refptr<const RTCStatsReport> report = GetStatsFromCaller();
- RTCStatsReportVerifier(report.get())
- .VerifyReport({},
- caller_->pc()->GetConfiguration().enable_simulcast_stats);
+ RTCStatsReportVerifier(report.get()).VerifyReport({});
#if RTC_TRACE_EVENTS_ENABLED
EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace());
@@ -1057,9 +1052,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsFromCallee) {
StartCall();
rtc::scoped_refptr<const RTCStatsReport> report = GetStatsFromCallee();
- RTCStatsReportVerifier(report.get())
- .VerifyReport({},
- caller_->pc()->GetConfiguration().enable_simulcast_stats);
+ RTCStatsReportVerifier(report.get()).VerifyReport({});
#if RTC_TRACE_EVENTS_ENABLED
EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace());
@@ -1083,9 +1076,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithSenderSelector) {
RTCMediaStreamStats::kType,
RTCDataChannelStats::kType,
};
- RTCStatsReportVerifier(report.get())
- .VerifyReport(allowed_missing_stats,
- caller_->pc()->GetConfiguration().enable_simulcast_stats);
+ RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats);
EXPECT_TRUE(report->size());
}
@@ -1104,9 +1095,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithReceiverSelector) {
RTCMediaStreamStats::kType,
RTCDataChannelStats::kType,
};
- RTCStatsReportVerifier(report.get())
- .VerifyReport(allowed_missing_stats,
- caller_->pc()->GetConfiguration().enable_simulcast_stats);
+ RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats);
EXPECT_TRUE(report->size());
}
diff --git a/chromium/third_party/webrtc/pc/rtp_sender_receiver_unittest.cc b/chromium/third_party/webrtc/pc/rtp_sender_receiver_unittest.cc
index 9736f183b55..364e87a89f3 100644
--- a/chromium/third_party/webrtc/pc/rtp_sender_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/pc/rtp_sender_receiver_unittest.cc
@@ -88,7 +88,7 @@ static const int kDefaultTimeout = 10000; // 10 seconds.
class MockSetStreamsObserver
: public webrtc::RtpSenderBase::SetStreamsObserver {
public:
- MOCK_METHOD0(OnSetStreams, void());
+ MOCK_METHOD(void, OnSetStreams, (), (override));
};
} // namespace
@@ -123,14 +123,13 @@ class RtpSenderReceiverTest
voice_channel_ = channel_manager_.CreateVoiceChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport_.get(),
- MediaTransportConfig(), rtc::Thread::Current(), cricket::CN_AUDIO,
- srtp_required, webrtc::CryptoOptions(), &ssrc_generator_,
- cricket::AudioOptions());
+ rtc::Thread::Current(), cricket::CN_AUDIO, srtp_required,
+ webrtc::CryptoOptions(), &ssrc_generator_, cricket::AudioOptions());
video_channel_ = channel_manager_.CreateVideoChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport_.get(),
- MediaTransportConfig(), rtc::Thread::Current(), cricket::CN_VIDEO,
- srtp_required, webrtc::CryptoOptions(), &ssrc_generator_,
- cricket::VideoOptions(), video_bitrate_allocator_factory_.get());
+ rtc::Thread::Current(), cricket::CN_VIDEO, srtp_required,
+ webrtc::CryptoOptions(), &ssrc_generator_, cricket::VideoOptions(),
+ video_bitrate_allocator_factory_.get());
voice_channel_->Enable(true);
video_channel_->Enable(true);
voice_media_channel_ = media_engine_->GetVoiceChannel(0);
diff --git a/chromium/third_party/webrtc/pc/rtp_transceiver.cc b/chromium/third_party/webrtc/pc/rtp_transceiver.cc
index d6e5ff46a12..b4e500bbc8d 100644
--- a/chromium/third_party/webrtc/pc/rtp_transceiver.cc
+++ b/chromium/third_party/webrtc/pc/rtp_transceiver.cc
@@ -114,7 +114,7 @@ RtpTransceiver::RtpTransceiver(
: unified_plan_(true),
media_type_(sender->media_type()),
channel_manager_(channel_manager),
- HeaderExtensionsToOffer_(std::move(header_extensions_offered)) {
+ header_extensions_to_offer_(std::move(header_extensions_offered)) {
RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO ||
media_type_ == cricket::MEDIA_TYPE_VIDEO);
RTC_DCHECK_EQ(sender->media_type(), receiver->media_type());
@@ -356,7 +356,51 @@ RTCError RtpTransceiver::SetCodecPreferences(
std::vector<RtpHeaderExtensionCapability>
RtpTransceiver::HeaderExtensionsToOffer() const {
- return HeaderExtensionsToOffer_;
+ return header_extensions_to_offer_;
+}
+
+RTCError RtpTransceiver::SetOfferedRtpHeaderExtensions(
+ rtc::ArrayView<const RtpHeaderExtensionCapability>
+ header_extensions_to_offer) {
+ for (const auto& entry : header_extensions_to_offer) {
+ // Handle unsupported requests for mandatory extensions as per
+ // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface.
+ // Note:
+ // - We do not handle setOfferedRtpHeaderExtensions algorithm step 2.1,
+ // this has to be checked on a higher level. We naturally error out
+ // in the handling of Step 2.2 if an unset URI is encountered.
+
+ // Step 2.2.
+ // Handle unknown extensions.
+ auto it = std::find_if(
+ header_extensions_to_offer_.begin(), header_extensions_to_offer_.end(),
+ [&entry](const auto& offered) { return entry.uri == offered.uri; });
+ if (it == header_extensions_to_offer_.end()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Attempted to modify an unoffered extension.");
+ }
+
+ // Step 2.4-2.5.
+ // - Use of the transceiver interface indicates unified plan is in effect,
+ // hence the MID extension needs to be enabled.
+ // - Also handle the mandatory video orientation extensions.
+ if ((entry.uri == RtpExtension::kMidUri ||
+ entry.uri == RtpExtension::kVideoRotationUri) &&
+ entry.direction != RtpTransceiverDirection::kSendRecv) {
+ return RTCError(RTCErrorType::INVALID_MODIFICATION,
+ "Attempted to stop a mandatory extension.");
+ }
+ }
+
+ // Apply mutation after error checking.
+ for (const auto& entry : header_extensions_to_offer) {
+ auto it = std::find_if(
+ header_extensions_to_offer_.begin(), header_extensions_to_offer_.end(),
+ [&entry](const auto& offered) { return entry.uri == offered.uri; });
+ it->direction = entry.direction;
+ }
+
+ return RTCError::OK();
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/rtp_transceiver.h b/chromium/third_party/webrtc/pc/rtp_transceiver.h
index 0668447b9f1..be46ccfd5c1 100644
--- a/chromium/third_party/webrtc/pc/rtp_transceiver.h
+++ b/chromium/third_party/webrtc/pc/rtp_transceiver.h
@@ -195,6 +195,9 @@ class RtpTransceiver final
}
std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer()
const override;
+ RTCError SetOfferedRtpHeaderExtensions(
+ rtc::ArrayView<const RtpHeaderExtensionCapability>
+ header_extensions_to_offer) override;
private:
void OnFirstPacketReceived(cricket::ChannelInterface* channel);
@@ -220,7 +223,7 @@ class RtpTransceiver final
cricket::ChannelInterface* channel_ = nullptr;
cricket::ChannelManager* channel_manager_ = nullptr;
std::vector<RtpCodecCapability> codec_preferences_;
- std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer_;
+ std::vector<RtpHeaderExtensionCapability> header_extensions_to_offer_;
};
BEGIN_SIGNALING_PROXY_MAP(RtpTransceiver)
@@ -241,6 +244,9 @@ PROXY_METHOD1(webrtc::RTCError,
PROXY_CONSTMETHOD0(std::vector<RtpCodecCapability>, codec_preferences)
PROXY_CONSTMETHOD0(std::vector<RtpHeaderExtensionCapability>,
HeaderExtensionsToOffer)
+PROXY_METHOD1(webrtc::RTCError,
+ SetOfferedRtpHeaderExtensions,
+ rtc::ArrayView<const RtpHeaderExtensionCapability>)
END_PROXY_MAP()
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/rtp_transceiver_unittest.cc b/chromium/third_party/webrtc/pc/rtp_transceiver_unittest.cc
index 5e345739f17..e3f05c4dd92 100644
--- a/chromium/third_party/webrtc/pc/rtp_transceiver_unittest.cc
+++ b/chromium/third_party/webrtc/pc/rtp_transceiver_unittest.cc
@@ -25,6 +25,7 @@ using ::testing::ElementsAre;
using ::testing::Eq;
using ::testing::Field;
using ::testing::Not;
+using ::testing::Property;
using ::testing::Return;
using ::testing::ReturnRef;
@@ -78,27 +79,95 @@ TEST(RtpTransceiverTest, CanUnsetChannelOnStoppedTransceiver) {
EXPECT_EQ(nullptr, transceiver.channel());
}
-TEST(RtpTransceiverTest,
- InitsWithChannelManagerRtpHeaderExtensionCapabilities) {
- cricket::ChannelManager channel_manager(
- std::make_unique<cricket::FakeMediaEngine>(),
- std::make_unique<cricket::FakeDataEngine>(), rtc::Thread::Current(),
- rtc::Thread::Current());
- std::vector<RtpHeaderExtensionCapability> extensions({
- RtpHeaderExtensionCapability("uri1", 1,
- RtpTransceiverDirection::kSendRecv),
- RtpHeaderExtensionCapability("uri2", 2,
- RtpTransceiverDirection::kRecvOnly),
- });
- RtpTransceiver transceiver(
- RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
- rtc::Thread::Current(),
- new rtc::RefCountedObject<MockRtpSenderInternal>()),
- RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
- rtc::Thread::Current(),
- new rtc::RefCountedObject<MockRtpReceiverInternal>()),
- &channel_manager, extensions);
- EXPECT_EQ(transceiver.HeaderExtensionsToOffer(), extensions);
+class RtpTransceiverTestForHeaderExtensions : public ::testing::Test {
+ public:
+ RtpTransceiverTestForHeaderExtensions()
+ : channel_manager_(std::make_unique<cricket::FakeMediaEngine>(),
+ std::make_unique<cricket::FakeDataEngine>(),
+ rtc::Thread::Current(),
+ rtc::Thread::Current()),
+ extensions_(
+ {RtpHeaderExtensionCapability("uri1",
+ 1,
+ RtpTransceiverDirection::kSendOnly),
+ RtpHeaderExtensionCapability("uri2",
+ 2,
+ RtpTransceiverDirection::kRecvOnly),
+ RtpHeaderExtensionCapability(RtpExtension::kMidUri,
+ 3,
+ RtpTransceiverDirection::kSendRecv),
+ RtpHeaderExtensionCapability(RtpExtension::kVideoRotationUri,
+ 4,
+ RtpTransceiverDirection::kSendRecv)}),
+ transceiver_(RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ rtc::Thread::Current(),
+ new rtc::RefCountedObject<MockRtpSenderInternal>()),
+ RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ rtc::Thread::Current(),
+ new rtc::RefCountedObject<MockRtpReceiverInternal>()),
+ &channel_manager_,
+ extensions_) {}
+
+ cricket::ChannelManager channel_manager_;
+ std::vector<RtpHeaderExtensionCapability> extensions_;
+ RtpTransceiver transceiver_;
+};
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, OffersChannelManagerList) {
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, ModifiesDirection) {
+ auto modified_extensions = extensions_;
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendOnly;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kRecvOnly;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kInactive;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, AcceptsStoppedExtension) {
+ auto modified_extensions = extensions_;
+ modified_extensions[0].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_TRUE(
+ transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions).ok());
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), modified_extensions);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, RejectsUnsupportedExtension) {
+ std::vector<RtpHeaderExtensionCapability> modified_extensions(
+ {RtpHeaderExtensionCapability("uri3", 1,
+ RtpTransceiverDirection::kSendRecv)});
+ EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_PARAMETER));
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions,
+ RejectsStoppedMandatoryExtensions) {
+ std::vector<RtpHeaderExtensionCapability> modified_extensions = extensions_;
+ // Attempting to stop the mandatory MID extension.
+ modified_extensions[2].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION));
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
+ modified_extensions = extensions_;
+ // Attempting to stop the mandatory video orientation extension.
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_THAT(transceiver_.SetOfferedRtpHeaderExtensions(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION));
+ EXPECT_EQ(transceiver_.HeaderExtensionsToOffer(), extensions_);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/sctp_utils.cc b/chromium/third_party/webrtc/pc/sctp_utils.cc
index 9cdff0e4da6..1882a1525f7 100644
--- a/chromium/third_party/webrtc/pc/sctp_utils.cc
+++ b/chromium/third_party/webrtc/pc/sctp_utils.cc
@@ -13,6 +13,7 @@
#include <stddef.h>
#include <stdint.h>
+#include "api/priority.h"
#include "rtc_base/byte_buffer.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/logging.h"
@@ -34,6 +35,15 @@ enum DataChannelOpenMessageChannelType {
DCOMCT_UNORDERED_PARTIAL_TIME = 0x82,
};
+// Values of priority in the DC open protocol message.
+// These are compared against an integer, so are enum, not enum class.
+enum DataChannelPriority {
+ DCO_PRIORITY_VERY_LOW = 128,
+ DCO_PRIORITY_LOW = 256,
+ DCO_PRIORITY_MEDIUM = 512,
+ DCO_PRIORITY_HIGH = 1024,
+};
+
bool IsOpenMessage(const rtc::CopyOnWriteBuffer& payload) {
// Format defined at
// http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
@@ -76,6 +86,18 @@ bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload,
<< "Could not read OPEN message reliabilility prioirty.";
return false;
}
+ // Parse priority as defined in
+ // https://w3c.github.io/webrtc-priority/#rtcdatachannel-processing-steps
+ if (priority <= DCO_PRIORITY_VERY_LOW) {
+ config->priority = Priority::kVeryLow;
+ } else if (priority <= DCO_PRIORITY_LOW) {
+ config->priority = Priority::kLow;
+ } else if (priority <= DCO_PRIORITY_MEDIUM) {
+ config->priority = Priority::kMedium;
+ } else {
+ config->priority = Priority::kHigh;
+ }
+
uint32_t reliability_param;
if (!buffer.ReadUInt32(&reliability_param)) {
RTC_LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
@@ -146,6 +168,24 @@ bool WriteDataChannelOpenMessage(const std::string& label,
uint8_t channel_type = 0;
uint32_t reliability_param = 0;
uint16_t priority = 0;
+ // Set priority according to
+ // https://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-12#section-6.4
+ if (config.priority) {
+ switch (*config.priority) {
+ case Priority::kVeryLow:
+ priority = DCO_PRIORITY_VERY_LOW;
+ break;
+ case Priority::kLow:
+ priority = DCO_PRIORITY_LOW;
+ break;
+ case Priority::kMedium:
+ priority = DCO_PRIORITY_MEDIUM;
+ break;
+ case Priority::kHigh:
+ priority = DCO_PRIORITY_HIGH;
+ break;
+ }
+ }
if (config.ordered) {
if (config.maxRetransmits) {
channel_type = DCOMCT_ORDERED_PARTIAL_RTXS;
diff --git a/chromium/third_party/webrtc/pc/sctp_utils_unittest.cc b/chromium/third_party/webrtc/pc/sctp_utils_unittest.cc
index 70c627714da..690a9dc5239 100644
--- a/chromium/third_party/webrtc/pc/sctp_utils_unittest.cc
+++ b/chromium/third_party/webrtc/pc/sctp_utils_unittest.cc
@@ -45,6 +45,13 @@ class SctpUtilsTest : public ::testing::Test {
}
ASSERT_TRUE(buffer.ReadUInt16(&priority));
+ if (config.priority) {
+ // Exact values are checked by round-trip conversion, but
+ // all values defined are greater than zero.
+ EXPECT_GT(priority, 0);
+ } else {
+ EXPECT_EQ(priority, 0);
+ }
ASSERT_TRUE(buffer.ReadUInt32(&reliability));
if (config.maxRetransmits || config.maxRetransmitTime) {
@@ -136,6 +143,27 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) {
EXPECT_FALSE(output_config.maxRetransmitTime);
}
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithPriority) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.protocol = "y";
+ config.priority = webrtc::Priority::kVeryLow;
+
+ rtc::CopyOnWriteBuffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(packet, &output_label,
+ &output_config));
+
+ EXPECT_EQ(label, output_label);
+ ASSERT_TRUE(output_config.priority);
+ EXPECT_EQ(*config.priority, *output_config.priority);
+}
+
TEST_F(SctpUtilsTest, WriteParseAckMessage) {
rtc::CopyOnWriteBuffer packet;
webrtc::WriteDataChannelOpenAckMessage(&packet);
diff --git a/chromium/third_party/webrtc/pc/session_description.h b/chromium/third_party/webrtc/pc/session_description.h
index bfd19b8c7ad..27142e1c726 100644
--- a/chromium/third_party/webrtc/pc/session_description.h
+++ b/chromium/third_party/webrtc/pc/session_description.h
@@ -253,13 +253,6 @@ class MediaContentDescription {
receive_rids_ = rids;
}
- virtual const absl::optional<std::string>& alt_protocol() const {
- return alt_protocol_;
- }
- virtual void set_alt_protocol(const absl::optional<std::string>& protocol) {
- alt_protocol_ = protocol;
- }
-
protected:
bool rtcp_mux_ = false;
bool rtcp_reduced_size_ = false;
@@ -282,8 +275,6 @@ class MediaContentDescription {
SimulcastDescription simulcast_;
std::vector<RidDescription> receive_rids_;
- absl::optional<std::string> alt_protocol_;
-
private:
// Copy function that returns a raw pointer. Caller will assert ownership.
// Should only be called by the Clone() function. Must be implemented
diff --git a/chromium/third_party/webrtc/pc/stats_collector.cc b/chromium/third_party/webrtc/pc/stats_collector.cc
index 0509c6dc19c..317e4443d41 100644
--- a/chromium/third_party/webrtc/pc/stats_collector.cc
+++ b/chromium/third_party/webrtc/pc/stats_collector.cc
@@ -1146,19 +1146,20 @@ void StatsCollector::ExtractDataInfo() {
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
- for (const auto& dc : pc_->sctp_data_channels()) {
+ std::vector<DataChannel::Stats> data_stats = pc_->GetDataChannelStats();
+ for (const auto& stats : data_stats) {
StatsReport::Id id(StatsReport::NewTypedIntId(
- StatsReport::kStatsReportTypeDataChannel, dc->id()));
+ StatsReport::kStatsReportTypeDataChannel, stats.id));
StatsReport* report = reports_.ReplaceOrAddNew(id);
report->set_timestamp(stats_gathering_started_);
- report->AddString(StatsReport::kStatsValueNameLabel, dc->label());
+ report->AddString(StatsReport::kStatsValueNameLabel, stats.label);
// Filter out the initial id (-1).
- if (dc->id() >= 0) {
- report->AddInt(StatsReport::kStatsValueNameDataChannelId, dc->id());
+ if (stats.id >= 0) {
+ report->AddInt(StatsReport::kStatsValueNameDataChannelId, stats.id);
}
- report->AddString(StatsReport::kStatsValueNameProtocol, dc->protocol());
+ report->AddString(StatsReport::kStatsValueNameProtocol, stats.protocol);
report->AddString(StatsReport::kStatsValueNameState,
- DataChannelInterface::DataStateString(dc->state()));
+ DataChannelInterface::DataStateString(stats.state));
}
}
diff --git a/chromium/third_party/webrtc/pc/test/fake_audio_capture_module.cc b/chromium/third_party/webrtc/pc/test/fake_audio_capture_module.cc
index db0886ddad4..1a7efd4ad12 100644
--- a/chromium/third_party/webrtc/pc/test/fake_audio_capture_module.cc
+++ b/chromium/third_party/webrtc/pc/test/fake_audio_capture_module.cc
@@ -47,7 +47,9 @@ FakeAudioCaptureModule::FakeAudioCaptureModule()
current_mic_level_(kMaxVolume),
started_(false),
next_frame_time_(0),
- frames_received_(0) {}
+ frames_received_(0) {
+ process_thread_checker_.Detach();
+}
FakeAudioCaptureModule::~FakeAudioCaptureModule() {
if (process_thread_) {
@@ -77,7 +79,7 @@ int32_t FakeAudioCaptureModule::ActiveAudioLayer(
int32_t FakeAudioCaptureModule::RegisterAudioCallback(
webrtc::AudioTransport* audio_callback) {
- rtc::CritScope cs(&crit_callback_);
+ rtc::CritScope cs(&crit_);
audio_callback_ = audio_callback;
return 0;
}
@@ -448,29 +450,34 @@ void FakeAudioCaptureModule::UpdateProcessing(bool start) {
if (process_thread_) {
process_thread_->Stop();
process_thread_.reset(nullptr);
+ process_thread_checker_.Detach();
}
+ rtc::CritScope lock(&crit_);
started_ = false;
}
}
void FakeAudioCaptureModule::StartProcessP() {
- RTC_CHECK(process_thread_->IsCurrent());
- if (started_) {
- // Already started.
- return;
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ {
+ rtc::CritScope lock(&crit_);
+ if (started_) {
+ // Already started.
+ return;
+ }
}
ProcessFrameP();
}
void FakeAudioCaptureModule::ProcessFrameP() {
- RTC_CHECK(process_thread_->IsCurrent());
- if (!started_) {
- next_frame_time_ = rtc::TimeMillis();
- started_ = true;
- }
-
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
{
rtc::CritScope cs(&crit_);
+ if (!started_) {
+ next_frame_time_ = rtc::TimeMillis();
+ started_ = true;
+ }
+
// Receive and send frames every kTimePerFrameMs.
if (playing_) {
ReceiveFrameP();
@@ -488,24 +495,22 @@ void FakeAudioCaptureModule::ProcessFrameP() {
}
void FakeAudioCaptureModule::ReceiveFrameP() {
- RTC_CHECK(process_thread_->IsCurrent());
- {
- rtc::CritScope cs(&crit_callback_);
- if (!audio_callback_) {
- return;
- }
- ResetRecBuffer();
- size_t nSamplesOut = 0;
- int64_t elapsed_time_ms = 0;
- int64_t ntp_time_ms = 0;
- if (audio_callback_->NeedMorePlayData(
- kNumberSamples, kNumberBytesPerSample, kNumberOfChannels,
- kSamplesPerSecond, rec_buffer_, nSamplesOut, &elapsed_time_ms,
- &ntp_time_ms) != 0) {
- RTC_NOTREACHED();
- }
- RTC_CHECK(nSamplesOut == kNumberSamples);
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ if (!audio_callback_) {
+ return;
+ }
+ ResetRecBuffer();
+ size_t nSamplesOut = 0;
+ int64_t elapsed_time_ms = 0;
+ int64_t ntp_time_ms = 0;
+ if (audio_callback_->NeedMorePlayData(kNumberSamples, kNumberBytesPerSample,
+ kNumberOfChannels, kSamplesPerSecond,
+ rec_buffer_, nSamplesOut,
+ &elapsed_time_ms, &ntp_time_ms) != 0) {
+ RTC_NOTREACHED();
}
+ RTC_CHECK(nSamplesOut == kNumberSamples);
+
// The SetBuffer() function ensures that after decoding, the audio buffer
// should contain samples of similar magnitude (there is likely to be some
// distortion due to the audio pipeline). If one sample is detected to
@@ -513,25 +518,22 @@ void FakeAudioCaptureModule::ReceiveFrameP() {
// has been received from the remote side (i.e. faked frames are not being
// pulled).
if (CheckRecBuffer(kHighSampleValue)) {
- rtc::CritScope cs(&crit_);
++frames_received_;
}
}
void FakeAudioCaptureModule::SendFrameP() {
- RTC_CHECK(process_thread_->IsCurrent());
- rtc::CritScope cs(&crit_callback_);
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
if (!audio_callback_) {
return;
}
bool key_pressed = false;
- uint32_t current_mic_level = 0;
- MicrophoneVolume(&current_mic_level);
+ uint32_t current_mic_level = current_mic_level_;
if (audio_callback_->RecordedDataIsAvailable(
send_buffer_, kNumberSamples, kNumberBytesPerSample,
kNumberOfChannels, kSamplesPerSecond, kTotalDelayMs, kClockDriftMs,
current_mic_level, key_pressed, current_mic_level) != 0) {
RTC_NOTREACHED();
}
- SetMicrophoneVolume(current_mic_level);
+ current_mic_level_ = current_mic_level;
}
diff --git a/chromium/third_party/webrtc/pc/test/fake_audio_capture_module.h b/chromium/third_party/webrtc/pc/test/fake_audio_capture_module.h
index 0af38102908..2a5d54c84e4 100644
--- a/chromium/third_party/webrtc/pc/test/fake_audio_capture_module.h
+++ b/chromium/third_party/webrtc/pc/test/fake_audio_capture_module.h
@@ -26,13 +26,14 @@
#include "modules/audio_device/include/audio_device.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/message_handler.h"
+#include "rtc_base/synchronization/sequence_checker.h"
namespace rtc {
class Thread;
} // namespace rtc
class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
- public rtc::MessageHandler {
+ public rtc::MessageHandlerAutoCleanup {
public:
typedef uint16_t Sample;
@@ -47,13 +48,13 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
// Returns the number of frames that have been successfully pulled by the
// instance. Note that correctly detecting success can only be done if the
// pulled frame was generated/pushed from a FakeAudioCaptureModule.
- int frames_received() const;
+ int frames_received() const RTC_LOCKS_EXCLUDED(crit_);
int32_t ActiveAudioLayer(AudioLayer* audio_layer) const override;
// Note: Calling this method from a callback may result in deadlock.
- int32_t RegisterAudioCallback(
- webrtc::AudioTransport* audio_callback) override;
+ int32_t RegisterAudioCallback(webrtc::AudioTransport* audio_callback) override
+ RTC_LOCKS_EXCLUDED(crit_);
int32_t Init() override;
int32_t Terminate() override;
@@ -80,12 +81,12 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
int32_t InitRecording() override;
bool RecordingIsInitialized() const override;
- int32_t StartPlayout() override;
- int32_t StopPlayout() override;
- bool Playing() const override;
- int32_t StartRecording() override;
- int32_t StopRecording() override;
- bool Recording() const override;
+ int32_t StartPlayout() RTC_LOCKS_EXCLUDED(crit_) override;
+ int32_t StopPlayout() RTC_LOCKS_EXCLUDED(crit_) override;
+ bool Playing() const RTC_LOCKS_EXCLUDED(crit_) override;
+ int32_t StartRecording() RTC_LOCKS_EXCLUDED(crit_) override;
+ int32_t StopRecording() RTC_LOCKS_EXCLUDED(crit_) override;
+ bool Recording() const RTC_LOCKS_EXCLUDED(crit_) override;
int32_t InitSpeaker() override;
bool SpeakerIsInitialized() const override;
@@ -99,8 +100,10 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
int32_t MinSpeakerVolume(uint32_t* min_volume) const override;
int32_t MicrophoneVolumeIsAvailable(bool* available) override;
- int32_t SetMicrophoneVolume(uint32_t volume) override;
- int32_t MicrophoneVolume(uint32_t* volume) const override;
+ int32_t SetMicrophoneVolume(uint32_t volume)
+ RTC_LOCKS_EXCLUDED(crit_) override;
+ int32_t MicrophoneVolume(uint32_t* volume) const
+ RTC_LOCKS_EXCLUDED(crit_) override;
int32_t MaxMicrophoneVolume(uint32_t* max_volume) const override;
int32_t MinMicrophoneVolume(uint32_t* min_volume) const override;
@@ -170,26 +173,28 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
// Returns true/false depending on if recording or playback has been
// enabled/started.
- bool ShouldStartProcessing();
+ bool ShouldStartProcessing() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Starts or stops the pushing and pulling of audio frames.
- void UpdateProcessing(bool start);
+ void UpdateProcessing(bool start) RTC_LOCKS_EXCLUDED(crit_);
// Starts the periodic calling of ProcessFrame() in a thread safe way.
void StartProcessP();
// Periodcally called function that ensures that frames are pulled and pushed
// periodically if enabled/started.
- void ProcessFrameP();
+ void ProcessFrameP() RTC_LOCKS_EXCLUDED(crit_);
// Pulls frames from the registered webrtc::AudioTransport.
- void ReceiveFrameP();
+ void ReceiveFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Pushes frames to the registered webrtc::AudioTransport.
- void SendFrameP();
+ void SendFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Callback for playout and recording.
- webrtc::AudioTransport* audio_callback_;
+ webrtc::AudioTransport* audio_callback_ RTC_GUARDED_BY(crit_);
- bool recording_; // True when audio is being pushed from the instance.
- bool playing_; // True when audio is being pulled by the instance.
+ bool recording_ RTC_GUARDED_BY(
+ crit_); // True when audio is being pushed from the instance.
+ bool playing_ RTC_GUARDED_BY(
+ crit_); // True when audio is being pulled by the instance.
bool play_is_initialized_; // True when the instance is ready to pull audio.
bool rec_is_initialized_; // True when the instance is ready to push audio.
@@ -197,13 +202,13 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
// Input to and output from RecordedDataIsAvailable(..) makes it possible to
// modify the current mic level. The implementation does not care about the
// mic level so it just feeds back what it receives.
- uint32_t current_mic_level_;
+ uint32_t current_mic_level_ RTC_GUARDED_BY(crit_);
// next_frame_time_ is updated in a non-drifting manner to indicate the next
// wall clock time the next frame should be generated and received. started_
// ensures that next_frame_time_ can be initialized properly on first call.
- bool started_;
- int64_t next_frame_time_;
+ bool started_ RTC_GUARDED_BY(crit_);
+ int64_t next_frame_time_ RTC_GUARDED_BY(process_thread_checker_);
std::unique_ptr<rtc::Thread> process_thread_;
@@ -220,9 +225,7 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule,
// Protects variables that are accessed from process_thread_ and
// the main thread.
rtc::CriticalSection crit_;
- // Protects |audio_callback_| that is accessed from process_thread_ and
- // the main thread.
- rtc::CriticalSection crit_callback_;
+ webrtc::SequenceChecker process_thread_checker_;
};
#endif // PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_
diff --git a/chromium/third_party/webrtc/pc/test/fake_peer_connection_base.h b/chromium/third_party/webrtc/pc/test/fake_peer_connection_base.h
index f4b27f03e18..e1663e6d9f8 100644
--- a/chromium/third_party/webrtc/pc/test/fake_peer_connection_base.h
+++ b/chromium/third_party/webrtc/pc/test/fake_peer_connection_base.h
@@ -254,11 +254,6 @@ class FakePeerConnectionBase : public PeerConnectionInternal {
cricket::RtpDataChannel* rtp_data_channel() const override { return nullptr; }
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels()
- const override {
- return {};
- }
-
absl::optional<std::string> sctp_transport_name() const override {
return absl::nullopt;
}
diff --git a/chromium/third_party/webrtc/pc/test/fake_peer_connection_for_stats.h b/chromium/third_party/webrtc/pc/test/fake_peer_connection_for_stats.h
index c6391583f57..175a1ede153 100644
--- a/chromium/third_party/webrtc/pc/test/fake_peer_connection_for_stats.h
+++ b/chromium/third_party/webrtc/pc/test/fake_peer_connection_for_stats.h
@@ -174,8 +174,10 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
void AddSctpDataChannel(const std::string& label,
const InternalDataChannelInit& init) {
- AddSctpDataChannel(DataChannel::Create(&data_channel_provider_,
- cricket::DCT_SCTP, label, init));
+ // TODO(bugs.webrtc.org/11547): Supply a separate network thread.
+ AddSctpDataChannel(DataChannel::Create(
+ &data_channel_provider_, cricket::DCT_SCTP, label, init,
+ rtc::Thread::Current(), rtc::Thread::Current()));
}
void AddSctpDataChannel(rtc::scoped_refptr<DataChannel> data_channel) {
@@ -257,9 +259,12 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase {
return transceivers_;
}
- std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels()
- const override {
- return sctp_data_channels_;
+ std::vector<DataChannel::Stats> GetDataChannelStats() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<DataChannel::Stats> stats;
+ for (const auto& channel : sctp_data_channels_)
+ stats.push_back(channel->GetStats());
+ return stats;
}
cricket::CandidateStatsList GetPooledCandidateStats() const override {
diff --git a/chromium/third_party/webrtc/pc/test/fake_periodic_video_source.h b/chromium/third_party/webrtc/pc/test/fake_periodic_video_source.h
index 1684ca4adbb..b1cff4e5edc 100644
--- a/chromium/third_party/webrtc/pc/test/fake_periodic_video_source.h
+++ b/chromium/third_party/webrtc/pc/test/fake_periodic_video_source.h
@@ -16,6 +16,7 @@
#include "api/video/video_source_interface.h"
#include "media/base/fake_frame_source.h"
#include "media/base/video_broadcaster.h"
+#include "rtc_base/critical_section.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/task_utils/repeating_task.h"
@@ -59,6 +60,11 @@ class FakePeriodicVideoSource final
});
}
+ rtc::VideoSinkWants wants() const {
+ rtc::CritScope cs(&crit_);
+ return wants_;
+ }
+
void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override {
RTC_DCHECK(thread_checker_.IsCurrent());
broadcaster_.RemoveSink(sink);
@@ -67,6 +73,10 @@ class FakePeriodicVideoSource final
void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
RTC_DCHECK(thread_checker_.IsCurrent());
+ {
+ rtc::CritScope cs(&crit_);
+ wants_ = wants;
+ }
broadcaster_.AddOrUpdateSink(sink, wants);
}
@@ -80,6 +90,8 @@ class FakePeriodicVideoSource final
rtc::VideoBroadcaster broadcaster_;
cricket::FakeFrameSource frame_source_;
+ rtc::CriticalSection crit_;
+ rtc::VideoSinkWants wants_ RTC_GUARDED_BY(&crit_);
std::unique_ptr<TaskQueueForTest> task_queue_;
};
diff --git a/chromium/third_party/webrtc/pc/test/fake_periodic_video_track_source.h b/chromium/third_party/webrtc/pc/test/fake_periodic_video_track_source.h
index cc406d6d3fc..98a456f2328 100644
--- a/chromium/third_party/webrtc/pc/test/fake_periodic_video_track_source.h
+++ b/chromium/third_party/webrtc/pc/test/fake_periodic_video_track_source.h
@@ -29,6 +29,10 @@ class FakePeriodicVideoTrackSource : public VideoTrackSource {
~FakePeriodicVideoTrackSource() = default;
+ const FakePeriodicVideoSource& fake_periodic_source() const {
+ return source_;
+ }
+
protected:
rtc::VideoSourceInterface<VideoFrame>* source() override { return &source_; }
diff --git a/chromium/third_party/webrtc/pc/test/fake_rtc_certificate_generator.h b/chromium/third_party/webrtc/pc/test/fake_rtc_certificate_generator.h
index 9c43ba97262..b726a4c0ba8 100644
--- a/chromium/third_party/webrtc/pc/test/fake_rtc_certificate_generator.h
+++ b/chromium/third_party/webrtc/pc/test/fake_rtc_certificate_generator.h
@@ -118,7 +118,7 @@ static const rtc::RTCCertificatePEM kEcdsaPems[] = {
class FakeRTCCertificateGenerator
: public rtc::RTCCertificateGeneratorInterface,
- public rtc::MessageHandler {
+ public rtc::MessageHandlerAutoCleanup {
public:
typedef rtc::TypedMessageData<
rtc::scoped_refptr<rtc::RTCCertificateGeneratorCallback> >
diff --git a/chromium/third_party/webrtc/pc/test/mock_channel_interface.h b/chromium/third_party/webrtc/pc/test/mock_channel_interface.h
index 255bd2fceec..2df3baee47e 100644
--- a/chromium/third_party/webrtc/pc/test/mock_channel_interface.h
+++ b/chromium/third_party/webrtc/pc/test/mock_channel_interface.h
@@ -24,25 +24,40 @@ namespace cricket {
// implementation of BaseChannel.
class MockChannelInterface : public cricket::ChannelInterface {
public:
- MOCK_CONST_METHOD0(media_type, cricket::MediaType());
- MOCK_CONST_METHOD0(media_channel, MediaChannel*());
- MOCK_CONST_METHOD0(transport_name, const std::string&());
- MOCK_CONST_METHOD0(content_name, const std::string&());
- MOCK_CONST_METHOD0(enabled, bool());
- MOCK_METHOD1(Enable, bool(bool));
- MOCK_METHOD0(SignalFirstPacketReceived,
- sigslot::signal1<ChannelInterface*>&());
- MOCK_METHOD3(SetLocalContent,
- bool(const cricket::MediaContentDescription*,
- webrtc::SdpType,
- std::string*));
- MOCK_METHOD3(SetRemoteContent,
- bool(const cricket::MediaContentDescription*,
- webrtc::SdpType,
- std::string*));
- MOCK_CONST_METHOD0(local_streams, const std::vector<StreamParams>&());
- MOCK_CONST_METHOD0(remote_streams, const std::vector<StreamParams>&());
- MOCK_METHOD1(SetRtpTransport, bool(webrtc::RtpTransportInternal*));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(MediaChannel*, media_channel, (), (const, override));
+ MOCK_METHOD(const std::string&, transport_name, (), (const, override));
+ MOCK_METHOD(const std::string&, content_name, (), (const, override));
+ MOCK_METHOD(bool, enabled, (), (const, override));
+ MOCK_METHOD(bool, Enable, (bool), (override));
+ MOCK_METHOD(sigslot::signal1<ChannelInterface*>&,
+ SignalFirstPacketReceived,
+ (),
+ (override));
+ MOCK_METHOD(bool,
+ SetLocalContent,
+ (const cricket::MediaContentDescription*,
+ webrtc::SdpType,
+ std::string*),
+ (override));
+ MOCK_METHOD(bool,
+ SetRemoteContent,
+ (const cricket::MediaContentDescription*,
+ webrtc::SdpType,
+ std::string*),
+ (override));
+ MOCK_METHOD(const std::vector<StreamParams>&,
+ local_streams,
+ (),
+ (const, override));
+ MOCK_METHOD(const std::vector<StreamParams>&,
+ remote_streams,
+ (),
+ (const, override));
+ MOCK_METHOD(bool,
+ SetRtpTransport,
+ (webrtc::RtpTransportInternal*),
+ (override));
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/pc/test/mock_data_channel.h b/chromium/third_party/webrtc/pc/test/mock_data_channel.h
index 3385ec2f75c..bc5f94da5f7 100644
--- a/chromium/third_party/webrtc/pc/test/mock_data_channel.h
+++ b/chromium/third_party/webrtc/pc/test/mock_data_channel.h
@@ -22,15 +22,24 @@ class MockDataChannel : public rtc::RefCountedObject<DataChannel> {
public:
MockDataChannel(int id, DataState state)
: MockDataChannel(id, "MockDataChannel", state, "udp", 0, 0, 0, 0) {}
- MockDataChannel(int id,
- const std::string& label,
- DataState state,
- const std::string& protocol,
- uint32_t messages_sent,
- uint64_t bytes_sent,
- uint32_t messages_received,
- uint64_t bytes_received)
- : rtc::RefCountedObject<DataChannel>(nullptr, cricket::DCT_NONE, label) {
+ MockDataChannel(
+ int id,
+ const std::string& label,
+ DataState state,
+ const std::string& protocol,
+ uint32_t messages_sent,
+ uint64_t bytes_sent,
+ uint32_t messages_received,
+ uint64_t bytes_received,
+ const InternalDataChannelInit& config = InternalDataChannelInit(),
+ rtc::Thread* signaling_thread = rtc::Thread::Current(),
+ rtc::Thread* network_thread = rtc::Thread::Current())
+ : rtc::RefCountedObject<DataChannel>(config,
+ nullptr,
+ cricket::DCT_NONE,
+ label,
+ signaling_thread,
+ network_thread) {
EXPECT_CALL(*this, id()).WillRepeatedly(::testing::Return(id));
EXPECT_CALL(*this, state()).WillRepeatedly(::testing::Return(state));
EXPECT_CALL(*this, protocol()).WillRepeatedly(::testing::Return(protocol));
@@ -43,13 +52,13 @@ class MockDataChannel : public rtc::RefCountedObject<DataChannel> {
EXPECT_CALL(*this, bytes_received())
.WillRepeatedly(::testing::Return(bytes_received));
}
- MOCK_CONST_METHOD0(id, int());
- MOCK_CONST_METHOD0(state, DataState());
- MOCK_CONST_METHOD0(protocol, std::string());
- MOCK_CONST_METHOD0(messages_sent, uint32_t());
- MOCK_CONST_METHOD0(bytes_sent, uint64_t());
- MOCK_CONST_METHOD0(messages_received, uint32_t());
- MOCK_CONST_METHOD0(bytes_received, uint64_t());
+ MOCK_METHOD(int, id, (), (const, override));
+ MOCK_METHOD(DataState, state, (), (const, override));
+ MOCK_METHOD(std::string, protocol, (), (const, override));
+ MOCK_METHOD(uint32_t, messages_sent, (), (const, override));
+ MOCK_METHOD(uint64_t, bytes_sent, (), (const, override));
+ MOCK_METHOD(uint32_t, messages_received, (), (const, override));
+ MOCK_METHOD(uint64_t, bytes_received, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/test/mock_delayable.h b/chromium/third_party/webrtc/pc/test/mock_delayable.h
index 548f9f8c0a8..bef07c1970f 100644
--- a/chromium/third_party/webrtc/pc/test/mock_delayable.h
+++ b/chromium/third_party/webrtc/pc/test/mock_delayable.h
@@ -21,9 +21,14 @@ namespace webrtc {
class MockDelayable : public cricket::Delayable {
public:
- MOCK_METHOD2(SetBaseMinimumPlayoutDelayMs, bool(uint32_t ssrc, int delay_ms));
- MOCK_CONST_METHOD1(GetBaseMinimumPlayoutDelayMs,
- absl::optional<int>(uint32_t ssrc));
+ MOCK_METHOD(bool,
+ SetBaseMinimumPlayoutDelayMs,
+ (uint32_t ssrc, int delay_ms),
+ (override));
+ MOCK_METHOD(absl::optional<int>,
+ GetBaseMinimumPlayoutDelayMs,
+ (uint32_t ssrc),
+ (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/test/mock_rtp_receiver_internal.h b/chromium/third_party/webrtc/pc/test/mock_rtp_receiver_internal.h
index ffe78b52304..779dcdcf086 100644
--- a/chromium/third_party/webrtc/pc/test/mock_rtp_receiver_internal.h
+++ b/chromium/third_party/webrtc/pc/test/mock_rtp_receiver_internal.h
@@ -24,37 +24,54 @@ namespace webrtc {
class MockRtpReceiverInternal : public RtpReceiverInternal {
public:
// RtpReceiverInterface methods.
- MOCK_METHOD1(SetTrack, void(MediaStreamTrackInterface*));
- MOCK_CONST_METHOD0(track, rtc::scoped_refptr<MediaStreamTrackInterface>());
- MOCK_CONST_METHOD0(dtls_transport,
- rtc::scoped_refptr<DtlsTransportInterface>());
- MOCK_CONST_METHOD0(stream_ids, std::vector<std::string>());
- MOCK_CONST_METHOD0(streams,
- std::vector<rtc::scoped_refptr<MediaStreamInterface>>());
- MOCK_CONST_METHOD0(media_type, cricket::MediaType());
- MOCK_CONST_METHOD0(id, std::string());
- MOCK_CONST_METHOD0(GetParameters, RtpParameters());
- MOCK_METHOD1(SetObserver, void(RtpReceiverObserverInterface*));
- MOCK_METHOD1(SetJitterBufferMinimumDelay, void(absl::optional<double>));
- MOCK_CONST_METHOD0(GetSources, std::vector<RtpSource>());
- MOCK_METHOD1(SetFrameDecryptor,
- void(rtc::scoped_refptr<FrameDecryptorInterface>));
- MOCK_CONST_METHOD0(GetFrameDecryptor,
- rtc::scoped_refptr<FrameDecryptorInterface>());
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamTrackInterface>,
+ track,
+ (),
+ (const, override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ dtls_transport,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<std::string>, stream_ids, (), (const, override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<MediaStreamInterface>>,
+ streams,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(RtpParameters, GetParameters, (), (const, override));
+ MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override));
+ MOCK_METHOD(void,
+ SetJitterBufferMinimumDelay,
+ (absl::optional<double>),
+ (override));
+ MOCK_METHOD(std::vector<RtpSource>, GetSources, (), (const, override));
+ MOCK_METHOD(void,
+ SetFrameDecryptor,
+ (rtc::scoped_refptr<FrameDecryptorInterface>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<FrameDecryptorInterface>,
+ GetFrameDecryptor,
+ (),
+ (const, override));
// RtpReceiverInternal methods.
- MOCK_METHOD0(Stop, void());
- MOCK_METHOD1(SetMediaChannel, void(cricket::MediaChannel*));
- MOCK_METHOD1(SetupMediaChannel, void(uint32_t));
- MOCK_METHOD0(SetupUnsignaledMediaChannel, void());
- MOCK_CONST_METHOD0(ssrc, uint32_t());
- MOCK_METHOD0(NotifyFirstPacketReceived, void());
- MOCK_METHOD1(set_stream_ids, void(std::vector<std::string>));
- MOCK_METHOD1(set_transport, void(rtc::scoped_refptr<DtlsTransportInterface>));
- MOCK_METHOD1(
- SetStreams,
- void(const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&));
- MOCK_CONST_METHOD0(AttachmentId, int());
+ MOCK_METHOD(void, Stop, (), (override));
+ MOCK_METHOD(void, SetMediaChannel, (cricket::MediaChannel*), (override));
+ MOCK_METHOD(void, SetupMediaChannel, (uint32_t), (override));
+ MOCK_METHOD(void, SetupUnsignaledMediaChannel, (), (override));
+ MOCK_METHOD(uint32_t, ssrc, (), (const, override));
+ MOCK_METHOD(void, NotifyFirstPacketReceived, (), (override));
+ MOCK_METHOD(void, set_stream_ids, (std::vector<std::string>), (override));
+ MOCK_METHOD(void,
+ set_transport,
+ (rtc::scoped_refptr<DtlsTransportInterface>),
+ (override));
+ MOCK_METHOD(void,
+ SetStreams,
+ (const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&),
+ (override));
+ MOCK_METHOD(int, AttachmentId, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/test/mock_rtp_sender_internal.h b/chromium/third_party/webrtc/pc/test/mock_rtp_sender_internal.h
index 2cf0173bd1c..1a31c5dac69 100644
--- a/chromium/third_party/webrtc/pc/test/mock_rtp_sender_internal.h
+++ b/chromium/third_party/webrtc/pc/test/mock_rtp_sender_internal.h
@@ -23,37 +23,65 @@ namespace webrtc {
class MockRtpSenderInternal : public RtpSenderInternal {
public:
// RtpSenderInterface methods.
- MOCK_METHOD1(SetTrack, bool(MediaStreamTrackInterface*));
- MOCK_CONST_METHOD0(track, rtc::scoped_refptr<MediaStreamTrackInterface>());
- MOCK_CONST_METHOD0(ssrc, uint32_t());
- MOCK_CONST_METHOD0(dtls_transport,
- rtc::scoped_refptr<DtlsTransportInterface>());
- MOCK_CONST_METHOD0(media_type, cricket::MediaType());
- MOCK_CONST_METHOD0(id, std::string());
- MOCK_CONST_METHOD0(stream_ids, std::vector<std::string>());
- MOCK_CONST_METHOD0(init_send_encodings, std::vector<RtpEncodingParameters>());
- MOCK_METHOD1(set_transport, void(rtc::scoped_refptr<DtlsTransportInterface>));
- MOCK_CONST_METHOD0(GetParameters, RtpParameters());
- MOCK_CONST_METHOD0(GetParametersInternal, RtpParameters());
- MOCK_METHOD1(SetParameters, RTCError(const RtpParameters&));
- MOCK_METHOD1(SetParametersInternal, RTCError(const RtpParameters&));
- MOCK_CONST_METHOD0(GetDtmfSender, rtc::scoped_refptr<DtmfSenderInterface>());
- MOCK_METHOD1(SetFrameEncryptor,
- void(rtc::scoped_refptr<FrameEncryptorInterface>));
- MOCK_CONST_METHOD0(GetFrameEncryptor,
- rtc::scoped_refptr<FrameEncryptorInterface>());
+ MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override));
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamTrackInterface>,
+ track,
+ (),
+ (const, override));
+ MOCK_METHOD(uint32_t, ssrc, (), (const, override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ dtls_transport,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(std::vector<std::string>, stream_ids, (), (const, override));
+ MOCK_METHOD(std::vector<RtpEncodingParameters>,
+ init_send_encodings,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ set_transport,
+ (rtc::scoped_refptr<DtlsTransportInterface>),
+ (override));
+ MOCK_METHOD(RtpParameters, GetParameters, (), (const, override));
+ MOCK_METHOD(RtpParameters, GetParametersInternal, (), (const, override));
+ MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override));
+ MOCK_METHOD(RTCError,
+ SetParametersInternal,
+ (const RtpParameters&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<DtmfSenderInterface>,
+ GetDtmfSender,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ SetFrameEncryptor,
+ (rtc::scoped_refptr<FrameEncryptorInterface>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<FrameEncryptorInterface>,
+ GetFrameEncryptor,
+ (),
+ (const, override));
// RtpSenderInternal methods.
- MOCK_METHOD1(SetMediaChannel, void(cricket::MediaChannel*));
- MOCK_METHOD1(SetSsrc, void(uint32_t));
- MOCK_METHOD1(set_stream_ids, void(const std::vector<std::string>&));
- MOCK_METHOD1(SetStreams, void(const std::vector<std::string>&));
- MOCK_METHOD1(set_init_send_encodings,
- void(const std::vector<RtpEncodingParameters>&));
- MOCK_METHOD0(Stop, void());
- MOCK_CONST_METHOD0(AttachmentId, int());
- MOCK_METHOD1(DisableEncodingLayers,
- RTCError(const std::vector<std::string>&));
+ MOCK_METHOD(void, SetMediaChannel, (cricket::MediaChannel*), (override));
+ MOCK_METHOD(void, SetSsrc, (uint32_t), (override));
+ MOCK_METHOD(void,
+ set_stream_ids,
+ (const std::vector<std::string>&),
+ (override));
+ MOCK_METHOD(void, SetStreams, (const std::vector<std::string>&), (override));
+ MOCK_METHOD(void,
+ set_init_send_encodings,
+ (const std::vector<RtpEncodingParameters>&),
+ (override));
+ MOCK_METHOD(void, Stop, (), (override));
+ MOCK_METHOD(int, AttachmentId, (), (const, override));
+ MOCK_METHOD(RTCError,
+ DisableEncodingLayers,
+ (const std::vector<std::string>&),
+ (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.cc b/chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.cc
index 4f0d72e6675..946f459f3b7 100644
--- a/chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.cc
+++ b/chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.cc
@@ -80,7 +80,8 @@ PeerConnectionTestWrapper::PeerConnectionTestWrapper(
rtc::Thread* worker_thread)
: name_(name),
network_thread_(network_thread),
- worker_thread_(worker_thread) {
+ worker_thread_(worker_thread),
+ pending_negotiation_(false) {
pc_thread_checker_.Detach();
}
@@ -135,6 +136,17 @@ PeerConnectionTestWrapper::CreateDataChannel(
return peer_connection_->CreateDataChannel(label, &init);
}
+void PeerConnectionTestWrapper::WaitForNegotiation() {
+ EXPECT_TRUE_WAIT(!pending_negotiation_, kMaxWait);
+}
+
+void PeerConnectionTestWrapper::OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) {
+ if (new_state == webrtc::PeerConnectionInterface::SignalingState::kStable) {
+ pending_negotiation_ = false;
+ }
+}
+
void PeerConnectionTestWrapper::OnAddTrack(
rtc::scoped_refptr<RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
@@ -182,6 +194,7 @@ void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
void PeerConnectionTestWrapper::CreateOffer(
const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) {
RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": CreateOffer.";
+ pending_negotiation_ = true;
peer_connection_->CreateOffer(this, options);
}
@@ -189,6 +202,7 @@ void PeerConnectionTestWrapper::CreateAnswer(
const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) {
RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
<< ": CreateAnswer.";
+ pending_negotiation_ = true;
peer_connection_->CreateAnswer(this, options);
}
diff --git a/chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.h b/chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.h
index 2dc88e93095..92599b78ab9 100644
--- a/chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.h
+++ b/chromium/third_party/webrtc/pc/test/peer_connection_test_wrapper.h
@@ -49,15 +49,21 @@ class PeerConnectionTestWrapper
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory);
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory()
+ const {
+ return peer_connection_factory_;
+ }
webrtc::PeerConnectionInterface* pc() { return peer_connection_.get(); }
rtc::scoped_refptr<webrtc::DataChannelInterface> CreateDataChannel(
const std::string& label,
const webrtc::DataChannelInit& init);
+ void WaitForNegotiation();
+
// Implements PeerConnectionObserver.
void OnSignalingChange(
- webrtc::PeerConnectionInterface::SignalingState new_state) override {}
+ webrtc::PeerConnectionInterface::SignalingState new_state) override;
void OnAddTrack(
rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface>>&
@@ -121,6 +127,7 @@ class PeerConnectionTestWrapper
rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
std::unique_ptr<webrtc::FakeVideoTrackRenderer> renderer_;
int num_get_user_media_calls_ = 0;
+ bool pending_negotiation_;
};
#endif // PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_
diff --git a/chromium/third_party/webrtc/pc/track_media_info_map_unittest.cc b/chromium/third_party/webrtc/pc/track_media_info_map_unittest.cc
index c487ab8f816..0cb1e0e2777 100644
--- a/chromium/third_party/webrtc/pc/track_media_info_map_unittest.cc
+++ b/chromium/third_party/webrtc/pc/track_media_info_map_unittest.cc
@@ -83,19 +83,21 @@ rtc::scoped_refptr<MockRtpReceiverInternal> CreateMockRtpReceiver(
class TrackMediaInfoMapTest : public ::testing::Test {
public:
- TrackMediaInfoMapTest()
+ TrackMediaInfoMapTest() : TrackMediaInfoMapTest(true) {}
+
+ explicit TrackMediaInfoMapTest(bool use_current_thread)
: voice_media_info_(new cricket::VoiceMediaInfo()),
video_media_info_(new cricket::VideoMediaInfo()),
local_audio_track_(AudioTrack::Create("LocalAudioTrack", nullptr)),
remote_audio_track_(AudioTrack::Create("RemoteAudioTrack", nullptr)),
- local_video_track_(
- VideoTrack::Create("LocalVideoTrack",
- FakeVideoTrackSource::Create(false),
- rtc::Thread::Current())),
- remote_video_track_(
- VideoTrack::Create("RemoteVideoTrack",
- FakeVideoTrackSource::Create(false),
- rtc::Thread::Current())) {}
+ local_video_track_(VideoTrack::Create(
+ "LocalVideoTrack",
+ FakeVideoTrackSource::Create(false),
+ use_current_thread ? rtc::Thread::Current() : nullptr)),
+ remote_video_track_(VideoTrack::Create(
+ "RemoteVideoTrack",
+ FakeVideoTrackSource::Create(false),
+ use_current_thread ? rtc::Thread::Current() : nullptr)) {}
~TrackMediaInfoMapTest() {
// If we have a map the ownership has been passed to the map, only delete if
@@ -417,7 +419,10 @@ TEST_F(TrackMediaInfoMapTest, GetAttachmentIdByTrack) {
// base/test/gtest_util.h.
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-class TrackMediaInfoMapDeathTest : public TrackMediaInfoMapTest {};
+class TrackMediaInfoMapDeathTest : public TrackMediaInfoMapTest {
+ public:
+ TrackMediaInfoMapDeathTest() : TrackMediaInfoMapTest(false) {}
+};
TEST_F(TrackMediaInfoMapDeathTest, MultipleOneSsrcReceiversPerTrack) {
AddRtpReceiverWithSsrcs({1}, remote_audio_track_);
diff --git a/chromium/third_party/webrtc/pc/video_rtp_receiver_unittest.cc b/chromium/third_party/webrtc/pc/video_rtp_receiver_unittest.cc
index c4b7b8205d0..b3eb6e6e355 100644
--- a/chromium/third_party/webrtc/pc/video_rtp_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/pc/video_rtp_receiver_unittest.cc
@@ -32,16 +32,20 @@ class VideoRtpReceiverTest : public testing::Test {
MockVideoMediaChannel(cricket::FakeVideoEngine* engine,
const cricket::VideoOptions& options)
: FakeVideoMediaChannel(engine, options) {}
- MOCK_METHOD2(SetRecordableEncodedFrameCallback,
- void(uint32_t,
- std::function<void(const RecordableEncodedFrame&)>));
- MOCK_METHOD1(ClearRecordableEncodedFrameCallback, void(uint32_t));
- MOCK_METHOD1(GenerateKeyFrame, void(uint32_t));
+ MOCK_METHOD(void,
+ SetRecordableEncodedFrameCallback,
+ (uint32_t, std::function<void(const RecordableEncodedFrame&)>),
+ (override));
+ MOCK_METHOD(void,
+ ClearRecordableEncodedFrameCallback,
+ (uint32_t),
+ (override));
+ MOCK_METHOD(void, GenerateKeyFrame, (uint32_t), (override));
};
class MockVideoSink : public rtc::VideoSinkInterface<RecordableEncodedFrame> {
public:
- MOCK_METHOD1(OnFrame, void(const RecordableEncodedFrame&));
+ MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override));
};
VideoRtpReceiverTest()
diff --git a/chromium/third_party/webrtc/pc/video_rtp_track_source_unittest.cc b/chromium/third_party/webrtc/pc/video_rtp_track_source_unittest.cc
index ed26db36549..ea1b4cacf87 100644
--- a/chromium/third_party/webrtc/pc/video_rtp_track_source_unittest.cc
+++ b/chromium/third_party/webrtc/pc/video_rtp_track_source_unittest.cc
@@ -19,13 +19,13 @@ namespace {
class MockCallback : public VideoRtpTrackSource::Callback {
public:
- MOCK_METHOD0(OnGenerateKeyFrame, void());
- MOCK_METHOD1(OnEncodedSinkEnabled, void(bool));
+ MOCK_METHOD(void, OnGenerateKeyFrame, (), (override));
+ MOCK_METHOD(void, OnEncodedSinkEnabled, (bool), (override));
};
class MockSink : public rtc::VideoSinkInterface<RecordableEncodedFrame> {
public:
- MOCK_METHOD1(OnFrame, void(const RecordableEncodedFrame&));
+ MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override));
};
rtc::scoped_refptr<VideoRtpTrackSource> MakeSource(
diff --git a/chromium/third_party/webrtc/pc/webrtc_sdp.cc b/chromium/third_party/webrtc/pc/webrtc_sdp.cc
index f77327faf1e..90a00c1479f 100644
--- a/chromium/third_party/webrtc/pc/webrtc_sdp.cc
+++ b/chromium/third_party/webrtc/pc/webrtc_sdp.cc
@@ -229,12 +229,6 @@ static const char kApplicationSpecificMaximum[] = "AS";
static const char kDefaultSctpmapProtocol[] = "webrtc-datachannel";
-// This is a non-standardized setting for plugin transports.
-static const char kOpaqueTransportParametersLine[] = "x-opaque";
-
-// This is a non-standardized setting for plugin transports.
-static const char kAltProtocolLine[] = "x-alt-protocol";
-
// RTP payload type is in the 0-127 range. Use -1 to indicate "all" payload
// types.
const int kWildcardPayloadType = -1;
@@ -523,25 +517,6 @@ static void InitAttrLine(const std::string& attribute, rtc::StringBuilder* os) {
InitLine(kLineTypeAttributes, attribute, os);
}
-// Adds an x-otp SDP attribute line based on opaque transport parameters.
-static void AddOpaqueTransportLine(
- const cricket::OpaqueTransportParameters params,
- std::string* message) {
- rtc::StringBuilder os;
- InitAttrLine(kOpaqueTransportParametersLine, &os);
- os << kSdpDelimiterColon << params.protocol << kSdpDelimiterColon
- << rtc::Base64::Encode(params.parameters);
- AddLine(os.str(), message);
-}
-
-static void AddAltProtocolLine(const std::string& protocol,
- std::string* message) {
- rtc::StringBuilder os;
- InitAttrLine(kAltProtocolLine, &os);
- os << kSdpDelimiterColon << protocol;
- AddLine(os.str(), message);
-}
-
// Writes a SDP attribute line based on |attribute| and |value| to |message|.
static void AddAttributeLine(const std::string& attribute,
int value,
@@ -1532,15 +1507,6 @@ void BuildMediaDescription(const ContentInfo* content_info,
AddLine(os.str(), message);
}
}
-
- if (transport_info->description.opaque_parameters) {
- AddOpaqueTransportLine(*transport_info->description.opaque_parameters,
- message);
- }
- }
-
- if (media_desc->alt_protocol()) {
- AddAltProtocolLine(*media_desc->alt_protocol(), message);
}
// RFC 3388
@@ -2105,32 +2071,6 @@ bool ParseConnectionData(const std::string& line,
return true;
}
-bool ParseOpaqueTransportLine(const std::string& line,
- std::string* protocol,
- std::string* transport_parameters,
- SdpParseError* error) {
- std::string value;
- if (!GetValue(line, kOpaqueTransportParametersLine, &value, error)) {
- return false;
- }
- std::string tmp_parameters;
- if (!rtc::tokenize_first(value, kSdpDelimiterColonChar, protocol,
- &tmp_parameters)) {
- return ParseFailedGetValue(line, kOpaqueTransportParametersLine, error);
- }
- if (!rtc::Base64::Decode(tmp_parameters, rtc::Base64::DO_STRICT,
- transport_parameters, nullptr)) {
- return ParseFailedGetValue(line, kOpaqueTransportParametersLine, error);
- }
- return true;
-}
-
-bool ParseAltProtocolLine(const std::string& line,
- std::string* protocol,
- SdpParseError* error) {
- return GetValue(line, kAltProtocolLine, protocol, error);
-}
-
bool ParseSessionDescription(const std::string& message,
size_t* pos,
std::string* session_id,
@@ -3137,19 +3077,6 @@ bool ParseContent(const std::string& message,
if (!ParseIceOptions(line, &transport->transport_options, error)) {
return false;
}
- } else if (HasAttribute(line, kOpaqueTransportParametersLine)) {
- transport->opaque_parameters = cricket::OpaqueTransportParameters();
- if (!ParseOpaqueTransportLine(
- line, &transport->opaque_parameters->protocol,
- &transport->opaque_parameters->parameters, error)) {
- return false;
- }
- } else if (HasAttribute(line, kAltProtocolLine)) {
- std::string alt_protocol;
- if (!ParseAltProtocolLine(line, &alt_protocol, error)) {
- return false;
- }
- media_desc->set_alt_protocol(alt_protocol);
} else if (HasAttribute(line, kAttributeFmtp)) {
if (!ParseFmtpAttributes(line, media_type, media_desc, error)) {
return false;
diff --git a/chromium/third_party/webrtc/pc/webrtc_sdp_unittest.cc b/chromium/third_party/webrtc/pc/webrtc_sdp_unittest.cc
index a2ad4b8bdc6..4279de67eea 100644
--- a/chromium/third_party/webrtc/pc/webrtc_sdp_unittest.cc
+++ b/chromium/third_party/webrtc/pc/webrtc_sdp_unittest.cc
@@ -1528,8 +1528,6 @@ class WebRtcSdpTest : public ::testing::Test {
CompareSimulcastDescription(
c1.media_description()->simulcast_description(),
c2.media_description()->simulcast_description());
- EXPECT_EQ(c1.media_description()->alt_protocol(),
- c2.media_description()->alt_protocol());
}
// group
@@ -1584,8 +1582,6 @@ class WebRtcSdpTest : public ::testing::Test {
}
EXPECT_EQ(transport1.description.transport_options,
transport2.description.transport_options);
- EXPECT_EQ(transport1.description.opaque_parameters,
- transport2.description.opaque_parameters);
}
// global attributes
@@ -1679,23 +1675,6 @@ class WebRtcSdpTest : public ::testing::Test {
desc_.AddTransportInfo(transport_info);
}
- void AddOpaqueTransportParameters(const std::string& content_name,
- cricket::OpaqueTransportParameters params) {
- ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
- cricket::TransportInfo info = *(desc_.GetTransportInfoByName(content_name));
- desc_.RemoveTransportInfoByName(content_name);
- info.description.opaque_parameters = params;
- desc_.AddTransportInfo(info);
- }
-
- void AddAltProtocol(const std::string& content_name,
- const std::string& alt_protocol) {
- ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
- cricket::MediaContentDescription* description =
- desc_.GetContentDescriptionByName(content_name);
- description->set_alt_protocol(alt_protocol);
- }
-
void AddFingerprint() {
desc_.RemoveTransportInfoByName(kAudioContentName);
desc_.RemoveTransportInfoByName(kVideoContentName);
@@ -2236,41 +2215,6 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithIceOptions) {
EXPECT_EQ(sdp_with_ice_options, message);
}
-TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithOpaqueTransportParams) {
- cricket::OpaqueTransportParameters params;
- params.protocol = "foo";
- params.parameters = "test64";
- AddOpaqueTransportParameters(kAudioContentName, params);
- AddOpaqueTransportParameters(kVideoContentName, params);
-
- ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
- jdesc_.session_version()));
- std::string message = webrtc::SdpSerialize(jdesc_);
-
- std::string sdp_with_transport_parameters = kSdpFullString;
- InjectAfter(kAttributeIcePwdVoice, "a=x-opaque:foo:dGVzdDY0\r\n",
- &sdp_with_transport_parameters);
- InjectAfter(kAttributeIcePwdVideo, "a=x-opaque:foo:dGVzdDY0\r\n",
- &sdp_with_transport_parameters);
- EXPECT_EQ(message, sdp_with_transport_parameters);
-}
-
-TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAltProtocol) {
- AddAltProtocol(kAudioContentName, "foo");
- AddAltProtocol(kVideoContentName, "bar");
-
- ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
- jdesc_.session_version()));
- std::string message = webrtc::SdpSerialize(jdesc_);
-
- std::string sdp_with_alt_protocol = kSdpFullString;
- InjectAfter(kAttributeIcePwdVoice, "a=x-alt-protocol:foo\r\n",
- &sdp_with_alt_protocol);
- InjectAfter(kAttributeIcePwdVideo, "a=x-alt-protocol:bar\r\n",
- &sdp_with_alt_protocol);
- EXPECT_EQ(message, sdp_with_alt_protocol);
-}
-
TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRecvOnlyContent) {
EXPECT_TRUE(TestSerializeDirection(RtpTransceiverDirection::kRecvOnly));
}
@@ -2445,8 +2389,6 @@ TEST_F(WebRtcSdpTest, SerializeHostnameCandidate) {
EXPECT_EQ(std::string(kRawHostnameCandidate), message);
}
-// TODO(mallinath) : Enable this test once WebRTCSdp capable of parsing
-// RFC 6544.
TEST_F(WebRtcSdpTest, SerializeTcpCandidates) {
Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp",
rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
@@ -2685,48 +2627,6 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithIceOptions) {
EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ice_options));
}
-TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithOpaqueTransportParams) {
- std::string sdp_with_transport_parameters = kSdpFullString;
- InjectAfter(kAttributeIcePwdVoice, "a=x-opaque:foo:dGVzdDY0\r\n",
- &sdp_with_transport_parameters);
- InjectAfter(kAttributeIcePwdVideo, "a=x-opaque:foo:dGVzdDY0\r\n",
- &sdp_with_transport_parameters);
-
- JsepSessionDescription jdesc_with_transport_parameters(kDummyType);
- EXPECT_TRUE(SdpDeserialize(sdp_with_transport_parameters,
- &jdesc_with_transport_parameters));
-
- cricket::OpaqueTransportParameters params;
- params.protocol = "foo";
- params.parameters = "test64";
-
- AddOpaqueTransportParameters(kAudioContentName, params);
- AddOpaqueTransportParameters(kVideoContentName, params);
-
- ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
- jdesc_.session_version()));
- EXPECT_TRUE(
- CompareSessionDescription(jdesc_, jdesc_with_transport_parameters));
-}
-
-TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithAltProtocol) {
- std::string sdp_with_alt_protocol = kSdpFullString;
- InjectAfter(kAttributeIcePwdVoice, "a=x-alt-protocol:foo\r\n",
- &sdp_with_alt_protocol);
- InjectAfter(kAttributeIcePwdVideo, "a=x-alt-protocol:bar\r\n",
- &sdp_with_alt_protocol);
-
- JsepSessionDescription jdesc_with_alt_protocol(kDummyType);
- EXPECT_TRUE(SdpDeserialize(sdp_with_alt_protocol, &jdesc_with_alt_protocol));
-
- AddAltProtocol(kAudioContentName, "foo");
- AddAltProtocol(kVideoContentName, "bar");
-
- ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
- jdesc_.session_version()));
- EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_alt_protocol));
-}
-
TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithUfragPwd) {
// Remove the original ice-ufrag and ice-pwd
JsepSessionDescription jdesc_with_ufrag_pwd(kDummyType);
diff --git a/chromium/third_party/webrtc/pc/webrtc_session_description_factory.cc b/chromium/third_party/webrtc/pc/webrtc_session_description_factory.cc
index aaef7fdeb67..d95174ec445 100644
--- a/chromium/third_party/webrtc/pc/webrtc_session_description_factory.cc
+++ b/chromium/third_party/webrtc/pc/webrtc_session_description_factory.cc
@@ -130,7 +130,8 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
UniqueRandomIdGenerator* ssrc_generator)
- : signaling_thread_(signaling_thread),
+ : MessageHandler(false),
+ signaling_thread_(signaling_thread),
session_desc_factory_(channel_manager,
&transport_desc_factory_,
ssrc_generator),
diff --git a/chromium/third_party/webrtc/rtc_base/BUILD.gn b/chromium/third_party/webrtc/rtc_base/BUILD.gn
index a61ede4ac98..c62b3f6afe7 100644
--- a/chromium/third_party/webrtc/rtc_base/BUILD.gn
+++ b/chromium/third_party/webrtc/rtc_base/BUILD.gn
@@ -59,8 +59,8 @@ rtc_library("rtc_base_approved") {
"system:rtc_export",
"system:unused",
"third_party/base64",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
public_deps = [] # no-presubmit-check TODO(webrtc:8603)
sources = [
@@ -154,6 +154,7 @@ rtc_library("platform_thread_types") {
"platform_thread_types.cc",
"platform_thread_types.h",
]
+ deps = [ ":macromagic" ]
}
rtc_source_set("refcount") {
@@ -176,6 +177,7 @@ rtc_library("criticalsection") {
":checks",
":macromagic",
":platform_thread_types",
+ "synchronization:yield",
"system:rtc_export",
"system:unused",
]
@@ -187,6 +189,7 @@ rtc_library("platform_thread") {
":rtc_task_queue_libevent",
":rtc_task_queue_win",
":rtc_task_queue_stdlib",
+ "synchronization:mutex",
"synchronization:sequence_checker",
]
sources = [
@@ -201,8 +204,8 @@ rtc_library("platform_thread") {
":rtc_event",
":thread_checker",
":timeutils",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("rtc_event") {
@@ -225,8 +228,8 @@ rtc_library("rtc_event") {
":checks",
"synchronization:yield_policy",
"system:warn_current_thread_is_deadlocked",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
@@ -240,6 +243,9 @@ rtc_library("logging") {
":platform_thread_types",
":stringutils",
":timeutils",
+ "synchronization:mutex",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/meta:type_traits",
"//third_party/abseil-cpp/absl/strings",
@@ -301,6 +307,8 @@ rtc_library("checks") {
":safe_compare",
"system:inline",
"system:rtc_export",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/meta:type_traits",
"//third_party/abseil-cpp/absl/strings",
]
@@ -317,13 +325,13 @@ rtc_library("rate_limiter") {
deps = [
":rtc_base_approved",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("sanitizer") {
sources = [ "sanitizer.h" ]
- deps = [ "//third_party/abseil-cpp/absl/meta:type_traits" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/meta:type_traits" ]
}
rtc_source_set("bounded_inline_vector") {
@@ -398,6 +406,8 @@ rtc_library("stringutils") {
":macromagic",
":safe_minmax",
"../api:array_view",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -433,8 +443,8 @@ rtc_library("rtc_task_queue") {
"../api/task_queue",
"system:rtc_export",
"task_utils:to_queued_task",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_source_set("rtc_operations_chain") {
@@ -469,6 +479,8 @@ if (rtc_enable_libevent) {
":safe_conversions",
":timeutils",
"../api/task_queue",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
]
@@ -490,8 +502,8 @@ if (is_mac || is_ios) {
":logging",
"../api/task_queue",
"system:gcd_helpers",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
}
@@ -512,8 +524,8 @@ if (is_win) {
":safe_conversions",
":timeutils",
"../api/task_queue",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
}
@@ -532,8 +544,8 @@ rtc_library("rtc_task_queue_stdlib") {
":safe_conversions",
":timeutils",
"../api/task_queue",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("weak_ptr") {
@@ -576,6 +588,8 @@ rtc_library("rtc_numerics") {
"../api/units:data_rate",
"../api/units:time_delta",
"../api/units:timestamp",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -760,6 +774,7 @@ rtc_library("rtc_base") {
deps = [
":checks",
":deprecation",
+ ":rtc_task_queue",
":stringutils",
"../api:array_view",
"../api:function_view",
@@ -767,12 +782,16 @@ rtc_library("rtc_base") {
"../api/task_queue",
"../system_wrappers:field_trial",
"network:sent_packet",
+ "synchronization:sequence_checker",
"system:file_wrapper",
"system:inline",
"system:rtc_export",
+ "task_utils:pending_task_safety_flag",
"task_utils:to_queued_task",
"third_party/base64",
"third_party/sigslot",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
@@ -801,6 +820,8 @@ rtc_library("rtc_base") {
"crypt_string.h",
"data_rate_limiter.cc",
"data_rate_limiter.h",
+ "deprecated/signal_thread.cc",
+ "deprecated/signal_thread.h",
"dscp.h",
"file_rotating_stream.cc",
"file_rotating_stream.h",
@@ -853,7 +874,6 @@ rtc_library("rtc_base") {
"rtc_certificate.h",
"rtc_certificate_generator.cc",
"rtc_certificate_generator.h",
- "signal_thread.cc",
"signal_thread.h",
"sigslot_repeater.h",
"socket.cc",
@@ -1000,8 +1020,8 @@ rtc_library("gunit_helpers") {
":rtc_base_tests_utils",
":stringutils",
"../test:test_support",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("testclient") {
@@ -1066,6 +1086,8 @@ rtc_library("rtc_base_tests_utils") {
"../api/units:timestamp",
"memory:fifo_buffer",
"third_party/sigslot",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
]
@@ -1087,8 +1109,8 @@ rtc_library("task_queue_for_test") {
"../api/task_queue",
"../api/task_queue:default_task_queue_factory",
"task_utils:to_queued_task",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (rtc_include_tests) {
@@ -1128,8 +1150,8 @@ if (rtc_include_tests) {
"../test:test_support",
"third_party/sigslot",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
if (is_win) {
sources += [ "win32_socket_server_unittest.cc" ]
}
@@ -1211,6 +1233,8 @@ if (rtc_include_tests) {
"task_utils:to_queued_task",
"third_party/base64",
"third_party/sigslot",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
]
@@ -1228,8 +1252,8 @@ if (rtc_include_tests) {
":task_queue_for_test",
"../test:test_main",
"../test:test_support",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("rtc_operations_chain_unittests") {
@@ -1279,8 +1303,8 @@ if (rtc_include_tests) {
":rtc_numerics",
"../test:test_main",
"../test:test_support",
- "//third_party/abseil-cpp/absl/algorithm:container",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
}
rtc_library("rtc_json_unittests") {
@@ -1304,6 +1328,7 @@ if (rtc_include_tests) {
"callback_unittest.cc",
"crc32_unittest.cc",
"data_rate_limiter_unittest.cc",
+ "deprecated/signal_thread_unittest.cc",
"fake_clock_unittest.cc",
"helpers_unittest.cc",
"ip_address_unittest.cc",
@@ -1316,7 +1341,6 @@ if (rtc_include_tests) {
"rolling_accumulator_unittest.cc",
"rtc_certificate_generator_unittest.cc",
"rtc_certificate_unittest.cc",
- "signal_thread_unittest.cc",
"sigslot_tester_unittest.cc",
"test_client_unittest.cc",
"thread_unittest.cc",
@@ -1356,6 +1380,8 @@ if (rtc_include_tests) {
"synchronization:synchronization_unittests",
"task_utils:to_queued_task",
"third_party/sigslot",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
diff --git a/chromium/third_party/webrtc/rtc_base/DEPS b/chromium/third_party/webrtc/rtc_base/DEPS
index 679d06dfc89..c9f7dc5898e 100644
--- a/chromium/third_party/webrtc/rtc_base/DEPS
+++ b/chromium/third_party/webrtc/rtc_base/DEPS
@@ -1,8 +1,8 @@
include_rules = [
"+base/third_party/libevent",
"+json",
- "+third_party/jsoncpp",
"+system_wrappers",
+ "+third_party/jsoncpp",
]
specific_include_rules = {
diff --git a/chromium/third_party/webrtc/rtc_base/async_invoker.h b/chromium/third_party/webrtc/rtc_base/async_invoker.h
index f15955d811f..82cdb21a30f 100644
--- a/chromium/third_party/webrtc/rtc_base/async_invoker.h
+++ b/chromium/third_party/webrtc/rtc_base/async_invoker.h
@@ -87,7 +87,7 @@ namespace rtc {
// destruction. This can be done by starting each chain of invocations on the
// same thread on which it will be destroyed, or by using some other
// synchronization method.
-class AsyncInvoker : public MessageHandler {
+class AsyncInvoker : public MessageHandlerAutoCleanup {
public:
AsyncInvoker();
~AsyncInvoker() override;
diff --git a/chromium/third_party/webrtc/rtc_base/bit_buffer_unittest.cc b/chromium/third_party/webrtc/rtc_base/bit_buffer_unittest.cc
index b3521b4951e..441cd264959 100644
--- a/chromium/third_party/webrtc/rtc_base/bit_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/bit_buffer_unittest.cc
@@ -142,7 +142,7 @@ TEST(BitBufferTest, ReadBits) {
EXPECT_FALSE(buffer.ReadBits(&val, 1));
}
-TEST(BitBufferTest, SetOffsetValues) {
+TEST(BitBufferDeathTest, SetOffsetValues) {
uint8_t bytes[4] = {0};
BitBufferWriter buffer(bytes, 4);
diff --git a/chromium/third_party/webrtc/rtc_base/buffer.h b/chromium/third_party/webrtc/rtc_base/buffer.h
index 3048b9179f4..d1639e2f71b 100644
--- a/chromium/third_party/webrtc/rtc_base/buffer.h
+++ b/chromium/third_party/webrtc/rtc_base/buffer.h
@@ -370,7 +370,9 @@ class BufferT {
: capacity;
std::unique_ptr<T[]> new_data(new T[new_capacity]);
- std::memcpy(new_data.get(), data_.get(), size_ * sizeof(T));
+ if (data_ != nullptr) {
+ std::memcpy(new_data.get(), data_.get(), size_ * sizeof(T));
+ }
MaybeZeroCompleteBuffer();
data_ = std::move(new_data);
capacity_ = new_capacity;
diff --git a/chromium/third_party/webrtc/rtc_base/buffer_unittest.cc b/chromium/third_party/webrtc/rtc_base/buffer_unittest.cc
index 3e7396dd2c8..8beae43cf94 100644
--- a/chromium/third_party/webrtc/rtc_base/buffer_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/buffer_unittest.cc
@@ -447,7 +447,7 @@ TEST(BufferTest, TestStruct) {
EXPECT_EQ(kObsidian, buf[2].stone);
}
-TEST(BufferTest, DieOnUseAfterMove) {
+TEST(BufferDeathTest, DieOnUseAfterMove) {
Buffer buf(17);
Buffer buf2 = std::move(buf);
EXPECT_EQ(buf2.size(), 17u);
diff --git a/chromium/third_party/webrtc/rtc_base/checks.h b/chromium/third_party/webrtc/rtc_base/checks.h
index 2fde3f6640b..61c074ac822 100644
--- a/chromium/third_party/webrtc/rtc_base/checks.h
+++ b/chromium/third_party/webrtc/rtc_base/checks.h
@@ -69,7 +69,7 @@ RTC_NORETURN void rtc_FatalMessage(const char* file, int line, const char* msg);
// the reason that it's better to terminate might simply be that the error
// handling code isn't in place yet; in production, the reason might be that
// the author of the code truly believes that x will always be true, but that
-// she recognizes that if she is wrong, abrupt and unpleasant process
+// they recognizes that if they are wrong, abrupt and unpleasant process
// termination is still better than carrying on with the assumption violated.
//
// RTC_CHECK always evaluates its argument, so it's OK for x to have side
diff --git a/chromium/third_party/webrtc/rtc_base/checks_unittest.cc b/chromium/third_party/webrtc/rtc_base/checks_unittest.cc
index e6e094e5975..91e04cf6a1c 100644
--- a/chromium/third_party/webrtc/rtc_base/checks_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/checks_unittest.cc
@@ -19,7 +19,7 @@ TEST(ChecksTest, ExpressionNotEvaluatedWhenCheckPassing) {
}
#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(ChecksTest, Checks) {
+TEST(ChecksDeathTest, Checks) {
#if RTC_CHECK_MSG_ENABLED
EXPECT_DEATH(FATAL() << "message",
"\n\n#\n"
diff --git a/chromium/third_party/webrtc/rtc_base/critical_section.cc b/chromium/third_party/webrtc/rtc_base/critical_section.cc
index 1969edefa50..c6b17ff1b22 100644
--- a/chromium/third_party/webrtc/rtc_base/critical_section.cc
+++ b/chromium/third_party/webrtc/rtc_base/critical_section.cc
@@ -15,6 +15,7 @@
#include "rtc_base/atomic_ops.h"
#include "rtc_base/checks.h"
#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/synchronization/yield.h"
#include "rtc_base/system/unused.h"
// TODO(tommi): Split this file up to per-platform implementation files.
@@ -42,7 +43,7 @@ CriticalSection::CriticalSection() {
pthread_mutexattr_settype(&mutex_attribute, PTHREAD_MUTEX_RECURSIVE);
#if defined(WEBRTC_MAC)
pthread_mutexattr_setpolicy_np(&mutex_attribute,
- _PTHREAD_MUTEX_POLICY_FAIRSHARE);
+ _PTHREAD_MUTEX_POLICY_FIRSTFIT);
#endif
pthread_mutex_init(&mutex_, &mutex_attribute);
pthread_mutexattr_destroy(&mutex_attribute);
@@ -217,19 +218,8 @@ CritScope::~CritScope() {
}
void GlobalLock::Lock() {
-#if !defined(WEBRTC_WIN) && \
- (!defined(WEBRTC_MAC) || RTC_USE_NATIVE_MUTEX_ON_MAC)
- const struct timespec ts_null = {0};
-#endif
-
while (AtomicOps::CompareAndSwap(&lock_acquired_, 0, 1)) {
-#if defined(WEBRTC_WIN)
- ::Sleep(0);
-#elif defined(WEBRTC_MAC) && !RTC_USE_NATIVE_MUTEX_ON_MAC
- sched_yield();
-#else
- nanosleep(&ts_null, nullptr);
-#endif
+ webrtc::YieldCurrentThread();
}
}
diff --git a/chromium/third_party/webrtc/rtc_base/critical_section_unittest.cc b/chromium/third_party/webrtc/rtc_base/critical_section_unittest.cc
index 16aefd27400..3fa48323762 100644
--- a/chromium/third_party/webrtc/rtc_base/critical_section_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/critical_section_unittest.cc
@@ -78,7 +78,7 @@ class CompareAndSwapVerifier {
int zero_count_;
};
-class RunnerBase : public MessageHandler {
+class RunnerBase : public MessageHandlerAutoCleanup {
public:
explicit RunnerBase(int value)
: threads_active_(0),
diff --git a/chromium/third_party/webrtc/rtc_base/signal_thread.cc b/chromium/third_party/webrtc/rtc_base/deprecated/signal_thread.cc
index e100fbe179e..96bdd65155a 100644
--- a/chromium/third_party/webrtc/rtc_base/signal_thread.cc
+++ b/chromium/third_party/webrtc/rtc_base/deprecated/signal_thread.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "rtc_base/signal_thread.h"
+#include "rtc_base/deprecated/signal_thread.h"
#include <memory>
@@ -23,26 +23,30 @@ namespace rtc {
// SignalThread
///////////////////////////////////////////////////////////////////////////////
-SignalThread::SignalThread()
+DEPRECATED_SignalThread::DEPRECATED_SignalThread()
: main_(Thread::Current()), worker_(this), state_(kInit), refcount_(1) {
- main_->SignalQueueDestroyed.connect(this,
- &SignalThread::OnMainThreadDestroyed);
+ main_->SignalQueueDestroyed.connect(
+ this, &DEPRECATED_SignalThread::OnMainThreadDestroyed);
worker_.SetName("SignalThread", this);
}
-SignalThread::~SignalThread() {
+DEPRECATED_SignalThread::~DEPRECATED_SignalThread() {
+ rtc::CritScope lock(&cs_);
RTC_DCHECK(refcount_ == 0);
}
-bool SignalThread::SetName(const std::string& name, const void* obj) {
+bool DEPRECATED_SignalThread::SetName(const std::string& name,
+ const void* obj) {
EnterExit ee(this);
+ RTC_DCHECK(!destroy_called_);
RTC_DCHECK(main_->IsCurrent());
RTC_DCHECK(kInit == state_);
return worker_.SetName(name, obj);
}
-void SignalThread::Start() {
+void DEPRECATED_SignalThread::Start() {
EnterExit ee(this);
+ RTC_DCHECK(!destroy_called_);
RTC_DCHECK(main_->IsCurrent());
if (kInit == state_ || kComplete == state_) {
state_ = kRunning;
@@ -53,9 +57,13 @@ void SignalThread::Start() {
}
}
-void SignalThread::Destroy(bool wait) {
+void DEPRECATED_SignalThread::Destroy(bool wait) {
EnterExit ee(this);
- RTC_DCHECK(main_->IsCurrent());
+ // Sometimes the caller can't guarantee which thread will call Destroy, only
+ // that it will be the last thing it does.
+ // RTC_DCHECK(main_->IsCurrent());
+ RTC_DCHECK(!destroy_called_);
+ destroy_called_ = true;
if ((kInit == state_) || (kComplete == state_)) {
refcount_--;
} else if (kRunning == state_ || kReleasing == state_) {
@@ -76,8 +84,9 @@ void SignalThread::Destroy(bool wait) {
}
}
-void SignalThread::Release() {
+void DEPRECATED_SignalThread::Release() {
EnterExit ee(this);
+ RTC_DCHECK(!destroy_called_);
RTC_DCHECK(main_->IsCurrent());
if (kComplete == state_) {
refcount_--;
@@ -89,13 +98,14 @@ void SignalThread::Release() {
}
}
-bool SignalThread::ContinueWork() {
+bool DEPRECATED_SignalThread::ContinueWork() {
EnterExit ee(this);
+ RTC_DCHECK(!destroy_called_);
RTC_DCHECK(worker_.IsCurrent());
return worker_.ProcessMessages(0);
}
-void SignalThread::OnMessage(Message* msg) {
+void DEPRECATED_SignalThread::OnMessage(Message* msg) {
EnterExit ee(this);
if (ST_MSG_WORKER_DONE == msg->message_id) {
RTC_DCHECK(main_->IsCurrent());
@@ -126,21 +136,21 @@ void SignalThread::OnMessage(Message* msg) {
}
}
-SignalThread::Worker::Worker(SignalThread* parent)
+DEPRECATED_SignalThread::Worker::Worker(DEPRECATED_SignalThread* parent)
: Thread(std::make_unique<NullSocketServer>(), /*do_init=*/false),
parent_(parent) {
DoInit();
}
-SignalThread::Worker::~Worker() {
+DEPRECATED_SignalThread::Worker::~Worker() {
Stop();
}
-void SignalThread::Worker::Run() {
+void DEPRECATED_SignalThread::Worker::Run() {
parent_->Run();
}
-void SignalThread::Run() {
+void DEPRECATED_SignalThread::Run() {
DoWork();
{
EnterExit ee(this);
@@ -150,12 +160,12 @@ void SignalThread::Run() {
}
}
-void SignalThread::OnMainThreadDestroyed() {
+void DEPRECATED_SignalThread::OnMainThreadDestroyed() {
EnterExit ee(this);
main_ = nullptr;
}
-bool SignalThread::Worker::IsProcessingMessagesForTesting() {
+bool DEPRECATED_SignalThread::Worker::IsProcessingMessagesForTesting() {
return false;
}
diff --git a/chromium/third_party/webrtc/rtc_base/deprecated/signal_thread.h b/chromium/third_party/webrtc/rtc_base/deprecated/signal_thread.h
new file mode 100644
index 00000000000..e84d4ce5580
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/deprecated/signal_thread.h
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_
+#define RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_
+
+#include <string>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/constructor_magic.h"
+#include "rtc_base/critical_section.h"
+#include "rtc_base/deprecation.h"
+#include "rtc_base/message_handler.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace rtc {
+
+///////////////////////////////////////////////////////////////////////////////
+// NOTE: this class has been deprecated. Do not use for new code. New code
+// should use factilities exposed by api/task_queue/ instead.
+//
+// SignalThread - Base class for worker threads. The main thread should call
+// Start() to begin work, and then follow one of these models:
+// Normal: Wait for SignalWorkDone, and then call Release to destroy.
+// Cancellation: Call Release(true), to abort the worker thread.
+// Fire-and-forget: Call Release(false), which allows the thread to run to
+// completion, and then self-destruct without further notification.
+// Periodic tasks: Wait for SignalWorkDone, then eventually call Start()
+// again to repeat the task. When the instance isn't needed anymore,
+// call Release. DoWork, OnWorkStart and OnWorkStop are called again,
+// on a new thread.
+// The subclass should override DoWork() to perform the background task. By
+// periodically calling ContinueWork(), it can check for cancellation.
+// OnWorkStart and OnWorkDone can be overridden to do pre- or post-work
+// tasks in the context of the main thread.
+///////////////////////////////////////////////////////////////////////////////
+
+class DEPRECATED_SignalThread : public sigslot::has_slots<>,
+ protected MessageHandlerAutoCleanup {
+ public:
+ DEPRECATED_SignalThread();
+
+ // Context: Main Thread. Call before Start to change the worker's name.
+ bool SetName(const std::string& name, const void* obj);
+
+ // Context: Main Thread. Call to begin the worker thread.
+ void Start();
+
+ // Context: Main Thread. If the worker thread is not running, deletes the
+ // object immediately. Otherwise, asks the worker thread to abort processing,
+ // and schedules the object to be deleted once the worker exits.
+ // SignalWorkDone will not be signalled. If wait is true, does not return
+ // until the thread is deleted.
+ void Destroy(bool wait);
+
+ // Context: Main Thread. If the worker thread is complete, deletes the
+ // object immediately. Otherwise, schedules the object to be deleted once
+ // the worker thread completes. SignalWorkDone will be signalled.
+ void Release();
+
+ // Context: Main Thread. Signalled when work is complete.
+ sigslot::signal1<DEPRECATED_SignalThread*> SignalWorkDone;
+
+ enum { ST_MSG_WORKER_DONE, ST_MSG_FIRST_AVAILABLE };
+
+ protected:
+ ~DEPRECATED_SignalThread() override;
+
+ Thread* worker() { return &worker_; }
+
+ // Context: Main Thread. Subclass should override to do pre-work setup.
+ virtual void OnWorkStart() {}
+
+ // Context: Worker Thread. Subclass should override to do work.
+ virtual void DoWork() = 0;
+
+ // Context: Worker Thread. Subclass should call periodically to
+ // dispatch messages and determine if the thread should terminate.
+ bool ContinueWork();
+
+ // Context: Worker Thread. Subclass should override when extra work is
+ // needed to abort the worker thread.
+ virtual void OnWorkStop() {}
+
+ // Context: Main Thread. Subclass should override to do post-work cleanup.
+ virtual void OnWorkDone() {}
+
+ // Context: Any Thread. If subclass overrides, be sure to call the base
+ // implementation. Do not use (message_id < ST_MSG_FIRST_AVAILABLE)
+ void OnMessage(Message* msg) override;
+
+ private:
+ enum State {
+ kInit, // Initialized, but not started
+ kRunning, // Started and doing work
+ kReleasing, // Same as running, but to be deleted when work is done
+ kComplete, // Work is done
+ kStopping, // Work is being interrupted
+ };
+
+ class Worker : public Thread {
+ public:
+ explicit Worker(DEPRECATED_SignalThread* parent);
+ ~Worker() override;
+ void Run() override;
+ bool IsProcessingMessagesForTesting() override;
+
+ private:
+ DEPRECATED_SignalThread* parent_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Worker);
+ };
+
+ class RTC_SCOPED_LOCKABLE EnterExit {
+ public:
+ explicit EnterExit(DEPRECATED_SignalThread* t)
+ RTC_EXCLUSIVE_LOCK_FUNCTION(t->cs_)
+ : t_(t) {
+ t_->cs_.Enter();
+ // If refcount_ is zero then the object has already been deleted and we
+ // will be double-deleting it in ~EnterExit()! (shouldn't happen)
+ RTC_DCHECK_NE(0, t_->refcount_);
+ ++t_->refcount_;
+ }
+ ~EnterExit() RTC_UNLOCK_FUNCTION() {
+ bool d = (0 == --t_->refcount_);
+ t_->cs_.Leave();
+ if (d)
+ delete t_;
+ }
+
+ private:
+ DEPRECATED_SignalThread* t_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(EnterExit);
+ };
+
+ void Run();
+ void OnMainThreadDestroyed();
+
+ Thread* main_;
+ Worker worker_;
+ CriticalSection cs_;
+ State state_ RTC_GUARDED_BY(cs_);
+ int refcount_ RTC_GUARDED_BY(cs_);
+ bool destroy_called_ RTC_GUARDED_BY(cs_) = false;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(DEPRECATED_SignalThread);
+};
+
+typedef RTC_DEPRECATED DEPRECATED_SignalThread SignalThread;
+
+///////////////////////////////////////////////////////////////////////////////
+
+} // namespace rtc
+
+#endif // RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_
diff --git a/chromium/third_party/webrtc/rtc_base/signal_thread_unittest.cc b/chromium/third_party/webrtc/rtc_base/deprecated/signal_thread_unittest.cc
index 14761865b8a..c280e970bef 100644
--- a/chromium/third_party/webrtc/rtc_base/signal_thread_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/deprecated/signal_thread_unittest.cc
@@ -28,9 +28,9 @@ static const int kTimeout = 10000;
class SignalThreadTest : public ::testing::Test, public sigslot::has_slots<> {
public:
- class SlowSignalThread : public SignalThread {
+ class SlowSignalThread : public DEPRECATED_SignalThread {
public:
- SlowSignalThread(SignalThreadTest* harness) : harness_(harness) {}
+ explicit SlowSignalThread(SignalThreadTest* harness) : harness_(harness) {}
~SlowSignalThread() override {
EXPECT_EQ(harness_->main_thread_, Thread::Current());
@@ -70,7 +70,7 @@ class SignalThreadTest : public ::testing::Test, public sigslot::has_slots<> {
RTC_DISALLOW_COPY_AND_ASSIGN(SlowSignalThread);
};
- void OnWorkComplete(rtc::SignalThread* thread) {
+ void OnWorkComplete(rtc::DEPRECATED_SignalThread* thread) {
SlowSignalThread* t = static_cast<SlowSignalThread*>(thread);
EXPECT_EQ(t->harness(), this);
EXPECT_EQ(main_thread_, Thread::Current());
@@ -157,7 +157,7 @@ class OwnerThread : public Thread, public sigslot::has_slots<> {
rtc::CritScope cs(&crit_);
return has_run_;
}
- void OnWorkDone(SignalThread* /*signal_thread*/) {
+ void OnWorkDone(DEPRECATED_SignalThread* /*signal_thread*/) {
FAIL() << " This shouldn't get called.";
}
diff --git a/chromium/third_party/webrtc/rtc_base/experiments/BUILD.gn b/chromium/third_party/webrtc/rtc_base/experiments/BUILD.gn
index bb3e0ce8ae5..282b5b92709 100644
--- a/chromium/third_party/webrtc/rtc_base/experiments/BUILD.gn
+++ b/chromium/third_party/webrtc/rtc_base/experiments/BUILD.gn
@@ -17,8 +17,8 @@ rtc_library("alr_experiment") {
"../:rtc_base_approved",
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("field_trial_parser") {
@@ -40,6 +40,8 @@ rtc_library("field_trial_parser") {
"../../rtc_base:logging",
"../../rtc_base:safe_conversions",
"../../rtc_base:stringutils",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings:strings",
"//third_party/abseil-cpp/absl/types:optional",
@@ -57,8 +59,8 @@ rtc_library("quality_rampup_experiment") {
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("quality_scaler_settings") {
@@ -72,8 +74,8 @@ rtc_library("quality_scaler_settings") {
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("quality_scaling_experiment") {
@@ -85,8 +87,8 @@ rtc_library("quality_scaling_experiment") {
"../:rtc_base_approved",
"../../api/video_codecs:video_codecs_api",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("normalize_simulcast_size_experiment") {
@@ -97,8 +99,8 @@ rtc_library("normalize_simulcast_size_experiment") {
deps = [
"../:rtc_base_approved",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("balanced_degradation_settings") {
@@ -111,8 +113,8 @@ rtc_library("balanced_degradation_settings") {
"../:rtc_base_approved",
"../../api/video_codecs:video_codecs_api",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("cpu_speed_experiment") {
@@ -123,8 +125,8 @@ rtc_library("cpu_speed_experiment") {
deps = [
"../:rtc_base_approved",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtt_mult_experiment") {
@@ -135,8 +137,8 @@ rtc_library("rtt_mult_experiment") {
deps = [
"../:rtc_base_approved",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("jitter_upper_bound_experiment") {
@@ -147,8 +149,8 @@ rtc_library("jitter_upper_bound_experiment") {
deps = [
"../:rtc_base_approved",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rate_control_settings") {
@@ -164,6 +166,8 @@ rtc_library("rate_control_settings") {
"../../api/units:data_size",
"../../api/video_codecs:video_codecs_api",
"../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -178,8 +182,8 @@ rtc_library("keyframe_interval_settings_experiment") {
":field_trial_parser",
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("stable_target_rate_experiment") {
@@ -192,8 +196,8 @@ rtc_library("stable_target_rate_experiment") {
":rate_control_settings",
"../../api/transport:field_trial_based_config",
"../../api/transport:webrtc_key_value_config",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("min_video_bitrate_experiment") {
@@ -208,8 +212,8 @@ rtc_library("min_video_bitrate_experiment") {
"../../rtc_base:checks",
"../../rtc_base:logging",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_include_tests) {
@@ -255,7 +259,7 @@ if (rtc_include_tests) {
"../../test:field_trial",
"../../test:test_main",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.cc b/chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.cc
index caf7e623683..ee6675c924d 100644
--- a/chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.cc
+++ b/chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.cc
@@ -70,4 +70,8 @@ bool QualityRampupExperiment::BwHigh(int64_t now_ms,
return (now_ms - *start_ms_) >= min_duration_ms_.Value();
}
+bool QualityRampupExperiment::Enabled() const {
+ return min_pixels_ || min_duration_ms_ || max_bitrate_kbps_;
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.h b/chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.h
index ff9d7d38e52..9d469011040 100644
--- a/chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.h
+++ b/chromium/third_party/webrtc/rtc_base/experiments/quality_rampup_experiment.h
@@ -33,6 +33,8 @@ class QualityRampupExperiment final {
// (max_bitrate_factor_) above |max_bitrate_kbps_| for |min_duration_ms_|.
bool BwHigh(int64_t now_ms, uint32_t available_bw_kbps);
+ bool Enabled() const;
+
private:
explicit QualityRampupExperiment(
const WebRtcKeyValueConfig* const key_value_config);
diff --git a/chromium/third_party/webrtc/rtc_base/fake_network.h b/chromium/third_party/webrtc/rtc_base/fake_network.h
index 040b24205ef..8bd50b69f0e 100644
--- a/chromium/third_party/webrtc/rtc_base/fake_network.h
+++ b/chromium/third_party/webrtc/rtc_base/fake_network.h
@@ -31,7 +31,8 @@ const int kFakeIPv4NetworkPrefixLength = 24;
const int kFakeIPv6NetworkPrefixLength = 64;
// Fake network manager that allows us to manually specify the IPs to use.
-class FakeNetworkManager : public NetworkManagerBase, public MessageHandler {
+class FakeNetworkManager : public NetworkManagerBase,
+ public MessageHandlerAutoCleanup {
public:
FakeNetworkManager() {}
diff --git a/chromium/third_party/webrtc/rtc_base/logging.cc b/chromium/third_party/webrtc/rtc_base/logging.cc
index ff7369dd5c4..bd2afcc9dd1 100644
--- a/chromium/third_party/webrtc/rtc_base/logging.cc
+++ b/chromium/third_party/webrtc/rtc_base/logging.cc
@@ -47,6 +47,7 @@ static const int kMaxLogLineSize = 1024 - 60;
#include "rtc_base/string_encode.h"
#include "rtc_base/string_utils.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/time_utils.h"
@@ -72,7 +73,9 @@ const char* FilenameFromPath(const char* file) {
}
// Global lock for log subsystem, only needed to serialize access to streams_.
-CriticalSection g_log_crit;
+// TODO(bugs.webrtc.org/11665): this is not currently constant initialized and
+// trivially destructible.
+webrtc::Mutex g_log_mutex_;
} // namespace
/////////////////////////////////////////////////////////////////////////////
@@ -85,7 +88,7 @@ bool LogMessage::log_to_stderr_ = true;
// Note: we explicitly do not clean this up, because of the uncertain ordering
// of destructors at program exit. Let the person who sets the stream trigger
// cleanup by setting to null, or let it leak (safe at program exit).
-ABSL_CONST_INIT LogSink* LogMessage::streams_ RTC_GUARDED_BY(g_log_crit) =
+ABSL_CONST_INIT LogSink* LogMessage::streams_ RTC_GUARDED_BY(g_log_mutex_) =
nullptr;
// Boolean options default to false (0)
@@ -193,7 +196,7 @@ LogMessage::~LogMessage() {
#endif
}
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) {
if (severity_ >= entry->min_severity_) {
#if defined(WEBRTC_ANDROID)
@@ -242,7 +245,7 @@ void LogMessage::LogTimestamps(bool on) {
void LogMessage::LogToDebug(LoggingSeverity min_sev) {
g_dbg_sev = min_sev;
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
UpdateMinLogSeverity();
}
@@ -251,7 +254,7 @@ void LogMessage::SetLogToStderr(bool log_to_stderr) {
}
int LogMessage::GetLogToStream(LogSink* stream) {
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
LoggingSeverity sev = LS_NONE;
for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) {
if (stream == nullptr || stream == entry) {
@@ -262,7 +265,7 @@ int LogMessage::GetLogToStream(LogSink* stream) {
}
void LogMessage::AddLogToStream(LogSink* stream, LoggingSeverity min_sev) {
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
stream->min_severity_ = min_sev;
stream->next_ = streams_;
streams_ = stream;
@@ -270,7 +273,7 @@ void LogMessage::AddLogToStream(LogSink* stream, LoggingSeverity min_sev) {
}
void LogMessage::RemoveLogToStream(LogSink* stream) {
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
for (LogSink** entry = &streams_; *entry != nullptr;
entry = &(*entry)->next_) {
if (*entry == stream) {
@@ -331,7 +334,7 @@ void LogMessage::ConfigureLogging(const char* params) {
}
void LogMessage::UpdateMinLogSeverity()
- RTC_EXCLUSIVE_LOCKS_REQUIRED(g_log_crit) {
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(g_log_mutex_) {
LoggingSeverity min_sev = g_dbg_sev;
for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) {
min_sev = std::min(min_sev, entry->min_severity_);
@@ -439,7 +442,7 @@ bool LogMessage::IsNoop(LoggingSeverity severity) {
// TODO(tommi): We're grabbing this lock for every LogMessage instance that
// is going to be logged. This introduces unnecessary synchronization for
// a feature that's mostly used for testing.
- CritScope cs(&g_log_crit);
+ webrtc::MutexLock lock(&g_log_mutex_);
return streams_ == nullptr;
}
diff --git a/chromium/third_party/webrtc/rtc_base/message_handler.cc b/chromium/third_party/webrtc/rtc_base/message_handler.cc
index 18a06e241d3..42b4c50b8a3 100644
--- a/chromium/third_party/webrtc/rtc_base/message_handler.cc
+++ b/chromium/third_party/webrtc/rtc_base/message_handler.cc
@@ -15,7 +15,18 @@
namespace rtc {
MessageHandler::~MessageHandler() {
- ThreadManager::Clear(this);
+ if (auto_cleanup_) {
+ // Note that even though this clears currently pending messages for the
+ // message handler, it's still racy since it doesn't prevent threads that
+ // might be in the process of posting new messages with would-be dangling
+ // pointers.
+ // This is related to the design of Message having a raw pointer.
+ // We could consider whether it would be safer to require message handlers
+ // to be reference counted (as some are).
+ ThreadManager::Clear(this);
+ }
}
+MessageHandlerAutoCleanup::~MessageHandlerAutoCleanup() {}
+
} // namespace rtc
diff --git a/chromium/third_party/webrtc/rtc_base/message_handler.h b/chromium/third_party/webrtc/rtc_base/message_handler.h
index 85cb7854851..7b6e682e297 100644
--- a/chromium/third_party/webrtc/rtc_base/message_handler.h
+++ b/chromium/third_party/webrtc/rtc_base/message_handler.h
@@ -21,17 +21,41 @@ namespace rtc {
struct Message;
-// Messages get dispatched to a MessageHandler
+// MessageQueue/Thread Messages get dispatched to a MessageHandler via the
+// |OnMessage()| callback method.
+//
+// Note: Besides being an interface, the class can perform automatic cleanup
+// in the destructor.
+// TODO(bugs.webrtc.org/11908): The |auto_cleanup| parameter and associated
+// logic is a temporary step while changing the MessageHandler class to be
+// a pure virtual interface. The automatic cleanup step involves a number of
+// complex operations and as part of this interface, can easily go by unnoticed
+// and bundled into situations where it's not needed.
class RTC_EXPORT MessageHandler {
public:
virtual ~MessageHandler();
virtual void OnMessage(Message* msg) = 0;
protected:
- MessageHandler() {}
+ // TODO(bugs.webrtc.org/11908): The |auto_cleanup| parameter needs to have a
+ // backwards compatible default value while external code is being updated.
+ explicit MessageHandler(bool auto_cleanup = true)
+ : auto_cleanup_(auto_cleanup) {}
private:
RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandler);
+ const bool auto_cleanup_;
+};
+
+class RTC_EXPORT MessageHandlerAutoCleanup : public MessageHandler {
+ public:
+ ~MessageHandlerAutoCleanup() override;
+
+ protected:
+ MessageHandlerAutoCleanup() : MessageHandler(true) {}
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandlerAutoCleanup);
};
} // namespace rtc
diff --git a/chromium/third_party/webrtc/rtc_base/net_helpers.cc b/chromium/third_party/webrtc/rtc_base/net_helpers.cc
index 6ff3791738b..c6685e2a65c 100644
--- a/chromium/third_party/webrtc/rtc_base/net_helpers.cc
+++ b/chromium/third_party/webrtc/rtc_base/net_helpers.cc
@@ -10,8 +10,6 @@
#include "rtc_base/net_helpers.h"
-#include <memory>
-
#if defined(WEBRTC_WIN)
#include <ws2spi.h>
#include <ws2tcpip.h>
@@ -26,8 +24,11 @@
#endif
#endif // defined(WEBRTC_POSIX) && !defined(__native_client__)
+#include "api/task_queue/task_queue_base.h"
#include "rtc_base/logging.h"
#include "rtc_base/signal_thread.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h" // for signal_with_thread...
namespace rtc {
@@ -83,18 +84,35 @@ int ResolveHostname(const std::string& hostname,
#endif // !__native_client__
}
-// AsyncResolver
-AsyncResolver::AsyncResolver() : SignalThread(), error_(-1) {}
+AsyncResolver::AsyncResolver() : error_(-1) {}
-AsyncResolver::~AsyncResolver() = default;
+AsyncResolver::~AsyncResolver() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+}
void AsyncResolver::Start(const SocketAddress& addr) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
addr_ = addr;
- // SignalThred Start will kickoff the resolve process.
- SignalThread::Start();
+ webrtc::TaskQueueBase* current_task_queue = webrtc::TaskQueueBase::Current();
+ popup_thread_ = Thread::Create();
+ popup_thread_->Start();
+ popup_thread_->PostTask(webrtc::ToQueuedTask(
+ [this, flag = safety_.flag(), addr, current_task_queue] {
+ std::vector<IPAddress> addresses;
+ int error =
+ ResolveHostname(addr.hostname().c_str(), addr.family(), &addresses);
+ current_task_queue->PostTask(webrtc::ToQueuedTask(
+ std::move(flag), [this, error, addresses = std::move(addresses)] {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ ResolveDone(std::move(addresses), error);
+ }));
+ }));
}
bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
if (error_ != 0 || addresses_.empty())
return false;
@@ -109,20 +127,40 @@ bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const {
}
int AsyncResolver::GetError() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
return error_;
}
void AsyncResolver::Destroy(bool wait) {
- SignalThread::Destroy(wait);
+ // Some callers have trouble guaranteeing that Destroy is called on the
+ // sequence guarded by |sequence_checker_|.
+ // RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
+ destroy_called_ = true;
+ MaybeSelfDestruct();
}
-void AsyncResolver::DoWork() {
- error_ =
- ResolveHostname(addr_.hostname().c_str(), addr_.family(), &addresses_);
+const std::vector<IPAddress>& AsyncResolver::addresses() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(!destroy_called_);
+ return addresses_;
}
-void AsyncResolver::OnWorkDone() {
+void AsyncResolver::ResolveDone(std::vector<IPAddress> addresses, int error) {
+ addresses_ = addresses;
+ error_ = error;
+ recursion_check_ = true;
SignalDone(this);
+ MaybeSelfDestruct();
+}
+
+void AsyncResolver::MaybeSelfDestruct() {
+ if (!recursion_check_) {
+ delete this;
+ } else {
+ recursion_check_ = false;
+ }
}
const char* inet_ntop(int af, const void* src, char* dst, socklen_t size) {
diff --git a/chromium/third_party/webrtc/rtc_base/net_helpers.h b/chromium/third_party/webrtc/rtc_base/net_helpers.h
index 1e06940be74..c6aa4be5b20 100644
--- a/chromium/third_party/webrtc/rtc_base/net_helpers.h
+++ b/chromium/third_party/webrtc/rtc_base/net_helpers.h
@@ -21,16 +21,23 @@
#include "rtc_base/async_resolver_interface.h"
#include "rtc_base/ip_address.h"
-#include "rtc_base/signal_thread.h"
#include "rtc_base/socket_address.h"
+#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
namespace rtc {
// AsyncResolver will perform async DNS resolution, signaling the result on
// the SignalDone from AsyncResolverInterface when the operation completes.
-class RTC_EXPORT AsyncResolver : public SignalThread,
- public AsyncResolverInterface {
+//
+// This class is thread-compatible, and all methods and destruction needs to
+// happen from the same rtc::Thread, except for Destroy which is allowed to
+// happen on another context provided it's not happening concurrently to another
+// public API call, and is the last access to the object.
+class RTC_EXPORT AsyncResolver : public AsyncResolverInterface {
public:
AsyncResolver();
~AsyncResolver() override;
@@ -40,17 +47,22 @@ class RTC_EXPORT AsyncResolver : public SignalThread,
int GetError() const override;
void Destroy(bool wait) override;
- const std::vector<IPAddress>& addresses() const { return addresses_; }
- void set_error(int error) { error_ = error; }
-
- protected:
- void DoWork() override;
- void OnWorkDone() override;
+ const std::vector<IPAddress>& addresses() const;
private:
- SocketAddress addr_;
- std::vector<IPAddress> addresses_;
- int error_;
+ void ResolveDone(std::vector<IPAddress> addresses, int error)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_);
+ void MaybeSelfDestruct();
+
+ SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_);
+ std::vector<IPAddress> addresses_ RTC_GUARDED_BY(sequence_checker_);
+ int error_ RTC_GUARDED_BY(sequence_checker_);
+ webrtc::ScopedTaskSafety safety_ RTC_GUARDED_BY(sequence_checker_);
+ std::unique_ptr<Thread> popup_thread_ RTC_GUARDED_BY(sequence_checker_);
+ bool recursion_check_ =
+ false; // Protects against SignalDone calling into Destroy.
+ bool destroy_called_ = false;
+ webrtc::SequenceChecker sequence_checker_;
};
// rtc namespaced wrappers for inet_ntop and inet_pton so we can avoid
diff --git a/chromium/third_party/webrtc/rtc_base/network.h b/chromium/third_party/webrtc/rtc_base/network.h
index a67d2a23392..9cf04de590d 100644
--- a/chromium/third_party/webrtc/rtc_base/network.h
+++ b/chromium/third_party/webrtc/rtc_base/network.h
@@ -217,7 +217,7 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager {
// Basic implementation of the NetworkManager interface that gets list
// of networks using OS APIs.
class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase,
- public MessageHandler,
+ public MessageHandlerAutoCleanup,
public sigslot::has_slots<> {
public:
BasicNetworkManager();
diff --git a/chromium/third_party/webrtc/rtc_base/network/BUILD.gn b/chromium/third_party/webrtc/rtc_base/network/BUILD.gn
index 1d06defb3bc..35ae3d45f73 100644
--- a/chromium/third_party/webrtc/rtc_base/network/BUILD.gn
+++ b/chromium/third_party/webrtc/rtc_base/network/BUILD.gn
@@ -13,8 +13,6 @@ rtc_library("sent_packet") {
"sent_packet.cc",
"sent_packet.h",
]
- deps = [
- "../system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../system:rtc_export" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/chromium/third_party/webrtc/rtc_base/network_monitor.cc b/chromium/third_party/webrtc/rtc_base/network_monitor.cc
index 4eb52901f3c..eb2bce9e2cc 100644
--- a/chromium/third_party/webrtc/rtc_base/network_monitor.cc
+++ b/chromium/third_party/webrtc/rtc_base/network_monitor.cc
@@ -30,8 +30,11 @@ NetworkMonitorInterface::NetworkMonitorInterface() {}
NetworkMonitorInterface::~NetworkMonitorInterface() {}
-NetworkMonitorBase::NetworkMonitorBase() : worker_thread_(Thread::Current()) {}
-NetworkMonitorBase::~NetworkMonitorBase() {}
+NetworkMonitorBase::NetworkMonitorBase()
+ : MessageHandler(false), worker_thread_(Thread::Current()) {}
+NetworkMonitorBase::~NetworkMonitorBase() {
+ worker_thread_->Clear(this);
+}
void NetworkMonitorBase::OnNetworksChanged() {
RTC_LOG(LS_VERBOSE) << "Network change is received at the network monitor";
diff --git a/chromium/third_party/webrtc/rtc_base/null_socket_server_unittest.cc b/chromium/third_party/webrtc/rtc_base/null_socket_server_unittest.cc
index 39c16313b16..a875d6c2846 100644
--- a/chromium/third_party/webrtc/rtc_base/null_socket_server_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/null_socket_server_unittest.cc
@@ -25,7 +25,8 @@ namespace rtc {
static const uint32_t kTimeout = 5000U;
-class NullSocketServerTest : public ::testing::Test, public MessageHandler {
+class NullSocketServerTest : public ::testing::Test,
+ public MessageHandlerAutoCleanup {
protected:
void OnMessage(Message* message) override { ss_.WakeUp(); }
diff --git a/chromium/third_party/webrtc/rtc_base/openssl_adapter.h b/chromium/third_party/webrtc/rtc_base/openssl_adapter.h
index 0e76836bafa..6f1f7dccabd 100644
--- a/chromium/third_party/webrtc/rtc_base/openssl_adapter.h
+++ b/chromium/third_party/webrtc/rtc_base/openssl_adapter.h
@@ -32,7 +32,8 @@
namespace rtc {
-class OpenSSLAdapter final : public SSLAdapter, public MessageHandler {
+class OpenSSLAdapter final : public SSLAdapter,
+ public MessageHandlerAutoCleanup {
public:
static bool InitializeSSL();
static bool CleanupSSL();
diff --git a/chromium/third_party/webrtc/rtc_base/openssl_adapter_unittest.cc b/chromium/third_party/webrtc/rtc_base/openssl_adapter_unittest.cc
index b161304d652..4bd87992d4d 100644
--- a/chromium/third_party/webrtc/rtc_base/openssl_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/openssl_adapter_unittest.cc
@@ -25,28 +25,34 @@ namespace {
class MockAsyncSocket : public AsyncSocket {
public:
virtual ~MockAsyncSocket() = default;
- MOCK_METHOD1(Accept, AsyncSocket*(SocketAddress*));
- MOCK_CONST_METHOD0(GetLocalAddress, SocketAddress());
- MOCK_CONST_METHOD0(GetRemoteAddress, SocketAddress());
- MOCK_METHOD1(Bind, int(const SocketAddress&));
- MOCK_METHOD1(Connect, int(const SocketAddress&));
- MOCK_METHOD2(Send, int(const void*, size_t));
- MOCK_METHOD3(SendTo, int(const void*, size_t, const SocketAddress&));
- MOCK_METHOD3(Recv, int(void*, size_t, int64_t*));
- MOCK_METHOD4(RecvFrom, int(void*, size_t, SocketAddress*, int64_t*));
- MOCK_METHOD1(Listen, int(int));
- MOCK_METHOD0(Close, int());
- MOCK_CONST_METHOD0(GetError, int());
- MOCK_METHOD1(SetError, void(int));
- MOCK_CONST_METHOD0(GetState, ConnState());
- MOCK_METHOD2(GetOption, int(Option, int*));
- MOCK_METHOD2(SetOption, int(Option, int));
+ MOCK_METHOD(AsyncSocket*, Accept, (SocketAddress*), (override));
+ MOCK_METHOD(SocketAddress, GetLocalAddress, (), (const, override));
+ MOCK_METHOD(SocketAddress, GetRemoteAddress, (), (const, override));
+ MOCK_METHOD(int, Bind, (const SocketAddress&), (override));
+ MOCK_METHOD(int, Connect, (const SocketAddress&), (override));
+ MOCK_METHOD(int, Send, (const void*, size_t), (override));
+ MOCK_METHOD(int,
+ SendTo,
+ (const void*, size_t, const SocketAddress&),
+ (override));
+ MOCK_METHOD(int, Recv, (void*, size_t, int64_t*), (override));
+ MOCK_METHOD(int,
+ RecvFrom,
+ (void*, size_t, SocketAddress*, int64_t*),
+ (override));
+ MOCK_METHOD(int, Listen, (int), (override));
+ MOCK_METHOD(int, Close, (), (override));
+ MOCK_METHOD(int, GetError, (), (const, override));
+ MOCK_METHOD(void, SetError, (int), (override));
+ MOCK_METHOD(ConnState, GetState, (), (const, override));
+ MOCK_METHOD(int, GetOption, (Option, int*), (override));
+ MOCK_METHOD(int, SetOption, (Option, int), (override));
};
class MockCertVerifier : public SSLCertificateVerifier {
public:
virtual ~MockCertVerifier() = default;
- MOCK_METHOD1(Verify, bool(const SSLCertificate&));
+ MOCK_METHOD(bool, Verify, (const SSLCertificate&), (override));
};
} // namespace
diff --git a/chromium/third_party/webrtc/rtc_base/operations_chain_unittest.cc b/chromium/third_party/webrtc/rtc_base/operations_chain_unittest.cc
index 968f94c060c..ed3c924998a 100644
--- a/chromium/third_party/webrtc/rtc_base/operations_chain_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/operations_chain_unittest.cc
@@ -369,14 +369,15 @@ TEST(OperationsChainTest, FunctorIsNotDestroyedWhileExecuting) {
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(OperationsChainTest, OperationNotInvokingCallbackShouldCrash) {
+TEST(OperationsChainDeathTest, OperationNotInvokingCallbackShouldCrash) {
scoped_refptr<OperationsChain> operations_chain = OperationsChain::Create();
EXPECT_DEATH(
operations_chain->ChainOperation([](std::function<void()> callback) {}),
"");
}
-TEST(OperationsChainTest, OperationInvokingCallbackMultipleTimesShouldCrash) {
+TEST(OperationsChainDeathTest,
+ OperationInvokingCallbackMultipleTimesShouldCrash) {
scoped_refptr<OperationsChain> operations_chain = OperationsChain::Create();
EXPECT_DEATH(
operations_chain->ChainOperation([](std::function<void()> callback) {
diff --git a/chromium/third_party/webrtc/rtc_base/physical_socket_server.cc b/chromium/third_party/webrtc/rtc_base/physical_socket_server.cc
index cf65300b4ac..3a953316695 100644
--- a/chromium/third_party/webrtc/rtc_base/physical_socket_server.cc
+++ b/chromium/third_party/webrtc/rtc_base/physical_socket_server.cc
@@ -1365,12 +1365,6 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) {
#if defined(WEBRTC_USE_EPOLL)
-// Initial number of events to process with one call to "epoll_wait".
-static const size_t kInitialEpollEvents = 128;
-
-// Maximum number of events to process with one call to "epoll_wait".
-static const size_t kMaxEpollEvents = 8192;
-
void PhysicalSocketServer::AddEpoll(Dispatcher* pdispatcher) {
RTC_DCHECK(epoll_fd_ != INVALID_SOCKET);
int fd = pdispatcher->GetDescriptor();
@@ -1437,20 +1431,13 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) {
tvStop = TimeAfter(cmsWait);
}
- if (epoll_events_.empty()) {
- // The initial space to receive events is created only if epoll is used.
- epoll_events_.resize(kInitialEpollEvents);
- }
-
fWait_ = true;
-
while (fWait_) {
// Wait then call handlers as appropriate
// < 0 means error
// 0 means timeout
// > 0 means count of descriptors ready
- int n = epoll_wait(epoll_fd_, &epoll_events_[0],
- static_cast<int>(epoll_events_.size()),
+ int n = epoll_wait(epoll_fd_, epoll_events_.data(), epoll_events_.size(),
static_cast<int>(tvWait));
if (n < 0) {
if (errno != EINTR) {
@@ -1483,13 +1470,6 @@ bool PhysicalSocketServer::WaitEpoll(int cmsWait) {
}
}
- if (static_cast<size_t>(n) == epoll_events_.size() &&
- epoll_events_.size() < kMaxEpollEvents) {
- // We used the complete space to receive events, increase size for future
- // iterations.
- epoll_events_.resize(std::max(epoll_events_.size() * 2, kMaxEpollEvents));
- }
-
if (cmsWait != kForever) {
tvWait = TimeDiff(tvStop, TimeMillis());
if (tvWait < 0) {
diff --git a/chromium/third_party/webrtc/rtc_base/physical_socket_server.h b/chromium/third_party/webrtc/rtc_base/physical_socket_server.h
index e7985db7db6..e21e53b8ecb 100644
--- a/chromium/third_party/webrtc/rtc_base/physical_socket_server.h
+++ b/chromium/third_party/webrtc/rtc_base/physical_socket_server.h
@@ -16,6 +16,7 @@
#define WEBRTC_USE_EPOLL 1
#endif
+#include <array>
#include <memory>
#include <set>
#include <vector>
@@ -24,6 +25,7 @@
#include "rtc_base/net_helpers.h"
#include "rtc_base/socket_server.h"
#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/thread_annotations.h"
#if defined(WEBRTC_POSIX)
typedef int SOCKET;
@@ -80,9 +82,12 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer {
void Update(Dispatcher* dispatcher);
private:
+ // The number of events to process with one call to "epoll_wait".
+ static constexpr size_t kNumEpollEvents = 128;
+
typedef std::set<Dispatcher*> DispatcherSet;
- void AddRemovePendingDispatchers();
+ void AddRemovePendingDispatchers() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
#if defined(WEBRTC_POSIX)
bool WaitSelect(int cms, bool process_io);
@@ -94,14 +99,18 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer {
bool WaitEpoll(int cms);
bool WaitPoll(int cms, Dispatcher* dispatcher);
+ // This array is accessed in isolation by a thread calling into Wait().
+ // It's useless to use a SequenceChecker to guard it because a socket
+ // server can outlive the thread it's bound to, forcing the Wait call
+ // to have to reset the sequence checker on Wait calls.
+ std::array<epoll_event, kNumEpollEvents> epoll_events_;
const int epoll_fd_ = INVALID_SOCKET;
- std::vector<struct epoll_event> epoll_events_;
#endif // WEBRTC_USE_EPOLL
- DispatcherSet dispatchers_;
- DispatcherSet pending_add_dispatchers_;
- DispatcherSet pending_remove_dispatchers_;
- bool processing_dispatchers_ = false;
- Signaler* signal_wakeup_;
+ DispatcherSet dispatchers_ RTC_GUARDED_BY(crit_);
+ DispatcherSet pending_add_dispatchers_ RTC_GUARDED_BY(crit_);
+ DispatcherSet pending_remove_dispatchers_ RTC_GUARDED_BY(crit_);
+ bool processing_dispatchers_ RTC_GUARDED_BY(crit_) = false;
+ Signaler* signal_wakeup_; // Assigned in constructor only
CriticalSection crit_;
#if defined(WEBRTC_WIN)
const WSAEVENT socket_ev_;
diff --git a/chromium/third_party/webrtc/rtc_base/platform_thread_types.cc b/chromium/third_party/webrtc/rtc_base/platform_thread_types.cc
index ed4a2282624..b0243b41dc8 100644
--- a/chromium/third_party/webrtc/rtc_base/platform_thread_types.cc
+++ b/chromium/third_party/webrtc/rtc_base/platform_thread_types.cc
@@ -15,6 +15,16 @@
#include <sys/syscall.h>
#endif
+#if defined(WEBRTC_WIN)
+#include "rtc_base/arraysize.h"
+
+// The SetThreadDescription API was brought in version 1607 of Windows 10.
+// For compatibility with various versions of winuser and avoid clashing with
+// a potentially defined type, we use the RTC_ prefix.
+typedef HRESULT(WINAPI* RTC_SetThreadDescription)(HANDLE hThread,
+ PCWSTR lpThreadDescription);
+#endif
+
namespace rtc {
PlatformThreadId CurrentThreadId() {
@@ -58,6 +68,24 @@ bool IsThreadRefEqual(const PlatformThreadRef& a, const PlatformThreadRef& b) {
void SetCurrentThreadName(const char* name) {
#if defined(WEBRTC_WIN)
+ // The SetThreadDescription API works even if no debugger is attached.
+ // The names set with this API also show up in ETW traces. Very handy.
+ static auto set_thread_description_func =
+ reinterpret_cast<RTC_SetThreadDescription>(::GetProcAddress(
+ ::GetModuleHandleA("Kernel32.dll"), "SetThreadDescription"));
+ if (set_thread_description_func) {
+ // Convert from ASCII to UTF-16.
+ wchar_t wide_thread_name[64];
+ for (size_t i = 0; i < arraysize(wide_thread_name) - 1; ++i) {
+ wide_thread_name[i] = name[i];
+ if (wide_thread_name[i] == L'\0')
+ break;
+ }
+ // Guarantee null-termination.
+ wide_thread_name[arraysize(wide_thread_name) - 1] = L'\0';
+ set_thread_description_func(::GetCurrentThread(), wide_thread_name);
+ }
+
// For details see:
// https://docs.microsoft.com/en-us/visualstudio/debugger/how-to-set-a-thread-name-in-native-code
#pragma pack(push, 8)
diff --git a/chromium/third_party/webrtc/rtc_base/rate_statistics.cc b/chromium/third_party/webrtc/rtc_base/rate_statistics.cc
index c4c2e78581b..85621fa5555 100644
--- a/chromium/third_party/webrtc/rtc_base/rate_statistics.cc
+++ b/chromium/third_party/webrtc/rtc_base/rate_statistics.cc
@@ -20,29 +20,26 @@
namespace webrtc {
+RateStatistics::Bucket::Bucket(int64_t timestamp)
+ : sum(0), num_samples(0), timestamp(timestamp) {}
+
RateStatistics::RateStatistics(int64_t window_size_ms, float scale)
- : buckets_(new Bucket[window_size_ms]()),
- accumulated_count_(0),
+ : accumulated_count_(0),
+ first_timestamp_(-1),
num_samples_(0),
- oldest_time_(-window_size_ms),
- oldest_index_(0),
scale_(scale),
max_window_size_ms_(window_size_ms),
current_window_size_ms_(max_window_size_ms_) {}
RateStatistics::RateStatistics(const RateStatistics& other)
- : accumulated_count_(other.accumulated_count_),
+ : buckets_(other.buckets_),
+ accumulated_count_(other.accumulated_count_),
+ first_timestamp_(other.first_timestamp_),
overflow_(other.overflow_),
num_samples_(other.num_samples_),
- oldest_time_(other.oldest_time_),
- oldest_index_(other.oldest_index_),
scale_(other.scale_),
max_window_size_ms_(other.max_window_size_ms_),
- current_window_size_ms_(other.current_window_size_ms_) {
- buckets_ = std::make_unique<Bucket[]>(other.max_window_size_ms_);
- std::copy(other.buckets_.get(),
- other.buckets_.get() + other.max_window_size_ms_, buckets_.get());
-}
+ current_window_size_ms_(other.current_window_size_ms_) {}
RateStatistics::RateStatistics(RateStatistics&& other) = default;
@@ -52,33 +49,33 @@ void RateStatistics::Reset() {
accumulated_count_ = 0;
overflow_ = false;
num_samples_ = 0;
- oldest_time_ = -max_window_size_ms_;
- oldest_index_ = 0;
+ first_timestamp_ = -1;
current_window_size_ms_ = max_window_size_ms_;
- for (int64_t i = 0; i < max_window_size_ms_; i++)
- buckets_[i] = Bucket();
+ buckets_.clear();
}
void RateStatistics::Update(int64_t count, int64_t now_ms) {
- RTC_DCHECK_LE(0, count);
- if (now_ms < oldest_time_) {
- // Too old data is ignored.
- return;
- }
+ RTC_DCHECK_GE(count, 0);
EraseOld(now_ms);
+ if (first_timestamp_ == -1) {
+ first_timestamp_ = now_ms;
+ }
+
+ if (buckets_.empty() || now_ms != buckets_.back().timestamp) {
+ if (!buckets_.empty() && now_ms < buckets_.back().timestamp) {
+ RTC_LOG(LS_WARNING) << "Timestamp " << now_ms
+ << " is before the last added "
+ "timestamp in the rate window: "
+ << buckets_.back().timestamp << ", aligning to that.";
+ now_ms = buckets_.back().timestamp;
+ }
+ buckets_.emplace_back(now_ms);
+ }
+ Bucket& last_bucket = buckets_.back();
+ last_bucket.sum += count;
+ ++last_bucket.num_samples;
- // First ever sample, reset window to start now.
- if (!IsInitialized())
- oldest_time_ = now_ms;
-
- uint32_t now_offset = rtc::dchecked_cast<uint32_t>(now_ms - oldest_time_);
- RTC_DCHECK_LT(now_offset, max_window_size_ms_);
- uint32_t index = oldest_index_ + now_offset;
- if (index >= max_window_size_ms_)
- index -= max_window_size_ms_;
- buckets_[index].sum += count;
- ++buckets_[index].samples;
if (std::numeric_limits<int64_t>::max() - accumulated_count_ > count) {
accumulated_count_ += count;
} else {
@@ -92,10 +89,22 @@ absl::optional<int64_t> RateStatistics::Rate(int64_t now_ms) const {
// of the members as mutable...
const_cast<RateStatistics*>(this)->EraseOld(now_ms);
+ int active_window_size = 0;
+ if (first_timestamp_ != -1) {
+ if (first_timestamp_ <= now_ms - current_window_size_ms_) {
+ // Count window as full even if no data points currently in view, if the
+ // data stream started before the window.
+ active_window_size = current_window_size_ms_;
+ } else {
+ // Size of a single bucket is 1ms, so even if now_ms == first_timestmap_
+ // the window size should be 1.
+ active_window_size = now_ms - first_timestamp_ + 1;
+ }
+ }
+
// If window is a single bucket or there is only one sample in a data set that
// has not grown to the full window size, or if the accumulator has
// overflowed, treat this as rate unavailable.
- int active_window_size = now_ms - oldest_time_ + 1;
if (num_samples_ == 0 || active_window_size <= 1 ||
(num_samples_ <= 1 &&
rtc::SafeLt(active_window_size, current_window_size_ms_)) ||
@@ -114,43 +123,35 @@ absl::optional<int64_t> RateStatistics::Rate(int64_t now_ms) const {
}
void RateStatistics::EraseOld(int64_t now_ms) {
- if (!IsInitialized())
- return;
-
// New oldest time that is included in data set.
- int64_t new_oldest_time = now_ms - current_window_size_ms_ + 1;
-
- // New oldest time is older than the current one, no need to cull data.
- if (new_oldest_time <= oldest_time_)
- return;
+ const int64_t new_oldest_time = now_ms - current_window_size_ms_ + 1;
// Loop over buckets and remove too old data points.
- while (num_samples_ > 0 && oldest_time_ < new_oldest_time) {
- const Bucket& oldest_bucket = buckets_[oldest_index_];
+ while (!buckets_.empty() && buckets_.front().timestamp < new_oldest_time) {
+ const Bucket& oldest_bucket = buckets_.front();
RTC_DCHECK_GE(accumulated_count_, oldest_bucket.sum);
- RTC_DCHECK_GE(num_samples_, oldest_bucket.samples);
+ RTC_DCHECK_GE(num_samples_, oldest_bucket.num_samples);
accumulated_count_ -= oldest_bucket.sum;
- num_samples_ -= oldest_bucket.samples;
- buckets_[oldest_index_] = Bucket();
- if (++oldest_index_ >= max_window_size_ms_)
- oldest_index_ = 0;
- ++oldest_time_;
+ num_samples_ -= oldest_bucket.num_samples;
+ buckets_.pop_front();
// This does not clear overflow_ even when counter is empty.
// TODO(https://bugs.webrtc.org/11247): Consider if overflow_ can be reset.
}
- oldest_time_ = new_oldest_time;
}
bool RateStatistics::SetWindowSize(int64_t window_size_ms, int64_t now_ms) {
if (window_size_ms <= 0 || window_size_ms > max_window_size_ms_)
return false;
+ if (first_timestamp_ != -1) {
+ // If the window changes (e.g. decreases - removing data point, then
+ // increases again) we need to update the first timestamp mark as
+ // otherwise it indicates the window coveres a region of zeros, suddenly
+ // under-estimating the rate.
+ first_timestamp_ = std::max(first_timestamp_, now_ms - window_size_ms + 1);
+ }
current_window_size_ms_ = window_size_ms;
EraseOld(now_ms);
return true;
}
-bool RateStatistics::IsInitialized() const {
- return oldest_time_ != -max_window_size_ms_;
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_base/rate_statistics.h b/chromium/third_party/webrtc/rtc_base/rate_statistics.h
index 11c8cee7af2..dc8d7f5272a 100644
--- a/chromium/third_party/webrtc/rtc_base/rate_statistics.h
+++ b/chromium/third_party/webrtc/rtc_base/rate_statistics.h
@@ -14,6 +14,7 @@
#include <stddef.h>
#include <stdint.h>
+#include <deque>
#include <memory>
#include "absl/types/optional.h"
@@ -28,6 +29,10 @@ namespace webrtc {
// high; for instance, a 20 Mbit/sec video stream can wrap a 32-bit byte
// counter in 14 minutes.
+// Note that timestamps used in Update(), Rate() and SetWindowSize() must never
+// decrease for two consecutive calls.
+// TODO(bugs.webrtc.org/11600): Migrate from int64_t to Timestamp.
+
class RTC_EXPORT RateStatistics {
public:
static constexpr float kBpsScale = 8000.0f;
@@ -65,19 +70,22 @@ class RTC_EXPORT RateStatistics {
private:
void EraseOld(int64_t now_ms);
- bool IsInitialized() const;
- // Counters are kept in buckets (circular buffer), with one bucket
- // per millisecond.
struct Bucket {
+ explicit Bucket(int64_t timestamp);
int64_t sum; // Sum of all samples in this bucket.
- int samples; // Number of samples in this bucket.
+ int num_samples; // Number of samples in this bucket.
+ const int64_t timestamp; // Timestamp this bucket corresponds to.
};
- std::unique_ptr<Bucket[]> buckets_;
+ // All buckets within the time window, ordered by time.
+ std::deque<Bucket> buckets_;
- // Total count recorded in buckets.
+ // Total count recorded in all buckets.
int64_t accumulated_count_;
+ // Timestamp of the first data point seen, or -1 of none seen.
+ int64_t first_timestamp_;
+
// True if accumulated_count_ has ever grown too large to be
// contained in its integer type.
bool overflow_ = false;
@@ -85,12 +93,6 @@ class RTC_EXPORT RateStatistics {
// The total number of samples in the buckets.
int num_samples_;
- // Oldest time recorded in buckets.
- int64_t oldest_time_;
-
- // Bucket index of oldest counter recorded in buckets.
- int64_t oldest_index_;
-
// To convert counts/ms to desired units
const float scale_;
diff --git a/chromium/third_party/webrtc/rtc_base/rtc_certificate_generator.cc b/chromium/third_party/webrtc/rtc_base/rtc_certificate_generator.cc
index 4c9d378dd2f..72f4277fa0c 100644
--- a/chromium/third_party/webrtc/rtc_base/rtc_certificate_generator.cc
+++ b/chromium/third_party/webrtc/rtc_base/rtc_certificate_generator.cc
@@ -40,7 +40,7 @@ enum {
// request. We are using a separate helper class so that a generation request
// can outlive the |RTCCertificateGenerator| that spawned it.
class RTCCertificateGenerationTask : public RefCountInterface,
- public MessageHandler {
+ public MessageHandlerAutoCleanup {
public:
RTCCertificateGenerationTask(
Thread* signaling_thread,
diff --git a/chromium/third_party/webrtc/rtc_base/signal_thread.h b/chromium/third_party/webrtc/rtc_base/signal_thread.h
index d9e8ade9b0e..b444d549949 100644
--- a/chromium/third_party/webrtc/rtc_base/signal_thread.h
+++ b/chromium/third_party/webrtc/rtc_base/signal_thread.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -11,147 +11,9 @@
#ifndef RTC_BASE_SIGNAL_THREAD_H_
#define RTC_BASE_SIGNAL_THREAD_H_
-#include <string>
-
-#include "rtc_base/checks.h"
-#include "rtc_base/constructor_magic.h"
-#include "rtc_base/critical_section.h"
-#include "rtc_base/message_handler.h"
-#include "rtc_base/third_party/sigslot/sigslot.h"
-#include "rtc_base/thread.h"
-#include "rtc_base/thread_annotations.h"
-
-namespace rtc {
-
-///////////////////////////////////////////////////////////////////////////////
-// SignalThread - Base class for worker threads. The main thread should call
-// Start() to begin work, and then follow one of these models:
-// Normal: Wait for SignalWorkDone, and then call Release to destroy.
-// Cancellation: Call Release(true), to abort the worker thread.
-// Fire-and-forget: Call Release(false), which allows the thread to run to
-// completion, and then self-destruct without further notification.
-// Periodic tasks: Wait for SignalWorkDone, then eventually call Start()
-// again to repeat the task. When the instance isn't needed anymore,
-// call Release. DoWork, OnWorkStart and OnWorkStop are called again,
-// on a new thread.
-// The subclass should override DoWork() to perform the background task. By
-// periodically calling ContinueWork(), it can check for cancellation.
-// OnWorkStart and OnWorkDone can be overridden to do pre- or post-work
-// tasks in the context of the main thread.
-///////////////////////////////////////////////////////////////////////////////
-
-class SignalThread : public sigslot::has_slots<>, protected MessageHandler {
- public:
- SignalThread();
-
- // Context: Main Thread. Call before Start to change the worker's name.
- bool SetName(const std::string& name, const void* obj);
-
- // Context: Main Thread. Call to begin the worker thread.
- void Start();
-
- // Context: Main Thread. If the worker thread is not running, deletes the
- // object immediately. Otherwise, asks the worker thread to abort processing,
- // and schedules the object to be deleted once the worker exits.
- // SignalWorkDone will not be signalled. If wait is true, does not return
- // until the thread is deleted.
- void Destroy(bool wait);
-
- // Context: Main Thread. If the worker thread is complete, deletes the
- // object immediately. Otherwise, schedules the object to be deleted once
- // the worker thread completes. SignalWorkDone will be signalled.
- void Release();
-
- // Context: Main Thread. Signalled when work is complete.
- sigslot::signal1<SignalThread*> SignalWorkDone;
-
- enum { ST_MSG_WORKER_DONE, ST_MSG_FIRST_AVAILABLE };
-
- protected:
- ~SignalThread() override;
-
- Thread* worker() { return &worker_; }
-
- // Context: Main Thread. Subclass should override to do pre-work setup.
- virtual void OnWorkStart() {}
-
- // Context: Worker Thread. Subclass should override to do work.
- virtual void DoWork() = 0;
-
- // Context: Worker Thread. Subclass should call periodically to
- // dispatch messages and determine if the thread should terminate.
- bool ContinueWork();
-
- // Context: Worker Thread. Subclass should override when extra work is
- // needed to abort the worker thread.
- virtual void OnWorkStop() {}
-
- // Context: Main Thread. Subclass should override to do post-work cleanup.
- virtual void OnWorkDone() {}
-
- // Context: Any Thread. If subclass overrides, be sure to call the base
- // implementation. Do not use (message_id < ST_MSG_FIRST_AVAILABLE)
- void OnMessage(Message* msg) override;
-
- private:
- enum State {
- kInit, // Initialized, but not started
- kRunning, // Started and doing work
- kReleasing, // Same as running, but to be deleted when work is done
- kComplete, // Work is done
- kStopping, // Work is being interrupted
- };
-
- class Worker : public Thread {
- public:
- explicit Worker(SignalThread* parent);
- ~Worker() override;
- void Run() override;
- bool IsProcessingMessagesForTesting() override;
-
- private:
- SignalThread* parent_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Worker);
- };
-
- class RTC_SCOPED_LOCKABLE EnterExit {
- public:
- explicit EnterExit(SignalThread* t) RTC_EXCLUSIVE_LOCK_FUNCTION(t->cs_)
- : t_(t) {
- t_->cs_.Enter();
- // If refcount_ is zero then the object has already been deleted and we
- // will be double-deleting it in ~EnterExit()! (shouldn't happen)
- RTC_DCHECK_NE(0, t_->refcount_);
- ++t_->refcount_;
- }
- ~EnterExit() RTC_UNLOCK_FUNCTION() {
- bool d = (0 == --t_->refcount_);
- t_->cs_.Leave();
- if (d)
- delete t_;
- }
-
- private:
- SignalThread* t_;
-
- RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(EnterExit);
- };
-
- void Run();
- void OnMainThreadDestroyed();
-
- Thread* main_;
- Worker worker_;
- CriticalSection cs_;
- State state_;
- int refcount_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(SignalThread);
-};
-
-///////////////////////////////////////////////////////////////////////////////
-
-} // namespace rtc
+// The facilities in this file have been deprecated. Please do not use them
+// in new code. New code should use factilities exposed by api/task_queue/
+// instead.
+#include "rtc_base/deprecated/signal_thread.h"
#endif // RTC_BASE_SIGNAL_THREAD_H_
diff --git a/chromium/third_party/webrtc/rtc_base/socket_unittest.cc b/chromium/third_party/webrtc/rtc_base/socket_unittest.cc
index 6ea4b47bd1d..04024fb184b 100644
--- a/chromium/third_party/webrtc/rtc_base/socket_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/socket_unittest.cc
@@ -651,7 +651,7 @@ void SocketTest::CloseInClosedCallbackInternal(const IPAddress& loopback) {
EXPECT_TRUE(Socket::CS_CLOSED == client->GetState());
}
-class Sleeper : public MessageHandler {
+class Sleeper : public MessageHandlerAutoCleanup {
public:
void OnMessage(Message* msg) override { Thread::Current()->SleepMs(500); }
};
diff --git a/chromium/third_party/webrtc/rtc_base/ssl_adapter_unittest.cc b/chromium/third_party/webrtc/rtc_base/ssl_adapter_unittest.cc
index 125b4bd50d0..498eba312bc 100644
--- a/chromium/third_party/webrtc/rtc_base/ssl_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/ssl_adapter_unittest.cc
@@ -50,7 +50,7 @@ static std::string GetSSLProtocolName(const rtc::SSLMode& ssl_mode) {
class MockCertVerifier : public rtc::SSLCertificateVerifier {
public:
virtual ~MockCertVerifier() = default;
- MOCK_METHOD1(Verify, bool(const rtc::SSLCertificate&));
+ MOCK_METHOD(bool, Verify, (const rtc::SSLCertificate&), (override));
};
// TODO(benwright) - Move to using INSTANTIATE_TEST_SUITE_P instead of using
diff --git a/chromium/third_party/webrtc/rtc_base/stream.h b/chromium/third_party/webrtc/rtc_base/stream.h
index bfb9dc2c41e..036c5ad8c74 100644
--- a/chromium/third_party/webrtc/rtc_base/stream.h
+++ b/chromium/third_party/webrtc/rtc_base/stream.h
@@ -54,7 +54,7 @@ struct StreamEventData : public MessageData {
StreamEventData(int ev, int er) : events(ev), error(er) {}
};
-class RTC_EXPORT StreamInterface : public MessageHandler {
+class RTC_EXPORT StreamInterface : public MessageHandlerAutoCleanup {
public:
enum { MSG_POST_EVENT = 0xF1F1, MSG_MAX = MSG_POST_EVENT };
diff --git a/chromium/third_party/webrtc/rtc_base/strings/string_builder_unittest.cc b/chromium/third_party/webrtc/rtc_base/strings/string_builder_unittest.cc
index 84717ad1d10..99dfd862923 100644
--- a/chromium/third_party/webrtc/rtc_base/strings/string_builder_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/strings/string_builder_unittest.cc
@@ -59,7 +59,7 @@ TEST(SimpleStringBuilder, StdString) {
// off.
#if (GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)) || !RTC_DCHECK_IS_ON
-TEST(SimpleStringBuilder, BufferOverrunConstCharP) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharP) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
const char* const msg = "This is just too much";
@@ -71,7 +71,7 @@ TEST(SimpleStringBuilder, BufferOverrunConstCharP) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunStdString) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunStdString) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
sb << 12;
@@ -84,7 +84,7 @@ TEST(SimpleStringBuilder, BufferOverrunStdString) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunInt) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunInt) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
constexpr int num = -12345;
@@ -100,7 +100,7 @@ TEST(SimpleStringBuilder, BufferOverrunInt) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunDouble) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunDouble) {
char sb_buf[5];
SimpleStringBuilder sb(sb_buf);
constexpr double num = 123.456;
@@ -113,7 +113,7 @@ TEST(SimpleStringBuilder, BufferOverrunDouble) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunConstCharPAlreadyFull) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharPAlreadyFull) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
sb << 123;
@@ -126,7 +126,7 @@ TEST(SimpleStringBuilder, BufferOverrunConstCharPAlreadyFull) {
#endif
}
-TEST(SimpleStringBuilder, BufferOverrunIntAlreadyFull) {
+TEST(SimpleStringBuilderDeathTest, BufferOverrunIntAlreadyFull) {
char sb_buf[4];
SimpleStringBuilder sb(sb_buf);
sb << "xyz";
diff --git a/chromium/third_party/webrtc/rtc_base/swap_queue_unittest.cc b/chromium/third_party/webrtc/rtc_base/swap_queue_unittest.cc
index 199ac6b1854..3862d850fa4 100644
--- a/chromium/third_party/webrtc/rtc_base/swap_queue_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/swap_queue_unittest.cc
@@ -135,7 +135,7 @@ TEST(SwapQueueTest, SuccessfulItemVerifyFunctor) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST(SwapQueueTest, UnsuccessfulItemVerifyFunctor) {
+TEST(SwapQueueDeathTest, UnsuccessfulItemVerifyFunctor) {
// Queue item verifier for the test.
auto minus_2_verifier = [](const int& i) { return i > -2; };
SwapQueue<int, decltype(minus_2_verifier)> queue(2, minus_2_verifier);
@@ -148,7 +148,7 @@ TEST(SwapQueueTest, UnsuccessfulItemVerifyFunctor) {
EXPECT_DEATH(result = queue.Insert(&invalid_value), "");
}
-TEST(SwapQueueTest, UnSuccessfulItemVerifyInsert) {
+TEST(SwapQueueDeathTest, UnSuccessfulItemVerifyInsert) {
std::vector<int> template_element(kChunkSize);
SwapQueue<std::vector<int>,
SwapQueueItemVerifier<std::vector<int>, &LengthVerifierFunction>>
@@ -158,7 +158,7 @@ TEST(SwapQueueTest, UnSuccessfulItemVerifyInsert) {
EXPECT_DEATH(result = queue.Insert(&invalid_chunk), "");
}
-TEST(SwapQueueTest, UnSuccessfulItemVerifyRemove) {
+TEST(SwapQueueDeathTest, UnSuccessfulItemVerifyRemove) {
std::vector<int> template_element(kChunkSize);
SwapQueue<std::vector<int>,
SwapQueueItemVerifier<std::vector<int>, &LengthVerifierFunction>>
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/BUILD.gn b/chromium/third_party/webrtc/rtc_base/synchronization/BUILD.gn
index 3e7b22d4f93..f6e6d0bfaaf 100644
--- a/chromium/third_party/webrtc/rtc_base/synchronization/BUILD.gn
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/BUILD.gn
@@ -12,6 +12,38 @@ if (is_android) {
import("//build/config/android/rules.gni")
}
+rtc_library("yield") {
+ sources = [
+ "yield.cc",
+ "yield.h",
+ ]
+ deps = []
+}
+
+rtc_library("mutex") {
+ sources = [
+ "mutex.cc",
+ "mutex.h",
+ "mutex_critical_section.h",
+ "mutex_pthread.h",
+ ]
+ if (rtc_use_absl_mutex) {
+ sources += [ "mutex_abseil.h" ]
+ }
+
+ deps = [
+ ":yield",
+ "..:checks",
+ "..:macromagic",
+ "..:platform_thread_types",
+ "../system:unused",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
+ if (rtc_use_absl_mutex) {
+ absl_deps += [ "//third_party/abseil-cpp/absl/synchronization" ]
+ }
+}
+
rtc_library("rw_lock_wrapper") {
public = [ "rw_lock_wrapper.h" ]
sources = [ "rw_lock_wrapper.cc" ]
@@ -40,6 +72,7 @@ rtc_library("sequence_checker") {
"..:criticalsection",
"..:macromagic",
"..:platform_thread_types",
+ "..:stringutils",
"../../api/task_queue",
"../system:rtc_export",
]
@@ -50,8 +83,8 @@ rtc_library("yield_policy") {
"yield_policy.cc",
"yield_policy.h",
]
- deps = [
- "..:checks",
+ deps = [ "..:checks" ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/base:config",
"//third_party/abseil-cpp/absl/base:core_headers",
]
@@ -60,11 +93,30 @@ rtc_library("yield_policy") {
if (rtc_include_tests) {
rtc_library("synchronization_unittests") {
testonly = true
- sources = [ "yield_policy_unittest.cc" ]
+ sources = [
+ "mutex_unittest.cc",
+ "yield_policy_unittest.cc",
+ ]
deps = [
+ ":mutex",
+ ":yield",
":yield_policy",
+ "..:checks",
+ "..:macromagic",
+ "..:rtc_base",
"..:rtc_event",
"../../test:test_support",
+ "//third_party/google_benchmark",
+ ]
+ }
+
+ rtc_library("mutex_benchmark") {
+ testonly = true
+ sources = [ "mutex_benchmark.cc" ]
+ deps = [
+ ":mutex",
+ "../system:unused",
+ "//third_party/google_benchmark",
]
}
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/DEPS b/chromium/third_party/webrtc/rtc_base/synchronization/DEPS
new file mode 100644
index 00000000000..4ed1f2444bc
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/DEPS
@@ -0,0 +1,11 @@
+specific_include_rules = {
+ "mutex_abseil\.h": [
+ "+absl/synchronization"
+ ],
+ ".*_benchmark\.cc": [
+ "+benchmark",
+ ],
+ ".*_unittest\.cc": [
+ "+benchmark",
+ ]
+}
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/mutex.cc b/chromium/third_party/webrtc/rtc_base/synchronization/mutex.cc
new file mode 100644
index 00000000000..6c2d6ff7f06
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/mutex.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/synchronization/mutex.h"
+
+#include "rtc_base/checks.h"
+#include "rtc_base/synchronization/yield.h"
+
+namespace webrtc {
+
+#if !defined(WEBRTC_ABSL_MUTEX)
+void GlobalMutex::Lock() {
+ while (mutex_locked_.exchange(1)) {
+ YieldCurrentThread();
+ }
+}
+
+void GlobalMutex::Unlock() {
+ int old = mutex_locked_.exchange(0);
+ RTC_DCHECK_EQ(old, 1) << "Unlock called without calling Lock first";
+}
+
+GlobalMutexLock::GlobalMutexLock(GlobalMutex* mutex) : mutex_(mutex) {
+ mutex_->Lock();
+}
+
+GlobalMutexLock::~GlobalMutexLock() {
+ mutex_->Unlock();
+}
+#endif // #if !defined(WEBRTC_ABSL_MUTEX)
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/mutex.h b/chromium/third_party/webrtc/rtc_base/synchronization/mutex.h
new file mode 100644
index 00000000000..cc12c7edf0f
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/mutex.h
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_H_
+#define RTC_BASE_SYNCHRONIZATION_MUTEX_H_
+
+#include <atomic>
+
+#include "absl/base/const_init.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/system/unused.h"
+#include "rtc_base/thread_annotations.h"
+
+#if defined(WEBRTC_ABSL_MUTEX)
+#include "rtc_base/synchronization/mutex_abseil.h" // nogncheck
+#elif defined(WEBRTC_WIN)
+#include "rtc_base/synchronization/mutex_critical_section.h"
+#elif defined(WEBRTC_POSIX)
+#include "rtc_base/synchronization/mutex_pthread.h"
+#else
+#error Unsupported platform.
+#endif
+
+namespace webrtc {
+
+// The Mutex guarantees exclusive access and aims to follow Abseil semantics
+// (i.e. non-reentrant etc).
+class RTC_LOCKABLE Mutex final {
+ public:
+ Mutex() = default;
+ Mutex(const Mutex&) = delete;
+ Mutex& operator=(const Mutex&) = delete;
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() {
+ rtc::PlatformThreadRef current = CurrentThreadRefAssertingNotBeingHolder();
+ impl_.Lock();
+ // |holder_| changes from 0 to CurrentThreadRef().
+ holder_.store(current, std::memory_order_relaxed);
+ }
+ RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ rtc::PlatformThreadRef current = CurrentThreadRefAssertingNotBeingHolder();
+ if (impl_.TryLock()) {
+ // |holder_| changes from 0 to CurrentThreadRef().
+ holder_.store(current, std::memory_order_relaxed);
+ return true;
+ }
+ return false;
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() {
+ // |holder_| changes from CurrentThreadRef() to 0. If something else than
+ // CurrentThreadRef() is stored in |holder_|, the Unlock results in
+ // undefined behavior as mutexes can't be unlocked from another thread than
+ // the one that locked it, or called while not being locked.
+ holder_.store(0, std::memory_order_relaxed);
+ impl_.Unlock();
+ }
+
+ private:
+ rtc::PlatformThreadRef CurrentThreadRefAssertingNotBeingHolder() {
+ rtc::PlatformThreadRef holder = holder_.load(std::memory_order_relaxed);
+ rtc::PlatformThreadRef current = rtc::CurrentThreadRef();
+ // TODO(bugs.webrtc.org/11567): remove this temporary check after migrating
+ // fully to Mutex.
+ RTC_CHECK_NE(holder, current);
+ return current;
+ }
+
+ MutexImpl impl_;
+ // TODO(bugs.webrtc.org/11567): remove |holder_| after migrating fully to
+ // Mutex.
+ // |holder_| contains the PlatformThreadRef of the thread currently holding
+ // the lock, or 0.
+ // Remarks on the used memory orders: the atomic load in
+ // CurrentThreadRefAssertingNotBeingHolder() observes either of two things:
+ // 1. our own previous write to holder_ with our thread ID.
+ // 2. another thread (with ID y) writing y and then 0 from an initial value of
+ // 0. If we're observing case 1, our own stores are obviously ordered before
+ // the load, and hit the CHECK. If we're observing case 2, the value observed
+ // w.r.t |impl_| being locked depends on the memory order. Since we only care
+ // that it's different from CurrentThreadRef()), we use the more performant
+ // option, memory_order_relaxed.
+ std::atomic<rtc::PlatformThreadRef> holder_ = {0};
+};
+
+// MutexLock, for serializing execution through a scope.
+class RTC_SCOPED_LOCKABLE MutexLock final {
+ public:
+ MutexLock(const MutexLock&) = delete;
+ MutexLock& operator=(const MutexLock&) = delete;
+
+ explicit MutexLock(Mutex* mutex) RTC_EXCLUSIVE_LOCK_FUNCTION(mutex)
+ : mutex_(mutex) {
+ mutex->Lock();
+ }
+ ~MutexLock() RTC_UNLOCK_FUNCTION() { mutex_->Unlock(); }
+
+ private:
+ Mutex* mutex_;
+};
+
+// A mutex used to protect global variables. Do NOT use for other purposes.
+#if defined(WEBRTC_ABSL_MUTEX)
+using GlobalMutex = absl::Mutex;
+using GlobalMutexLock = absl::MutexLock;
+#else
+class RTC_LOCKABLE GlobalMutex final {
+ public:
+ GlobalMutex(const GlobalMutex&) = delete;
+ GlobalMutex& operator=(const GlobalMutex&) = delete;
+
+ constexpr explicit GlobalMutex(absl::ConstInitType /*unused*/)
+ : mutex_locked_(0) {}
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION();
+ void Unlock() RTC_UNLOCK_FUNCTION();
+
+ private:
+ std::atomic<int> mutex_locked_; // 0 means lock not taken, 1 means taken.
+};
+
+// GlobalMutexLock, for serializing execution through a scope.
+class RTC_SCOPED_LOCKABLE GlobalMutexLock final {
+ public:
+ GlobalMutexLock(const GlobalMutexLock&) = delete;
+ GlobalMutexLock& operator=(const GlobalMutexLock&) = delete;
+
+ explicit GlobalMutexLock(GlobalMutex* mutex) RTC_EXCLUSIVE_LOCK_FUNCTION();
+ ~GlobalMutexLock() RTC_UNLOCK_FUNCTION();
+
+ private:
+ GlobalMutex* mutex_;
+};
+#endif // if defined(WEBRTC_ABSL_MUTEX)
+
+} // namespace webrtc
+
+#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_H_
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/mutex_abseil.h b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_abseil.h
new file mode 100644
index 00000000000..4ad1d07eef1
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_abseil.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_
+#define RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_
+
+#include "absl/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class RTC_LOCKABLE MutexImpl final {
+ public:
+ MutexImpl() = default;
+ MutexImpl(const MutexImpl&) = delete;
+ MutexImpl& operator=(const MutexImpl&) = delete;
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { mutex_.Lock(); }
+ RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ return mutex_.TryLock();
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); }
+
+ private:
+ absl::Mutex mutex_;
+};
+
+} // namespace webrtc
+
+#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/mutex_benchmark.cc b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_benchmark.cc
new file mode 100644
index 00000000000..40adca65d86
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_benchmark.cc
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark/benchmark.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/unused.h"
+
+namespace webrtc {
+
+class PerfTestData {
+ public:
+ PerfTestData() : cache_line_barrier_1_(), cache_line_barrier_2_() {
+ cache_line_barrier_1_[0]++; // Avoid 'is not used'.
+ cache_line_barrier_2_[0]++; // Avoid 'is not used'.
+ }
+
+ int AddToCounter(int add) {
+ MutexLock mu(&mu_);
+ my_counter_ += add;
+ return 0;
+ }
+
+ private:
+ uint8_t cache_line_barrier_1_[64];
+ Mutex mu_;
+ uint8_t cache_line_barrier_2_[64];
+ int64_t my_counter_ = 0;
+};
+
+void BM_LockWithMutex(benchmark::State& state) {
+ static PerfTestData test_data;
+ for (auto s : state) {
+ RTC_UNUSED(s);
+ benchmark::DoNotOptimize(test_data.AddToCounter(2));
+ }
+}
+
+BENCHMARK(BM_LockWithMutex)->Threads(1);
+BENCHMARK(BM_LockWithMutex)->Threads(2);
+BENCHMARK(BM_LockWithMutex)->Threads(4);
+BENCHMARK(BM_LockWithMutex)->ThreadPerCpu();
+
+} // namespace webrtc
+
+/*
+
+Results:
+
+NB when reproducing: Remember to turn of power management features such as CPU
+scaling before running!
+
+pthreads (Linux):
+----------------------------------------------------------------------
+Run on (12 X 4500 MHz CPU s)
+CPU Caches:
+ L1 Data 32 KiB (x6)
+ L1 Instruction 32 KiB (x6)
+ L2 Unified 1024 KiB (x6)
+ L3 Unified 8448 KiB (x1)
+Load Average: 0.26, 0.28, 0.44
+----------------------------------------------------------------------
+Benchmark Time CPU Iterations
+----------------------------------------------------------------------
+BM_LockWithMutex/threads:1 13.4 ns 13.4 ns 52192906
+BM_LockWithMutex/threads:2 44.2 ns 88.4 ns 8189944
+BM_LockWithMutex/threads:4 52.0 ns 198 ns 3743244
+BM_LockWithMutex/threads:12 84.9 ns 944 ns 733524
+
+std::mutex performs like the pthread implementation (Linux).
+
+Abseil (Linux):
+----------------------------------------------------------------------
+Run on (12 X 4500 MHz CPU s)
+CPU Caches:
+ L1 Data 32 KiB (x6)
+ L1 Instruction 32 KiB (x6)
+ L2 Unified 1024 KiB (x6)
+ L3 Unified 8448 KiB (x1)
+Load Average: 0.27, 0.24, 0.37
+----------------------------------------------------------------------
+Benchmark Time CPU Iterations
+----------------------------------------------------------------------
+BM_LockWithMutex/threads:1 15.0 ns 15.0 ns 46550231
+BM_LockWithMutex/threads:2 91.1 ns 182 ns 4059212
+BM_LockWithMutex/threads:4 40.8 ns 131 ns 5496560
+BM_LockWithMutex/threads:12 37.0 ns 130 ns 5377668
+
+*/
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/mutex_critical_section.h b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_critical_section.h
new file mode 100644
index 00000000000..d206794988b
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_critical_section.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_
+#define RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_
+
+#if defined(WEBRTC_WIN)
+// clang-format off
+// clang formating would change include order.
+
+// Include winsock2.h before including <windows.h> to maintain consistency with
+// win32.h. To include win32.h directly, it must be broken out into its own
+// build target.
+#include <winsock2.h>
+#include <windows.h>
+#include <sal.h> // must come after windows headers.
+// clang-format on
+
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class RTC_LOCKABLE MutexImpl final {
+ public:
+ MutexImpl() { InitializeCriticalSection(&critical_section_); }
+ MutexImpl(const MutexImpl&) = delete;
+ MutexImpl& operator=(const MutexImpl&) = delete;
+ ~MutexImpl() { DeleteCriticalSection(&critical_section_); }
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() {
+ EnterCriticalSection(&critical_section_);
+ }
+ RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ return TryEnterCriticalSection(&critical_section_) != FALSE;
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() {
+ LeaveCriticalSection(&critical_section_);
+ }
+
+ private:
+ CRITICAL_SECTION critical_section_;
+};
+
+} // namespace webrtc
+
+#endif // #if defined(WEBRTC_WIN)
+#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_CRITICAL_SECTION_H_
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/mutex_pthread.h b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_pthread.h
new file mode 100644
index 00000000000..c9496e72c9a
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_pthread.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_
+#define RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_
+
+#if defined(WEBRTC_POSIX)
+
+#include <pthread.h>
+#if defined(WEBRTC_MAC)
+#include <pthread_spis.h>
+#endif
+
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class RTC_LOCKABLE MutexImpl final {
+ public:
+ MutexImpl() {
+ pthread_mutexattr_t mutex_attribute;
+ pthread_mutexattr_init(&mutex_attribute);
+#if defined(WEBRTC_MAC)
+ pthread_mutexattr_setpolicy_np(&mutex_attribute,
+ _PTHREAD_MUTEX_POLICY_FIRSTFIT);
+#endif
+ pthread_mutex_init(&mutex_, &mutex_attribute);
+ pthread_mutexattr_destroy(&mutex_attribute);
+ }
+ MutexImpl(const MutexImpl&) = delete;
+ MutexImpl& operator=(const MutexImpl&) = delete;
+ ~MutexImpl() { pthread_mutex_destroy(&mutex_); }
+
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { pthread_mutex_lock(&mutex_); }
+ RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) {
+ return pthread_mutex_trylock(&mutex_) == 0;
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() { pthread_mutex_unlock(&mutex_); }
+
+ private:
+ pthread_mutex_t mutex_;
+};
+
+} // namespace webrtc
+#endif // #if defined(WEBRTC_POSIX)
+#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_PTHREAD_H_
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/mutex_unittest.cc b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_unittest.cc
new file mode 100644
index 00000000000..b8c45d0a8cf
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/mutex_unittest.cc
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/synchronization/mutex.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <atomic>
+#include <memory>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "benchmark/benchmark.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+#include "rtc_base/location.h"
+#include "rtc_base/message_handler.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/yield.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::rtc::Event;
+using ::rtc::Message;
+using ::rtc::MessageHandler;
+using ::rtc::Thread;
+
+constexpr int kNumThreads = 16;
+
+template <class MutexType>
+class RTC_LOCKABLE RawMutexLocker {
+ public:
+ explicit RawMutexLocker(MutexType& mutex) : mutex_(mutex) {}
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { mutex_.Lock(); }
+ void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); }
+
+ private:
+ MutexType& mutex_;
+};
+
+class RTC_LOCKABLE RawMutexTryLocker {
+ public:
+ explicit RawMutexTryLocker(Mutex& mutex) : mutex_(mutex) {}
+ void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() {
+ while (!mutex_.TryLock()) {
+ YieldCurrentThread();
+ }
+ }
+ void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); }
+
+ private:
+ Mutex& mutex_;
+};
+
+template <class MutexType, class MutexLockType>
+class MutexLockLocker {
+ public:
+ explicit MutexLockLocker(MutexType& mutex) : mutex_(mutex) {}
+ void Lock() { lock_ = std::make_unique<MutexLockType>(&mutex_); }
+ void Unlock() { lock_ = nullptr; }
+
+ private:
+ MutexType& mutex_;
+ std::unique_ptr<MutexLockType> lock_;
+};
+
+template <class MutexType, class MutexLocker>
+class LockRunner : public rtc::MessageHandlerAutoCleanup {
+ public:
+ template <typename... Args>
+ explicit LockRunner(Args... args)
+ : threads_active_(0),
+ start_event_(true, false),
+ done_event_(true, false),
+ shared_value_(0),
+ mutex_(args...),
+ locker_(mutex_) {}
+
+ bool Run() {
+ // Signal all threads to start.
+ start_event_.Set();
+
+ // Wait for all threads to finish.
+ return done_event_.Wait(kLongTime);
+ }
+
+ void SetExpectedThreadCount(int count) { threads_active_ = count; }
+
+ int shared_value() {
+ int shared_value;
+ locker_.Lock();
+ shared_value = shared_value_;
+ locker_.Unlock();
+ return shared_value_;
+ }
+
+ void OnMessage(Message* msg) override {
+ ASSERT_TRUE(start_event_.Wait(kLongTime));
+ locker_.Lock();
+
+ EXPECT_EQ(0, shared_value_);
+ int old = shared_value_;
+
+ // Use a loop to increase the chance of race. If the |locker_|
+ // implementation is faulty, it would be improbable that the error slips
+ // through.
+ for (int i = 0; i < kOperationsToRun; ++i) {
+ benchmark::DoNotOptimize(++shared_value_);
+ }
+ EXPECT_EQ(old + kOperationsToRun, shared_value_);
+ shared_value_ = 0;
+
+ locker_.Unlock();
+ if (threads_active_.fetch_sub(1) == 1) {
+ done_event_.Set();
+ }
+ }
+
+ private:
+ static constexpr int kLongTime = 10000; // 10 seconds
+ static constexpr int kOperationsToRun = 1000;
+
+ std::atomic<int> threads_active_;
+ Event start_event_;
+ Event done_event_;
+ int shared_value_;
+ MutexType mutex_;
+ MutexLocker locker_;
+};
+
+void StartThreads(std::vector<std::unique_ptr<Thread>>& threads,
+ MessageHandler* handler) {
+ for (int i = 0; i < kNumThreads; ++i) {
+ std::unique_ptr<Thread> thread(Thread::Create());
+ thread->Start();
+ thread->Post(RTC_FROM_HERE, handler);
+ threads.push_back(std::move(thread));
+ }
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithMutexAndRawMutexLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<Mutex, RawMutexLocker<Mutex>> runner;
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithMutexAndRawMutexTryLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<Mutex, RawMutexTryLocker> runner;
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithMutexAndMutexLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<Mutex, MutexLockLocker<Mutex, MutexLock>> runner;
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithGlobalMutexAndRawMutexLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<GlobalMutex, RawMutexLocker<GlobalMutex>> runner(absl::kConstInit);
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, ProtectsSharedResourceWithGlobalMutexAndMutexLocker) {
+ std::vector<std::unique_ptr<Thread>> threads;
+ LockRunner<GlobalMutex, MutexLockLocker<GlobalMutex, GlobalMutexLock>> runner(
+ absl::kConstInit);
+ StartThreads(threads, &runner);
+ runner.SetExpectedThreadCount(kNumThreads);
+ EXPECT_TRUE(runner.Run());
+ EXPECT_EQ(0, runner.shared_value());
+}
+
+TEST(MutexTest, GlobalMutexCanHaveStaticStorageDuration) {
+ ABSL_CONST_INIT static GlobalMutex global_lock(absl::kConstInit);
+ global_lock.Lock();
+ global_lock.Unlock();
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.cc b/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.cc
index d64f32a616d..ff433db137f 100644
--- a/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.cc
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.cc
@@ -13,6 +13,8 @@
#include <dispatch/dispatch.h>
#endif
+#include "rtc_base/strings/string_builder.h"
+
namespace webrtc {
namespace {
// On Mac, returns the label of the current dispatch queue; elsewhere, return
@@ -24,8 +26,16 @@ const void* GetSystemQueueRef() {
return nullptr;
#endif
}
+
} // namespace
+std::string ExpectationToString(const webrtc::SequenceChecker* checker) {
+#if RTC_DCHECK_IS_ON
+ return checker->ExpectationToString();
+#endif
+ return std::string();
+}
+
SequenceCheckerImpl::SequenceCheckerImpl()
: attached_(true),
valid_thread_(rtc::CurrentThreadRef()),
@@ -62,4 +72,41 @@ void SequenceCheckerImpl::Detach() {
// reset on the next call to IsCurrent().
}
+#if RTC_DCHECK_IS_ON
+std::string SequenceCheckerImpl::ExpectationToString() const {
+ const TaskQueueBase* const current_queue = TaskQueueBase::Current();
+ const rtc::PlatformThreadRef current_thread = rtc::CurrentThreadRef();
+ const void* const current_system_queue = GetSystemQueueRef();
+ rtc::CritScope scoped_lock(&lock_);
+ if (!attached_)
+ return "Checker currently not attached.";
+
+ // The format of the string is meant to compliment the one we have inside of
+ // FatalLog() (checks.cc). Example:
+ //
+ // # Expected: TQ: 0x0 SysQ: 0x7fff69541330 Thread: 0x11dcf6dc0
+ // # Actual: TQ: 0x7fa8f0604190 SysQ: 0x7fa8f0604a30 Thread: 0x700006f1a000
+ // TaskQueue doesn't match
+
+ rtc::StringBuilder message;
+ message.AppendFormat(
+ "# Expected: TQ: %p SysQ: %p Thread: %p\n"
+ "# Actual: TQ: %p SysQ: %p Thread: %p\n",
+ valid_queue_, valid_system_queue_,
+ reinterpret_cast<const void*>(valid_thread_), current_queue,
+ current_system_queue, reinterpret_cast<const void*>(current_thread));
+
+ if ((valid_queue_ || current_queue) && valid_queue_ != current_queue) {
+ message << "TaskQueue doesn't match\n";
+ } else if (valid_system_queue_ &&
+ valid_system_queue_ != current_system_queue) {
+ message << "System queue doesn't match\n";
+ } else if (!rtc::IsThreadRefEqual(valid_thread_, current_thread)) {
+ message << "Threads don't match\n";
+ }
+
+ return message.Release();
+}
+#endif // RTC_DCHECK_IS_ON
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.h b/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.h
index fe644fa14e3..fd0a69983af 100644
--- a/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.h
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker.h
@@ -10,6 +10,8 @@
#ifndef RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_
#define RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_
+#include <type_traits>
+
#include "api/task_queue/task_queue_base.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/platform_thread_types.h"
@@ -34,6 +36,11 @@ class RTC_EXPORT SequenceCheckerImpl {
// used exclusively on another thread.
void Detach();
+ // Returns a string that is formatted to match with the error string printed
+ // by RTC_CHECK() when a condition is not met.
+ // This is used in conjunction with the RTC_DCHECK_RUN_ON() macro.
+ std::string ExpectationToString() const;
+
private:
rtc::CriticalSection lock_;
// These are mutable so that IsCurrent can set them.
@@ -162,8 +169,19 @@ class RTC_SCOPED_LOCKABLE SequenceCheckerScope {
#define RTC_RUN_ON(x) \
RTC_THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(x))
+namespace webrtc {
+std::string ExpectationToString(const webrtc::SequenceChecker* checker);
+
+// Catch-all implementation for types other than explicitly supported above.
+template <typename ThreadLikeObject>
+std::string ExpectationToString(const ThreadLikeObject*) {
+ return std::string();
+}
+
+} // namespace webrtc
+
#define RTC_DCHECK_RUN_ON(x) \
webrtc::webrtc_seq_check_impl::SequenceCheckerScope seq_check_scope(x); \
- RTC_DCHECK((x)->IsCurrent())
+ RTC_DCHECK((x)->IsCurrent()) << webrtc::ExpectationToString(x)
#endif // RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker_unittest.cc b/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker_unittest.cc
index 1e62e9759b4..6fcb522c545 100644
--- a/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/sequence_checker_unittest.cc
@@ -31,7 +31,7 @@ class CompileTimeTestForGuardedBy {
int CalledOnSequence() RTC_RUN_ON(sequence_checker_) { return guarded_; }
void CallMeFromSequence() {
- RTC_DCHECK_RUN_ON(&sequence_checker_) << "Should be called on sequence";
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
guarded_ = 41;
}
@@ -158,7 +158,12 @@ void TestAnnotationsOnWrongQueue() {
}
#if RTC_DCHECK_IS_ON
-TEST(SequenceCheckerTest, TestAnnotationsOnWrongQueueDebug) {
+// Note: Ending the test suite name with 'DeathTest' is important as it causes
+// gtest to order this test before any other non-death-tests, to avoid potential
+// global process state pollution such as shared worker threads being started
+// (e.g. a side effect of calling InitCocoaMultiThreading() on Mac causes one or
+// two additional threads to be created).
+TEST(SequenceCheckerDeathTest, TestAnnotationsOnWrongQueueDebug) {
ASSERT_DEATH({ TestAnnotationsOnWrongQueue(); }, "");
}
#else
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/yield.cc b/chromium/third_party/webrtc/rtc_base/synchronization/yield.cc
new file mode 100644
index 00000000000..cbb58d12ab0
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/yield.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/synchronization/yield.h"
+
+#if defined(WEBRTC_WIN)
+#include <windows.h>
+#else
+#include <sched.h>
+#include <time.h>
+#endif
+
+namespace webrtc {
+
+void YieldCurrentThread() {
+ // TODO(bugs.webrtc.org/11634): use dedicated OS functionality instead of
+ // sleep for yielding.
+#if defined(WEBRTC_WIN)
+ ::Sleep(0);
+#elif defined(WEBRTC_MAC) && defined(RTC_USE_NATIVE_MUTEX_ON_MAC) && \
+ !RTC_USE_NATIVE_MUTEX_ON_MAC
+ sched_yield();
+#else
+ static const struct timespec ts_null = {0};
+ nanosleep(&ts_null, nullptr);
+#endif
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/yield.h b/chromium/third_party/webrtc/rtc_base/synchronization/yield.h
new file mode 100644
index 00000000000..d4f5f99f375
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/yield.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef RTC_BASE_SYNCHRONIZATION_YIELD_H_
+#define RTC_BASE_SYNCHRONIZATION_YIELD_H_
+
+namespace webrtc {
+
+// Request rescheduling of threads.
+void YieldCurrentThread();
+
+} // namespace webrtc
+
+#endif // RTC_BASE_SYNCHRONIZATION_YIELD_H_
diff --git a/chromium/third_party/webrtc/rtc_base/synchronization/yield_policy_unittest.cc b/chromium/third_party/webrtc/rtc_base/synchronization/yield_policy_unittest.cc
index e0c622510a1..0bf38f4537c 100644
--- a/chromium/third_party/webrtc/rtc_base/synchronization/yield_policy_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/synchronization/yield_policy_unittest.cc
@@ -20,7 +20,7 @@ namespace rtc {
namespace {
class MockYieldHandler : public YieldInterface {
public:
- MOCK_METHOD0(YieldExecution, void());
+ MOCK_METHOD(void, YieldExecution, (), (override));
};
} // namespace
TEST(YieldPolicyTest, HandlerReceivesYieldSignalWhenSet) {
diff --git a/chromium/third_party/webrtc/rtc_base/system/BUILD.gn b/chromium/third_party/webrtc/rtc_base/system/BUILD.gn
index 79cb301038f..98867588ccc 100644
--- a/chromium/third_party/webrtc/rtc_base/system/BUILD.gn
+++ b/chromium/third_party/webrtc/rtc_base/system/BUILD.gn
@@ -75,10 +75,8 @@ rtc_source_set("thread_registry") {
deps = [ "..:rtc_base_approved" ]
if (is_android && !build_with_chromium) {
sources += [ "thread_registry.cc" ]
- deps += [
- "../../sdk/android:native_api_stacktrace",
- "//third_party/abseil-cpp/absl/base:core_headers",
- ]
+ deps += [ "../../sdk/android:native_api_stacktrace" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
}
}
diff --git a/chromium/third_party/webrtc/rtc_base/task_utils/BUILD.gn b/chromium/third_party/webrtc/rtc_base/task_utils/BUILD.gn
index 1882cd9ee8d..54f9a048f02 100644
--- a/chromium/third_party/webrtc/rtc_base/task_utils/BUILD.gn
+++ b/chromium/third_party/webrtc/rtc_base/task_utils/BUILD.gn
@@ -21,9 +21,10 @@ rtc_library("repeating_task") {
"../../api/task_queue",
"../../api/units:time_delta",
"../../api/units:timestamp",
+ "../../system_wrappers:system_wrappers",
"../synchronization:sequence_checker",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("pending_task_safety_flag") {
@@ -81,7 +82,7 @@ if (rtc_include_tests) {
":to_queued_task",
"../../api/task_queue",
"../../test:test_support",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
}
diff --git a/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.cc b/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.cc
index 71911e6982c..574e6331f14 100644
--- a/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.cc
+++ b/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.cc
@@ -17,17 +17,18 @@
namespace webrtc {
namespace webrtc_repeating_task_impl {
+
RepeatingTaskBase::RepeatingTaskBase(TaskQueueBase* task_queue,
- TimeDelta first_delay)
+ TimeDelta first_delay,
+ Clock* clock)
: task_queue_(task_queue),
- next_run_time_(Timestamp::Micros(rtc::TimeMicros()) + first_delay) {
- sequence_checker_.Detach();
-}
+ clock_(clock),
+ next_run_time_(clock_->CurrentTime() + first_delay) {}
RepeatingTaskBase::~RepeatingTaskBase() = default;
bool RepeatingTaskBase::Run() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(task_queue_);
// Return true to tell the TaskQueue to destruct this object.
if (next_run_time_.IsPlusInfinity())
return true;
@@ -40,7 +41,7 @@ bool RepeatingTaskBase::Run() {
return true;
RTC_DCHECK(delay.IsFinite());
- TimeDelta lost_time = Timestamp::Micros(rtc::TimeMicros()) - next_run_time_;
+ TimeDelta lost_time = clock_->CurrentTime() - next_run_time_;
next_run_time_ += delay;
delay -= lost_time;
delay = std::max(delay, TimeDelta::Zero());
@@ -53,7 +54,7 @@ bool RepeatingTaskBase::Run() {
}
void RepeatingTaskBase::Stop() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK_RUN_ON(task_queue_);
RTC_DCHECK(next_run_time_.IsFinite());
next_run_time_ = Timestamp::PlusInfinity();
}
diff --git a/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.h b/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.h
index 75d03bfe5e6..487b7d19d46 100644
--- a/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.h
+++ b/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task.h
@@ -19,7 +19,7 @@
#include "api/task_queue/task_queue_base.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
-#include "rtc_base/synchronization/sequence_checker.h"
+#include "system_wrappers/include/clock.h"
namespace webrtc {
@@ -28,7 +28,9 @@ class RepeatingTaskHandle;
namespace webrtc_repeating_task_impl {
class RepeatingTaskBase : public QueuedTask {
public:
- RepeatingTaskBase(TaskQueueBase* task_queue, TimeDelta first_delay);
+ RepeatingTaskBase(TaskQueueBase* task_queue,
+ TimeDelta first_delay,
+ Clock* clock);
~RepeatingTaskBase() override;
void Stop();
@@ -39,16 +41,10 @@ class RepeatingTaskBase : public QueuedTask {
bool Run() final;
TaskQueueBase* const task_queue_;
+ Clock* const clock_;
// This is always finite, except for the special case where it's PlusInfinity
// to signal that the task should stop.
- Timestamp next_run_time_ RTC_GUARDED_BY(sequence_checker_);
- // We use a SequenceChecker to check for correct usage instead of using
- // RTC_DCHECK_RUN_ON(task_queue_). This is to work around a compatibility
- // issue with some TQ implementations such as rtc::Thread that don't
- // consistently set themselves as the 'current' TQ when running tasks.
- // The SequenceChecker detects those implementations differently but gives
- // the same effect as far as thread safety goes.
- SequenceChecker sequence_checker_;
+ Timestamp next_run_time_ RTC_GUARDED_BY(task_queue_);
};
// The template closure pattern is based on rtc::ClosureTask.
@@ -57,8 +53,9 @@ class RepeatingTaskImpl final : public RepeatingTaskBase {
public:
RepeatingTaskImpl(TaskQueueBase* task_queue,
TimeDelta first_delay,
- Closure&& closure)
- : RepeatingTaskBase(task_queue, first_delay),
+ Closure&& closure,
+ Clock* clock)
+ : RepeatingTaskBase(task_queue, first_delay, clock),
closure_(std::forward<Closure>(closure)) {
static_assert(
std::is_same<TimeDelta,
@@ -98,10 +95,11 @@ class RepeatingTaskHandle {
// repeated task is owned by the TaskQueue.
template <class Closure>
static RepeatingTaskHandle Start(TaskQueueBase* task_queue,
- Closure&& closure) {
+ Closure&& closure,
+ Clock* clock = Clock::GetRealTimeClock()) {
auto repeating_task = std::make_unique<
webrtc_repeating_task_impl::RepeatingTaskImpl<Closure>>(
- task_queue, TimeDelta::Zero(), std::forward<Closure>(closure));
+ task_queue, TimeDelta::Zero(), std::forward<Closure>(closure), clock);
auto* repeating_task_ptr = repeating_task.get();
task_queue->PostTask(std::move(repeating_task));
return RepeatingTaskHandle(repeating_task_ptr);
@@ -110,12 +108,14 @@ class RepeatingTaskHandle {
// DelayedStart is equivalent to Start except that the first invocation of the
// closure will be delayed by the given amount.
template <class Closure>
- static RepeatingTaskHandle DelayedStart(TaskQueueBase* task_queue,
- TimeDelta first_delay,
- Closure&& closure) {
+ static RepeatingTaskHandle DelayedStart(
+ TaskQueueBase* task_queue,
+ TimeDelta first_delay,
+ Closure&& closure,
+ Clock* clock = Clock::GetRealTimeClock()) {
auto repeating_task = std::make_unique<
webrtc_repeating_task_impl::RepeatingTaskImpl<Closure>>(
- task_queue, first_delay, std::forward<Closure>(closure));
+ task_queue, first_delay, std::forward<Closure>(closure), clock);
auto* repeating_task_ptr = repeating_task.get();
task_queue->PostDelayedTask(std::move(repeating_task), first_delay.ms());
return RepeatingTaskHandle(repeating_task_ptr);
diff --git a/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task_unittest.cc b/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task_unittest.cc
index 83efb29209a..2fb15d1e5a3 100644
--- a/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/task_utils/repeating_task_unittest.cc
@@ -40,8 +40,23 @@ void Sleep(TimeDelta time_delta) {
class MockClosure {
public:
- MOCK_METHOD0(Call, TimeDelta());
- MOCK_METHOD0(Delete, void());
+ MOCK_METHOD(TimeDelta, Call, ());
+ MOCK_METHOD(void, Delete, ());
+};
+
+class MockTaskQueue : public TaskQueueBase {
+ public:
+ MockTaskQueue() : task_queue_setter_(this) {}
+
+ MOCK_METHOD(void, Delete, (), (override));
+ MOCK_METHOD(void, PostTask, (std::unique_ptr<QueuedTask> task), (override));
+ MOCK_METHOD(void,
+ PostDelayedTask,
+ (std::unique_ptr<QueuedTask> task, uint32_t milliseconds),
+ (override));
+
+ private:
+ CurrentTaskQueueSetter task_queue_setter_;
};
class MoveOnlyClosure {
@@ -228,4 +243,37 @@ TEST(RepeatingTaskTest, Example) {
// task queue destruction and running the desctructor closure.
}
+TEST(RepeatingTaskTest, ClockIntegration) {
+ std::unique_ptr<QueuedTask> delayed_task;
+ uint32_t expected_ms = 0;
+ SimulatedClock clock(Timestamp::Millis(0));
+
+ NiceMock<MockTaskQueue> task_queue;
+ ON_CALL(task_queue, PostDelayedTask)
+ .WillByDefault(
+ Invoke([&delayed_task, &expected_ms](std::unique_ptr<QueuedTask> task,
+ uint32_t milliseconds) {
+ EXPECT_EQ(milliseconds, expected_ms);
+ delayed_task = std::move(task);
+ }));
+
+ expected_ms = 100;
+ RepeatingTaskHandle handle = RepeatingTaskHandle::DelayedStart(
+ &task_queue, TimeDelta::Millis(100),
+ [&clock]() {
+ EXPECT_EQ(Timestamp::Millis(100), clock.CurrentTime());
+ // Simulate work happening for 10ms.
+ clock.AdvanceTimeMilliseconds(10);
+ return TimeDelta::Millis(100);
+ },
+ &clock);
+
+ clock.AdvanceTimeMilliseconds(100);
+ QueuedTask* task_to_run = delayed_task.release();
+ expected_ms = 90;
+ EXPECT_FALSE(task_to_run->Run());
+ EXPECT_NE(nullptr, delayed_task.get());
+ handle.Stop();
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_base/thread.cc b/chromium/third_party/webrtc/rtc_base/thread.cc
index 0fb2e813e03..566edff13fd 100644
--- a/chromium/third_party/webrtc/rtc_base/thread.cc
+++ b/chromium/third_party/webrtc/rtc_base/thread.cc
@@ -32,8 +32,10 @@
#include "rtc_base/atomic_ops.h"
#include "rtc_base/checks.h"
#include "rtc_base/critical_section.h"
+#include "rtc_base/event.h"
#include "rtc_base/logging.h"
#include "rtc_base/null_socket_server.h"
+#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/trace_event.h"
@@ -72,7 +74,7 @@ const int kSlowDispatchLoggingThreshold = 50; // 50 ms
class MessageHandlerWithTask final : public MessageHandler {
public:
- MessageHandlerWithTask() = default;
+ MessageHandlerWithTask() : MessageHandler(false) {}
void OnMessage(Message* msg) override {
static_cast<rtc_thread_internal::MessageLikeTask*>(msg->pdata)->Run();
@@ -163,13 +165,16 @@ void ThreadManager::RemoveFromSendGraph(Thread* thread) {
void ThreadManager::RegisterSendAndCheckForCycles(Thread* source,
Thread* target) {
+ RTC_DCHECK(source);
+ RTC_DCHECK(target);
+
CritScope cs(&crit_);
std::deque<Thread*> all_targets({target});
// We check the pre-existing who-sends-to-who graph for any path from target
// to source. This loop is guaranteed to terminate because per the send graph
// invariant, there are no cycles in the graph.
- for (auto it = all_targets.begin(); it != all_targets.end(); ++it) {
- const auto& targets = send_graph_[*it];
+ for (size_t i = 0; i < all_targets.size(); i++) {
+ const auto& targets = send_graph_[all_targets[i]];
all_targets.insert(all_targets.end(), targets.begin(), targets.end());
}
RTC_CHECK_EQ(absl::c_count(all_targets, source), 0)
@@ -296,6 +301,21 @@ void ThreadManager::SetCurrentThread(Thread* thread) {
RTC_DLOG(LS_ERROR) << "SetCurrentThread: Overwriting an existing value?";
}
#endif // RTC_DLOG_IS_ON
+
+ if (thread) {
+ thread->EnsureIsCurrentTaskQueue();
+ } else {
+ Thread* current = CurrentThread();
+ if (current) {
+ // The current thread is being cleared, e.g. as a result of
+ // UnwrapCurrent() being called or when a thread is being stopped
+ // (see PreRun()). This signals that the Thread instance is being detached
+ // from the thread, which also means that TaskQueue::Current() must not
+ // return a pointer to the Thread instance.
+ current->ClearCurrentTaskQueue();
+ }
+ }
+
SetCurrentThreadInternal(thread);
}
@@ -824,7 +844,6 @@ void* Thread::PreRun(void* pv) {
Thread* thread = static_cast<Thread*>(pv);
ThreadManager::Instance()->SetCurrentThread(thread);
rtc::SetCurrentThreadName(thread->name_.c_str());
- CurrentTaskQueueSetter set_current_task_queue(thread);
#if defined(WEBRTC_MAC)
ScopedAutoReleasePool pool;
#endif
@@ -875,45 +894,62 @@ void Thread::Send(const Location& posted_from,
AssertBlockingIsAllowedOnCurrentThread();
- AutoThread thread;
Thread* current_thread = Thread::Current();
- RTC_DCHECK(current_thread != nullptr); // AutoThread ensures this
+
#if RTC_DCHECK_IS_ON
- ThreadManager::Instance()->RegisterSendAndCheckForCycles(current_thread,
- this);
+ if (current_thread) {
+ RTC_DCHECK(current_thread->IsInvokeToThreadAllowed(this));
+ ThreadManager::Instance()->RegisterSendAndCheckForCycles(current_thread,
+ this);
+ }
#endif
+
+ // Perhaps down the line we can get rid of this workaround and always require
+ // current_thread to be valid when Send() is called.
+ std::unique_ptr<rtc::Event> done_event;
+ if (!current_thread)
+ done_event.reset(new rtc::Event());
+
bool ready = false;
- PostTask(
- webrtc::ToQueuedTask([msg]() mutable { msg.phandler->OnMessage(&msg); },
- [this, &ready, current_thread] {
- CritScope cs(&crit_);
- ready = true;
- current_thread->socketserver()->WakeUp();
- }));
-
- bool waited = false;
- crit_.Enter();
- while (!ready) {
- crit_.Leave();
- current_thread->socketserver()->Wait(kForever, false);
- waited = true;
+ PostTask(webrtc::ToQueuedTask(
+ [&msg]() mutable { msg.phandler->OnMessage(&msg); },
+ [this, &ready, current_thread, done = done_event.get()] {
+ if (current_thread) {
+ CritScope cs(&crit_);
+ ready = true;
+ current_thread->socketserver()->WakeUp();
+ } else {
+ done->Set();
+ }
+ }));
+
+ if (current_thread) {
+ bool waited = false;
crit_.Enter();
- }
- crit_.Leave();
-
- // Our Wait loop above may have consumed some WakeUp events for this
- // Thread, that weren't relevant to this Send. Losing these WakeUps can
- // cause problems for some SocketServers.
- //
- // Concrete example:
- // Win32SocketServer on thread A calls Send on thread B. While processing the
- // message, thread B Posts a message to A. We consume the wakeup for that
- // Post while waiting for the Send to complete, which means that when we exit
- // this loop, we need to issue another WakeUp, or else the Posted message
- // won't be processed in a timely manner.
-
- if (waited) {
- current_thread->socketserver()->WakeUp();
+ while (!ready) {
+ crit_.Leave();
+ current_thread->socketserver()->Wait(kForever, false);
+ waited = true;
+ crit_.Enter();
+ }
+ crit_.Leave();
+
+ // Our Wait loop above may have consumed some WakeUp events for this
+ // Thread, that weren't relevant to this Send. Losing these WakeUps can
+ // cause problems for some SocketServers.
+ //
+ // Concrete example:
+ // Win32SocketServer on thread A calls Send on thread B. While processing
+ // the message, thread B Posts a message to A. We consume the wakeup for
+ // that Post while waiting for the Send to complete, which means that when
+ // we exit this loop, we need to issue another WakeUp, or else the Posted
+ // message won't be processed in a timely manner.
+
+ if (waited) {
+ current_thread->socketserver()->WakeUp();
+ }
+ } else {
+ done_event->Wait(rtc::Event::kForever);
}
}
@@ -925,7 +961,7 @@ void Thread::InvokeInternal(const Location& posted_from,
class FunctorMessageHandler : public MessageHandler {
public:
explicit FunctorMessageHandler(rtc::FunctionView<void()> functor)
- : functor_(functor) {}
+ : MessageHandler(false), functor_(functor) {}
void OnMessage(Message* msg) override { functor_(); }
private:
@@ -935,6 +971,17 @@ void Thread::InvokeInternal(const Location& posted_from,
Send(posted_from, &handler);
}
+// Called by the ThreadManager when being set as the current thread.
+void Thread::EnsureIsCurrentTaskQueue() {
+ task_queue_registration_ =
+ std::make_unique<TaskQueueBase::CurrentTaskQueueSetter>(this);
+}
+
+// Called by the ThreadManager when being set as the current thread.
+void Thread::ClearCurrentTaskQueue() {
+ task_queue_registration_.reset();
+}
+
void Thread::QueuedTaskHandler::OnMessage(Message* msg) {
RTC_DCHECK(msg);
auto* data = static_cast<ScopedMessageData<webrtc::QueuedTask>*>(msg->pdata);
@@ -949,6 +996,50 @@ void Thread::QueuedTaskHandler::OnMessage(Message* msg) {
task.release();
}
+void Thread::AllowInvokesToThread(Thread* thread) {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ if (!IsCurrent()) {
+ PostTask(webrtc::ToQueuedTask(
+ [thread, this]() { AllowInvokesToThread(thread); }));
+ return;
+ }
+ RTC_DCHECK_RUN_ON(this);
+ allowed_threads_.push_back(thread);
+ invoke_policy_enabled_ = true;
+#endif
+}
+
+void Thread::DisallowAllInvokes() {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ if (!IsCurrent()) {
+ PostTask(webrtc::ToQueuedTask([this]() { DisallowAllInvokes(); }));
+ return;
+ }
+ RTC_DCHECK_RUN_ON(this);
+ allowed_threads_.clear();
+ invoke_policy_enabled_ = true;
+#endif
+}
+
+// Returns true if no policies added or if there is at least one policy
+// that permits invocation to |target| thread.
+bool Thread::IsInvokeToThreadAllowed(rtc::Thread* target) {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ RTC_DCHECK_RUN_ON(this);
+ if (!invoke_policy_enabled_) {
+ return true;
+ }
+ for (const auto* thread : allowed_threads_) {
+ if (thread == target) {
+ return true;
+ }
+ }
+ return false;
+#else
+ return true;
+#endif
+}
+
void Thread::PostTask(std::unique_ptr<webrtc::QueuedTask> task) {
// Though Post takes MessageData by raw pointer (last parameter), it still
// takes it with ownership.
diff --git a/chromium/third_party/webrtc/rtc_base/thread.h b/chromium/third_party/webrtc/rtc_base/thread.h
index 74aab623c87..341f94285bc 100644
--- a/chromium/third_party/webrtc/rtc_base/thread.h
+++ b/chromium/third_party/webrtc/rtc_base/thread.h
@@ -338,6 +338,19 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
InvokeInternal(posted_from, functor);
}
+ // Allows invoke to specified |thread|. Thread never will be dereferenced and
+ // will be used only for reference-based comparison, so instance can be safely
+ // deleted. If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined do nothing.
+ void AllowInvokesToThread(Thread* thread);
+
+ // If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined do nothing.
+ void DisallowAllInvokes();
+ // Returns true if |target| was allowed by AllowInvokesToThread() or if no
+ // calls were made to AllowInvokesToThread and DisallowAllInvokes. Otherwise
+ // returns false.
+ // If NDEBUG is defined and DCHECK_ALWAYS_ON is undefined always returns true.
+ bool IsInvokeToThreadAllowed(rtc::Thread* target);
+
// Posts a task to invoke the functor on |this| thread asynchronously, i.e.
// without blocking the thread that invoked PostTask(). Ownership of |functor|
// is passed and (usually, see below) destroyed on |this| thread after it is
@@ -524,6 +537,7 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
private:
class QueuedTaskHandler final : public MessageHandler {
public:
+ QueuedTaskHandler() : MessageHandler(false) {}
void OnMessage(Message* msg) override;
};
@@ -551,6 +565,12 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
void InvokeInternal(const Location& posted_from,
rtc::FunctionView<void()> functor);
+ // Called by the ThreadManager when being set as the current thread.
+ void EnsureIsCurrentTaskQueue();
+
+ // Called by the ThreadManager when being unset as the current thread.
+ void ClearCurrentTaskQueue();
+
// Returns a static-lifetime MessageHandler which runs message with
// MessageLikeTask payload data.
static MessageHandler* GetPostTaskMessageHandler();
@@ -560,6 +580,10 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
MessageList messages_ RTC_GUARDED_BY(crit_);
PriorityQueue delayed_messages_ RTC_GUARDED_BY(crit_);
uint32_t delayed_next_num_ RTC_GUARDED_BY(crit_);
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+ std::vector<Thread*> allowed_threads_ RTC_GUARDED_BY(this);
+ bool invoke_policy_enabled_ RTC_GUARDED_BY(this) = false;
+#endif
CriticalSection crit_;
bool fInitialized_;
bool fDestroyed_;
@@ -595,6 +619,8 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
// Runs webrtc::QueuedTask posted to the Thread.
QueuedTaskHandler queued_task_handler_;
+ std::unique_ptr<TaskQueueBase::CurrentTaskQueueSetter>
+ task_queue_registration_;
friend class ThreadManager;
@@ -604,7 +630,9 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase {
// AutoThread automatically installs itself at construction
// uninstalls at destruction, if a Thread object is
// _not already_ associated with the current OS thread.
-
+//
+// NOTE: *** This class should only be used by tests ***
+//
class AutoThread : public Thread {
public:
AutoThread();
diff --git a/chromium/third_party/webrtc/rtc_base/thread_unittest.cc b/chromium/third_party/webrtc/rtc_base/thread_unittest.cc
index d53a3879141..193819c6e40 100644
--- a/chromium/third_party/webrtc/rtc_base/thread_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/thread_unittest.cc
@@ -94,7 +94,7 @@ class SocketClient : public TestGenerator, public sigslot::has_slots<> {
};
// Receives messages and sends on a socket.
-class MessageClient : public MessageHandler, public TestGenerator {
+class MessageClient : public MessageHandlerAutoCleanup, public TestGenerator {
public:
MessageClient(Thread* pth, Socket* socket) : socket_(socket) {}
@@ -516,7 +516,7 @@ TEST_F(ThreadQueueTest, DisposeNotLocked) {
EXPECT_FALSE(was_locked);
}
-class DeletedMessageHandler : public MessageHandler {
+class DeletedMessageHandler : public MessageHandlerAutoCleanup {
public:
explicit DeletedMessageHandler(bool* deleted) : deleted_(deleted) {}
~DeletedMessageHandler() override { *deleted_ = true; }
@@ -606,12 +606,13 @@ TEST(ThreadManager, ProcessAllMessageQueuesWithClearedQueue) {
ThreadManager::ProcessAllMessageQueuesForTesting();
}
-class RefCountedHandler : public MessageHandler, public rtc::RefCountInterface {
+class RefCountedHandler : public MessageHandlerAutoCleanup,
+ public rtc::RefCountInterface {
public:
void OnMessage(Message* msg) override {}
};
-class EmptyHandler : public MessageHandler {
+class EmptyHandler : public MessageHandlerAutoCleanup {
public:
void OnMessage(Message* msg) override {}
};
@@ -1148,6 +1149,18 @@ TEST(ThreadPostDelayedTaskTest, InvokesInDelayOrder) {
EXPECT_TRUE(fourth.Wait(0));
}
+TEST(ThreadPostDelayedTaskTest, IsCurrentTaskQueue) {
+ auto current_tq = webrtc::TaskQueueBase::Current();
+ {
+ std::unique_ptr<rtc::Thread> thread(rtc::Thread::Create());
+ thread->WrapCurrent();
+ EXPECT_EQ(webrtc::TaskQueueBase::Current(),
+ static_cast<webrtc::TaskQueueBase*>(thread.get()));
+ thread->UnwrapCurrent();
+ }
+ EXPECT_EQ(webrtc::TaskQueueBase::Current(), current_tq);
+}
+
class ThreadFactory : public webrtc::TaskQueueFactory {
public:
std::unique_ptr<webrtc::TaskQueueBase, webrtc::TaskQueueDeleter>
diff --git a/chromium/third_party/webrtc/rtc_base/virtual_socket_server.h b/chromium/third_party/webrtc/rtc_base/virtual_socket_server.h
index f45fabf0af1..5ad66a8d34e 100644
--- a/chromium/third_party/webrtc/rtc_base/virtual_socket_server.h
+++ b/chromium/third_party/webrtc/rtc_base/virtual_socket_server.h
@@ -304,7 +304,7 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> {
// Implements the socket interface using the virtual network. Packets are
// passed as messages using the message queue of the socket server.
class VirtualSocket : public AsyncSocket,
- public MessageHandler,
+ public MessageHandlerAutoCleanup,
public sigslot::has_slots<> {
public:
VirtualSocket(VirtualSocketServer* server, int family, int type, bool async);
diff --git a/chromium/third_party/webrtc/rtc_base/virtual_socket_unittest.cc b/chromium/third_party/webrtc/rtc_base/virtual_socket_unittest.cc
index b274b40857d..78003f5cb24 100644
--- a/chromium/third_party/webrtc/rtc_base/virtual_socket_unittest.cc
+++ b/chromium/third_party/webrtc/rtc_base/virtual_socket_unittest.cc
@@ -53,7 +53,7 @@ using webrtc::testing::SSE_WRITE;
using webrtc::testing::StreamSink;
// Sends at a constant rate but with random packet sizes.
-struct Sender : public MessageHandler {
+struct Sender : public MessageHandlerAutoCleanup {
Sender(Thread* th, AsyncSocket* s, uint32_t rt)
: thread(th),
socket(std::make_unique<AsyncUDPSocket>(s)),
@@ -99,7 +99,8 @@ struct Sender : public MessageHandler {
char dummy[4096];
};
-struct Receiver : public MessageHandler, public sigslot::has_slots<> {
+struct Receiver : public MessageHandlerAutoCleanup,
+ public sigslot::has_slots<> {
Receiver(Thread* th, AsyncSocket* s, uint32_t bw)
: thread(th),
socket(std::make_unique<AsyncUDPSocket>(s)),
diff --git a/chromium/third_party/webrtc/rtc_tools/BUILD.gn b/chromium/third_party/webrtc/rtc_tools/BUILD.gn
index f293853f6ef..f33d96eff12 100644
--- a/chromium/third_party/webrtc/rtc_tools/BUILD.gn
+++ b/chromium/third_party/webrtc/rtc_tools/BUILD.gn
@@ -17,12 +17,12 @@ group("rtc_tools") {
deps = [
":frame_analyzer",
":video_file_reader",
- ":video_quality_analysis",
]
if (!build_with_chromium) {
deps += [
":psnr_ssim_analyzer",
":rgba_to_i420_converter",
+ ":video_quality_analysis",
]
if (rtc_enable_protobuf) {
deps += [ ":chart_proto" ]
@@ -60,6 +60,8 @@ rtc_library("video_file_reader") {
"../api/video:video_rtp_headers",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -77,6 +79,8 @@ rtc_library("video_file_writer") {
"../api/video:video_frame_i420",
"../api/video:video_rtp_headers",
"../rtc_base:rtc_base_approved",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -107,9 +111,9 @@ rtc_library("video_quality_analysis") {
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../test:perf_test",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_executable("frame_analyzer") {
@@ -319,8 +323,14 @@ if (!build_with_chromium) {
rtc_library("event_log_visualizer_utils") {
visibility = [ "*" ]
sources = [
+ "rtc_event_log_visualizer/alerts.cc",
+ "rtc_event_log_visualizer/alerts.h",
+ "rtc_event_log_visualizer/analyze_audio.cc",
+ "rtc_event_log_visualizer/analyze_audio.h",
"rtc_event_log_visualizer/analyzer.cc",
"rtc_event_log_visualizer/analyzer.h",
+ "rtc_event_log_visualizer/analyzer_common.cc",
+ "rtc_event_log_visualizer/analyzer_common.h",
"rtc_event_log_visualizer/log_simulation.cc",
"rtc_event_log_visualizer/log_simulation.h",
"rtc_event_log_visualizer/plot_base.cc",
@@ -329,11 +339,11 @@ if (!build_with_chromium) {
"rtc_event_log_visualizer/plot_protobuf.h",
"rtc_event_log_visualizer/plot_python.cc",
"rtc_event_log_visualizer/plot_python.h",
- "rtc_event_log_visualizer/triage_notifications.h",
]
deps = [
":chart_proto",
"../api:function_view",
+ "../rtc_base:deprecation",
"../rtc_base:ignore_wundef",
# TODO(kwiberg): Remove this dependency.
@@ -360,8 +370,12 @@ if (!build_with_chromium) {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_numerics",
"../rtc_base:stringutils",
+ "../test:explicit_key_value_config",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
}
diff --git a/chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn b/chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn
index 47e600aa856..1156bf5dd8f 100644
--- a/chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn
+++ b/chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn
@@ -50,8 +50,8 @@ if (rtc_enable_protobuf) {
"../../rtc_base:rtc_task_queue",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/third_party/sigslot",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
network_tester_unittests_resources = [
@@ -115,7 +115,7 @@ if (is_android) {
rtc_android_library("NetworkTesterMobile_javalib") {
testonly = true
- android_manifest_for_lint = "androidapp/AndroidManifest.xml"
+ android_manifest = "androidapp/AndroidManifest.xml"
sources = [
"androidapp/src/com/google/media/networktester/MainActivity.java",
@@ -138,11 +138,11 @@ if (is_android) {
"androidapp/res/mipmap-xhdpi/ic_launcher.png",
"androidapp/res/mipmap-xxhdpi/ic_launcher.png",
"androidapp/res/mipmap-xxxhdpi/ic_launcher.png",
+ "androidapp/res/values-v17/styles.xml",
+ "androidapp/res/values-w820dp/dimens.xml",
"androidapp/res/values/colors.xml",
"androidapp/res/values/dimens.xml",
"androidapp/res/values/strings.xml",
- "androidapp/res/values-v17/styles.xml",
- "androidapp/res/values-w820dp/dimens.xml",
]
# Needed for Bazel converter.
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.cc
new file mode 100644
index 00000000000..86372de4cfd
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.cc
@@ -0,0 +1,227 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_tools/rtc_event_log_visualizer/alerts.h"
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <limits>
+#include <map>
+#include <string>
+
+#include "logging/rtc_event_log/rtc_event_processor.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/format_macros.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+void TriageHelper::Print(FILE* file) {
+ fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n");
+ for (const auto& alert : triage_alerts_) {
+ fprintf(file, "%d %s. First occurrence at %3.3lf\n", alert.second.count,
+ alert.second.explanation.c_str(), alert.second.first_occurrence);
+ }
+ fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n");
+}
+
+void TriageHelper::AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction) {
+ // With 100 packets/s (~800kbps), false positives would require 10 s without
+ // data.
+ constexpr int64_t kMaxSeqNumJump = 1000;
+ // With a 90 kHz clock, false positives would require 10 s without data.
+ constexpr int64_t kTicksPerMillisec = 90;
+ constexpr int64_t kCaptureTimeGraceMs = 10000;
+
+ std::string seq_num_explanation =
+ direction == kIncomingPacket
+ ? "Incoming RTP sequence number jumps more than 1000. Counter may "
+ "have been reset or rewritten incorrectly in a group call."
+ : "Outgoing RTP sequence number jumps more than 1000. Counter may "
+ "have been reset.";
+ std::string capture_time_explanation =
+ direction == kIncomingPacket ? "Incoming capture time jumps more than "
+ "10s. Clock might have been reset."
+ : "Outgoing capture time jumps more than "
+ "10s. Clock might have been reset.";
+ TriageAlertType seq_num_alert = direction == kIncomingPacket
+ ? TriageAlertType::kIncomingSeqNumJump
+ : TriageAlertType::kOutgoingSeqNumJump;
+ TriageAlertType capture_time_alert =
+ direction == kIncomingPacket ? TriageAlertType::kIncomingCaptureTimeJump
+ : TriageAlertType::kOutgoingCaptureTimeJump;
+
+ const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
+
+ // Check for gaps in sequence numbers and capture timestamps.
+ for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) {
+ if (IsRtxSsrc(parsed_log, direction, stream.ssrc)) {
+ continue;
+ }
+ auto packets = stream.packet_view;
+ if (packets.empty()) {
+ continue;
+ }
+ SeqNumUnwrapper<uint16_t> seq_num_unwrapper;
+ int64_t last_seq_num =
+ seq_num_unwrapper.Unwrap(packets[0].header.sequenceNumber);
+ SeqNumUnwrapper<uint32_t> capture_time_unwrapper;
+ int64_t last_capture_time =
+ capture_time_unwrapper.Unwrap(packets[0].header.timestamp);
+ int64_t last_log_time_ms = packets[0].log_time_ms();
+ for (const auto& packet : packets) {
+ if (packet.log_time_us() > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+
+ int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber);
+ if (std::abs(seq_num - last_seq_num) > kMaxSeqNumJump) {
+ Alert(seq_num_alert, config_.GetCallTimeSec(packet.log_time_us()),
+ seq_num_explanation);
+ }
+ last_seq_num = seq_num;
+
+ int64_t capture_time =
+ capture_time_unwrapper.Unwrap(packet.header.timestamp);
+ if (std::abs(capture_time - last_capture_time) >
+ kTicksPerMillisec *
+ (kCaptureTimeGraceMs + packet.log_time_ms() - last_log_time_ms)) {
+ Alert(capture_time_alert, config_.GetCallTimeSec(packet.log_time_us()),
+ capture_time_explanation);
+ }
+ last_capture_time = capture_time;
+ }
+ }
+}
+
+void TriageHelper::AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction) {
+ constexpr int64_t kMaxRtpTransmissionGap = 500000;
+ constexpr int64_t kMaxRtcpTransmissionGap = 3000000;
+ std::string rtp_explanation =
+ direction == kIncomingPacket
+ ? "No RTP packets received for more than 500ms. This indicates a "
+ "network problem. Temporary video freezes and choppy or robotic "
+ "audio is unavoidable. Unnecessary BWE drops is a known issue."
+ : "No RTP packets sent for more than 500 ms. This might be an issue "
+ "with the pacer.";
+ std::string rtcp_explanation =
+ direction == kIncomingPacket
+ ? "No RTCP packets received for more than 3 s. Could be a longer "
+ "connection outage"
+ : "No RTCP packets sent for more than 3 s. This is most likely a "
+ "bug.";
+ TriageAlertType rtp_alert = direction == kIncomingPacket
+ ? TriageAlertType::kIncomingRtpGap
+ : TriageAlertType::kOutgoingRtpGap;
+ TriageAlertType rtcp_alert = direction == kIncomingPacket
+ ? TriageAlertType::kIncomingRtcpGap
+ : TriageAlertType::kOutgoingRtcpGap;
+
+ const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
+
+ // TODO(terelius): The parser could provide a list of all packets, ordered
+ // by time, for each direction.
+ std::multimap<int64_t, const LoggedRtpPacket*> rtp_in_direction;
+ for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) {
+ for (const LoggedRtpPacket& rtp_packet : stream.packet_view)
+ rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet);
+ }
+ absl::optional<int64_t> last_rtp_time;
+ for (const auto& kv : rtp_in_direction) {
+ int64_t timestamp = kv.first;
+ if (timestamp > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+ int64_t duration = timestamp - last_rtp_time.value_or(0);
+ if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) {
+ // No packet sent/received for more than 500 ms.
+ Alert(rtp_alert, config_.GetCallTimeSec(timestamp), rtp_explanation);
+ }
+ last_rtp_time.emplace(timestamp);
+ }
+
+ absl::optional<int64_t> last_rtcp_time;
+ if (direction == kIncomingPacket) {
+ for (const auto& rtcp : parsed_log.incoming_rtcp_packets()) {
+ if (rtcp.log_time_us() > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+ int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
+ if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
+ // No feedback sent/received for more than 2000 ms.
+ Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()),
+ rtcp_explanation);
+ }
+ last_rtcp_time.emplace(rtcp.log_time_us());
+ }
+ } else {
+ for (const auto& rtcp : parsed_log.outgoing_rtcp_packets()) {
+ if (rtcp.log_time_us() > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+ int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
+ if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
+ // No feedback sent/received for more than 2000 ms.
+ Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()),
+ rtcp_explanation);
+ }
+ last_rtcp_time.emplace(rtcp.log_time_us());
+ }
+ }
+}
+
+// TODO(terelius): Notifications could possibly be generated by the same code
+// that produces the graphs. There is some code duplication that could be
+// avoided, but that might be solved anyway when we move functionality from the
+// analyzer to the parser.
+void TriageHelper::AnalyzeLog(const ParsedRtcEventLog& parsed_log) {
+ AnalyzeStreamGaps(parsed_log, kIncomingPacket);
+ AnalyzeStreamGaps(parsed_log, kOutgoingPacket);
+ AnalyzeTransmissionGaps(parsed_log, kIncomingPacket);
+ AnalyzeTransmissionGaps(parsed_log, kOutgoingPacket);
+
+ const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
+
+ int64_t first_occurrence = parsed_log.last_timestamp();
+ constexpr double kMaxLossFraction = 0.05;
+ // Loss feedback
+ int64_t total_lost_packets = 0;
+ int64_t total_expected_packets = 0;
+ for (auto& bwe_update : parsed_log.bwe_loss_updates()) {
+ if (bwe_update.log_time_us() > segment_end_us) {
+ // Only process the first (LOG_START, LOG_END) segment.
+ break;
+ }
+ int64_t lost_packets = static_cast<double>(bwe_update.fraction_lost) / 255 *
+ bwe_update.expected_packets;
+ total_lost_packets += lost_packets;
+ total_expected_packets += bwe_update.expected_packets;
+ if (bwe_update.fraction_lost >= 255 * kMaxLossFraction) {
+ first_occurrence = std::min(first_occurrence, bwe_update.log_time_us());
+ }
+ }
+ double avg_outgoing_loss =
+ static_cast<double>(total_lost_packets) / total_expected_packets;
+ if (avg_outgoing_loss > kMaxLossFraction) {
+ Alert(TriageAlertType::kOutgoingHighLoss, first_occurrence,
+ "More than 5% of outgoing packets lost.");
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.h
new file mode 100644
index 00000000000..7bd9f052706
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_
+#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_
+
+#include <stdio.h>
+
+#include <map>
+#include <string>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "logging/rtc_event_log/rtc_event_log_parser.h"
+#include "rtc_base/constructor_magic.h"
+#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
+
+namespace webrtc {
+
+enum class TriageAlertType {
+ kUnknown = 0,
+ kIncomingRtpGap,
+ kOutgoingRtpGap,
+ kIncomingRtcpGap,
+ kOutgoingRtcpGap,
+ kIncomingSeqNumJump,
+ kOutgoingSeqNumJump,
+ kIncomingCaptureTimeJump,
+ kOutgoingCaptureTimeJump,
+ kOutgoingHighLoss,
+ kLast,
+};
+
+struct TriageAlert {
+ TriageAlertType type = TriageAlertType::kUnknown;
+ int count = 0;
+ float first_occurrence = -1;
+ std::string explanation;
+};
+
+class TriageHelper {
+ public:
+ explicit TriageHelper(const AnalyzerConfig& config) : config_(config) {}
+
+ void AnalyzeLog(const ParsedRtcEventLog& parsed_log);
+
+ void AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction);
+ void AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction);
+ void Print(FILE* file);
+
+ private:
+ AnalyzerConfig config_;
+ std::map<TriageAlertType, TriageAlert> triage_alerts_;
+
+ void Alert(TriageAlertType type,
+ float time_seconds,
+ absl::string_view explanation) {
+ std::map<TriageAlertType, TriageAlert>::iterator it =
+ triage_alerts_.find(type);
+
+ if (it == triage_alerts_.end()) {
+ TriageAlert alert;
+ alert.type = type;
+ alert.first_occurrence = time_seconds;
+ alert.count = 1;
+ alert.explanation = std::string(explanation);
+ triage_alerts_.insert(std::make_pair(type, alert));
+ } else {
+ it->second.count += 1;
+ }
+ }
+ RTC_DISALLOW_COPY_AND_ASSIGN(TriageHelper);
+};
+
+} // namespace webrtc
+
+#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
new file mode 100644
index 00000000000..becc0044abb
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
@@ -0,0 +1,503 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h"
+
+#include <memory>
+#include <set>
+#include <utility>
+#include <vector>
+
+#include "modules/audio_coding/neteq/tools/audio_sink.h"
+#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h"
+#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h"
+#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h"
+#include "modules/audio_coding/neteq/tools/neteq_test.h"
+#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h"
+#include "rtc_base/ref_counted_object.h"
+
+namespace webrtc {
+
+void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event)
+ -> absl::optional<float> {
+ if (ana_event.config.bitrate_bps)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.bitrate_bps));
+ return absl::nullopt;
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaBitrateBps,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder target bitrate");
+}
+
+void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder frame length", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaFrameLengthMs =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.frame_length_ms)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.frame_length_ms));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaFrameLengthMs,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder frame length");
+}
+
+void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder uplink packet loss fraction",
+ LineStyle::kLine, PointStyle::kHighlight);
+ auto GetAnaPacketLoss =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.uplink_packet_loss_fraction)
+ return absl::optional<float>(static_cast<float>(
+ *ana_event.config.uplink_packet_loss_fraction));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaPacketLoss,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin,
+ kTopMargin);
+ plot->SetTitle("Reported audio encoder lost packets");
+}
+
+void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder FEC", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaFecEnabled =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.enable_fec)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.enable_fec));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaFecEnabled,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder FEC");
+}
+
+void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder DTX", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaDtxEnabled =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.enable_dtx)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.enable_dtx));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaDtxEnabled,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder DTX");
+}
+
+void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot) {
+ TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine,
+ PointStyle::kHighlight);
+ auto GetAnaNumChannels =
+ [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
+ if (ana_event.config.num_channels)
+ return absl::optional<float>(
+ static_cast<float>(*ana_event.config.num_channels));
+ return absl::optional<float>();
+ };
+ auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
+ return config.GetCallTimeSec(packet.log_time_us());
+ };
+ ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
+ ToCallTime, GetAnaNumChannels,
+ parsed_log.audio_network_adaptation_events(), &time_series);
+ plot->AppendTimeSeries(std::move(time_series));
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))",
+ kBottomMargin, kTopMargin);
+ plot->SetTitle("Reported audio encoder number of channels");
+}
+
+class NetEqStreamInput : public test::NetEqInput {
+ public:
+ // Does not take any ownership, and all pointers must refer to valid objects
+ // that outlive the one constructed.
+ NetEqStreamInput(const std::vector<LoggedRtpPacketIncoming>* packet_stream,
+ const std::vector<LoggedAudioPlayoutEvent>* output_events,
+ absl::optional<int64_t> end_time_ms)
+ : packet_stream_(*packet_stream),
+ packet_stream_it_(packet_stream_.begin()),
+ output_events_it_(output_events->begin()),
+ output_events_end_(output_events->end()),
+ end_time_ms_(end_time_ms) {
+ RTC_DCHECK(packet_stream);
+ RTC_DCHECK(output_events);
+ }
+
+ absl::optional<int64_t> NextPacketTime() const override {
+ if (packet_stream_it_ == packet_stream_.end()) {
+ return absl::nullopt;
+ }
+ if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) {
+ return absl::nullopt;
+ }
+ return packet_stream_it_->rtp.log_time_ms();
+ }
+
+ absl::optional<int64_t> NextOutputEventTime() const override {
+ if (output_events_it_ == output_events_end_) {
+ return absl::nullopt;
+ }
+ if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) {
+ return absl::nullopt;
+ }
+ return output_events_it_->log_time_ms();
+ }
+
+ std::unique_ptr<PacketData> PopPacket() override {
+ if (packet_stream_it_ == packet_stream_.end()) {
+ return std::unique_ptr<PacketData>();
+ }
+ std::unique_ptr<PacketData> packet_data(new PacketData());
+ packet_data->header = packet_stream_it_->rtp.header;
+ packet_data->time_ms = packet_stream_it_->rtp.log_time_ms();
+
+ // This is a header-only "dummy" packet. Set the payload to all zeros, with
+ // length according to the virtual length.
+ packet_data->payload.SetSize(packet_stream_it_->rtp.total_length -
+ packet_stream_it_->rtp.header_length);
+ std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0);
+
+ ++packet_stream_it_;
+ return packet_data;
+ }
+
+ void AdvanceOutputEvent() override {
+ if (output_events_it_ != output_events_end_) {
+ ++output_events_it_;
+ }
+ }
+
+ bool ended() const override { return !NextEventTime(); }
+
+ absl::optional<RTPHeader> NextHeader() const override {
+ if (packet_stream_it_ == packet_stream_.end()) {
+ return absl::nullopt;
+ }
+ return packet_stream_it_->rtp.header;
+ }
+
+ private:
+ const std::vector<LoggedRtpPacketIncoming>& packet_stream_;
+ std::vector<LoggedRtpPacketIncoming>::const_iterator packet_stream_it_;
+ std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_it_;
+ const std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_end_;
+ const absl::optional<int64_t> end_time_ms_;
+};
+
+namespace {
+
+// Factory to create a "replacement decoder" that produces the decoded audio
+// by reading from a file rather than from the encoded payloads.
+class ReplacementAudioDecoderFactory : public AudioDecoderFactory {
+ public:
+ ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name,
+ int file_sample_rate_hz)
+ : replacement_file_name_(replacement_file_name),
+ file_sample_rate_hz_(file_sample_rate_hz) {}
+
+ std::vector<AudioCodecSpec> GetSupportedDecoders() override {
+ RTC_NOTREACHED();
+ return {};
+ }
+
+ bool IsSupportedDecoder(const SdpAudioFormat& format) override {
+ return true;
+ }
+
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
+ auto replacement_file = std::make_unique<test::ResampleInputAudioFile>(
+ replacement_file_name_, file_sample_rate_hz_);
+ replacement_file->set_output_rate_hz(48000);
+ return std::make_unique<test::FakeDecodeFromFile>(
+ std::move(replacement_file), 48000, false);
+ }
+
+ private:
+ const std::string replacement_file_name_;
+ const int file_sample_rate_hz_;
+};
+
+// Creates a NetEq test object and all necessary input and output helpers. Runs
+// the test and returns the NetEqDelayAnalyzer object that was used to
+// instrument the test.
+std::unique_ptr<test::NetEqStatsGetter> CreateNetEqTestAndRun(
+ const std::vector<LoggedRtpPacketIncoming>* packet_stream,
+ const std::vector<LoggedAudioPlayoutEvent>* output_events,
+ absl::optional<int64_t> end_time_ms,
+ const std::string& replacement_file_name,
+ int file_sample_rate_hz) {
+ std::unique_ptr<test::NetEqInput> input(
+ new NetEqStreamInput(packet_stream, output_events, end_time_ms));
+
+ constexpr int kReplacementPt = 127;
+ std::set<uint8_t> cn_types;
+ std::set<uint8_t> forbidden_types;
+ input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt,
+ cn_types, forbidden_types));
+
+ NetEq::Config config;
+ config.max_packets_in_buffer = 200;
+ config.enable_fast_accelerate = true;
+
+ std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink());
+
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory =
+ new rtc::RefCountedObject<ReplacementAudioDecoderFactory>(
+ replacement_file_name, file_sample_rate_hz);
+
+ test::NetEqTest::DecoderMap codecs = {
+ {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}};
+
+ std::unique_ptr<test::NetEqDelayAnalyzer> delay_cb(
+ new test::NetEqDelayAnalyzer);
+ std::unique_ptr<test::NetEqStatsGetter> neteq_stats_getter(
+ new test::NetEqStatsGetter(std::move(delay_cb)));
+ test::DefaultNetEqTestErrorCallback error_cb;
+ test::NetEqTest::Callbacks callbacks;
+ callbacks.error_callback = &error_cb;
+ callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer();
+ callbacks.get_audio_callback = neteq_stats_getter.get();
+
+ test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr,
+ /*factory=*/nullptr, std::move(input), std::move(output),
+ callbacks);
+ test.Run();
+ return neteq_stats_getter;
+}
+} // namespace
+
+NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const std::string& replacement_file_name,
+ int file_sample_rate_hz) {
+ NetEqStatsGetterMap neteq_stats;
+
+ for (const auto& stream : parsed_log.incoming_rtp_packets_by_ssrc()) {
+ const uint32_t ssrc = stream.ssrc;
+ if (!IsAudioSsrc(parsed_log, kIncomingPacket, ssrc))
+ continue;
+ const std::vector<LoggedRtpPacketIncoming>* audio_packets =
+ &stream.incoming_packets;
+ if (audio_packets == nullptr) {
+ // No incoming audio stream found.
+ continue;
+ }
+
+ RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end());
+
+ std::map<uint32_t, std::vector<LoggedAudioPlayoutEvent>>::const_iterator
+ output_events_it = parsed_log.audio_playout_events().find(ssrc);
+ if (output_events_it == parsed_log.audio_playout_events().end()) {
+ // Could not find output events with SSRC matching the input audio stream.
+ // Using the first available stream of output events.
+ output_events_it = parsed_log.audio_playout_events().cbegin();
+ }
+
+ int64_t end_time_ms = parsed_log.first_log_segment().stop_time_ms();
+
+ neteq_stats[ssrc] = CreateNetEqTestAndRun(
+ audio_packets, &output_events_it->second, end_time_ms,
+ replacement_file_name, file_sample_rate_hz);
+ }
+
+ return neteq_stats;
+}
+
+// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created
+// for, this method generates a plot for the jitter buffer delay profile.
+void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ uint32_t ssrc,
+ const test::NetEqStatsGetter* stats_getter,
+ Plot* plot) {
+ test::NetEqDelayAnalyzer::Delays arrival_delay_ms;
+ test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms;
+ test::NetEqDelayAnalyzer::Delays playout_delay_ms;
+ test::NetEqDelayAnalyzer::Delays target_delay_ms;
+
+ stats_getter->delay_analyzer()->CreateGraphs(
+ &arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms,
+ &target_delay_ms);
+
+ TimeSeries time_series_packet_arrival("packet arrival delay",
+ LineStyle::kLine);
+ TimeSeries time_series_relative_packet_arrival(
+ "Relative packet arrival delay", LineStyle::kLine);
+ TimeSeries time_series_play_time("Playout delay", LineStyle::kLine);
+ TimeSeries time_series_target_time("Target delay", LineStyle::kLine,
+ PointStyle::kHighlight);
+
+ for (const auto& data : arrival_delay_ms) {
+ const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float y = data.second;
+ time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y));
+ }
+ for (const auto& data : corrected_arrival_delay_ms) {
+ const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float y = data.second;
+ time_series_relative_packet_arrival.points.emplace_back(
+ TimeSeriesPoint(x, y));
+ }
+ for (const auto& data : playout_delay_ms) {
+ const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float y = data.second;
+ time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y));
+ }
+ for (const auto& data : target_delay_ms) {
+ const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float y = data.second;
+ time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y));
+ }
+
+ plot->AppendTimeSeries(std::move(time_series_packet_arrival));
+ plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival));
+ plot->AppendTimeSeries(std::move(time_series_play_time));
+ plot->AppendTimeSeries(std::move(time_series_target_time));
+
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin,
+ kTopMargin);
+ plot->SetTitle("NetEq timing for " +
+ GetStreamName(parsed_log, kIncomingPacket, ssrc));
+}
+
+template <typename NetEqStatsType>
+void CreateNetEqStatsGraphInternal(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats,
+ rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*(
+ const test::NetEqStatsGetter*)> data_extractor,
+ rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot) {
+ std::map<uint32_t, TimeSeries> time_series;
+
+ for (const auto& st : neteq_stats) {
+ const uint32_t ssrc = st.first;
+ const std::vector<std::pair<int64_t, NetEqStatsType>>* data_vector =
+ data_extractor(st.second.get());
+ for (const auto& data : *data_vector) {
+ const float time = config.GetCallTimeSec(data.first * 1000); // ms to us.
+ const float value = stats_extractor(data.second);
+ time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value));
+ }
+ }
+
+ for (auto& series : time_series) {
+ series.second.label =
+ GetStreamName(parsed_log, kIncomingPacket, series.first);
+ series.second.line_style = LineStyle::kLine;
+ plot->AppendTimeSeries(std::move(series.second));
+ }
+
+ plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
+ kLeftMargin, kRightMargin);
+ plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin);
+ plot->SetTitle(plot_name);
+}
+
+void CreateNetEqNetworkStatsGraph(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats,
+ rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot) {
+ CreateNetEqStatsGraphInternal<NetEqNetworkStatistics>(
+ parsed_log, config, neteq_stats,
+ [](const test::NetEqStatsGetter* stats_getter) {
+ return stats_getter->stats();
+ },
+ stats_extractor, plot_name, plot);
+}
+
+void CreateNetEqLifetimeStatsGraph(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats,
+ rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot) {
+ CreateNetEqStatsGraphInternal<NetEqLifetimeStatistics>(
+ parsed_log, config, neteq_stats,
+ [](const test::NetEqStatsGetter* stats_getter) {
+ return stats_getter->lifetime_stats();
+ },
+ stats_extractor, plot_name, plot);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.h
new file mode 100644
index 00000000000..726e84492db
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_
+#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <string>
+
+#include "api/function_view.h"
+#include "logging/rtc_event_log/rtc_event_log_parser.h"
+#include "modules/audio_coding/neteq/tools/neteq_stats_getter.h"
+#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
+#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
+
+namespace webrtc {
+
+void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ Plot* plot);
+
+using NetEqStatsGetterMap =
+ std::map<uint32_t, std::unique_ptr<test::NetEqStatsGetter>>;
+NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const std::string& replacement_file_name,
+ int file_sample_rate_hz);
+
+void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ uint32_t ssrc,
+ const test::NetEqStatsGetter* stats_getter,
+ Plot* plot);
+void CreateNetEqNetworkStatsGraph(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats_getters,
+ rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot);
+void CreateNetEqLifetimeStatsGraph(
+ const ParsedRtcEventLog& parsed_log,
+ const AnalyzerConfig& config,
+ const NetEqStatsGetterMap& neteq_stats_getters,
+ rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
+ const std::string& plot_name,
+ Plot* plot);
+
+} // namespace webrtc
+
+#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc
index 9fcb510adcb..6d84b1b5ca4 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc
@@ -31,12 +31,6 @@
#include "logging/rtc_event_log/rtc_event_processor.h"
#include "logging/rtc_event_log/rtc_stream_config.h"
#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h"
-#include "modules/audio_coding/neteq/tools/audio_sink.h"
-#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h"
-#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h"
-#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h"
-#include "modules/audio_coding/neteq/tools/neteq_test.h"
-#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h"
#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h"
#include "modules/congestion_controller/goog_cc/bitrate_estimator.h"
#include "modules/congestion_controller/goog_cc/delay_based_bwe.h"
@@ -45,7 +39,6 @@
#include "modules/pacing/paced_sender.h"
#include "modules/pacing/packet_router.h"
#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h"
@@ -54,6 +47,7 @@
#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h"
#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include "rtc_base/checks.h"
#include "rtc_base/format_macros.h"
@@ -62,17 +56,12 @@
#include "rtc_base/rate_statistics.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_tools/rtc_event_log_visualizer/log_simulation.h"
-
-#ifndef BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
-#define BWE_TEST_LOGGING_COMPILE_TIME_ENABLE 0
-#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+#include "test/explicit_key_value_config.h"
namespace webrtc {
namespace {
-const int kNumMicrosecsPerSec = 1000000;
-
std::string SsrcToString(uint32_t ssrc) {
rtc::StringBuilder ss;
ss << "SSRC " << ssrc;
@@ -168,11 +157,6 @@ absl::optional<uint32_t> EstimateRtpClockFrequency(
return absl::nullopt;
}
-constexpr float kLeftMargin = 0.01f;
-constexpr float kRightMargin = 0.02f;
-constexpr float kBottomMargin = 0.02f;
-constexpr float kTopMargin = 0.05f;
-
absl::optional<double> NetworkDelayDiff_AbsSendTime(
const LoggedRtpPacketIncoming& old_packet,
const LoggedRtpPacketIncoming& new_packet) {
@@ -222,99 +206,6 @@ absl::optional<double> NetworkDelayDiff_CaptureTime(
return delay_change;
}
-// For each element in data_view, use |f()| to extract a y-coordinate and
-// store the result in a TimeSeries.
-template <typename DataType, typename IterableType>
-void ProcessPoints(rtc::FunctionView<float(const DataType&)> fx,
- rtc::FunctionView<absl::optional<float>(const DataType&)> fy,
- const IterableType& data_view,
- TimeSeries* result) {
- for (size_t i = 0; i < data_view.size(); i++) {
- const DataType& elem = data_view[i];
- float x = fx(elem);
- absl::optional<float> y = fy(elem);
- if (y)
- result->points.emplace_back(x, *y);
- }
-}
-
-// For each pair of adjacent elements in |data|, use |f()| to extract a
-// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
-// will be the time of the second element in the pair.
-template <typename DataType, typename ResultType, typename IterableType>
-void ProcessPairs(
- rtc::FunctionView<float(const DataType&)> fx,
- rtc::FunctionView<absl::optional<ResultType>(const DataType&,
- const DataType&)> fy,
- const IterableType& data,
- TimeSeries* result) {
- for (size_t i = 1; i < data.size(); i++) {
- float x = fx(data[i]);
- absl::optional<ResultType> y = fy(data[i - 1], data[i]);
- if (y)
- result->points.emplace_back(x, static_cast<float>(*y));
- }
-}
-
-// For each pair of adjacent elements in |data|, use |f()| to extract a
-// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
-// will be the time of the second element in the pair.
-template <typename DataType, typename ResultType, typename IterableType>
-void AccumulatePairs(
- rtc::FunctionView<float(const DataType&)> fx,
- rtc::FunctionView<absl::optional<ResultType>(const DataType&,
- const DataType&)> fy,
- const IterableType& data,
- TimeSeries* result) {
- ResultType sum = 0;
- for (size_t i = 1; i < data.size(); i++) {
- float x = fx(data[i]);
- absl::optional<ResultType> y = fy(data[i - 1], data[i]);
- if (y) {
- sum += *y;
- result->points.emplace_back(x, static_cast<float>(sum));
- }
- }
-}
-
-// Calculates a moving average of |data| and stores the result in a TimeSeries.
-// A data point is generated every |step| microseconds from |begin_time|
-// to |end_time|. The value of each data point is the average of the data
-// during the preceding |window_duration_us| microseconds.
-template <typename DataType, typename ResultType, typename IterableType>
-void MovingAverage(
- rtc::FunctionView<absl::optional<ResultType>(const DataType&)> fy,
- const IterableType& data_view,
- AnalyzerConfig config,
- TimeSeries* result) {
- size_t window_index_begin = 0;
- size_t window_index_end = 0;
- ResultType sum_in_window = 0;
-
- for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_;
- t += config.step_) {
- while (window_index_end < data_view.size() &&
- data_view[window_index_end].log_time_us() < t) {
- absl::optional<ResultType> value = fy(data_view[window_index_end]);
- if (value)
- sum_in_window += *value;
- ++window_index_end;
- }
- while (window_index_begin < data_view.size() &&
- data_view[window_index_begin].log_time_us() <
- t - config.window_duration_) {
- absl::optional<ResultType> value = fy(data_view[window_index_begin]);
- if (value)
- sum_in_window -= *value;
- ++window_index_begin;
- }
- float window_duration_s =
- static_cast<float>(config.window_duration_) / kNumMicrosecsPerSec;
- float x = config.GetCallTimeSec(t);
- float y = sum_in_window / window_duration_s;
- result->points.emplace_back(x, y);
- }
-}
template <typename T>
TimeSeries CreateRtcpTypeTimeSeries(const std::vector<T>& rtcp_list,
@@ -465,32 +356,21 @@ EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log,
config_.begin_time_ = config_.end_time_ = 0;
}
- const auto& log_start_events = parsed_log_.start_log_events();
- const auto& log_end_events = parsed_log_.stop_log_events();
- auto start_iter = log_start_events.begin();
- auto end_iter = log_end_events.begin();
- while (start_iter != log_start_events.end()) {
- int64_t start = start_iter->log_time_us();
- ++start_iter;
- absl::optional<int64_t> next_start;
- if (start_iter != log_start_events.end())
- next_start.emplace(start_iter->log_time_us());
- if (end_iter != log_end_events.end() &&
- end_iter->log_time_us() <=
- next_start.value_or(std::numeric_limits<int64_t>::max())) {
- int64_t end = end_iter->log_time_us();
- RTC_DCHECK_LE(start, end);
- log_segments_.push_back(std::make_pair(start, end));
- ++end_iter;
- } else {
- // we're missing an end event. Assume that it occurred just before the
- // next start.
- log_segments_.push_back(
- std::make_pair(start, next_start.value_or(config_.end_time_)));
- }
- }
- RTC_LOG(LS_INFO) << "Found " << log_segments_.size()
- << " (LOG_START, LOG_END) segments in log.";
+ RTC_LOG(LS_INFO) << "Log is "
+ << (parsed_log_.last_timestamp() -
+ parsed_log_.first_timestamp()) /
+ 1000000
+ << " seconds long.";
+}
+
+EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log,
+ const AnalyzerConfig& config)
+ : parsed_log_(log), config_(config) {
+ RTC_LOG(LS_INFO) << "Log is "
+ << (parsed_log_.last_timestamp() -
+ parsed_log_.first_timestamp()) /
+ 1000000
+ << " seconds long.";
}
class BitrateObserver : public RemoteBitrateObserver {
@@ -527,7 +407,7 @@ void EventLogAnalyzer::CreatePacketGraph(PacketDirection direction,
continue;
}
- TimeSeries time_series(GetStreamName(direction, stream.ssrc),
+ TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc),
LineStyle::kBar);
auto GetPacketSize = [](const LoggedRtpPacket& packet) {
return absl::optional<float>(packet.total_length);
@@ -597,8 +477,8 @@ void EventLogAnalyzer::CreateAccumulatedPacketsGraph(PacketDirection direction,
for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
if (!MatchingSsrc(stream.ssrc, desired_ssrc_))
continue;
- std::string label =
- std::string("RTP ") + GetStreamName(direction, stream.ssrc);
+ std::string label = std::string("RTP ") +
+ GetStreamName(parsed_log_, direction, stream.ssrc);
CreateAccumulatedPacketsTimeSeries(plot, stream.packet_view, label);
}
std::string label =
@@ -627,7 +507,8 @@ void EventLogAnalyzer::CreatePacketRateGraph(PacketDirection direction,
continue;
}
TimeSeries time_series(
- std::string("RTP ") + GetStreamName(direction, stream.ssrc),
+ std::string("RTP ") +
+ GetStreamName(parsed_log_, direction, stream.ssrc),
LineStyle::kLine);
MovingAverage<LoggedRtpPacket, double>(CountPackets, stream.packet_view,
config_, &time_series);
@@ -736,9 +617,9 @@ void EventLogAnalyzer::CreatePlayoutGraph(Plot* plot) {
void EventLogAnalyzer::CreateAudioLevelGraph(PacketDirection direction,
Plot* plot) {
for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
- if (!IsAudioSsrc(direction, stream.ssrc))
+ if (!IsAudioSsrc(parsed_log_, direction, stream.ssrc))
continue;
- TimeSeries time_series(GetStreamName(direction, stream.ssrc),
+ TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc),
LineStyle::kLine);
for (auto& packet : stream.packet_view) {
if (packet.header.extension.hasAudioLevel) {
@@ -767,8 +648,9 @@ void EventLogAnalyzer::CreateSequenceNumberGraph(Plot* plot) {
continue;
}
- TimeSeries time_series(GetStreamName(kIncomingPacket, stream.ssrc),
- LineStyle::kBar);
+ TimeSeries time_series(
+ GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc),
+ LineStyle::kBar);
auto GetSequenceNumberDiff = [](const LoggedRtpPacketIncoming& old_packet,
const LoggedRtpPacketIncoming& new_packet) {
int64_t diff =
@@ -801,8 +683,9 @@ void EventLogAnalyzer::CreateIncomingPacketLossGraph(Plot* plot) {
continue;
}
- TimeSeries time_series(GetStreamName(kIncomingPacket, stream.ssrc),
- LineStyle::kLine, PointStyle::kHighlight);
+ TimeSeries time_series(
+ GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc),
+ LineStyle::kLine, PointStyle::kHighlight);
// TODO(terelius): Should the window and step size be read from the class
// instead?
const int64_t kWindowUs = 1000000;
@@ -855,7 +738,7 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) {
for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) {
// Filter on SSRC.
if (!MatchingSsrc(stream.ssrc, desired_ssrc_) ||
- IsRtxSsrc(kIncomingPacket, stream.ssrc)) {
+ IsRtxSsrc(parsed_log_, kIncomingPacket, stream.ssrc)) {
continue;
}
@@ -866,15 +749,14 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) {
<< packets.size() << " packets in the stream.";
continue;
}
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
+ int64_t segment_end_us = parsed_log_.first_log_segment().stop_time_us();
absl::optional<uint32_t> estimated_frequency =
- EstimateRtpClockFrequency(packets, end_time_us);
+ EstimateRtpClockFrequency(packets, segment_end_us);
if (!estimated_frequency)
continue;
const double frequency_hz = *estimated_frequency;
- if (IsVideoSsrc(kIncomingPacket, stream.ssrc) && frequency_hz != 90000) {
+ if (IsVideoSsrc(parsed_log_, kIncomingPacket, stream.ssrc) &&
+ frequency_hz != 90000) {
RTC_LOG(LS_WARNING)
<< "Video stream should use a 90 kHz clock but appears to use "
<< frequency_hz / 1000 << ". Discarding.";
@@ -891,14 +773,16 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) {
};
TimeSeries capture_time_data(
- GetStreamName(kIncomingPacket, stream.ssrc) + " capture-time",
+ GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc) +
+ " capture-time",
LineStyle::kLine);
AccumulatePairs<LoggedRtpPacketIncoming, double>(
ToCallTime, ToNetworkDelay, packets, &capture_time_data);
plot->AppendTimeSeries(std::move(capture_time_data));
TimeSeries send_time_data(
- GetStreamName(kIncomingPacket, stream.ssrc) + " abs-send-time",
+ GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc) +
+ " abs-send-time",
LineStyle::kLine);
AccumulatePairs<LoggedRtpPacketIncoming, double>(
ToCallTime, NetworkDelayDiff_AbsSendTime, packets, &send_time_data);
@@ -1191,7 +1075,7 @@ void EventLogAnalyzer::CreateStreamBitrateGraph(PacketDirection direction,
continue;
}
- TimeSeries time_series(GetStreamName(direction, stream.ssrc),
+ TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc),
LineStyle::kLine);
auto GetPacketSizeKilobits = [](const LoggedRtpPacket& packet) {
return packet.total_length * 8.0 / 1000.0;
@@ -1325,10 +1209,13 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
TimeSeries time_series("Delay-based estimate", LineStyle::kStep,
PointStyle::kHighlight);
- TimeSeries acked_time_series("Acked bitrate", LineStyle::kLine,
+ TimeSeries acked_time_series("Raw acked bitrate", LineStyle::kLine,
PointStyle::kHighlight);
- TimeSeries acked_estimate_time_series(
- "Acked bitrate estimate", LineStyle::kLine, PointStyle::kHighlight);
+ TimeSeries robust_time_series("Robust throughput estimate", LineStyle::kLine,
+ PointStyle::kHighlight);
+ TimeSeries acked_estimate_time_series("Ackednowledged bitrate estimate",
+ LineStyle::kLine,
+ PointStyle::kHighlight);
auto rtp_iterator = outgoing_rtp.begin();
auto rtcp_iterator = incoming_rtcp.begin();
@@ -1354,20 +1241,18 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
return std::numeric_limits<int64_t>::max();
};
- RateStatistics acked_bitrate(250, 8000);
-#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
- FieldTrialBasedConfig field_trial_config_;
- // The event_log_visualizer should normally not be compiled with
- // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE since the normal plots won't work.
- // However, compiling with BWE_TEST_LOGGING, running with --plot=sendside_bwe
- // and piping the output to plot_dynamics.py can be used as a hack to get the
- // internal state of various BWE components. In this case, it is important
- // we don't instantiate the AcknowledgedBitrateEstimator both here and in
- // GoogCcNetworkController since that would lead to duplicate outputs.
+ RateStatistics acked_bitrate(750, 8000);
+ test::ExplicitKeyValueConfig throughput_config(
+ "WebRTC-Bwe-RobustThroughputEstimatorSettings/"
+ "enabled:true,reduce_bias:true,assume_shared_link:false,initial_packets:"
+ "10,min_packets:25,window_duration:750ms,unacked_weight:0.5/");
+ std::unique_ptr<AcknowledgedBitrateEstimatorInterface>
+ robust_throughput_estimator(
+ AcknowledgedBitrateEstimatorInterface::Create(&throughput_config));
+ FieldTrialBasedConfig field_trial_config;
std::unique_ptr<AcknowledgedBitrateEstimatorInterface>
acknowledged_bitrate_estimator(
- AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config_));
-#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
+ AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config));
int64_t time_us =
std::min({NextRtpTime(), NextRtcpTime(), NextProcessTime()});
int64_t last_update_us = 0;
@@ -1377,24 +1262,40 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextRtpTime());
const RtpPacketType& rtp_packet = *rtp_iterator->second;
if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) {
- RTC_DCHECK(rtp_packet.rtp.header.extension.hasTransportSequenceNumber);
RtpPacketSendInfo packet_info;
packet_info.ssrc = rtp_packet.rtp.header.ssrc;
packet_info.transport_sequence_number =
rtp_packet.rtp.header.extension.transportSequenceNumber;
packet_info.rtp_sequence_number = rtp_packet.rtp.header.sequenceNumber;
packet_info.length = rtp_packet.rtp.total_length;
+ if (IsRtxSsrc(parsed_log_, PacketDirection::kOutgoingPacket,
+ rtp_packet.rtp.header.ssrc)) {
+ // Don't set the optional media type as we don't know if it is
+ // a retransmission, FEC or padding.
+ } else if (IsVideoSsrc(parsed_log_, PacketDirection::kOutgoingPacket,
+ rtp_packet.rtp.header.ssrc)) {
+ packet_info.packet_type = RtpPacketMediaType::kVideo;
+ } else if (IsAudioSsrc(parsed_log_, PacketDirection::kOutgoingPacket,
+ rtp_packet.rtp.header.ssrc)) {
+ packet_info.packet_type = RtpPacketMediaType::kAudio;
+ }
transport_feedback.AddPacket(
packet_info,
0u, // Per packet overhead bytes.
Timestamp::Micros(rtp_packet.rtp.log_time_us()));
- rtc::SentPacket sent_packet(
- rtp_packet.rtp.header.extension.transportSequenceNumber,
- rtp_packet.rtp.log_time_us() / 1000);
- auto sent_msg = transport_feedback.ProcessSentPacket(sent_packet);
- if (sent_msg)
- observer.Update(goog_cc->OnSentPacket(*sent_msg));
}
+ rtc::SentPacket sent_packet;
+ sent_packet.send_time_ms = rtp_packet.rtp.log_time_ms();
+ sent_packet.info.included_in_allocation = true;
+ sent_packet.info.packet_size_bytes = rtp_packet.rtp.total_length;
+ if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) {
+ sent_packet.packet_id =
+ rtp_packet.rtp.header.extension.transportSequenceNumber;
+ sent_packet.info.included_in_feedback = true;
+ }
+ auto sent_msg = transport_feedback.ProcessSentPacket(sent_packet);
+ if (sent_msg)
+ observer.Update(goog_cc->OnSentPacket(*sent_msg));
++rtp_iterator;
}
if (clock.TimeInMicroseconds() >= NextRtcpTime()) {
@@ -1409,13 +1310,13 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
std::vector<PacketResult> feedback =
feedback_msg->SortedByReceiveTime();
if (!feedback.empty()) {
-#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
acknowledged_bitrate_estimator->IncomingPacketFeedbackVector(
feedback);
-#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
- for (const PacketResult& packet : feedback)
+ robust_throughput_estimator->IncomingPacketFeedbackVector(feedback);
+ for (const PacketResult& packet : feedback) {
acked_bitrate.Update(packet.sent_packet.size.bytes(),
packet.receive_time.ms());
+ }
bitrate_bps = acked_bitrate.Rate(feedback.back().receive_time.ms());
}
}
@@ -1423,12 +1324,14 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
float x = config_.GetCallTimeSec(clock.TimeInMicroseconds());
float y = bitrate_bps.value_or(0) / 1000;
acked_time_series.points.emplace_back(x, y);
-#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
+ y = robust_throughput_estimator->bitrate()
+ .value_or(DataRate::Zero())
+ .kbps();
+ robust_time_series.points.emplace_back(x, y);
y = acknowledged_bitrate_estimator->bitrate()
.value_or(DataRate::Zero())
.kbps();
acked_estimate_time_series.points.emplace_back(x, y);
-#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE)
++rtcp_iterator;
}
if (clock.TimeInMicroseconds() >= NextProcessTime()) {
@@ -1449,6 +1352,7 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) {
}
// Add the data set to the plot.
plot->AppendTimeSeries(std::move(time_series));
+ plot->AppendTimeSeries(std::move(robust_time_series));
plot->AppendTimeSeries(std::move(acked_time_series));
plot->AppendTimeSeriesIfNotEmpty(std::move(acked_estimate_time_series));
@@ -1476,14 +1380,16 @@ void EventLogAnalyzer::CreateReceiveSideBweSimulationGraph(Plot* plot) {
}
private:
- uint32_t last_bitrate_bps_;
- bool bitrate_updated_;
+ // We don't know the start bitrate, but assume that it is the default 300
+ // kbps.
+ uint32_t last_bitrate_bps_ = 300000;
+ bool bitrate_updated_ = false;
};
std::multimap<int64_t, const RtpPacketType*> incoming_rtp;
for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) {
- if (IsVideoSsrc(kIncomingPacket, stream.ssrc)) {
+ if (IsVideoSsrc(parsed_log_, kIncomingPacket, stream.ssrc)) {
for (const auto& rtp_packet : stream.incoming_packets)
incoming_rtp.insert(
std::make_pair(rtp_packet.rtp.log_time_us(), &rtp_packet));
@@ -1586,7 +1492,7 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) {
const std::vector<LoggedRtpPacketOutgoing>& packets =
stream.outgoing_packets;
- if (IsRtxSsrc(kOutgoingPacket, stream.ssrc)) {
+ if (IsRtxSsrc(parsed_log_, kOutgoingPacket, stream.ssrc)) {
continue;
}
@@ -1596,14 +1502,12 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) {
"pacer delay with less than 2 packets in the stream";
continue;
}
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
+ int64_t segment_end_us = parsed_log_.first_log_segment().stop_time_us();
absl::optional<uint32_t> estimated_frequency =
- EstimateRtpClockFrequency(packets, end_time_us);
+ EstimateRtpClockFrequency(packets, segment_end_us);
if (!estimated_frequency)
continue;
- if (IsVideoSsrc(kOutgoingPacket, stream.ssrc) &&
+ if (IsVideoSsrc(parsed_log_, kOutgoingPacket, stream.ssrc) &&
*estimated_frequency != 90000) {
RTC_LOG(LS_WARNING)
<< "Video stream should use a 90 kHz clock but appears to use "
@@ -1612,7 +1516,7 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) {
}
TimeSeries pacer_delay_series(
- GetStreamName(kOutgoingPacket, stream.ssrc) + "(" +
+ GetStreamName(parsed_log_, kOutgoingPacket, stream.ssrc) + "(" +
std::to_string(*estimated_frequency / 1000) + " kHz)",
LineStyle::kLine, PointStyle::kHighlight);
SeqNumUnwrapper<uint32_t> timestamp_unwrapper;
@@ -1645,7 +1549,7 @@ void EventLogAnalyzer::CreateTimestampGraph(PacketDirection direction,
Plot* plot) {
for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
TimeSeries rtp_timestamps(
- GetStreamName(direction, stream.ssrc) + " capture-time",
+ GetStreamName(parsed_log_, direction, stream.ssrc) + " capture-time",
LineStyle::kLine, PointStyle::kHighlight);
for (const auto& packet : stream.packet_view) {
float x = config_.GetCallTimeSec(packet.log_time_us());
@@ -1655,7 +1559,8 @@ void EventLogAnalyzer::CreateTimestampGraph(PacketDirection direction,
plot->AppendTimeSeries(std::move(rtp_timestamps));
TimeSeries rtcp_timestamps(
- GetStreamName(direction, stream.ssrc) + " rtcp capture-time",
+ GetStreamName(parsed_log_, direction, stream.ssrc) +
+ " rtcp capture-time",
LineStyle::kLine, PointStyle::kHighlight);
// TODO(terelius): Why only sender reports?
const auto& sender_reports = parsed_log_.sender_reports(direction);
@@ -1692,7 +1597,8 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot(
bool inserted;
if (sr_report_it == sr_reports_by_ssrc.end()) {
std::tie(sr_report_it, inserted) = sr_reports_by_ssrc.emplace(
- ssrc, TimeSeries(GetStreamName(direction, ssrc) + " Sender Reports",
+ ssrc, TimeSeries(GetStreamName(parsed_log_, direction, ssrc) +
+ " Sender Reports",
LineStyle::kLine, PointStyle::kHighlight));
}
sr_report_it->second.points.emplace_back(x, y);
@@ -1713,9 +1619,9 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot(
bool inserted;
if (rr_report_it == rr_reports_by_ssrc.end()) {
std::tie(rr_report_it, inserted) = rr_reports_by_ssrc.emplace(
- ssrc,
- TimeSeries(GetStreamName(direction, ssrc) + " Receiver Reports",
- LineStyle::kLine, PointStyle::kHighlight));
+ ssrc, TimeSeries(GetStreamName(parsed_log_, direction, ssrc) +
+ " Receiver Reports",
+ LineStyle::kLine, PointStyle::kHighlight));
}
rr_report_it->second.points.emplace_back(x, y);
}
@@ -1730,463 +1636,6 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot(
plot->SetTitle(title);
}
-void EventLogAnalyzer::CreateAudioEncoderTargetBitrateGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event)
- -> absl::optional<float> {
- if (ana_event.config.bitrate_bps)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.bitrate_bps));
- return absl::nullopt;
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaBitrateBps,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder target bitrate");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderFrameLengthGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder frame length", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaFrameLengthMs =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.frame_length_ms)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.frame_length_ms));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaFrameLengthMs,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder frame length");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderPacketLossGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder uplink packet loss fraction",
- LineStyle::kLine, PointStyle::kHighlight);
- auto GetAnaPacketLoss =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.uplink_packet_loss_fraction)
- return absl::optional<float>(static_cast<float>(
- *ana_event.config.uplink_packet_loss_fraction));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaPacketLoss,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin,
- kTopMargin);
- plot->SetTitle("Reported audio encoder lost packets");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderEnableFecGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder FEC", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaFecEnabled =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.enable_fec)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.enable_fec));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaFecEnabled,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder FEC");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderEnableDtxGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder DTX", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaDtxEnabled =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.enable_dtx)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.enable_dtx));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaDtxEnabled,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder DTX");
-}
-
-void EventLogAnalyzer::CreateAudioEncoderNumChannelsGraph(Plot* plot) {
- TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine,
- PointStyle::kHighlight);
- auto GetAnaNumChannels =
- [](const LoggedAudioNetworkAdaptationEvent& ana_event) {
- if (ana_event.config.num_channels)
- return absl::optional<float>(
- static_cast<float>(*ana_event.config.num_channels));
- return absl::optional<float>();
- };
- auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) {
- return this->config_.GetCallTimeSec(packet.log_time_us());
- };
- ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
- ToCallTime, GetAnaNumChannels,
- parsed_log_.audio_network_adaptation_events(), &time_series);
- plot->AppendTimeSeries(std::move(time_series));
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))",
- kBottomMargin, kTopMargin);
- plot->SetTitle("Reported audio encoder number of channels");
-}
-
-class NetEqStreamInput : public test::NetEqInput {
- public:
- // Does not take any ownership, and all pointers must refer to valid objects
- // that outlive the one constructed.
- NetEqStreamInput(const std::vector<LoggedRtpPacketIncoming>* packet_stream,
- const std::vector<LoggedAudioPlayoutEvent>* output_events,
- absl::optional<int64_t> end_time_ms)
- : packet_stream_(*packet_stream),
- packet_stream_it_(packet_stream_.begin()),
- output_events_it_(output_events->begin()),
- output_events_end_(output_events->end()),
- end_time_ms_(end_time_ms) {
- RTC_DCHECK(packet_stream);
- RTC_DCHECK(output_events);
- }
-
- absl::optional<int64_t> NextPacketTime() const override {
- if (packet_stream_it_ == packet_stream_.end()) {
- return absl::nullopt;
- }
- if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) {
- return absl::nullopt;
- }
- return packet_stream_it_->rtp.log_time_ms();
- }
-
- absl::optional<int64_t> NextOutputEventTime() const override {
- if (output_events_it_ == output_events_end_) {
- return absl::nullopt;
- }
- if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) {
- return absl::nullopt;
- }
- return output_events_it_->log_time_ms();
- }
-
- std::unique_ptr<PacketData> PopPacket() override {
- if (packet_stream_it_ == packet_stream_.end()) {
- return std::unique_ptr<PacketData>();
- }
- std::unique_ptr<PacketData> packet_data(new PacketData());
- packet_data->header = packet_stream_it_->rtp.header;
- packet_data->time_ms = packet_stream_it_->rtp.log_time_ms();
-
- // This is a header-only "dummy" packet. Set the payload to all zeros, with
- // length according to the virtual length.
- packet_data->payload.SetSize(packet_stream_it_->rtp.total_length -
- packet_stream_it_->rtp.header_length);
- std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0);
-
- ++packet_stream_it_;
- return packet_data;
- }
-
- void AdvanceOutputEvent() override {
- if (output_events_it_ != output_events_end_) {
- ++output_events_it_;
- }
- }
-
- bool ended() const override { return !NextEventTime(); }
-
- absl::optional<RTPHeader> NextHeader() const override {
- if (packet_stream_it_ == packet_stream_.end()) {
- return absl::nullopt;
- }
- return packet_stream_it_->rtp.header;
- }
-
- private:
- const std::vector<LoggedRtpPacketIncoming>& packet_stream_;
- std::vector<LoggedRtpPacketIncoming>::const_iterator packet_stream_it_;
- std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_it_;
- const std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_end_;
- const absl::optional<int64_t> end_time_ms_;
-};
-
-namespace {
-
-// Factory to create a "replacement decoder" that produces the decoded audio
-// by reading from a file rather than from the encoded payloads.
-class ReplacementAudioDecoderFactory : public AudioDecoderFactory {
- public:
- ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name,
- int file_sample_rate_hz)
- : replacement_file_name_(replacement_file_name),
- file_sample_rate_hz_(file_sample_rate_hz) {}
-
- std::vector<AudioCodecSpec> GetSupportedDecoders() override {
- RTC_NOTREACHED();
- return {};
- }
-
- bool IsSupportedDecoder(const SdpAudioFormat& format) override {
- return true;
- }
-
- std::unique_ptr<AudioDecoder> MakeAudioDecoder(
- const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id) override {
- auto replacement_file = std::make_unique<test::ResampleInputAudioFile>(
- replacement_file_name_, file_sample_rate_hz_);
- replacement_file->set_output_rate_hz(48000);
- return std::make_unique<test::FakeDecodeFromFile>(
- std::move(replacement_file), 48000, false);
- }
-
- private:
- const std::string replacement_file_name_;
- const int file_sample_rate_hz_;
-};
-
-// Creates a NetEq test object and all necessary input and output helpers. Runs
-// the test and returns the NetEqDelayAnalyzer object that was used to
-// instrument the test.
-std::unique_ptr<test::NetEqStatsGetter> CreateNetEqTestAndRun(
- const std::vector<LoggedRtpPacketIncoming>* packet_stream,
- const std::vector<LoggedAudioPlayoutEvent>* output_events,
- absl::optional<int64_t> end_time_ms,
- const std::string& replacement_file_name,
- int file_sample_rate_hz) {
- std::unique_ptr<test::NetEqInput> input(
- new NetEqStreamInput(packet_stream, output_events, end_time_ms));
-
- constexpr int kReplacementPt = 127;
- std::set<uint8_t> cn_types;
- std::set<uint8_t> forbidden_types;
- input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt,
- cn_types, forbidden_types));
-
- NetEq::Config config;
- config.max_packets_in_buffer = 200;
- config.enable_fast_accelerate = true;
-
- std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink());
-
- rtc::scoped_refptr<AudioDecoderFactory> decoder_factory =
- new rtc::RefCountedObject<ReplacementAudioDecoderFactory>(
- replacement_file_name, file_sample_rate_hz);
-
- test::NetEqTest::DecoderMap codecs = {
- {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}};
-
- std::unique_ptr<test::NetEqDelayAnalyzer> delay_cb(
- new test::NetEqDelayAnalyzer);
- std::unique_ptr<test::NetEqStatsGetter> neteq_stats_getter(
- new test::NetEqStatsGetter(std::move(delay_cb)));
- test::DefaultNetEqTestErrorCallback error_cb;
- test::NetEqTest::Callbacks callbacks;
- callbacks.error_callback = &error_cb;
- callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer();
- callbacks.get_audio_callback = neteq_stats_getter.get();
-
- test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr,
- /*factory=*/nullptr, std::move(input), std::move(output),
- callbacks);
- test.Run();
- return neteq_stats_getter;
-}
-} // namespace
-
-EventLogAnalyzer::NetEqStatsGetterMap EventLogAnalyzer::SimulateNetEq(
- const std::string& replacement_file_name,
- int file_sample_rate_hz) const {
- NetEqStatsGetterMap neteq_stats;
-
- for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) {
- const uint32_t ssrc = stream.ssrc;
- if (!IsAudioSsrc(kIncomingPacket, ssrc))
- continue;
- const std::vector<LoggedRtpPacketIncoming>* audio_packets =
- &stream.incoming_packets;
- if (audio_packets == nullptr) {
- // No incoming audio stream found.
- continue;
- }
-
- RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end());
-
- std::map<uint32_t, std::vector<LoggedAudioPlayoutEvent>>::const_iterator
- output_events_it = parsed_log_.audio_playout_events().find(ssrc);
- if (output_events_it == parsed_log_.audio_playout_events().end()) {
- // Could not find output events with SSRC matching the input audio stream.
- // Using the first available stream of output events.
- output_events_it = parsed_log_.audio_playout_events().cbegin();
- }
-
- absl::optional<int64_t> end_time_ms =
- log_segments_.empty()
- ? absl::nullopt
- : absl::optional<int64_t>(log_segments_.front().second / 1000);
-
- neteq_stats[ssrc] = CreateNetEqTestAndRun(
- audio_packets, &output_events_it->second, end_time_ms,
- replacement_file_name, file_sample_rate_hz);
- }
-
- return neteq_stats;
-}
-
-// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created
-// for, this method generates a plot for the jitter buffer delay profile.
-void EventLogAnalyzer::CreateAudioJitterBufferGraph(
- uint32_t ssrc,
- const test::NetEqStatsGetter* stats_getter,
- Plot* plot) const {
- test::NetEqDelayAnalyzer::Delays arrival_delay_ms;
- test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms;
- test::NetEqDelayAnalyzer::Delays playout_delay_ms;
- test::NetEqDelayAnalyzer::Delays target_delay_ms;
-
- stats_getter->delay_analyzer()->CreateGraphs(
- &arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms,
- &target_delay_ms);
-
- TimeSeries time_series_packet_arrival("packet arrival delay",
- LineStyle::kLine);
- TimeSeries time_series_relative_packet_arrival(
- "Relative packet arrival delay", LineStyle::kLine);
- TimeSeries time_series_play_time("Playout delay", LineStyle::kLine);
- TimeSeries time_series_target_time("Target delay", LineStyle::kLine,
- PointStyle::kHighlight);
-
- for (const auto& data : arrival_delay_ms) {
- const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float y = data.second;
- time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y));
- }
- for (const auto& data : corrected_arrival_delay_ms) {
- const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float y = data.second;
- time_series_relative_packet_arrival.points.emplace_back(
- TimeSeriesPoint(x, y));
- }
- for (const auto& data : playout_delay_ms) {
- const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float y = data.second;
- time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y));
- }
- for (const auto& data : target_delay_ms) {
- const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float y = data.second;
- time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y));
- }
-
- plot->AppendTimeSeries(std::move(time_series_packet_arrival));
- plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival));
- plot->AppendTimeSeries(std::move(time_series_play_time));
- plot->AppendTimeSeries(std::move(time_series_target_time));
-
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin,
- kTopMargin);
- plot->SetTitle("NetEq timing for " + GetStreamName(kIncomingPacket, ssrc));
-}
-
-template <typename NetEqStatsType>
-void EventLogAnalyzer::CreateNetEqStatsGraphInternal(
- const NetEqStatsGetterMap& neteq_stats,
- rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*(
- const test::NetEqStatsGetter*)> data_extractor,
- rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const {
- std::map<uint32_t, TimeSeries> time_series;
-
- for (const auto& st : neteq_stats) {
- const uint32_t ssrc = st.first;
- const std::vector<std::pair<int64_t, NetEqStatsType>>* data_vector =
- data_extractor(st.second.get());
- for (const auto& data : *data_vector) {
- const float time =
- config_.GetCallTimeSec(data.first * 1000); // ms to us.
- const float value = stats_extractor(data.second);
- time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value));
- }
- }
-
- for (auto& series : time_series) {
- series.second.label = GetStreamName(kIncomingPacket, series.first);
- series.second.line_style = LineStyle::kLine;
- plot->AppendTimeSeries(std::move(series.second));
- }
-
- plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(),
- "Time (s)", kLeftMargin, kRightMargin);
- plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin);
- plot->SetTitle(plot_name);
-}
-
-void EventLogAnalyzer::CreateNetEqNetworkStatsGraph(
- const NetEqStatsGetterMap& neteq_stats,
- rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const {
- CreateNetEqStatsGraphInternal<NetEqNetworkStatistics>(
- neteq_stats,
- [](const test::NetEqStatsGetter* stats_getter) {
- return stats_getter->stats();
- },
- stats_extractor, plot_name, plot);
-}
-
-void EventLogAnalyzer::CreateNetEqLifetimeStatsGraph(
- const NetEqStatsGetterMap& neteq_stats,
- rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const {
- CreateNetEqStatsGraphInternal<NetEqLifetimeStatistics>(
- neteq_stats,
- [](const test::NetEqStatsGetter* stats_getter) {
- return stats_getter->lifetime_stats();
- },
- stats_extractor, plot_name, plot);
-}
-
void EventLogAnalyzer::CreateIceCandidatePairConfigGraph(Plot* plot) {
std::map<uint32_t, TimeSeries> configs_by_cp_id;
for (const auto& config : parsed_log_.ice_candidate_pair_configs()) {
@@ -2326,181 +1775,4 @@ void EventLogAnalyzer::CreateDtlsWritableStateGraph(Plot* plot) {
plot->SetTitle("DTLS Writable State");
}
-void EventLogAnalyzer::PrintNotifications(FILE* file) {
- fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n");
- for (const auto& alert : incoming_rtp_recv_time_gaps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : incoming_rtcp_recv_time_gaps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_rtp_send_time_gaps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_rtcp_send_time_gaps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : incoming_seq_num_jumps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : incoming_capture_time_jumps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_seq_num_jumps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_capture_time_jumps_) {
- fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str());
- }
- for (const auto& alert : outgoing_high_loss_alerts_) {
- fprintf(file, " : %s\n", alert.ToString().c_str());
- }
- fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n");
-}
-
-void EventLogAnalyzer::CreateStreamGapAlerts(PacketDirection direction) {
- // With 100 packets/s (~800kbps), false positives would require 10 s without
- // data.
- constexpr int64_t kMaxSeqNumJump = 1000;
- // With a 90 kHz clock, false positives would require 10 s without data.
- constexpr int64_t kMaxCaptureTimeJump = 900000;
-
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
-
- SeqNumUnwrapper<uint16_t> seq_num_unwrapper;
- absl::optional<int64_t> last_seq_num;
- SeqNumUnwrapper<uint32_t> capture_time_unwrapper;
- absl::optional<int64_t> last_capture_time;
- // Check for gaps in sequence numbers and capture timestamps.
- for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
- for (const auto& packet : stream.packet_view) {
- if (packet.log_time_us() > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
-
- int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber);
- if (last_seq_num.has_value() &&
- std::abs(seq_num - last_seq_num.value()) > kMaxSeqNumJump) {
- Alert_SeqNumJump(direction,
- config_.GetCallTimeSec(packet.log_time_us()),
- packet.header.ssrc);
- }
- last_seq_num.emplace(seq_num);
-
- int64_t capture_time =
- capture_time_unwrapper.Unwrap(packet.header.timestamp);
- if (last_capture_time.has_value() &&
- std::abs(capture_time - last_capture_time.value()) >
- kMaxCaptureTimeJump) {
- Alert_CaptureTimeJump(direction,
- config_.GetCallTimeSec(packet.log_time_us()),
- packet.header.ssrc);
- }
- last_capture_time.emplace(capture_time);
- }
- }
-}
-
-void EventLogAnalyzer::CreateTransmissionGapAlerts(PacketDirection direction) {
- constexpr int64_t kMaxRtpTransmissionGap = 500000;
- constexpr int64_t kMaxRtcpTransmissionGap = 2000000;
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
-
- // TODO(terelius): The parser could provide a list of all packets, ordered
- // by time, for each direction.
- std::multimap<int64_t, const LoggedRtpPacket*> rtp_in_direction;
- for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) {
- for (const LoggedRtpPacket& rtp_packet : stream.packet_view)
- rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet);
- }
- absl::optional<int64_t> last_rtp_time;
- for (const auto& kv : rtp_in_direction) {
- int64_t timestamp = kv.first;
- if (timestamp > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
- int64_t duration = timestamp - last_rtp_time.value_or(0);
- if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) {
- // No packet sent/received for more than 500 ms.
- Alert_RtpLogTimeGap(direction, config_.GetCallTimeSec(timestamp),
- duration / 1000);
- }
- last_rtp_time.emplace(timestamp);
- }
-
- absl::optional<int64_t> last_rtcp_time;
- if (direction == kIncomingPacket) {
- for (const auto& rtcp : parsed_log_.incoming_rtcp_packets()) {
- if (rtcp.log_time_us() > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
- int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
- if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
- // No feedback sent/received for more than 2000 ms.
- Alert_RtcpLogTimeGap(direction,
- config_.GetCallTimeSec(rtcp.log_time_us()),
- duration / 1000);
- }
- last_rtcp_time.emplace(rtcp.log_time_us());
- }
- } else {
- for (const auto& rtcp : parsed_log_.outgoing_rtcp_packets()) {
- if (rtcp.log_time_us() > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
- int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
- if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
- // No feedback sent/received for more than 2000 ms.
- Alert_RtcpLogTimeGap(direction,
- config_.GetCallTimeSec(rtcp.log_time_us()),
- duration / 1000);
- }
- last_rtcp_time.emplace(rtcp.log_time_us());
- }
- }
-}
-
-// TODO(terelius): Notifications could possibly be generated by the same code
-// that produces the graphs. There is some code duplication that could be
-// avoided, but that might be solved anyway when we move functionality from the
-// analyzer to the parser.
-void EventLogAnalyzer::CreateTriageNotifications() {
- CreateStreamGapAlerts(kIncomingPacket);
- CreateStreamGapAlerts(kOutgoingPacket);
- CreateTransmissionGapAlerts(kIncomingPacket);
- CreateTransmissionGapAlerts(kOutgoingPacket);
-
- int64_t end_time_us = log_segments_.empty()
- ? std::numeric_limits<int64_t>::max()
- : log_segments_.front().second;
-
- constexpr double kMaxLossFraction = 0.05;
- // Loss feedback
- int64_t total_lost_packets = 0;
- int64_t total_expected_packets = 0;
- for (auto& bwe_update : parsed_log_.bwe_loss_updates()) {
- if (bwe_update.log_time_us() > end_time_us) {
- // Only process the first (LOG_START, LOG_END) segment.
- break;
- }
- int64_t lost_packets = static_cast<double>(bwe_update.fraction_lost) / 255 *
- bwe_update.expected_packets;
- total_lost_packets += lost_packets;
- total_expected_packets += bwe_update.expected_packets;
- }
- double avg_outgoing_loss =
- static_cast<double>(total_lost_packets) / total_expected_packets;
- if (avg_outgoing_loss > kMaxLossFraction) {
- Alert_OutgoingHighLoss(avg_outgoing_loss);
- }
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h
index 1e091099590..4918cf48e1a 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h
@@ -21,41 +21,18 @@
#include "logging/rtc_event_log/rtc_event_log_parser.h"
#include "modules/audio_coding/neteq/tools/neteq_stats_getter.h"
#include "rtc_base/strings/string_builder.h"
+#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
-#include "rtc_tools/rtc_event_log_visualizer/triage_notifications.h"
namespace webrtc {
-class AnalyzerConfig {
- public:
- float GetCallTimeSec(int64_t timestamp_us) const {
- int64_t offset = normalize_time_ ? begin_time_ : 0;
- return static_cast<float>(timestamp_us - offset) / 1000000;
- }
-
- float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); }
-
- float CallEndTimeSec() const { return GetCallTimeSec(end_time_); }
-
- // Window and step size used for calculating moving averages, e.g. bitrate.
- // The generated data points will be |step_| microseconds apart.
- // Only events occurring at most |window_duration_| microseconds before the
- // current data point will be part of the average.
- int64_t window_duration_;
- int64_t step_;
-
- // First and last events of the log.
- int64_t begin_time_;
- int64_t end_time_;
- bool normalize_time_;
-};
-
class EventLogAnalyzer {
public:
// The EventLogAnalyzer keeps a reference to the ParsedRtcEventLogNew for the
// duration of its lifetime. The ParsedRtcEventLogNew must not be destroyed or
// modified while the EventLogAnalyzer is being used.
EventLogAnalyzer(const ParsedRtcEventLog& log, bool normalize_time);
+ EventLogAnalyzer(const ParsedRtcEventLog& log, const AnalyzerConfig& config);
void CreatePacketGraph(PacketDirection direction, Plot* plot);
@@ -102,32 +79,6 @@ class EventLogAnalyzer {
std::string yaxis_label,
Plot* plot);
- void CreateAudioEncoderTargetBitrateGraph(Plot* plot);
- void CreateAudioEncoderFrameLengthGraph(Plot* plot);
- void CreateAudioEncoderPacketLossGraph(Plot* plot);
- void CreateAudioEncoderEnableFecGraph(Plot* plot);
- void CreateAudioEncoderEnableDtxGraph(Plot* plot);
- void CreateAudioEncoderNumChannelsGraph(Plot* plot);
-
- using NetEqStatsGetterMap =
- std::map<uint32_t, std::unique_ptr<test::NetEqStatsGetter>>;
- NetEqStatsGetterMap SimulateNetEq(const std::string& replacement_file_name,
- int file_sample_rate_hz) const;
-
- void CreateAudioJitterBufferGraph(uint32_t ssrc,
- const test::NetEqStatsGetter* stats_getter,
- Plot* plot) const;
- void CreateNetEqNetworkStatsGraph(
- const NetEqStatsGetterMap& neteq_stats_getters,
- rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const;
- void CreateNetEqLifetimeStatsGraph(
- const NetEqStatsGetterMap& neteq_stats_getters,
- rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const;
-
void CreateIceCandidatePairConfigGraph(Plot* plot);
void CreateIceConnectivityCheckGraph(Plot* plot);
@@ -138,145 +89,11 @@ class EventLogAnalyzer {
void PrintNotifications(FILE* file);
private:
- struct LayerDescription {
- LayerDescription(uint32_t ssrc,
- uint8_t spatial_layer,
- uint8_t temporal_layer)
- : ssrc(ssrc),
- spatial_layer(spatial_layer),
- temporal_layer(temporal_layer) {}
- bool operator<(const LayerDescription& other) const {
- if (ssrc != other.ssrc)
- return ssrc < other.ssrc;
- if (spatial_layer != other.spatial_layer)
- return spatial_layer < other.spatial_layer;
- return temporal_layer < other.temporal_layer;
- }
- uint32_t ssrc;
- uint8_t spatial_layer;
- uint8_t temporal_layer;
- };
-
- bool IsRtxSsrc(PacketDirection direction, uint32_t ssrc) const {
- if (direction == kIncomingPacket) {
- return parsed_log_.incoming_rtx_ssrcs().find(ssrc) !=
- parsed_log_.incoming_rtx_ssrcs().end();
- } else {
- return parsed_log_.outgoing_rtx_ssrcs().find(ssrc) !=
- parsed_log_.outgoing_rtx_ssrcs().end();
- }
- }
-
- bool IsVideoSsrc(PacketDirection direction, uint32_t ssrc) const {
- if (direction == kIncomingPacket) {
- return parsed_log_.incoming_video_ssrcs().find(ssrc) !=
- parsed_log_.incoming_video_ssrcs().end();
- } else {
- return parsed_log_.outgoing_video_ssrcs().find(ssrc) !=
- parsed_log_.outgoing_video_ssrcs().end();
- }
- }
-
- bool IsAudioSsrc(PacketDirection direction, uint32_t ssrc) const {
- if (direction == kIncomingPacket) {
- return parsed_log_.incoming_audio_ssrcs().find(ssrc) !=
- parsed_log_.incoming_audio_ssrcs().end();
- } else {
- return parsed_log_.outgoing_audio_ssrcs().find(ssrc) !=
- parsed_log_.outgoing_audio_ssrcs().end();
- }
- }
-
- template <typename NetEqStatsType>
- void CreateNetEqStatsGraphInternal(
- const NetEqStatsGetterMap& neteq_stats,
- rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*(
- const test::NetEqStatsGetter*)> data_extractor,
- rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor,
- const std::string& plot_name,
- Plot* plot) const;
-
template <typename IterableType>
void CreateAccumulatedPacketsTimeSeries(Plot* plot,
const IterableType& packets,
const std::string& label);
- void CreateStreamGapAlerts(PacketDirection direction);
- void CreateTransmissionGapAlerts(PacketDirection direction);
-
- std::string GetStreamName(PacketDirection direction, uint32_t ssrc) const {
- char buffer[200];
- rtc::SimpleStringBuilder name(buffer);
- if (IsAudioSsrc(direction, ssrc)) {
- name << "Audio ";
- } else if (IsVideoSsrc(direction, ssrc)) {
- name << "Video ";
- } else {
- name << "Unknown ";
- }
- if (IsRtxSsrc(direction, ssrc)) {
- name << "RTX ";
- }
- if (direction == kIncomingPacket)
- name << "(In) ";
- else
- name << "(Out) ";
- name << "SSRC " << ssrc;
- return name.str();
- }
-
- std::string GetLayerName(LayerDescription layer) const {
- char buffer[100];
- rtc::SimpleStringBuilder name(buffer);
- name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl "
- << layer.temporal_layer;
- return name.str();
- }
-
- void Alert_RtpLogTimeGap(PacketDirection direction,
- float time_seconds,
- int64_t duration) {
- if (direction == kIncomingPacket) {
- incoming_rtp_recv_time_gaps_.emplace_back(time_seconds, duration);
- } else {
- outgoing_rtp_send_time_gaps_.emplace_back(time_seconds, duration);
- }
- }
-
- void Alert_RtcpLogTimeGap(PacketDirection direction,
- float time_seconds,
- int64_t duration) {
- if (direction == kIncomingPacket) {
- incoming_rtcp_recv_time_gaps_.emplace_back(time_seconds, duration);
- } else {
- outgoing_rtcp_send_time_gaps_.emplace_back(time_seconds, duration);
- }
- }
-
- void Alert_SeqNumJump(PacketDirection direction,
- float time_seconds,
- uint32_t ssrc) {
- if (direction == kIncomingPacket) {
- incoming_seq_num_jumps_.emplace_back(time_seconds, ssrc);
- } else {
- outgoing_seq_num_jumps_.emplace_back(time_seconds, ssrc);
- }
- }
-
- void Alert_CaptureTimeJump(PacketDirection direction,
- float time_seconds,
- uint32_t ssrc) {
- if (direction == kIncomingPacket) {
- incoming_capture_time_jumps_.emplace_back(time_seconds, ssrc);
- } else {
- outgoing_capture_time_jumps_.emplace_back(time_seconds, ssrc);
- }
- }
-
- void Alert_OutgoingHighLoss(double avg_loss_fraction) {
- outgoing_high_loss_alerts_.emplace_back(avg_loss_fraction);
- }
-
std::string GetCandidatePairLogDescriptionFromId(uint32_t candidate_pair_id);
const ParsedRtcEventLog& parsed_log_;
@@ -285,20 +102,6 @@ class EventLogAnalyzer {
// If left empty, all SSRCs will be considered relevant.
std::vector<uint32_t> desired_ssrc_;
- // Stores the timestamps for all log segments, in the form of associated start
- // and end events.
- std::vector<std::pair<int64_t, int64_t>> log_segments_;
-
- std::vector<IncomingRtpReceiveTimeGap> incoming_rtp_recv_time_gaps_;
- std::vector<IncomingRtcpReceiveTimeGap> incoming_rtcp_recv_time_gaps_;
- std::vector<OutgoingRtpSendTimeGap> outgoing_rtp_send_time_gaps_;
- std::vector<OutgoingRtcpSendTimeGap> outgoing_rtcp_send_time_gaps_;
- std::vector<IncomingSeqNumJump> incoming_seq_num_jumps_;
- std::vector<IncomingCaptureTimeJump> incoming_capture_time_jumps_;
- std::vector<OutgoingSeqNoJump> outgoing_seq_num_jumps_;
- std::vector<OutgoingCaptureTimeJump> outgoing_capture_time_jumps_;
- std::vector<OutgoingHighLoss> outgoing_high_loss_alerts_;
-
std::map<uint32_t, std::string> candidate_pair_desc_by_id_;
AnalyzerConfig config_;
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc
new file mode 100644
index 00000000000..3d3ce5a4aca
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc
@@ -0,0 +1,83 @@
+
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
+
+namespace webrtc {
+
+bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc) {
+ if (direction == kIncomingPacket) {
+ return parsed_log.incoming_rtx_ssrcs().find(ssrc) !=
+ parsed_log.incoming_rtx_ssrcs().end();
+ } else {
+ return parsed_log.outgoing_rtx_ssrcs().find(ssrc) !=
+ parsed_log.outgoing_rtx_ssrcs().end();
+ }
+}
+
+bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc) {
+ if (direction == kIncomingPacket) {
+ return parsed_log.incoming_video_ssrcs().find(ssrc) !=
+ parsed_log.incoming_video_ssrcs().end();
+ } else {
+ return parsed_log.outgoing_video_ssrcs().find(ssrc) !=
+ parsed_log.outgoing_video_ssrcs().end();
+ }
+}
+
+bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc) {
+ if (direction == kIncomingPacket) {
+ return parsed_log.incoming_audio_ssrcs().find(ssrc) !=
+ parsed_log.incoming_audio_ssrcs().end();
+ } else {
+ return parsed_log.outgoing_audio_ssrcs().find(ssrc) !=
+ parsed_log.outgoing_audio_ssrcs().end();
+ }
+}
+
+std::string GetStreamName(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc) {
+ char buffer[200];
+ rtc::SimpleStringBuilder name(buffer);
+ if (IsAudioSsrc(parsed_log, direction, ssrc)) {
+ name << "Audio ";
+ } else if (IsVideoSsrc(parsed_log, direction, ssrc)) {
+ name << "Video ";
+ } else {
+ name << "Unknown ";
+ }
+ if (IsRtxSsrc(parsed_log, direction, ssrc)) {
+ name << "RTX ";
+ }
+ if (direction == kIncomingPacket)
+ name << "(In) ";
+ else
+ name << "(Out) ";
+ name << "SSRC " << ssrc;
+ return name.str();
+}
+
+std::string GetLayerName(LayerDescription layer) {
+ char buffer[100];
+ rtc::SimpleStringBuilder name(buffer);
+ name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl "
+ << layer.temporal_layer;
+ return name.str();
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.h
new file mode 100644
index 00000000000..d5776acf62e
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.h
@@ -0,0 +1,182 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_
+#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_
+
+#include <cstdint>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/function_view.h"
+#include "logging/rtc_event_log/rtc_event_log_parser.h"
+#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
+
+namespace webrtc {
+
+constexpr int kNumMicrosecsPerSec = 1000000;
+constexpr float kLeftMargin = 0.01f;
+constexpr float kRightMargin = 0.02f;
+constexpr float kBottomMargin = 0.02f;
+constexpr float kTopMargin = 0.05f;
+
+class AnalyzerConfig {
+ public:
+ float GetCallTimeSec(int64_t timestamp_us) const {
+ int64_t offset = normalize_time_ ? begin_time_ : 0;
+ return static_cast<float>(timestamp_us - offset) / 1000000;
+ }
+
+ float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); }
+
+ float CallEndTimeSec() const { return GetCallTimeSec(end_time_); }
+
+ // Window and step size used for calculating moving averages, e.g. bitrate.
+ // The generated data points will be |step_| microseconds apart.
+ // Only events occurring at most |window_duration_| microseconds before the
+ // current data point will be part of the average.
+ int64_t window_duration_;
+ int64_t step_;
+
+ // First and last events of the log.
+ int64_t begin_time_;
+ int64_t end_time_;
+ bool normalize_time_;
+};
+
+struct LayerDescription {
+ LayerDescription(uint32_t ssrc, uint8_t spatial_layer, uint8_t temporal_layer)
+ : ssrc(ssrc),
+ spatial_layer(spatial_layer),
+ temporal_layer(temporal_layer) {}
+ bool operator<(const LayerDescription& other) const {
+ if (ssrc != other.ssrc)
+ return ssrc < other.ssrc;
+ if (spatial_layer != other.spatial_layer)
+ return spatial_layer < other.spatial_layer;
+ return temporal_layer < other.temporal_layer;
+ }
+ uint32_t ssrc;
+ uint8_t spatial_layer;
+ uint8_t temporal_layer;
+};
+
+bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc);
+bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc);
+bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc);
+
+std::string GetStreamName(const ParsedRtcEventLog& parsed_log,
+ PacketDirection direction,
+ uint32_t ssrc);
+std::string GetLayerName(LayerDescription layer);
+
+// For each element in data_view, use |f()| to extract a y-coordinate and
+// store the result in a TimeSeries.
+template <typename DataType, typename IterableType>
+void ProcessPoints(rtc::FunctionView<float(const DataType&)> fx,
+ rtc::FunctionView<absl::optional<float>(const DataType&)> fy,
+ const IterableType& data_view,
+ TimeSeries* result) {
+ for (size_t i = 0; i < data_view.size(); i++) {
+ const DataType& elem = data_view[i];
+ float x = fx(elem);
+ absl::optional<float> y = fy(elem);
+ if (y)
+ result->points.emplace_back(x, *y);
+ }
+}
+
+// For each pair of adjacent elements in |data|, use |f()| to extract a
+// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
+// will be the time of the second element in the pair.
+template <typename DataType, typename ResultType, typename IterableType>
+void ProcessPairs(
+ rtc::FunctionView<float(const DataType&)> fx,
+ rtc::FunctionView<absl::optional<ResultType>(const DataType&,
+ const DataType&)> fy,
+ const IterableType& data,
+ TimeSeries* result) {
+ for (size_t i = 1; i < data.size(); i++) {
+ float x = fx(data[i]);
+ absl::optional<ResultType> y = fy(data[i - 1], data[i]);
+ if (y)
+ result->points.emplace_back(x, static_cast<float>(*y));
+ }
+}
+
+// For each pair of adjacent elements in |data|, use |f()| to extract a
+// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
+// will be the time of the second element in the pair.
+template <typename DataType, typename ResultType, typename IterableType>
+void AccumulatePairs(
+ rtc::FunctionView<float(const DataType&)> fx,
+ rtc::FunctionView<absl::optional<ResultType>(const DataType&,
+ const DataType&)> fy,
+ const IterableType& data,
+ TimeSeries* result) {
+ ResultType sum = 0;
+ for (size_t i = 1; i < data.size(); i++) {
+ float x = fx(data[i]);
+ absl::optional<ResultType> y = fy(data[i - 1], data[i]);
+ if (y) {
+ sum += *y;
+ result->points.emplace_back(x, static_cast<float>(sum));
+ }
+ }
+}
+
+// Calculates a moving average of |data| and stores the result in a TimeSeries.
+// A data point is generated every |step| microseconds from |begin_time|
+// to |end_time|. The value of each data point is the average of the data
+// during the preceding |window_duration_us| microseconds.
+template <typename DataType, typename ResultType, typename IterableType>
+void MovingAverage(
+ rtc::FunctionView<absl::optional<ResultType>(const DataType&)> fy,
+ const IterableType& data_view,
+ AnalyzerConfig config,
+ TimeSeries* result) {
+ size_t window_index_begin = 0;
+ size_t window_index_end = 0;
+ ResultType sum_in_window = 0;
+
+ for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_;
+ t += config.step_) {
+ while (window_index_end < data_view.size() &&
+ data_view[window_index_end].log_time_us() < t) {
+ absl::optional<ResultType> value = fy(data_view[window_index_end]);
+ if (value)
+ sum_in_window += *value;
+ ++window_index_end;
+ }
+ while (window_index_begin < data_view.size() &&
+ data_view[window_index_begin].log_time_us() <
+ t - config.window_duration_) {
+ absl::optional<ResultType> value = fy(data_view[window_index_begin]);
+ if (value)
+ sum_in_window -= *value;
+ ++window_index_begin;
+ }
+ float window_duration_s =
+ static_cast<float>(config.window_duration_) / kNumMicrosecsPerSec;
+ float x = config.GetCallTimeSec(t);
+ float y = sum_in_window / window_duration_s;
+ result->points.emplace_back(x, y);
+ }
+}
+
+} // namespace webrtc
+
+#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc
index eb36b2679ee..2563338e1a3 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc
@@ -30,10 +30,10 @@
#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
+#include "rtc_tools/rtc_event_log_visualizer/alerts.h"
+#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h"
#include "rtc_tools/rtc_event_log_visualizer/analyzer.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
-#include "rtc_tools/rtc_event_log_visualizer/plot_protobuf.h"
-#include "rtc_tools/rtc_event_log_visualizer/plot_python.h"
#include "system_wrappers/include/field_trial.h"
#include "test/field_trial.h"
#include "test/testsupport/file_utils.h"
@@ -77,7 +77,7 @@ ABSL_FLAG(bool,
ABSL_FLAG(bool,
print_triage_alerts,
- false,
+ true,
"Print triage alerts, i.e. a list of potential problems.");
ABSL_FLAG(bool,
@@ -194,9 +194,9 @@ int main(int argc, char* argv[]) {
"A tool for visualizing WebRTC event logs.\n"
"Example usage:\n"
"./event_log_visualizer <logfile> | python\n");
- absl::FlagsUsageConfig config;
- config.contains_help_flags = &ContainsHelppackageFlags;
- absl::SetFlagsUsageConfig(config);
+ absl::FlagsUsageConfig flag_config;
+ flag_config.contains_help_flags = &ContainsHelppackageFlags;
+ absl::SetFlagsUsageConfig(flag_config);
std::vector<char*> args = absl::ParseCommandLine(argc, argv);
// Print RTC_LOG warnings and errors even in release builds.
@@ -261,16 +261,22 @@ int main(int argc, char* argv[]) {
}
}
- webrtc::EventLogAnalyzer analyzer(parsed_log,
- absl::GetFlag(FLAGS_normalize_time));
- std::unique_ptr<webrtc::PlotCollection> collection;
- if (absl::GetFlag(FLAGS_protobuf_output)) {
- collection.reset(new webrtc::ProtobufPlotCollection());
- } else {
- collection.reset(
- new webrtc::PythonPlotCollection(absl::GetFlag(FLAGS_shared_xaxis)));
+ webrtc::AnalyzerConfig config;
+ config.window_duration_ = 250000;
+ config.step_ = 10000;
+ config.normalize_time_ = absl::GetFlag(FLAGS_normalize_time);
+ config.begin_time_ = parsed_log.first_timestamp();
+ config.end_time_ = parsed_log.last_timestamp();
+ if (config.end_time_ < config.begin_time_) {
+ RTC_LOG(LS_WARNING) << "Log end time " << config.end_time_
+ << " not after begin time " << config.begin_time_
+ << ". Nothing to analyze. Is the log broken?";
+ return -1;
}
+ webrtc::EventLogAnalyzer analyzer(parsed_log, config);
+ webrtc::PlotCollection collection;
+
PlotMap plots;
plots.RegisterPlot("incoming_packet_sizes", [&](Plot* plot) {
analyzer.CreatePacketGraph(webrtc::kIncomingPacket, plot);
@@ -423,22 +429,22 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("pacer_delay",
[&](Plot* plot) { analyzer.CreatePacerDelayGraph(plot); });
plots.RegisterPlot("audio_encoder_bitrate", [&](Plot* plot) {
- analyzer.CreateAudioEncoderTargetBitrateGraph(plot);
+ CreateAudioEncoderTargetBitrateGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_frame_length", [&](Plot* plot) {
- analyzer.CreateAudioEncoderFrameLengthGraph(plot);
+ CreateAudioEncoderFrameLengthGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_packet_loss", [&](Plot* plot) {
- analyzer.CreateAudioEncoderPacketLossGraph(plot);
+ CreateAudioEncoderPacketLossGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_fec", [&](Plot* plot) {
- analyzer.CreateAudioEncoderEnableFecGraph(plot);
+ CreateAudioEncoderEnableFecGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_dtx", [&](Plot* plot) {
- analyzer.CreateAudioEncoderEnableDtxGraph(plot);
+ CreateAudioEncoderEnableDtxGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_num_channels", [&](Plot* plot) {
- analyzer.CreateAudioEncoderNumChannelsGraph(plot);
+ CreateAudioEncoderNumChannelsGraph(parsed_log, config, plot);
});
plots.RegisterPlot("ice_candidate_pair_config", [&](Plot* plot) {
@@ -461,14 +467,14 @@ int main(int argc, char* argv[]) {
wav_path = webrtc::test::ResourcePath(
"audio_processing/conversational_speech/EN_script2_F_sp2_B1", "wav");
}
- absl::optional<webrtc::EventLogAnalyzer::NetEqStatsGetterMap> neteq_stats;
+ absl::optional<webrtc::NetEqStatsGetterMap> neteq_stats;
plots.RegisterPlot("simulated_neteq_expand_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.expand_rate / 16384.f;
},
@@ -477,10 +483,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_speech_expand_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.speech_expand_rate / 16384.f;
},
@@ -489,10 +495,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_accelerate_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.accelerate_rate / 16384.f;
},
@@ -501,10 +507,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_preemptive_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.preemptive_rate / 16384.f;
},
@@ -513,10 +519,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_packet_loss_rate", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.packet_loss_rate / 16384.f;
},
@@ -525,10 +531,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_concealment_events", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqLifetimeStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqLifetimeStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqLifetimeStatistics& stats) {
return static_cast<float>(stats.concealment_events);
},
@@ -537,10 +543,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_preferred_buffer_size", [&](Plot* plot) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- analyzer.CreateNetEqNetworkStatsGraph(
- *neteq_stats,
+ webrtc::CreateNetEqNetworkStatsGraph(
+ parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.preferred_buffer_size_ms;
},
@@ -587,7 +593,7 @@ int main(int argc, char* argv[]) {
for (const auto& plot : plots) {
if (plot.enabled) {
- Plot* output = collection->AppendNewPlot();
+ Plot* output = collection.AppendNewPlot();
plot.plot_func(output);
output->SetId(plot.label);
}
@@ -601,21 +607,28 @@ int main(int argc, char* argv[]) {
if (absl::c_find(plot_flags, "simulated_neteq_jitter_buffer_delay") !=
plot_flags.end()) {
if (!neteq_stats) {
- neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
+ neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
- for (webrtc::EventLogAnalyzer::NetEqStatsGetterMap::const_iterator it =
- neteq_stats->cbegin();
+ for (webrtc::NetEqStatsGetterMap::const_iterator it = neteq_stats->cbegin();
it != neteq_stats->cend(); ++it) {
- analyzer.CreateAudioJitterBufferGraph(it->first, it->second.get(),
- collection->AppendNewPlot());
+ webrtc::CreateAudioJitterBufferGraph(parsed_log, config, it->first,
+ it->second.get(),
+ collection.AppendNewPlot());
}
}
- collection->Draw();
+ if (absl::GetFlag(FLAGS_protobuf_output)) {
+ webrtc::analytics::ChartCollection proto_charts;
+ collection.ExportProtobuf(&proto_charts);
+ std::cout << proto_charts.SerializeAsString();
+ } else {
+ collection.PrintPythonCode(absl::GetFlag(FLAGS_shared_xaxis));
+ }
if (absl::GetFlag(FLAGS_print_triage_alerts)) {
- analyzer.CreateTriageNotifications();
- analyzer.PrintNotifications(stderr);
+ webrtc::TriageHelper triage_alerts(config);
+ triage_alerts.AnalyzeLog(parsed_log);
+ triage_alerts.Print(stderr);
}
return 0;
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc
index dfcd26fed52..dce601a8329 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc
@@ -11,6 +11,7 @@
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
#include <algorithm>
+#include <memory>
#include "rtc_base/checks.h"
@@ -93,4 +94,232 @@ void Plot::AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series) {
}
}
+void Plot::PrintPythonCode() const {
+ // Write python commands to stdout. Intended program usage is
+ // ./event_log_visualizer event_log160330.dump | python
+
+ if (!series_list_.empty()) {
+ printf("color_count = %zu\n", series_list_.size());
+ printf(
+ "hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i "
+ "in range(color_count)]\n");
+ printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n");
+
+ for (size_t i = 0; i < series_list_.size(); i++) {
+ printf("\n# === Series: %s ===\n", series_list_[i].label.c_str());
+ // List x coordinates
+ printf("x%zu = [", i);
+ if (!series_list_[i].points.empty())
+ printf("%.3f", series_list_[i].points[0].x);
+ for (size_t j = 1; j < series_list_[i].points.size(); j++)
+ printf(", %.3f", series_list_[i].points[j].x);
+ printf("]\n");
+
+ // List y coordinates
+ printf("y%zu = [", i);
+ if (!series_list_[i].points.empty())
+ printf("%G", series_list_[i].points[0].y);
+ for (size_t j = 1; j < series_list_[i].points.size(); j++)
+ printf(", %G", series_list_[i].points[j].y);
+ printf("]\n");
+
+ if (series_list_[i].line_style == LineStyle::kBar) {
+ // There is a plt.bar function that draws bar plots,
+ // but it is *way* too slow to be useful.
+ printf(
+ "plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: "
+ "max(t,0), y%zu), color=colors[%zu], "
+ "label=\'%s\')\n",
+ i, i, i, i, series_list_[i].label.c_str());
+ if (series_list_[i].point_style == PointStyle::kHighlight) {
+ printf(
+ "plt.plot(x%zu, y%zu, color=colors[%zu], "
+ "marker='.', ls=' ')\n",
+ i, i, i);
+ }
+ } else if (series_list_[i].line_style == LineStyle::kLine) {
+ if (series_list_[i].point_style == PointStyle::kHighlight) {
+ printf(
+ "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
+ "marker='.')\n",
+ i, i, i, series_list_[i].label.c_str());
+ } else {
+ printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i,
+ i, i, series_list_[i].label.c_str());
+ }
+ } else if (series_list_[i].line_style == LineStyle::kStep) {
+ // Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on
+ // to illustrate the "steps". This can be expressed by duplicating all
+ // elements except the first in x and the last in y.
+ printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i);
+ printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i);
+ printf(
+ "plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], "
+ "label=\'%s\')\n",
+ i, i, i, series_list_[i].label.c_str());
+ if (series_list_[i].point_style == PointStyle::kHighlight) {
+ printf(
+ "plt.plot(x%zu, y%zu, color=colors[%zu], "
+ "marker='.', ls=' ')\n",
+ i, i, i);
+ }
+ } else if (series_list_[i].line_style == LineStyle::kNone) {
+ printf(
+ "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
+ "marker='o', ls=' ')\n",
+ i, i, i, series_list_[i].label.c_str());
+ } else {
+ printf("raise Exception(\"Unknown graph type\")\n");
+ }
+ }
+
+ // IntervalSeries
+ printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n");
+ RTC_CHECK_LE(interval_list_.size(), 4);
+ // To get the intervals to show up in the legend we have to create patches
+ // for them.
+ printf("legend_patches = []\n");
+ for (size_t i = 0; i < interval_list_.size(); i++) {
+ // List intervals
+ printf("\n# === IntervalSeries: %s ===\n",
+ interval_list_[i].label.c_str());
+ printf("ival%zu = [", i);
+ if (!interval_list_[i].intervals.empty()) {
+ printf("(%G, %G)", interval_list_[i].intervals[0].begin,
+ interval_list_[i].intervals[0].end);
+ }
+ for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) {
+ printf(", (%G, %G)", interval_list_[i].intervals[j].begin,
+ interval_list_[i].intervals[j].end);
+ }
+ printf("]\n");
+
+ printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size());
+ if (interval_list_[i].orientation == IntervalSeries::kVertical) {
+ printf(
+ " plt.axhspan(ival%zu[i][0], ival%zu[i][1], "
+ "facecolor=interval_colors[%zu], "
+ "alpha=0.3)\n",
+ i, i, i);
+ } else {
+ printf(
+ " plt.axvspan(ival%zu[i][0], ival%zu[i][1], "
+ "facecolor=interval_colors[%zu], "
+ "alpha=0.3)\n",
+ i, i, i);
+ }
+ printf(
+ "legend_patches.append(mpatches.Patch(ec=\'black\', "
+ "fc=interval_colors[%zu], label='%s'))\n",
+ i, interval_list_[i].label.c_str());
+ }
+ }
+
+ printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_);
+ printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_);
+ printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str());
+ printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str());
+ printf("plt.title(\'%s\')\n", title_.c_str());
+ printf("fig = plt.gcf()\n");
+ printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str());
+ if (!yaxis_tick_labels_.empty()) {
+ printf("yaxis_tick_labels = [");
+ for (const auto& kv : yaxis_tick_labels_) {
+ printf("(%f,\"%s\"),", kv.first, kv.second.c_str());
+ }
+ printf("]\n");
+ printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n");
+ printf("plt.yticks(*yaxis_tick_labels)\n");
+ }
+ if (!series_list_.empty() || !interval_list_.empty()) {
+ printf("handles, labels = plt.gca().get_legend_handles_labels()\n");
+ printf("for lp in legend_patches:\n");
+ printf(" handles.append(lp)\n");
+ printf(" labels.append(lp.get_label())\n");
+ printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n");
+ }
+}
+
+void Plot::ExportProtobuf(webrtc::analytics::Chart* chart) const {
+ for (size_t i = 0; i < series_list_.size(); i++) {
+ webrtc::analytics::DataSet* data_set = chart->add_data_sets();
+ for (const auto& point : series_list_[i].points) {
+ data_set->add_x_values(point.x);
+ }
+ for (const auto& point : series_list_[i].points) {
+ data_set->add_y_values(point.y);
+ }
+
+ if (series_list_[i].line_style == LineStyle::kBar) {
+ data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART);
+ } else if (series_list_[i].line_style == LineStyle::kLine) {
+ data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART);
+ } else if (series_list_[i].line_style == LineStyle::kStep) {
+ data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART);
+ } else if (series_list_[i].line_style == LineStyle::kNone) {
+ data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART);
+ } else {
+ data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED);
+ }
+
+ if (series_list_[i].point_style == PointStyle::kHighlight)
+ data_set->set_highlight_points(true);
+
+ data_set->set_label(series_list_[i].label);
+ }
+
+ chart->set_xaxis_min(xaxis_min_);
+ chart->set_xaxis_max(xaxis_max_);
+ chart->set_yaxis_min(yaxis_min_);
+ chart->set_yaxis_max(yaxis_max_);
+ chart->set_xaxis_label(xaxis_label_);
+ chart->set_yaxis_label(yaxis_label_);
+ chart->set_title(title_);
+ chart->set_id(id_);
+
+ for (const auto& kv : yaxis_tick_labels_) {
+ webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels();
+ tick->set_value(kv.first);
+ tick->set_label(kv.second);
+ }
+}
+
+void PlotCollection::PrintPythonCode(bool shared_xaxis) const {
+ printf("import matplotlib.pyplot as plt\n");
+ printf("plt.rcParams.update({'figure.max_open_warning': 0})\n");
+ printf("import matplotlib.patches as mpatches\n");
+ printf("import matplotlib.patheffects as pe\n");
+ printf("import colorsys\n");
+ for (size_t i = 0; i < plots_.size(); i++) {
+ printf("plt.figure(%zu)\n", i);
+ if (shared_xaxis) {
+ // Link x-axes across all figures for synchronized zooming.
+ if (i == 0) {
+ printf("axis0 = plt.subplot(111)\n");
+ } else {
+ printf("plt.subplot(111, sharex=axis0)\n");
+ }
+ }
+ plots_[i]->PrintPythonCode();
+ }
+ printf("plt.show()\n");
+}
+
+void PlotCollection::ExportProtobuf(
+ webrtc::analytics::ChartCollection* collection) const {
+ for (const auto& plot : plots_) {
+ // TODO(terelius): Ensure that there is no way to insert plots other than
+ // ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast
+ // here.
+ webrtc::analytics::Chart* protobuf_representation =
+ collection->add_charts();
+ plot->ExportProtobuf(protobuf_representation);
+ }
+}
+
+Plot* PlotCollection::AppendNewPlot() {
+ plots_.push_back(std::make_unique<Plot>());
+ return plots_.back().get();
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h
index 5e4ebfa5222..06a206f0315 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h
@@ -15,6 +15,13 @@
#include <utility>
#include <vector>
+#include "rtc_base/deprecation.h"
+#include "rtc_base/ignore_wundef.h"
+
+RTC_PUSH_IGNORING_WUNDEF()
+#include "rtc_tools/rtc_event_log_visualizer/proto/chart.pb.h"
+RTC_POP_IGNORING_WUNDEF()
+
namespace webrtc {
enum class LineStyle {
@@ -94,8 +101,8 @@ class Plot {
public:
virtual ~Plot() {}
- // Overloaded to draw the plot.
- virtual void Draw() = 0;
+ // Deprecated. Use PrintPythonCode() or ExportProtobuf() instead.
+ RTC_DEPRECATED virtual void Draw() {}
// Sets the lower x-axis limit to min_value (if left_margin == 0).
// Sets the upper x-axis limit to max_value (if right_margin == 0).
@@ -158,6 +165,12 @@ class Plot {
// Otherwise, the call has no effect and the timeseries is destroyed.
void AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series);
+ // Replaces PythonPlot::Draw()
+ void PrintPythonCode() const;
+
+ // Replaces ProtobufPlot::Draw()
+ void ExportProtobuf(webrtc::analytics::Chart* chart) const;
+
protected:
float xaxis_min_;
float xaxis_max_;
@@ -175,8 +188,17 @@ class Plot {
class PlotCollection {
public:
virtual ~PlotCollection() {}
- virtual void Draw() = 0;
- virtual Plot* AppendNewPlot() = 0;
+
+ // Deprecated. Use PrintPythonCode() or ExportProtobuf() instead.
+ RTC_DEPRECATED virtual void Draw() {}
+
+ virtual Plot* AppendNewPlot();
+
+ // Replaces PythonPlotCollection::Draw()
+ void PrintPythonCode(bool shared_xaxis) const;
+
+ // Replaces ProtobufPlotCollections::Draw()
+ void ExportProtobuf(webrtc::analytics::ChartCollection* collection) const;
protected:
std::vector<std::unique_ptr<Plot>> plots_;
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc
index 9e82c01ba63..0f43191e8b0 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc
@@ -24,49 +24,7 @@ ProtobufPlot::~ProtobufPlot() {}
void ProtobufPlot::Draw() {}
-void ProtobufPlot::ExportProtobuf(webrtc::analytics::Chart* chart) {
- for (size_t i = 0; i < series_list_.size(); i++) {
- webrtc::analytics::DataSet* data_set = chart->add_data_sets();
- for (const auto& point : series_list_[i].points) {
- data_set->add_x_values(point.x);
- }
- for (const auto& point : series_list_[i].points) {
- data_set->add_y_values(point.y);
- }
- if (series_list_[i].line_style == LineStyle::kBar) {
- data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART);
- } else if (series_list_[i].line_style == LineStyle::kLine) {
- data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART);
- } else if (series_list_[i].line_style == LineStyle::kStep) {
- data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART);
- } else if (series_list_[i].line_style == LineStyle::kNone) {
- data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART);
- } else {
- data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED);
- }
-
- if (series_list_[i].point_style == PointStyle::kHighlight)
- data_set->set_highlight_points(true);
-
- data_set->set_label(series_list_[i].label);
- }
-
- chart->set_xaxis_min(xaxis_min_);
- chart->set_xaxis_max(xaxis_max_);
- chart->set_yaxis_min(yaxis_min_);
- chart->set_yaxis_max(yaxis_max_);
- chart->set_xaxis_label(xaxis_label_);
- chart->set_yaxis_label(yaxis_label_);
- chart->set_title(title_);
- chart->set_id(id_);
-
- for (const auto& kv : yaxis_tick_labels_) {
- webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels();
- tick->set_value(kv.first);
- tick->set_label(kv.second);
- }
-}
ProtobufPlotCollection::ProtobufPlotCollection() {}
@@ -78,19 +36,6 @@ void ProtobufPlotCollection::Draw() {
std::cout << collection.SerializeAsString();
}
-void ProtobufPlotCollection::ExportProtobuf(
- webrtc::analytics::ChartCollection* collection) {
- for (const auto& plot : plots_) {
- // TODO(terelius): Ensure that there is no way to insert plots other than
- // ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast
- // here.
- webrtc::analytics::Chart* protobuf_representation =
- collection->add_charts();
- static_cast<ProtobufPlot*>(plot.get())
- ->ExportProtobuf(protobuf_representation);
- }
-}
-
Plot* ProtobufPlotCollection::AppendNewPlot() {
Plot* plot = new ProtobufPlot();
plots_.push_back(std::unique_ptr<Plot>(plot));
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h
index 738247a3098..0773b58d208 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h
@@ -23,16 +23,15 @@ class ProtobufPlot final : public Plot {
ProtobufPlot();
~ProtobufPlot() override;
void Draw() override;
- void ExportProtobuf(webrtc::analytics::Chart* chart);
};
class ProtobufPlotCollection final : public PlotCollection {
public:
- ProtobufPlotCollection();
+ // This class is deprecated. Use PlotCollection and ExportProtobuf() instead.
+ RTC_DEPRECATED ProtobufPlotCollection();
~ProtobufPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;
- void ExportProtobuf(webrtc::analytics::ChartCollection* collection);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc
index e7cde45f30e..b3708110dfb 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc
@@ -25,149 +25,7 @@ PythonPlot::PythonPlot() {}
PythonPlot::~PythonPlot() {}
void PythonPlot::Draw() {
- // Write python commands to stdout. Intended program usage is
- // ./event_log_visualizer event_log160330.dump | python
-
- if (!series_list_.empty()) {
- printf("color_count = %zu\n", series_list_.size());
- printf(
- "hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i "
- "in range(color_count)]\n");
- printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n");
-
- for (size_t i = 0; i < series_list_.size(); i++) {
- printf("\n# === Series: %s ===\n", series_list_[i].label.c_str());
- // List x coordinates
- printf("x%zu = [", i);
- if (!series_list_[i].points.empty())
- printf("%.3f", series_list_[i].points[0].x);
- for (size_t j = 1; j < series_list_[i].points.size(); j++)
- printf(", %.3f", series_list_[i].points[j].x);
- printf("]\n");
-
- // List y coordinates
- printf("y%zu = [", i);
- if (!series_list_[i].points.empty())
- printf("%G", series_list_[i].points[0].y);
- for (size_t j = 1; j < series_list_[i].points.size(); j++)
- printf(", %G", series_list_[i].points[j].y);
- printf("]\n");
-
- if (series_list_[i].line_style == LineStyle::kBar) {
- // There is a plt.bar function that draws bar plots,
- // but it is *way* too slow to be useful.
- printf(
- "plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: "
- "max(t,0), y%zu), color=colors[%zu], "
- "label=\'%s\')\n",
- i, i, i, i, series_list_[i].label.c_str());
- if (series_list_[i].point_style == PointStyle::kHighlight) {
- printf(
- "plt.plot(x%zu, y%zu, color=colors[%zu], "
- "marker='.', ls=' ')\n",
- i, i, i);
- }
- } else if (series_list_[i].line_style == LineStyle::kLine) {
- if (series_list_[i].point_style == PointStyle::kHighlight) {
- printf(
- "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
- "marker='.')\n",
- i, i, i, series_list_[i].label.c_str());
- } else {
- printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i,
- i, i, series_list_[i].label.c_str());
- }
- } else if (series_list_[i].line_style == LineStyle::kStep) {
- // Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on
- // to illustrate the "steps". This can be expressed by duplicating all
- // elements except the first in x and the last in y.
- printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i);
- printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i);
- printf(
- "plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], "
- "label=\'%s\')\n",
- i, i, i, series_list_[i].label.c_str());
- if (series_list_[i].point_style == PointStyle::kHighlight) {
- printf(
- "plt.plot(x%zu, y%zu, color=colors[%zu], "
- "marker='.', ls=' ')\n",
- i, i, i);
- }
- } else if (series_list_[i].line_style == LineStyle::kNone) {
- printf(
- "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
- "marker='o', ls=' ')\n",
- i, i, i, series_list_[i].label.c_str());
- } else {
- printf("raise Exception(\"Unknown graph type\")\n");
- }
- }
-
- // IntervalSeries
- printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n");
- RTC_CHECK_LE(interval_list_.size(), 4);
- // To get the intervals to show up in the legend we have to create patches
- // for them.
- printf("legend_patches = []\n");
- for (size_t i = 0; i < interval_list_.size(); i++) {
- // List intervals
- printf("\n# === IntervalSeries: %s ===\n",
- interval_list_[i].label.c_str());
- printf("ival%zu = [", i);
- if (!interval_list_[i].intervals.empty()) {
- printf("(%G, %G)", interval_list_[i].intervals[0].begin,
- interval_list_[i].intervals[0].end);
- }
- for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) {
- printf(", (%G, %G)", interval_list_[i].intervals[j].begin,
- interval_list_[i].intervals[j].end);
- }
- printf("]\n");
-
- printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size());
- if (interval_list_[i].orientation == IntervalSeries::kVertical) {
- printf(
- " plt.axhspan(ival%zu[i][0], ival%zu[i][1], "
- "facecolor=interval_colors[%zu], "
- "alpha=0.3)\n",
- i, i, i);
- } else {
- printf(
- " plt.axvspan(ival%zu[i][0], ival%zu[i][1], "
- "facecolor=interval_colors[%zu], "
- "alpha=0.3)\n",
- i, i, i);
- }
- printf(
- "legend_patches.append(mpatches.Patch(ec=\'black\', "
- "fc=interval_colors[%zu], label='%s'))\n",
- i, interval_list_[i].label.c_str());
- }
- }
-
- printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_);
- printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_);
- printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str());
- printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str());
- printf("plt.title(\'%s\')\n", title_.c_str());
- printf("fig = plt.gcf()\n");
- printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str());
- if (!yaxis_tick_labels_.empty()) {
- printf("yaxis_tick_labels = [");
- for (const auto& kv : yaxis_tick_labels_) {
- printf("(%f,\"%s\"),", kv.first, kv.second.c_str());
- }
- printf("]\n");
- printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n");
- printf("plt.yticks(*yaxis_tick_labels)\n");
- }
- if (!series_list_.empty() || !interval_list_.empty()) {
- printf("handles, labels = plt.gca().get_legend_handles_labels()\n");
- printf("for lp in legend_patches:\n");
- printf(" handles.append(lp)\n");
- printf(" labels.append(lp.get_label())\n");
- printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n");
- }
+ PrintPythonCode();
}
PythonPlotCollection::PythonPlotCollection(bool shared_xaxis)
@@ -176,24 +34,7 @@ PythonPlotCollection::PythonPlotCollection(bool shared_xaxis)
PythonPlotCollection::~PythonPlotCollection() {}
void PythonPlotCollection::Draw() {
- printf("import matplotlib.pyplot as plt\n");
- printf("plt.rcParams.update({'figure.max_open_warning': 0})\n");
- printf("import matplotlib.patches as mpatches\n");
- printf("import matplotlib.patheffects as pe\n");
- printf("import colorsys\n");
- for (size_t i = 0; i < plots_.size(); i++) {
- printf("plt.figure(%zu)\n", i);
- if (shared_xaxis_) {
- // Link x-axes across all figures for synchronized zooming.
- if (i == 0) {
- printf("axis0 = plt.subplot(111)\n");
- } else {
- printf("plt.subplot(111, sharex=axis0)\n");
- }
- }
- plots_[i]->Draw();
- }
- printf("plt.show()\n");
+ PrintPythonCode(shared_xaxis_);
}
Plot* PythonPlotCollection::AppendNewPlot() {
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h
index dcdcf23fcf7..998ed7b2215 100644
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h
+++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h
@@ -23,7 +23,8 @@ class PythonPlot final : public Plot {
class PythonPlotCollection final : public PlotCollection {
public:
- explicit PythonPlotCollection(bool shared_xaxis = false);
+ // This class is deprecated. Use PlotCollection and PrintPythonCode() instead.
+ RTC_DEPRECATED explicit PythonPlotCollection(bool shared_xaxis = false);
~PythonPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;
diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/triage_notifications.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/triage_notifications.h
deleted file mode 100644
index 23b31ece421..00000000000
--- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/triage_notifications.h
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_
-#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_
-
-#include <string>
-
-namespace webrtc {
-
-class IncomingRtpReceiveTimeGap {
- public:
- IncomingRtpReceiveTimeGap(float time_seconds, int64_t duration)
- : time_seconds_(time_seconds), duration_(duration) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("No RTP packets received for ") +
- std::to_string(duration_) + std::string(" ms");
- }
-
- private:
- float time_seconds_;
- int64_t duration_;
-};
-
-class IncomingRtcpReceiveTimeGap {
- public:
- IncomingRtcpReceiveTimeGap(float time_seconds, int64_t duration)
- : time_seconds_(time_seconds), duration_(duration) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("No RTCP packets received for ") +
- std::to_string(duration_) + std::string(" ms");
- }
-
- private:
- float time_seconds_;
- int64_t duration_;
-};
-
-class OutgoingRtpSendTimeGap {
- public:
- OutgoingRtpSendTimeGap(float time_seconds, int64_t duration)
- : time_seconds_(time_seconds), duration_(duration) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("No RTP packets sent for ") + std::to_string(duration_) +
- std::string(" ms");
- }
-
- private:
- float time_seconds_;
- int64_t duration_;
-};
-
-class OutgoingRtcpSendTimeGap {
- public:
- OutgoingRtcpSendTimeGap(float time_seconds, int64_t duration)
- : time_seconds_(time_seconds), duration_(duration) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("No RTCP packets sent for ") +
- std::to_string(duration_) + std::string(" ms");
- }
-
- private:
- float time_seconds_;
- int64_t duration_;
-};
-
-class IncomingSeqNumJump {
- public:
- IncomingSeqNumJump(float time_seconds, uint32_t ssrc)
- : time_seconds_(time_seconds), ssrc_(ssrc) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("Sequence number jumps on incoming SSRC ") +
- std::to_string(ssrc_);
- }
-
- private:
- float time_seconds_;
-
- uint32_t ssrc_;
-};
-
-class IncomingCaptureTimeJump {
- public:
- IncomingCaptureTimeJump(float time_seconds, uint32_t ssrc)
- : time_seconds_(time_seconds), ssrc_(ssrc) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("Capture timestamp jumps on incoming SSRC ") +
- std::to_string(ssrc_);
- }
-
- private:
- float time_seconds_;
-
- uint32_t ssrc_;
-};
-
-class OutgoingSeqNoJump {
- public:
- OutgoingSeqNoJump(float time_seconds, uint32_t ssrc)
- : time_seconds_(time_seconds), ssrc_(ssrc) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("Sequence number jumps on outgoing SSRC ") +
- std::to_string(ssrc_);
- }
-
- private:
- float time_seconds_;
-
- uint32_t ssrc_;
-};
-
-class OutgoingCaptureTimeJump {
- public:
- OutgoingCaptureTimeJump(float time_seconds, uint32_t ssrc)
- : time_seconds_(time_seconds), ssrc_(ssrc) {}
- float Time() const { return time_seconds_; }
- std::string ToString() const {
- return std::string("Capture timestamp jumps on outgoing SSRC ") +
- std::to_string(ssrc_);
- }
-
- private:
- float time_seconds_;
-
- uint32_t ssrc_;
-};
-
-class OutgoingHighLoss {
- public:
- explicit OutgoingHighLoss(double avg_loss_fraction)
- : avg_loss_fraction_(avg_loss_fraction) {}
- std::string ToString() const {
- return std::string("High average loss (") +
- std::to_string(avg_loss_fraction_ * 100) +
- std::string("%) across the call.");
- }
-
- private:
- double avg_loss_fraction_;
-};
-
-} // namespace webrtc
-
-#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_
diff --git a/chromium/third_party/webrtc/sdk/BUILD.gn b/chromium/third_party/webrtc/sdk/BUILD.gn
index 1b313b3d872..dc9b265155e 100644
--- a/chromium/third_party/webrtc/sdk/BUILD.gn
+++ b/chromium/third_party/webrtc/sdk/BUILD.gn
@@ -35,8 +35,8 @@ rtc_library("media_constraints") {
deps = [
"../api:audio_options_api",
"../api:libjingle_peerconnection_api",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("sdk_tests") {
@@ -272,8 +272,8 @@ if (is_ios || is_mac) {
"../rtc_base:checks",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/base:core_headers",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
libs = [ "AudioToolbox.framework" ]
}
@@ -444,8 +444,8 @@ if (is_ios || is_mac) {
"../media:rtc_media_base",
"../rtc_base",
"../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
configs += [
"..:common_objc",
@@ -946,7 +946,6 @@ if (is_ios || is_mac) {
"../api/crypto:frame_encryptor_interface",
"../api/rtc_event_log:rtc_event_log_factory",
"../api/task_queue:default_task_queue_factory",
- "../api/transport/media:media_transport_interface",
"../api/video:video_frame",
"../api/video:video_rtp_headers",
"../api/video_codecs:video_codecs_api",
@@ -1217,7 +1216,6 @@ if (is_ios || is_mac) {
"../api/audio_codecs:audio_codecs_api",
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/audio_codecs:builtin_audio_encoder_factory",
- "../api/transport/media:media_transport_interface",
"../api/video_codecs:video_codecs_api",
"../media:rtc_media_base",
"../modules:module_api",
@@ -1572,8 +1570,8 @@ if (is_ios || is_mac) {
"../api/video_codecs:video_codecs_api",
"../common_video",
"../rtc_base",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("native_video") {
diff --git a/chromium/third_party/webrtc/sdk/android/BUILD.gn b/chromium/third_party/webrtc/sdk/android/BUILD.gn
index f4d05c1fde3..3c824dbe756 100644
--- a/chromium/third_party/webrtc/sdk/android/BUILD.gn
+++ b/chromium/third_party/webrtc/sdk/android/BUILD.gn
@@ -272,7 +272,6 @@ if (is_android) {
"api/org/webrtc/MediaSource.java",
"api/org/webrtc/MediaStream.java",
"api/org/webrtc/MediaStreamTrack.java",
- "api/org/webrtc/MediaTransportFactoryFactory.java",
"api/org/webrtc/NativeLibraryLoader.java",
"api/org/webrtc/NativePeerConnectionFactory.java",
"api/org/webrtc/NetEqFactoryFactory.java",
@@ -326,7 +325,7 @@ if (is_android) {
"//third_party/android_deps:com_android_support_support_annotations_java",
]
srcjar_deps = [
- "//api:rtp_parameters_enums",
+ "//api:priority_enums",
"//api/video:video_frame_enums",
]
}
@@ -558,8 +557,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_jni") {
@@ -655,9 +654,9 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:checks",
"../../rtc_base:rtc_task_queue",
"../../rtc_base/task_utils:to_queued_task",
- "//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("peerconnection_jni") {
@@ -745,6 +744,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:rtc_task_queue",
"../../rtc_base/system:thread_registry",
"../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
@@ -837,8 +838,8 @@ if (current_os == "linux" || is_android) {
"//api:array_view",
"//rtc_base:checks",
"//rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("native_api_base") {
@@ -932,8 +933,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:criticalsection",
"../../rtc_base:logging",
"../../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/base:core_headers",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
}
# API for creating C++ wrapper implementations of api/mediastreaminterface.h
@@ -1042,8 +1043,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("java_audio_device_module") {
@@ -1065,8 +1066,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_enable_android_aaudio) {
@@ -1092,8 +1093,8 @@ if (current_os == "linux" || is_android) {
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
@@ -1118,8 +1119,8 @@ if (current_os == "linux" || is_android) {
"../../modules/audio_device:audio_device_buffer",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
#########################
@@ -1344,9 +1345,16 @@ if (is_android) {
deps = [
":base_java",
+ ":builtin_audio_codecs_java",
+ ":camera_java",
":default_video_codec_factory_java",
+ ":filevideo_java",
+ ":hwcodecs_java",
":libjingle_peerconnection_java",
":libjingle_peerconnection_metrics_default_java",
+ ":peerconnection_java",
+ ":surfaceviewrenderer_java",
+ ":swcodecs_java",
":video_api_java",
":video_java",
"//base:base_java_test_support",
@@ -1354,6 +1362,7 @@ if (is_android) {
"//third_party/android_support_test_runner:rules_java",
"//third_party/android_support_test_runner:runner_java",
"//third_party/google-truth:google_truth_java",
+ "//third_party/hamcrest:hamcrest_library_java",
"//third_party/junit",
"//third_party/mockito:mockito_java",
]
@@ -1450,8 +1459,8 @@ if (is_android) {
"../../test:fileutils",
"../../test:test_support",
"../../testing/gtest",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_android_library("native_unittests_java") {
@@ -1508,7 +1517,13 @@ if (is_android) {
]
deps = [
+ ":base_java",
+ ":camera_java",
+ ":hwcodecs_java",
":libjingle_peerconnection_java",
+ ":peerconnection_java",
+ ":video_api_java",
+ ":video_java",
"//base:base_java_test_support",
"//third_party/google-truth:google_truth_java",
]
diff --git a/chromium/third_party/webrtc/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java b/chromium/third_party/webrtc/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java
deleted file mode 100644
index c16a37a6d77..00000000000
--- a/chromium/third_party/webrtc/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc;
-
-/**
- * Factory for creating webrtc::MediaTransportFactory instances.
- */
-public interface MediaTransportFactoryFactory {
- /**
- * Dynamically allocates a webrtc::MediaTransportFactory instance and returns a pointer to it.
- * The caller takes ownership of the object.
- */
- public long createNativeMediaTransportFactory();
-}
diff --git a/chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnection.java b/chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnection.java
index bf0a2e94413..920e2f12c96 100644
--- a/chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnection.java
+++ b/chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnection.java
@@ -536,18 +536,6 @@ public class PeerConnection {
// Null indicates no change to currently configured value.
@Nullable public Boolean allowCodecSwitching;
- /*
- * Experimental flag that enables a use of media transport. If this is true, the media transport
- * factory MUST be provided to the PeerConnectionFactory.
- */
- public boolean useMediaTransport;
-
- /*
- * Experimental flag that enables a use of media transport for data channels. If this is true,
- * the media transport factory MUST be provided to the PeerConnectionFactory.
- */
- public boolean useMediaTransportForDataChannels;
-
/**
* Defines advanced optional cryptographic settings related to SRTP and
* frame encryption for native WebRTC. Setting this will overwrite any
@@ -602,8 +590,6 @@ public class PeerConnection {
networkPreference = AdapterType.UNKNOWN;
sdpSemantics = SdpSemantics.PLAN_B;
activeResetSrtpParams = false;
- useMediaTransport = false;
- useMediaTransportForDataChannels = false;
cryptoOptions = null;
turnLoggingId = null;
allowCodecSwitching = null;
@@ -816,16 +802,6 @@ public class PeerConnection {
return allowCodecSwitching;
}
- @CalledByNative("RTCConfiguration")
- boolean getUseMediaTransport() {
- return useMediaTransport;
- }
-
- @CalledByNative("RTCConfiguration")
- boolean getUseMediaTransportForDataChannels() {
- return useMediaTransportForDataChannels;
- }
-
@Nullable
@CalledByNative("RTCConfiguration")
CryptoOptions getCryptoOptions() {
diff --git a/chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnectionFactory.java
index decdc0cc427..c87e639f235 100644
--- a/chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnectionFactory.java
+++ b/chromium/third_party/webrtc/sdk/android/api/org/webrtc/PeerConnectionFactory.java
@@ -175,7 +175,6 @@ public class PeerConnectionFactory {
@Nullable private FecControllerFactoryFactoryInterface fecControllerFactoryFactory;
@Nullable private NetworkControllerFactoryFactory networkControllerFactoryFactory;
@Nullable private NetworkStatePredictorFactoryFactory networkStatePredictorFactoryFactory;
- @Nullable private MediaTransportFactoryFactory mediaTransportFactoryFactory;
@Nullable private NetEqFactoryFactory neteqFactoryFactory;
private Builder() {}
@@ -247,13 +246,6 @@ public class PeerConnectionFactory {
return this;
}
- /** Sets a MediaTransportFactoryFactory for a PeerConnectionFactory. */
- public Builder setMediaTransportFactoryFactory(
- MediaTransportFactoryFactory mediaTransportFactoryFactory) {
- this.mediaTransportFactoryFactory = mediaTransportFactoryFactory;
- return this;
- }
-
/**
* Sets a NetEqFactoryFactory for the PeerConnectionFactory. When using a
* custom NetEqFactoryFactory, the AudioDecoderFactoryFactory will be set
@@ -284,9 +276,6 @@ public class PeerConnectionFactory {
networkStatePredictorFactoryFactory == null
? 0
: networkStatePredictorFactoryFactory.createNativeNetworkStatePredictorFactory(),
- mediaTransportFactoryFactory == null
- ? 0
- : mediaTransportFactoryFactory.createNativeMediaTransportFactory(),
neteqFactoryFactory == null ? 0 : neteqFactoryFactory.createNativeNetEqFactory());
}
}
@@ -607,7 +596,7 @@ public class PeerConnectionFactory {
long audioDecoderFactory, VideoEncoderFactory encoderFactory,
VideoDecoderFactory decoderFactory, long nativeAudioProcessor,
long nativeFecControllerFactory, long nativeNetworkControllerFactory,
- long nativeNetworkStatePredictorFactory, long mediaTransportFactory, long neteqFactory);
+ long nativeNetworkStatePredictorFactory, long neteqFactory);
private static native long nativeCreatePeerConnection(long factory,
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver,
diff --git a/chromium/third_party/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/chromium/third_party/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
index 3522a874879..b7b028f5aeb 100644
--- a/chromium/third_party/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
+++ b/chromium/third_party/webrtc/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -263,6 +263,17 @@ public class SurfaceTextureHelper {
});
}
+ /**
+ * Forces a frame to be produced. If no new frame is available, the last frame is sent to the
+ * listener again.
+ */
+ public void forceFrame() {
+ handler.post(() -> {
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ });
+ }
+
/** Set the rotation of the delivered frames. */
public void setFrameRotation(int rotation) {
handler.post(() -> this.frameRotation = rotation);
diff --git a/chromium/third_party/webrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/chromium/third_party/webrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
index edc9dd179df..94eb2a4357a 100644
--- a/chromium/third_party/webrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
+++ b/chromium/third_party/webrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -183,7 +183,7 @@ class WebRtcAudioTrack {
}
@CalledByNative
- private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
@@ -212,14 +212,14 @@ class WebRtcAudioTrack {
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
- return false;
+ return -1;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
- return false;
+ return -1;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
@@ -241,7 +241,7 @@ class WebRtcAudioTrack {
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
- return false;
+ return -1;
}
// It can happen that an AudioTrack is created but it was not successfully
@@ -250,11 +250,11 @@ class WebRtcAudioTrack {
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
- return false;
+ return -1;
}
logMainParameters();
logMainParametersExtended();
- return true;
+ return minBufferSizeInBytes;
}
@CalledByNative
@@ -423,6 +423,14 @@ class WebRtcAudioTrack {
}
}
+ @CalledByNative
+ private int getBufferSizeInFrames() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ return audioTrack.getBufferSizeInFrames();
+ }
+ return -1;
+ }
+
private void logBufferCapacityInFrames() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
Logging.d(TAG,
diff --git a/chromium/third_party/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc b/chromium/third_party/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc
index 8f0a0417110..d5b880b1b0f 100644
--- a/chromium/third_party/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc
+++ b/chromium/third_party/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc
@@ -20,6 +20,7 @@
#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
namespace webrtc {
@@ -89,12 +90,33 @@ int32_t AudioTrackJni::InitPlayout() {
nullptr);
if (buffer_size_factor == 0)
buffer_size_factor = 1.0;
- if (!Java_WebRtcAudioTrack_initPlayout(
- env_, j_audio_track_, audio_parameters_.sample_rate(),
- static_cast<int>(audio_parameters_.channels()), buffer_size_factor)) {
+ int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout(
+ env_, j_audio_track_, audio_parameters_.sample_rate(),
+ static_cast<int>(audio_parameters_.channels()), buffer_size_factor);
+ if (requested_buffer_size_bytes < 0) {
RTC_LOG(LS_ERROR) << "InitPlayout failed";
return -1;
}
+ // Update UMA histograms for both the requested and actual buffer size.
+ // To avoid division by zero, we assume the sample rate is 48k if an invalid
+ // value is found.
+ const int sample_rate = audio_parameters_.sample_rate() <= 0
+ ? 48000
+ : audio_parameters_.sample_rate();
+ // This calculation assumes that audio is mono.
+ const int requested_buffer_size_ms =
+ (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
+ requested_buffer_size_ms, 0, 1000, 100);
+ int actual_buffer_size_frames =
+ Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
+ if (actual_buffer_size_frames >= 0) {
+ const int actual_buffer_size_ms =
+ actual_buffer_size_frames * 1000 / sample_rate;
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
+ actual_buffer_size_ms, 0, 1000, 100);
+ }
+
initialized_ = true;
return 0;
}
diff --git a/chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection.cc b/chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection.cc
index 0ae39fbf661..05e940ee806 100644
--- a/chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection.cc
+++ b/chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection.cc
@@ -264,11 +264,6 @@ void JavaToNativeRTCConfiguration(
rtc_config->sdp_semantics = JavaToNativeSdpSemantics(jni, j_sdp_semantics);
rtc_config->active_reset_srtp_params =
Java_RTCConfiguration_getActiveResetSrtpParams(jni, j_rtc_config);
- rtc_config->use_media_transport =
- Java_RTCConfiguration_getUseMediaTransport(jni, j_rtc_config);
- rtc_config->use_media_transport_for_data_channels =
- Java_RTCConfiguration_getUseMediaTransportForDataChannels(jni,
- j_rtc_config);
rtc_config->crypto_options =
JavaToNativeOptionalCryptoOptions(jni, j_crypto_options);
diff --git a/chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc b/chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc
index 48dd6e41d89..32382fe5216 100644
--- a/chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc
+++ b/chromium/third_party/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc
@@ -82,7 +82,9 @@ void PostJavaCallback(JNIEnv* env,
JavaAsyncCallback(JNIEnv* env,
const JavaRef<jobject>& j_object,
JavaMethodPointer java_method_pointer)
- : j_object_(env, j_object), java_method_pointer_(java_method_pointer) {}
+ : rtc::MessageHandler(false),
+ j_object_(env, j_object),
+ java_method_pointer_(java_method_pointer) {}
void OnMessage(rtc::Message*) override {
java_method_pointer_(AttachCurrentThreadIfNeeded(), j_object_);
@@ -246,7 +248,7 @@ static void JNI_PeerConnectionFactory_ShutdownInternalTracer(JNIEnv* jni) {
// Following parameters are optional:
// |audio_device_module|, |jencoder_factory|, |jdecoder_factory|,
-// |audio_processor|, |media_transport_factory|, |fec_controller_factory|,
+// |audio_processor|, |fec_controller_factory|,
// |network_state_predictor_factory|, |neteq_factory|.
ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
JNIEnv* jni,
@@ -263,7 +265,6 @@ ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
network_controller_factory,
std::unique_ptr<NetworkStatePredictorFactoryInterface>
network_state_predictor_factory,
- std::unique_ptr<MediaTransportFactory> media_transport_factory,
std::unique_ptr<NetEqFactory> neteq_factory) {
// talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
// ThreadManager only WrapCurrentThread()s the thread where it is first
@@ -310,7 +311,6 @@ ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
std::move(network_controller_factory);
dependencies.network_state_predictor_factory =
std::move(network_state_predictor_factory);
- dependencies.media_transport_factory = std::move(media_transport_factory);
dependencies.neteq_factory = std::move(neteq_factory);
cricket::MediaEngineDependencies media_dependencies;
@@ -355,7 +355,6 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
jlong native_fec_controller_factory,
jlong native_network_controller_factory,
jlong native_network_state_predictor_factory,
- jlong native_media_transport_factory,
jlong native_neteq_factory) {
rtc::scoped_refptr<AudioProcessing> audio_processor =
reinterpret_cast<AudioProcessing*>(native_audio_processor);
@@ -372,8 +371,6 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
native_network_controller_factory),
TakeOwnershipOfUniquePtr<NetworkStatePredictorFactoryInterface>(
native_network_state_predictor_factory),
- TakeOwnershipOfUniquePtr<MediaTransportFactory>(
- native_media_transport_factory),
TakeOwnershipOfUniquePtr<NetEqFactory>(native_neteq_factory));
}
diff --git a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.h b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.h
index 4e9c674ef8e..86eaa6cee5d 100644
--- a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.h
+++ b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.h
@@ -198,18 +198,6 @@ RTC_OBJC_EXPORT
@property(nonatomic, assign) BOOL allowCodecSwitching;
/**
- * If MediaTransportFactory is provided in PeerConnectionFactory, this flag informs PeerConnection
- * that it should use the MediaTransportInterface.
- */
-@property(nonatomic, assign) BOOL useMediaTransport;
-
-/**
- * If MediaTransportFactory is provided in PeerConnectionFactory, this flag informs PeerConnection
- * that it should use the MediaTransportInterface for data channels.
- */
-@property(nonatomic, assign) BOOL useMediaTransportForDataChannels;
-
-/**
* Defines advanced optional cryptographic settings related to SRTP and
* frame encryption for native WebRTC. Setting this will overwrite any
* options set through the PeerConnectionFactory (which is deprecated).
diff --git a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm
index 52c14505054..55abbcdb184 100644
--- a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm
+++ b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCConfiguration.mm
@@ -52,8 +52,6 @@
@synthesize turnCustomizer = _turnCustomizer;
@synthesize activeResetSrtpParams = _activeResetSrtpParams;
@synthesize allowCodecSwitching = _allowCodecSwitching;
-@synthesize useMediaTransport = _useMediaTransport;
-@synthesize useMediaTransportForDataChannels = _useMediaTransportForDataChannels;
@synthesize cryptoOptions = _cryptoOptions;
@synthesize rtcpAudioReportIntervalMs = _rtcpAudioReportIntervalMs;
@synthesize rtcpVideoReportIntervalMs = _rtcpVideoReportIntervalMs;
@@ -106,8 +104,6 @@
_iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
_iceBackupCandidatePairPingInterval =
config.ice_backup_candidate_pair_ping_interval;
- _useMediaTransport = config.use_media_transport;
- _useMediaTransportForDataChannels = config.use_media_transport_for_data_channels;
_keyType = RTCEncryptionKeyTypeECDSA;
_iceCandidatePoolSize = config.ice_candidate_pool_size;
_shouldPruneTurnPorts = config.prune_turn_ports;
@@ -143,7 +139,7 @@
- (NSString *)description {
static NSString *formatString = @"RTC_OBJC_TYPE(RTCConfiguration): "
@"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n"
- @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n%d\n}\n";
+ @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n}\n";
return [NSString
stringWithFormat:formatString,
@@ -169,7 +165,6 @@
_disableIPV6OnWiFi,
_maxIPv6Networks,
_activeResetSrtpParams,
- _useMediaTransport,
_enableDscp];
}
@@ -208,8 +203,6 @@
_iceConnectionReceivingTimeout;
nativeConfig->ice_backup_candidate_pair_ping_interval =
_iceBackupCandidatePairPingInterval;
- nativeConfig->use_media_transport = _useMediaTransport;
- nativeConfig->use_media_transport_for_data_channels = _useMediaTransportForDataChannels;
rtc::KeyType keyType =
[[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
if (_certificate != nullptr) {
diff --git a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm
index fa68d08e74d..9e561fc65f9 100644
--- a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm
+++ b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnection.mm
@@ -29,7 +29,6 @@
#include "api/jsep_ice_candidate.h"
#include "api/rtc_event_log_output_file.h"
-#include "api/transport/media/media_transport_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/numerics/safe_conversions.h"
diff --git a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
index c2aab0be568..1d3b82550a5 100644
--- a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
+++ b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
@@ -17,7 +17,6 @@ namespace webrtc {
class AudioDeviceModule;
class AudioEncoderFactory;
class AudioDecoderFactory;
-class MediaTransportFactory;
class NetworkControllerFactoryInterface;
class VideoEncoderFactory;
class VideoDecoderFactory;
@@ -65,30 +64,12 @@ NS_ASSUME_NONNULL_BEGIN
audioDeviceModule:(nullable webrtc::AudioDeviceModule *)audioDeviceModule
audioProcessingModule:
(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
- mediaTransportFactory:
- (std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
-
-- (instancetype)
- initWithNativeAudioEncoderFactory:
- (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
- nativeAudioDecoderFactory:
- (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
- nativeVideoEncoderFactory:
- (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
- nativeVideoDecoderFactory:
- (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
- audioDeviceModule:(nullable webrtc::AudioDeviceModule *)audioDeviceModule
- audioProcessingModule:
- (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
networkControllerFactory:(std::unique_ptr<webrtc::NetworkControllerFactoryInterface>)
- networkControllerFactory
- mediaTransportFactory:
- (std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
+ networkControllerFactory;
- (instancetype)
initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
- decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory
- mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory;
/** Initialize an RTCPeerConnection with a configuration, constraints, and
* dependencies.
diff --git a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
index 2e34b05fed0..4ce38dbd7fd 100644
--- a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
+++ b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
@@ -52,7 +52,6 @@
// C++ target.
// TODO(zhihuang): Remove nogncheck once MediaEngineInterface is moved to C++
// API layer.
-#include "api/transport/media/media_transport_interface.h"
#include "media/engine/webrtc_media_engine.h" // nogncheck
@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) {
@@ -84,15 +83,13 @@
nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(
RTCVideoDecoderFactoryH264) alloc] init])
audioDeviceModule:[self audioDeviceModule]
- audioProcessingModule:nullptr
- mediaTransportFactory:nullptr];
+ audioProcessingModule:nullptr];
#endif
}
- (instancetype)
initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
- decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory
- mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory {
+ decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory {
#ifdef HAVE_NO_MEDIA
return [self initWithNoMedia];
#else
@@ -109,18 +106,9 @@
nativeVideoEncoderFactory:std::move(native_encoder_factory)
nativeVideoDecoderFactory:std::move(native_decoder_factory)
audioDeviceModule:[self audioDeviceModule]
- audioProcessingModule:nullptr
- mediaTransportFactory:std::move(mediaTransportFactory)];
+ audioProcessingModule:nullptr];
#endif
}
-- (instancetype)
- initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
- decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory {
- return [self initWithEncoderFactory:encoderFactory
- decoderFactory:decoderFactory
- mediaTransportFactory:nullptr];
-}
-
- (instancetype)initNative {
if (self = [super init]) {
_networkThread = rtc::Thread::CreateWithSocketServer();
@@ -170,30 +158,7 @@
nativeVideoDecoderFactory:std::move(videoDecoderFactory)
audioDeviceModule:audioDeviceModule
audioProcessingModule:audioProcessingModule
- mediaTransportFactory:nullptr];
-}
-
-- (instancetype)initWithNativeAudioEncoderFactory:
- (rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
- nativeAudioDecoderFactory:
- (rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
- nativeVideoEncoderFactory:
- (std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
- nativeVideoDecoderFactory:
- (std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
- audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule
- audioProcessingModule:
- (rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
- mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)
- mediaTransportFactory {
- return [self initWithNativeAudioEncoderFactory:audioEncoderFactory
- nativeAudioDecoderFactory:audioDecoderFactory
- nativeVideoEncoderFactory:std::move(videoEncoderFactory)
- nativeVideoDecoderFactory:std::move(videoDecoderFactory)
- audioDeviceModule:audioDeviceModule
- audioProcessingModule:audioProcessingModule
- networkControllerFactory:nullptr
- mediaTransportFactory:std::move(mediaTransportFactory)];
+ networkControllerFactory:nullptr];
}
- (instancetype)initWithNativeAudioEncoderFactory:
(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
@@ -208,9 +173,7 @@
(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule
networkControllerFactory:
(std::unique_ptr<webrtc::NetworkControllerFactoryInterface>)
- networkControllerFactory
- mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)
- mediaTransportFactory {
+ networkControllerFactory {
if (self = [self initNative]) {
webrtc::PeerConnectionFactoryDependencies dependencies;
dependencies.network_thread = _networkThread.get();
@@ -235,7 +198,6 @@
dependencies.event_log_factory =
std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
dependencies.network_controller_factory = std::move(networkControllerFactory);
- dependencies.media_transport_factory = std::move(mediaTransportFactory);
#endif
_nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
diff --git a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
index 8f52bea8e33..991ec5a41cc 100644
--- a/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
+++ b/chromium/third_party/webrtc/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
@@ -13,7 +13,6 @@
#include "api/audio_codecs/audio_decoder_factory.h"
#include "api/audio_codecs/audio_encoder_factory.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/audio_device/include/audio_device.h"
@@ -26,7 +25,6 @@
rtc::scoped_refptr<webrtc::AudioDecoderFactory> _audioDecoderFactory;
rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
rtc::scoped_refptr<webrtc::AudioProcessing> _audioProcessingModule;
- std::unique_ptr<webrtc::MediaTransportFactory> _mediaTransportFactory;
}
+ (RTCPeerConnectionFactoryBuilder *)builder {
@@ -41,8 +39,7 @@
nativeVideoEncoderFactory:std::move(_videoEncoderFactory)
nativeVideoDecoderFactory:std::move(_videoDecoderFactory)
audioDeviceModule:_audioDeviceModule
- audioProcessingModule:_audioProcessingModule
- mediaTransportFactory:std::move(_mediaTransportFactory)];
+ audioProcessingModule:_audioProcessingModule];
}
- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory {
diff --git a/chromium/third_party/webrtc/sdk/objc/native/src/audio/audio_device_ios.mm b/chromium/third_party/webrtc/sdk/objc/native/src/audio/audio_device_ios.mm
index b70c4d0e50b..55dc517e4c1 100644
--- a/chromium/third_party/webrtc/sdk/objc/native/src/audio/audio_device_ios.mm
+++ b/chromium/third_party/webrtc/sdk/objc/native/src/audio/audio_device_ios.mm
@@ -102,7 +102,8 @@ static void LogDeviceInfo() {
#endif // !defined(NDEBUG)
AudioDeviceIOS::AudioDeviceIOS()
- : audio_device_buffer_(nullptr),
+ : MessageHandler(false),
+ audio_device_buffer_(nullptr),
audio_unit_(nullptr),
recording_(0),
playing_(0),
@@ -125,6 +126,7 @@ AudioDeviceIOS::AudioDeviceIOS()
AudioDeviceIOS::~AudioDeviceIOS() {
RTC_DCHECK(thread_checker_.IsCurrent());
LOGI() << "~dtor" << ios::GetCurrentThreadDescription();
+ thread_->Clear(this);
Terminate();
audio_session_observer_ = nil;
}
diff --git a/chromium/third_party/webrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/chromium/third_party/webrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
index 7d19d4095d7..14131dc38d3 100644
--- a/chromium/third_party/webrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
+++ b/chromium/third_party/webrtc/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
@@ -22,7 +22,6 @@ extern "C" {
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/audio_device/include/audio_device.h"
@@ -50,8 +49,7 @@ extern "C" {
nativeVideoEncoderFactory:nullptr
nativeVideoDecoderFactory:nullptr
audioDeviceModule:nullptr
- audioProcessingModule:nullptr
- mediaTransportFactory:nullptr]);
+ audioProcessingModule:nullptr]);
#endif
RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
@@ -72,8 +70,7 @@ extern "C" {
nativeVideoEncoderFactory:nullptr
nativeVideoDecoderFactory:nullptr
audioDeviceModule:nullptr
- audioProcessingModule:nullptr
- mediaTransportFactory:nullptr]);
+ audioProcessingModule:nullptr]);
#endif
RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder];
RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
diff --git a/chromium/third_party/webrtc/stats/rtcstats_objects.cc b/chromium/third_party/webrtc/stats/rtcstats_objects.cc
index 453acce9252..6c7b8d2123e 100644
--- a/chromium/third_party/webrtc/stats/rtcstats_objects.cc
+++ b/chromium/third_party/webrtc/stats/rtcstats_objects.cc
@@ -124,7 +124,7 @@ RTCCodecStats::~RTCCodecStats() {}
WEBRTC_RTCSTATS_IMPL(RTCDataChannelStats, RTCStats, "data-channel",
&label,
&protocol,
- &datachannelid,
+ &data_channel_identifier,
&state,
&messages_sent,
&bytes_sent,
@@ -140,7 +140,7 @@ RTCDataChannelStats::RTCDataChannelStats(std::string&& id, int64_t timestamp_us)
: RTCStats(std::move(id), timestamp_us),
label("label"),
protocol("protocol"),
- datachannelid("datachannelid"),
+ data_channel_identifier("dataChannelIdentifier"),
state("state"),
messages_sent("messagesSent"),
bytes_sent("bytesSent"),
@@ -151,7 +151,7 @@ RTCDataChannelStats::RTCDataChannelStats(const RTCDataChannelStats& other)
: RTCStats(other.id(), other.timestamp_us()),
label(other.label),
protocol(other.protocol),
- datachannelid(other.datachannelid),
+ data_channel_identifier(other.data_channel_identifier),
state(other.state),
messages_sent(other.messages_sent),
bytes_sent(other.bytes_sent),
diff --git a/chromium/third_party/webrtc/style-guide.md b/chromium/third_party/webrtc/style-guide.md
index 901217a86de..80c33021562 100644
--- a/chromium/third_party/webrtc/style-guide.md
+++ b/chromium/third_party/webrtc/style-guide.md
@@ -31,6 +31,10 @@ WebRTC is written in C++14, but with some restrictions:
[chromium-cpp]: https://chromium-cpp.appspot.com/
+Unlike the Chromium and Google C++ style guides, we do not allow C++20-style
+designated initializers, because we want to stay compatible with compilers that
+do not yet support them.
+
### Abseil
You may use a subset of the utilities provided by the [Abseil][abseil]
diff --git a/chromium/third_party/webrtc/system_wrappers/BUILD.gn b/chromium/third_party/webrtc/system_wrappers/BUILD.gn
index 1ff2ddd4fda..4b2278c5032 100644
--- a/chromium/third_party/webrtc/system_wrappers/BUILD.gn
+++ b/chromium/third_party/webrtc/system_wrappers/BUILD.gn
@@ -38,8 +38,8 @@ rtc_library("system_wrappers") {
"../rtc_base/synchronization:rw_lock_wrapper",
"../rtc_base/system:arch",
"../rtc_base/system:rtc_export",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (is_android) {
if (build_with_mozilla) {
@@ -92,8 +92,8 @@ rtc_library("field_trial") {
"../rtc_base:checks",
"../rtc_base:logging",
"../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("metrics") {
diff --git a/chromium/third_party/webrtc/system_wrappers/source/field_trial_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/field_trial_unittest.cc
index fdabe1b7e63..ada6313e674 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/field_trial_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/field_trial_unittest.cc
@@ -32,7 +32,7 @@ TEST(FieldTrialValidationTest, AcceptsValidInputs) {
EXPECT_TRUE(FieldTrialsStringIsValid("Audio/Enabled/B/C/Audio/Enabled/"));
}
-TEST(FieldTrialValidationTest, RejectsBadInputs) {
+TEST(FieldTrialValidationDeathTest, RejectsBadInputs) {
// Bad delimiters
RTC_EXPECT_DEATH(InitFieldTrialsFromString("Audio/EnabledVideo/Disabled/"),
"Invalid field trials string:");
@@ -90,7 +90,7 @@ TEST(FieldTrialMergingTest, MergesValidInput) {
"Audio/Enabled/Video/Enabled/");
}
-TEST(FieldTrialMergingTest, DchecksBadInput) {
+TEST(FieldTrialMergingDeathTest, DchecksBadInput) {
RTC_EXPECT_DEATH(MergeFieldTrialsStrings("Audio/Enabled/", "garbage"),
"Invalid field trials string:");
}
diff --git a/chromium/third_party/webrtc/system_wrappers/source/metrics_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/metrics_unittest.cc
index 9e5bc86ba92..7532b2ad830 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/metrics_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/metrics_unittest.cc
@@ -114,7 +114,8 @@ TEST_F(MetricsTest, RtcHistogramsCounts_AddSample) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(MetricsTest, RtcHistogramsCounts_InvalidIndex) {
+using MetricsDeathTest = MetricsTest;
+TEST_F(MetricsDeathTest, RtcHistogramsCounts_InvalidIndex) {
EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(-1, "Name", kSample), "");
EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(3, "Name", kSample), "");
EXPECT_DEATH(RTC_HISTOGRAMS_COUNTS_1000(3u, "Name", kSample), "");
diff --git a/chromium/third_party/webrtc/test/BUILD.gn b/chromium/third_party/webrtc/test/BUILD.gn
index 34da8894f74..9215b679cea 100644
--- a/chromium/third_party/webrtc/test/BUILD.gn
+++ b/chromium/third_party/webrtc/test/BUILD.gn
@@ -22,13 +22,13 @@ group("test") {
":test_renderer",
":test_support",
":video_test_common",
- "pc/e2e",
]
if (rtc_include_tests) {
deps += [
":test_main",
":test_support_unittests",
+ "pc/e2e",
]
}
}
@@ -70,8 +70,8 @@ rtc_library("frame_generator_impl") {
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/system:file_wrapper",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("frame_utils") {
@@ -127,8 +127,8 @@ rtc_library("video_test_common") {
"../rtc_base:timeutils",
"../rtc_base/task_utils:repeating_task",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (!build_with_chromium) {
@@ -160,10 +160,8 @@ if (!build_with_chromium) {
"platform_video_capturer.cc",
"platform_video_capturer.h",
]
- deps = [
- ":video_test_common",
- "//third_party/abseil-cpp/absl/memory",
- ]
+ deps = [ ":video_test_common" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
if (is_mac || is_ios) {
deps += [ ":video_test_mac" ]
} else {
@@ -218,6 +216,20 @@ rtc_library("field_trial") {
deps = [ "../system_wrappers:field_trial" ]
}
+rtc_library("explicit_key_value_config") {
+ sources = [
+ "explicit_key_value_config.cc",
+ "explicit_key_value_config.h",
+ ]
+
+ deps = [
+ "../api/transport:webrtc_key_value_config",
+ "../rtc_base:checks",
+ "../system_wrappers:field_trial",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+}
+
rtc_library("perf_test") {
visibility = [ "*" ]
testonly = true
@@ -235,9 +247,8 @@ rtc_library("perf_test") {
"../rtc_base:criticalsection",
"../rtc_base:logging",
"../rtc_base:rtc_numerics",
- "//third_party/abseil-cpp/absl/flags:flag",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (rtc_enable_protobuf) {
sources += [ "testsupport/perf_test_histogram_writer.cc" ]
deps += [
@@ -262,8 +273,8 @@ if (is_ios) {
deps = [
":perf_test",
"../sdk:helpers_objc",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
configs += [ ":test_support_objc_config" ]
}
@@ -359,8 +370,8 @@ rtc_library("video_test_support") {
"../rtc_base:rtc_event",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/system:file_wrapper",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (!is_ios) {
deps += [ "//third_party:jpeg" ]
@@ -375,6 +386,16 @@ rtc_library("video_test_support") {
}
if (rtc_include_tests) {
+ rtc_library("resources_dir_flag") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [
+ "testsupport/resources_dir_flag.cc",
+ "testsupport/resources_dir_flag.h",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
+ }
+
rtc_library("test_main_lib") {
visibility = [ "*" ]
testonly = true
@@ -394,9 +415,12 @@ if (rtc_include_tests) {
"../rtc_base:rtc_base_approved",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings:strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -406,13 +430,20 @@ if (rtc_include_tests) {
testonly = true
sources = [ "test_main.cc" ]
- deps = [
- ":test_main_lib",
+ deps = [ ":test_main_lib" ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/debugging:failure_signal_handler",
"//third_party/abseil-cpp/absl/debugging:symbolize",
]
}
+ rtc_library("benchmark_main") {
+ testonly = true
+ sources = [ "benchmark_main.cc" ]
+
+ deps = [ "//third_party/google_benchmark" ]
+ }
+
rtc_library("test_support_test_artifacts") {
testonly = true
sources = [
@@ -423,6 +454,8 @@ if (rtc_include_tests) {
":fileutils",
"../rtc_base:logging",
"../rtc_base/system:file_wrapper",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
]
@@ -586,8 +619,8 @@ rtc_library("fileutils") {
":fileutils_override_impl",
"../rtc_base:checks",
"../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (is_ios) {
deps += [ ":fileutils_ios_objc" ]
}
@@ -599,16 +632,6 @@ rtc_library("fileutils") {
}
}
-rtc_library("resources_dir_flag") {
- testonly = true
- visibility = [ "*" ]
- sources = [
- "testsupport/resources_dir_flag.cc",
- "testsupport/resources_dir_flag.h",
- ]
- deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
-}
-
# We separate header into own target to make it possible for downstream
# projects to override implementation.
rtc_source_set("fileutils_override_api") {
@@ -625,8 +648,8 @@ rtc_library("fileutils_override_impl") {
"../rtc_base:checks",
"../rtc_base:macromagic",
"../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (is_ios) {
deps += [ ":fileutils_ios_objc" ]
}
@@ -676,8 +699,8 @@ rtc_library("fileutils_unittests") {
":fileutils",
":test_support",
"../rtc_base:checks",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("direct_transport") {
@@ -699,8 +722,8 @@ rtc_library("direct_transport") {
"../rtc_base:timeutils",
"../rtc_base/synchronization:sequence_checker",
"../rtc_base/task_utils:repeating_task",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
public_deps = # no-presubmit-check TODO(webrtc:8603)
[ "../call:fake_network" ]
}
@@ -742,8 +765,8 @@ rtc_library("fake_video_codecs") {
"../rtc_base:timeutils",
"../rtc_base/synchronization:sequence_checker",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("null_transport") {
@@ -839,8 +862,8 @@ rtc_library("test_common") {
"../rtc_base/task_utils:to_queued_task",
"../system_wrappers",
"../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
if (!is_android && !build_with_chromium) {
deps += [ "../modules/video_capture:video_capture_internal_impl" ]
}
@@ -986,8 +1009,8 @@ rtc_library("audio_codec_mocks") {
"../api/audio_codecs:builtin_audio_decoder_factory",
"../rtc_base:checks",
"../rtc_base:refcount",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("copy_to_file_audio_capturer") {
@@ -1001,8 +1024,8 @@ rtc_library("copy_to_file_audio_capturer") {
"../common_audio",
"../modules/audio_device:audio_device_impl",
"../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("copy_to_file_audio_capturer_unittest") {
diff --git a/chromium/third_party/webrtc/test/DEPS b/chromium/third_party/webrtc/test/DEPS
index 62fd6d3ff77..170c4086d74 100644
--- a/chromium/third_party/webrtc/test/DEPS
+++ b/chromium/third_party/webrtc/test/DEPS
@@ -72,5 +72,8 @@ specific_include_rules = {
],
".*test_video_capturer_video_track_source.h": [
"+pc",
+ ],
+ "benchmark_main\.cc": [
+ "+benchmark",
]
}
diff --git a/chromium/third_party/webrtc/test/benchmark_main.cc b/chromium/third_party/webrtc/test/benchmark_main.cc
new file mode 100644
index 00000000000..1a79c249133
--- /dev/null
+++ b/chromium/third_party/webrtc/test/benchmark_main.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark/benchmark.h"
+
+int main(int argc, char* argv[]) {
+ benchmark::Initialize(&argc, argv);
+ benchmark::RunSpecifiedBenchmarks();
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/test/explicit_key_value_config.cc b/chromium/third_party/webrtc/test/explicit_key_value_config.cc
new file mode 100644
index 00000000000..69f725a9e24
--- /dev/null
+++ b/chromium/third_party/webrtc/test/explicit_key_value_config.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/explicit_key_value_config.h"
+
+#include "api/transport/webrtc_key_value_config.h"
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+namespace test {
+
+ExplicitKeyValueConfig::ExplicitKeyValueConfig(const std::string& s) {
+ std::string::size_type field_start = 0;
+ while (field_start < s.size()) {
+ std::string::size_type separator_pos = s.find('/', field_start);
+ RTC_CHECK_NE(separator_pos, std::string::npos)
+ << "Missing separator '/' after field trial key.";
+ RTC_CHECK_GT(separator_pos, field_start)
+ << "Field trial key cannot be empty.";
+ std::string key = s.substr(field_start, separator_pos - field_start);
+ field_start = separator_pos + 1;
+
+ RTC_CHECK_LT(field_start, s.size())
+ << "Missing value after field trial key. String ended.";
+ separator_pos = s.find('/', field_start);
+ RTC_CHECK_NE(separator_pos, std::string::npos)
+ << "Missing terminating '/' in field trial string.";
+ RTC_CHECK_GT(separator_pos, field_start)
+ << "Field trial value cannot be empty.";
+ std::string value = s.substr(field_start, separator_pos - field_start);
+ field_start = separator_pos + 1;
+
+ key_value_map_[key] = value;
+ }
+ // This check is technically redundant due to earlier checks.
+ // We nevertheless keep the check to make it clear that the entire
+ // string has been processed, and without indexing past the end.
+ RTC_CHECK_EQ(field_start, s.size());
+}
+
+std::string ExplicitKeyValueConfig::Lookup(absl::string_view key) const {
+ auto it = key_value_map_.find(std::string(key));
+ if (it != key_value_map_.end())
+ return it->second;
+ return "";
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/test/explicit_key_value_config.h b/chromium/third_party/webrtc/test/explicit_key_value_config.h
new file mode 100644
index 00000000000..9a3bc84f606
--- /dev/null
+++ b/chromium/third_party/webrtc/test/explicit_key_value_config.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
+#define TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/transport/webrtc_key_value_config.h"
+
+namespace webrtc {
+namespace test {
+
+class ExplicitKeyValueConfig : public WebRtcKeyValueConfig {
+ public:
+ explicit ExplicitKeyValueConfig(const std::string& s);
+ std::string Lookup(absl::string_view key) const override;
+
+ private:
+ std::map<std::string, std::string> key_value_map_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
diff --git a/chromium/third_party/webrtc/test/fake_encoder.cc b/chromium/third_party/webrtc/test/fake_encoder.cc
index 64b4a4e9ff8..2959559910e 100644
--- a/chromium/third_party/webrtc/test/fake_encoder.cc
+++ b/chromium/third_party/webrtc/test/fake_encoder.cc
@@ -69,7 +69,7 @@ void FakeEncoder::SetMaxBitrate(int max_kbps) {
RTC_DCHECK_GE(max_kbps, -1); // max_kbps == -1 disables it.
rtc::CritScope cs(&crit_sect_);
max_target_bitrate_kbps_ = max_kbps;
- SetRates(current_rate_settings_);
+ SetRatesLocked(current_rate_settings_);
}
void FakeEncoder::SetQp(int qp) {
@@ -243,6 +243,10 @@ int32_t FakeEncoder::Release() {
void FakeEncoder::SetRates(const RateControlParameters& parameters) {
rtc::CritScope cs(&crit_sect_);
+ SetRatesLocked(parameters);
+}
+
+void FakeEncoder::SetRatesLocked(const RateControlParameters& parameters) {
current_rate_settings_ = parameters;
int allocated_bitrate_kbps = parameters.bitrate.get_sum_kbps();
diff --git a/chromium/third_party/webrtc/test/fake_encoder.h b/chromium/third_party/webrtc/test/fake_encoder.h
index 39838d16f14..ade0e35560c 100644
--- a/chromium/third_party/webrtc/test/fake_encoder.h
+++ b/chromium/third_party/webrtc/test/fake_encoder.h
@@ -40,21 +40,23 @@ class FakeEncoder : public VideoEncoder {
virtual ~FakeEncoder() = default;
// Sets max bitrate. Not thread-safe, call before registering the encoder.
- void SetMaxBitrate(int max_kbps);
- void SetQp(int qp);
+ void SetMaxBitrate(int max_kbps) RTC_LOCKS_EXCLUDED(crit_sect_);
+ void SetQp(int qp) RTC_LOCKS_EXCLUDED(crit_sect_);
void SetFecControllerOverride(
FecControllerOverride* fec_controller_override) override;
- int32_t InitEncode(const VideoCodec* config,
- const Settings& settings) override;
+ int32_t InitEncode(const VideoCodec* config, const Settings& settings)
+ RTC_LOCKS_EXCLUDED(crit_sect_) override;
int32_t Encode(const VideoFrame& input_image,
- const std::vector<VideoFrameType>* frame_types) override;
- int32_t RegisterEncodeCompleteCallback(
- EncodedImageCallback* callback) override;
+ const std::vector<VideoFrameType>* frame_types)
+ RTC_LOCKS_EXCLUDED(crit_sect_) override;
+ int32_t RegisterEncodeCompleteCallback(EncodedImageCallback* callback)
+ RTC_LOCKS_EXCLUDED(crit_sect_) override;
int32_t Release() override;
- void SetRates(const RateControlParameters& parameters) override;
- int GetConfiguredInputFramerate() const;
+ void SetRates(const RateControlParameters& parameters)
+ RTC_LOCKS_EXCLUDED(crit_sect_) override;
+ int GetConfiguredInputFramerate() const RTC_LOCKS_EXCLUDED(crit_sect_);
EncoderInfo GetEncoderInfo() const override;
static const char* kImplementationName;
@@ -79,7 +81,7 @@ class FakeEncoder : public VideoEncoder {
uint8_t num_simulcast_streams,
const VideoBitrateAllocation& target_bitrate,
SimulcastStream simulcast_streams[kMaxSimulcastStreams],
- int framerate);
+ int framerate) RTC_LOCKS_EXCLUDED(crit_sect_);
// Called before the frame is passed to callback_->OnEncodedImage, to let
// subclasses fill out codec_specific, possibly modify encodedImage.
@@ -88,6 +90,9 @@ class FakeEncoder : public VideoEncoder {
EncodedImage* encoded_image,
CodecSpecificInfo* codec_specific);
+ void SetRatesLocked(const RateControlParameters& parameters)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
FrameInfo last_frame_info_ RTC_GUARDED_BY(crit_sect_);
Clock* const clock_;
diff --git a/chromium/third_party/webrtc/test/frame_forwarder.cc b/chromium/third_party/webrtc/test/frame_forwarder.cc
index d1a2ddb1c29..d8ec4b5060e 100644
--- a/chromium/third_party/webrtc/test/frame_forwarder.cc
+++ b/chromium/third_party/webrtc/test/frame_forwarder.cc
@@ -26,6 +26,12 @@ void FrameForwarder::IncomingCapturedFrame(const VideoFrame& video_frame) {
void FrameForwarder::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
rtc::CritScope lock(&crit_);
+ AddOrUpdateSinkLocked(sink, wants);
+}
+
+void FrameForwarder::AddOrUpdateSinkLocked(
+ rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
RTC_DCHECK(!sink_ || sink_ == sink);
sink_ = sink;
sink_wants_ = wants;
@@ -42,6 +48,10 @@ rtc::VideoSinkWants FrameForwarder::sink_wants() const {
return sink_wants_;
}
+rtc::VideoSinkWants FrameForwarder::sink_wants_locked() const {
+ return sink_wants_;
+}
+
bool FrameForwarder::has_sinks() const {
rtc::CritScope lock(&crit_);
return sink_ != nullptr;
diff --git a/chromium/third_party/webrtc/test/frame_forwarder.h b/chromium/third_party/webrtc/test/frame_forwarder.h
index cf29f5f0743..d391160fab0 100644
--- a/chromium/third_party/webrtc/test/frame_forwarder.h
+++ b/chromium/third_party/webrtc/test/frame_forwarder.h
@@ -26,14 +26,22 @@ class FrameForwarder : public rtc::VideoSourceInterface<VideoFrame> {
FrameForwarder();
~FrameForwarder() override;
// Forwards |video_frame| to the registered |sink_|.
- virtual void IncomingCapturedFrame(const VideoFrame& video_frame);
- rtc::VideoSinkWants sink_wants() const;
- bool has_sinks() const;
+ virtual void IncomingCapturedFrame(const VideoFrame& video_frame)
+ RTC_LOCKS_EXCLUDED(crit_);
+ rtc::VideoSinkWants sink_wants() const RTC_LOCKS_EXCLUDED(crit_);
+ bool has_sinks() const RTC_LOCKS_EXCLUDED(crit_);
protected:
+ rtc::VideoSinkWants sink_wants_locked() const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
- const rtc::VideoSinkWants& wants) override;
- void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
+ const rtc::VideoSinkWants& wants)
+ RTC_LOCKS_EXCLUDED(crit_) override;
+ void AddOrUpdateSinkLocked(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink)
+ RTC_LOCKS_EXCLUDED(crit_) override;
rtc::CriticalSection crit_;
rtc::VideoSinkInterface<VideoFrame>* sink_ RTC_GUARDED_BY(crit_);
diff --git a/chromium/third_party/webrtc/test/frame_generator_capturer_unittest.cc b/chromium/third_party/webrtc/test/frame_generator_capturer_unittest.cc
index 7400bbb79bd..a76cb95d443 100644
--- a/chromium/third_party/webrtc/test/frame_generator_capturer_unittest.cc
+++ b/chromium/third_party/webrtc/test/frame_generator_capturer_unittest.cc
@@ -22,8 +22,8 @@ using ::testing::Property;
class MockVideoSinkInterfaceVideoFrame
: public rtc::VideoSinkInterface<VideoFrame> {
public:
- MOCK_METHOD1(OnFrame, void(const VideoFrame& frame));
- MOCK_METHOD0(OnDiscardedFrame, void());
+ MOCK_METHOD(void, OnFrame, (const VideoFrame& frame), (override));
+ MOCK_METHOD(void, OnDiscardedFrame, (), (override));
};
} // namespace
TEST(FrameGeneratorCapturerTest, CreateFromConfig) {
diff --git a/chromium/third_party/webrtc/test/fuzzers/BUILD.gn b/chromium/third_party/webrtc/test/fuzzers/BUILD.gn
index 96376a2e839..203490f417c 100644
--- a/chromium/third_party/webrtc/test/fuzzers/BUILD.gn
+++ b/chromium/third_party/webrtc/test/fuzzers/BUILD.gn
@@ -40,6 +40,10 @@ rtc_library("fuzz_data_helper") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
}
+set_defaults("webrtc_fuzzer_test") {
+ absl_deps = []
+}
+
template("webrtc_fuzzer_test") {
fuzzer_test(target_name) {
forward_variables_from(invoker, "*")
@@ -47,6 +51,21 @@ template("webrtc_fuzzer_test") {
":fuzz_data_helper",
":webrtc_fuzzer_main",
]
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
+
if (!build_with_chromium && is_clang) {
suppressed_configs = [ "//build/config/clang:find_bad_constructs" ]
}
@@ -194,10 +213,8 @@ webrtc_fuzzer_test("rtcp_receiver_fuzzer") {
webrtc_fuzzer_test("rtp_packet_fuzzer") {
sources = [ "rtp_packet_fuzzer.cc" ]
- deps = [
- "../../modules/rtp_rtcp:rtp_rtcp_format",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
+ deps = [ "../../modules/rtp_rtcp:rtp_rtcp_format" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
seed_corpus = "corpora/rtp-corpus"
}
@@ -240,8 +257,8 @@ rtc_library("audio_decoder_fuzzer") {
"../../modules/rtp_rtcp:rtp_rtcp_format",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
webrtc_fuzzer_test("audio_decoder_ilbc_fuzzer") {
@@ -318,7 +335,7 @@ webrtc_fuzzer_test("audio_encoder_opus_fuzzer") {
}
webrtc_fuzzer_test("audio_encoder_isac_fixed_fuzzer") {
- sources = [ "audio_encoder_isax_fixed_fuzzer.cc" ]
+ sources = [ "audio_encoder_isac_fixed_fuzzer.cc" ]
deps = [
":audio_encoder_fuzzer",
"../../api/audio_codecs/isac:audio_encoder_isac_fix",
@@ -327,7 +344,7 @@ webrtc_fuzzer_test("audio_encoder_isac_fixed_fuzzer") {
}
webrtc_fuzzer_test("audio_encoder_isac_float_fuzzer") {
- sources = [ "audio_encoder_isax_float_fuzzer.cc" ]
+ sources = [ "audio_encoder_isac_float_fuzzer.cc" ]
deps = [
":audio_encoder_fuzzer",
"../../api/audio_codecs/isac:audio_encoder_isac_float",
@@ -439,8 +456,8 @@ rtc_library("audio_processing_fuzzer_helper") {
"../../modules/audio_processing:audio_frame_proxies",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
webrtc_fuzzer_test("audio_processing_fuzzer") {
@@ -461,8 +478,8 @@ webrtc_fuzzer_test("audio_processing_fuzzer") {
"../../rtc_base:rtc_task_queue",
"../../rtc_base:safe_minmax",
"../../system_wrappers:field_trial",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
seed_corpus = "corpora/audio_processing-corpus"
}
diff --git a/chromium/third_party/webrtc/test/logging/BUILD.gn b/chromium/third_party/webrtc/test/logging/BUILD.gn
index db2a5447ac9..1af2ecfdac2 100644
--- a/chromium/third_party/webrtc/test/logging/BUILD.gn
+++ b/chromium/third_party/webrtc/test/logging/BUILD.gn
@@ -27,6 +27,6 @@ rtc_library("log_writer") {
"../../rtc_base:rtc_base_tests_utils",
"../../rtc_base:stringutils",
"../../test:fileutils",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
diff --git a/chromium/third_party/webrtc/test/mock_audio_decoder.h b/chromium/third_party/webrtc/test/mock_audio_decoder.h
index 7c6db5acc22..8f44bf891d5 100644
--- a/chromium/third_party/webrtc/test/mock_audio_decoder.h
+++ b/chromium/third_party/webrtc/test/mock_audio_decoder.h
@@ -20,16 +20,18 @@ class MockAudioDecoder : public AudioDecoder {
public:
MockAudioDecoder();
~MockAudioDecoder();
- MOCK_METHOD0(Die, void());
- MOCK_METHOD5(DecodeInternal,
- int(const uint8_t*, size_t, int, int16_t*, SpeechType*));
- MOCK_CONST_METHOD0(HasDecodePlc, bool());
- MOCK_METHOD2(DecodePlc, size_t(size_t, int16_t*));
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD0(ErrorCode, int());
- MOCK_CONST_METHOD2(PacketDuration, int(const uint8_t*, size_t));
- MOCK_CONST_METHOD0(Channels, size_t());
- MOCK_CONST_METHOD0(SampleRateHz, int());
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(int,
+ DecodeInternal,
+ (const uint8_t*, size_t, int, int16_t*, SpeechType*),
+ (override));
+ MOCK_METHOD(bool, HasDecodePlc, (), (const, override));
+ MOCK_METHOD(size_t, DecodePlc, (size_t, int16_t*), (override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int, ErrorCode, (), (override));
+ MOCK_METHOD(int, PacketDuration, (const uint8_t*, size_t), (const, override));
+ MOCK_METHOD(size_t, Channels, (), (const, override));
+ MOCK_METHOD(int, SampleRateHz, (), (const, override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/test/mock_audio_decoder_factory.h b/chromium/third_party/webrtc/test/mock_audio_decoder_factory.h
index cdf29195433..cdb03d3f380 100644
--- a/chromium/third_party/webrtc/test/mock_audio_decoder_factory.h
+++ b/chromium/third_party/webrtc/test/mock_audio_decoder_factory.h
@@ -24,19 +24,23 @@ namespace webrtc {
class MockAudioDecoderFactory : public AudioDecoderFactory {
public:
- MOCK_METHOD0(GetSupportedDecoders, std::vector<AudioCodecSpec>());
- MOCK_METHOD1(IsSupportedDecoder, bool(const SdpAudioFormat&));
+ MOCK_METHOD(std::vector<AudioCodecSpec>,
+ GetSupportedDecoders,
+ (),
+ (override));
+ MOCK_METHOD(bool, IsSupportedDecoder, (const SdpAudioFormat&), (override));
std::unique_ptr<AudioDecoder> MakeAudioDecoder(
const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id) {
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
std::unique_ptr<AudioDecoder> return_value;
MakeAudioDecoderMock(format, codec_pair_id, &return_value);
return return_value;
}
- MOCK_METHOD3(MakeAudioDecoderMock,
- void(const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id,
- std::unique_ptr<AudioDecoder>* return_value));
+ MOCK_METHOD(void,
+ MakeAudioDecoderMock,
+ (const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id,
+ std::unique_ptr<AudioDecoder>*));
// Creates a MockAudioDecoderFactory with no formats and that may not be
// invoked to create a codec - useful for initializing a voice engine, for
diff --git a/chromium/third_party/webrtc/test/mock_audio_encoder.h b/chromium/third_party/webrtc/test/mock_audio_encoder.h
index 2dfd15ca981..9d9db0d66cd 100644
--- a/chromium/third_party/webrtc/test/mock_audio_encoder.h
+++ b/chromium/third_party/webrtc/test/mock_audio_encoder.h
@@ -27,37 +27,44 @@ class MockAudioEncoder : public AudioEncoder {
// http://crbug.com/428099.
MockAudioEncoder();
~MockAudioEncoder();
- MOCK_METHOD1(Mark, void(std::string desc));
- MOCK_CONST_METHOD0(SampleRateHz, int());
- MOCK_CONST_METHOD0(NumChannels, size_t());
- MOCK_CONST_METHOD0(RtpTimestampRateHz, int());
- MOCK_CONST_METHOD0(Num10MsFramesInNextPacket, size_t());
- MOCK_CONST_METHOD0(Max10MsFramesInAPacket, size_t());
- MOCK_CONST_METHOD0(GetTargetBitrate, int());
- MOCK_CONST_METHOD0(GetFrameLengthRange,
- absl::optional<std::pair<TimeDelta, TimeDelta>>());
-
- MOCK_METHOD0(Reset, void());
- MOCK_METHOD1(SetFec, bool(bool enable));
- MOCK_METHOD1(SetDtx, bool(bool enable));
- MOCK_METHOD1(SetApplication, bool(Application application));
- MOCK_METHOD1(SetMaxPlaybackRate, void(int frequency_hz));
- MOCK_METHOD1(SetMaxBitrate, void(int max_bps));
- MOCK_METHOD1(SetMaxPayloadSize, void(int max_payload_size_bytes));
- MOCK_METHOD2(OnReceivedUplinkBandwidth,
- void(int target_audio_bitrate_bps,
- absl::optional<int64_t> probing_interval_ms));
- MOCK_METHOD1(OnReceivedUplinkPacketLossFraction,
- void(float uplink_packet_loss_fraction));
-
- MOCK_METHOD2(EnableAudioNetworkAdaptor,
- bool(const std::string& config_string, RtcEventLog* event_log));
+ MOCK_METHOD(int, SampleRateHz, (), (const, override));
+ MOCK_METHOD(size_t, NumChannels, (), (const, override));
+ MOCK_METHOD(int, RtpTimestampRateHz, (), (const, override));
+ MOCK_METHOD(size_t, Num10MsFramesInNextPacket, (), (const, override));
+ MOCK_METHOD(size_t, Max10MsFramesInAPacket, (), (const, override));
+ MOCK_METHOD(int, GetTargetBitrate, (), (const, override));
+ MOCK_METHOD((absl::optional<std::pair<TimeDelta, TimeDelta>>),
+ GetFrameLengthRange,
+ (),
+ (const, override));
+
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(bool, SetFec, (bool enable), (override));
+ MOCK_METHOD(bool, SetDtx, (bool enable), (override));
+ MOCK_METHOD(bool, SetApplication, (Application application), (override));
+ MOCK_METHOD(void, SetMaxPlaybackRate, (int frequency_hz), (override));
+ MOCK_METHOD(void,
+ OnReceivedUplinkBandwidth,
+ (int target_audio_bitrate_bps,
+ absl::optional<int64_t> probing_interval_ms),
+ (override));
+ MOCK_METHOD(void,
+ OnReceivedUplinkPacketLossFraction,
+ (float uplink_packet_loss_fraction),
+ (override));
+
+ MOCK_METHOD(bool,
+ EnableAudioNetworkAdaptor,
+ (const std::string& config_string, RtcEventLog*),
+ (override));
// Note, we explicitly chose not to create a mock for the Encode method.
- MOCK_METHOD3(EncodeImpl,
- EncodedInfo(uint32_t timestamp,
- rtc::ArrayView<const int16_t> audio,
- rtc::Buffer* encoded));
+ MOCK_METHOD(EncodedInfo,
+ EncodeImpl,
+ (uint32_t timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ rtc::Buffer*),
+ (override));
class FakeEncoding {
public:
diff --git a/chromium/third_party/webrtc/test/mock_audio_encoder_factory.h b/chromium/third_party/webrtc/test/mock_audio_encoder_factory.h
index 3e774a39e9d..392a4c11e2e 100644
--- a/chromium/third_party/webrtc/test/mock_audio_encoder_factory.h
+++ b/chromium/third_party/webrtc/test/mock_audio_encoder_factory.h
@@ -24,23 +24,29 @@ namespace webrtc {
class MockAudioEncoderFactory
: public ::testing::NiceMock<AudioEncoderFactory> {
public:
- MOCK_METHOD0(GetSupportedEncoders, std::vector<AudioCodecSpec>());
- MOCK_METHOD1(QueryAudioEncoder,
- absl::optional<AudioCodecInfo>(const SdpAudioFormat& format));
+ MOCK_METHOD(std::vector<AudioCodecSpec>,
+ GetSupportedEncoders,
+ (),
+ (override));
+ MOCK_METHOD(absl::optional<AudioCodecInfo>,
+ QueryAudioEncoder,
+ (const SdpAudioFormat& format),
+ (override));
std::unique_ptr<AudioEncoder> MakeAudioEncoder(
int payload_type,
const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id) {
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
std::unique_ptr<AudioEncoder> return_value;
MakeAudioEncoderMock(payload_type, format, codec_pair_id, &return_value);
return return_value;
}
- MOCK_METHOD4(MakeAudioEncoderMock,
- void(int payload_type,
- const SdpAudioFormat& format,
- absl::optional<AudioCodecPairId> codec_pair_id,
- std::unique_ptr<AudioEncoder>* return_value));
+ MOCK_METHOD(void,
+ MakeAudioEncoderMock,
+ (int payload_type,
+ const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id,
+ std::unique_ptr<AudioEncoder>*));
// Creates a MockAudioEncoderFactory with no formats and that may not be
// invoked to create a codec - useful for initializing a voice engine, for
diff --git a/chromium/third_party/webrtc/test/mock_transport.h b/chromium/third_party/webrtc/test/mock_transport.h
index 5ffc10425b6..9c4dc4bf8dc 100644
--- a/chromium/third_party/webrtc/test/mock_transport.h
+++ b/chromium/third_party/webrtc/test/mock_transport.h
@@ -25,7 +25,7 @@ class MockTransport : public Transport {
SendRtp,
(const uint8_t*, size_t, const PacketOptions&),
(override));
- MOCK_METHOD(bool, SendRtcp, (const uint8_t* data, size_t len), (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t len), (override));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/test/network/BUILD.gn b/chromium/third_party/webrtc/test/network/BUILD.gn
index 4b01479c9b6..9e810bfc530 100644
--- a/chromium/third_party/webrtc/test/network/BUILD.gn
+++ b/chromium/third_party/webrtc/test/network/BUILD.gn
@@ -54,6 +54,8 @@ rtc_library("emulated_network") {
"../../system_wrappers",
"../scenario:column_printer",
"../time_controller",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
@@ -114,8 +116,8 @@ rtc_library("cross_traffic_unittest") {
"../../rtc_base:logging",
"../../rtc_base:rtc_event",
"//test/time_controller:time_controller",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("feedback_generator") {
@@ -130,8 +132,8 @@ rtc_library("feedback_generator") {
"../../call:simulated_network",
"../../rtc_base:checks",
"../time_controller",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
rtc_library("feedback_generator_unittest") {
diff --git a/chromium/third_party/webrtc/test/network/cross_traffic.cc b/chromium/third_party/webrtc/test/network/cross_traffic.cc
index be0c3d32863..56e7635142e 100644
--- a/chromium/third_party/webrtc/test/network/cross_traffic.cc
+++ b/chromium/third_party/webrtc/test/network/cross_traffic.cc
@@ -144,15 +144,16 @@ void TcpMessageRouteImpl::SendMessage(size_t size,
cwnd_ = 10;
ssthresh_ = INFINITY;
}
- size_t data_left = size;
- size_t kMaxPacketSize = 1200;
- size_t kMinPacketSize = 4;
+ int64_t data_left = static_cast<int64_t>(size);
+ int64_t kMaxPacketSize = 1200;
+ int64_t kMinPacketSize = 4;
Message message{std::move(handler)};
while (data_left > 0) {
- size_t packet_size =
- std::max(kMinPacketSize, std::min(data_left, kMaxPacketSize));
+ int64_t packet_size = std::min(data_left, kMaxPacketSize);
int fragment_id = next_fragment_id_++;
- pending_.push_back(MessageFragment{fragment_id, packet_size});
+ pending_.push_back(MessageFragment{
+ fragment_id,
+ static_cast<size_t>(std::max(kMinPacketSize, packet_size))});
message.pending_fragment_ids.insert(fragment_id);
data_left -= packet_size;
}
diff --git a/chromium/third_party/webrtc/test/network/network_emulation_unittest.cc b/chromium/third_party/webrtc/test/network/network_emulation_unittest.cc
index 58346abb93c..9e630de9cb9 100644
--- a/chromium/third_party/webrtc/test/network/network_emulation_unittest.cc
+++ b/chromium/third_party/webrtc/test/network/network_emulation_unittest.cc
@@ -70,7 +70,7 @@ class SocketReader : public sigslot::has_slots<> {
class MockReceiver : public EmulatedNetworkReceiverInterface {
public:
- MOCK_METHOD1(OnPacketReceived, void(EmulatedIpPacket packet));
+ MOCK_METHOD(void, OnPacketReceived, (EmulatedIpPacket packet), (override));
};
class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test {
diff --git a/chromium/third_party/webrtc/test/pc/e2e/BUILD.gn b/chromium/third_party/webrtc/test/pc/e2e/BUILD.gn
index d340f1a00cc..fea59bcb87e 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/BUILD.gn
+++ b/chromium/third_party/webrtc/test/pc/e2e/BUILD.gn
@@ -8,632 +8,681 @@
import("../../../webrtc.gni")
-group("e2e") {
- testonly = true
-
- deps = [
- ":default_encoded_image_data_injector",
- ":encoded_image_data_injector_api",
- ":example_video_quality_analyzer",
- ":id_generator",
- ":quality_analyzing_video_decoder",
- ":quality_analyzing_video_encoder",
- ":single_process_encoded_image_data_injector",
- ]
- if (rtc_include_tests) {
- deps += [
- ":peerconnection_quality_test",
- ":test_peer",
- ":video_quality_analyzer_injection_helper",
- ]
- }
-}
-
-if (rtc_include_tests) {
- group("e2e_unittests") {
+if (!build_with_chromium) {
+ group("e2e") {
testonly = true
deps = [
- ":default_encoded_image_data_injector_unittest",
- ":default_video_quality_analyzer_test",
- ":peer_connection_e2e_smoke_test",
- ":single_process_encoded_image_data_injector_unittest",
+ ":default_encoded_image_data_injector",
+ ":encoded_image_data_injector_api",
+ ":example_video_quality_analyzer",
+ ":id_generator",
+ ":quality_analyzing_video_decoder",
+ ":quality_analyzing_video_encoder",
+ ":single_process_encoded_image_data_injector",
]
+ if (rtc_include_tests) {
+ deps += [
+ ":peerconnection_quality_test",
+ ":test_peer",
+ ":video_quality_analyzer_injection_helper",
+ ]
+ }
}
-}
-
-rtc_library("peer_connection_quality_test_params") {
- visibility = [ "*" ]
- testonly = true
- sources = [ "peer_connection_quality_test_params.h" ]
-
- deps = [
- "../../../api:callfactory_api",
- "../../../api:fec_controller_api",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:packet_socket_factory",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api/rtc_event_log",
- "../../../api/task_queue",
- "../../../api/transport:network_control",
- "../../../api/transport/media:media_transport_interface",
- "../../../api/video_codecs:video_codecs_api",
- "../../../rtc_base",
- ]
-}
-
-rtc_library("encoded_image_data_injector_api") {
- visibility = [ "*" ]
- testonly = true
- sources = [ "analyzer/video/encoded_image_data_injector.h" ]
-
- deps = [ "../../../api/video:encoded_image" ]
-}
-rtc_library("default_encoded_image_data_injector") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/default_encoded_image_data_injector.cc",
- "analyzer/video/default_encoded_image_data_injector.h",
- ]
-
- deps = [
- ":encoded_image_data_injector_api",
- "../../../api/video:encoded_image",
- "../../../rtc_base:checks",
- "../../../rtc_base:criticalsection",
- "//third_party/abseil-cpp/absl/memory",
- ]
-}
+ if (rtc_include_tests) {
+ group("e2e_unittests") {
+ testonly = true
-rtc_library("single_process_encoded_image_data_injector") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/single_process_encoded_image_data_injector.cc",
- "analyzer/video/single_process_encoded_image_data_injector.h",
- ]
-
- deps = [
- ":encoded_image_data_injector_api",
- "../../../api/video:encoded_image",
- "../../../rtc_base:checks",
- "../../../rtc_base:criticalsection",
- "//third_party/abseil-cpp/absl/memory",
- ]
-}
+ deps = [
+ ":default_encoded_image_data_injector_unittest",
+ ":default_video_quality_analyzer_test",
+ ":multi_head_queue_test",
+ ":peer_connection_e2e_smoke_test",
+ ":single_process_encoded_image_data_injector_unittest",
+ ]
+ }
+ }
-rtc_library("id_generator") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/id_generator.cc",
- "analyzer/video/id_generator.h",
- ]
- deps = []
-}
+ rtc_library("peer_connection_quality_test_params") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [ "peer_connection_quality_test_params.h" ]
-rtc_library("simulcast_dummy_buffer_helper") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/simulcast_dummy_buffer_helper.cc",
- "analyzer/video/simulcast_dummy_buffer_helper.h",
- ]
- deps = [
- "../../../api/video:video_frame",
- "../../../api/video:video_frame_i420",
- ]
-}
+ deps = [
+ "../../../api:callfactory_api",
+ "../../../api:fec_controller_api",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:packet_socket_factory",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/rtc_event_log",
+ "../../../api/task_queue",
+ "../../../api/transport:network_control",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../rtc_base",
+ ]
+ }
-rtc_library("quality_analyzing_video_decoder") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/quality_analyzing_video_decoder.cc",
- "analyzer/video/quality_analyzing_video_decoder.h",
- ]
- deps = [
- ":encoded_image_data_injector_api",
- ":id_generator",
- ":simulcast_dummy_buffer_helper",
- "../../../api:video_quality_analyzer_api",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_frame_i420",
- "../../../api/video:video_rtp_headers",
- "../../../api/video_codecs:video_codecs_api",
- "../../../modules/video_coding:video_codec_interface",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
-}
+ rtc_library("encoded_image_data_injector_api") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [ "analyzer/video/encoded_image_data_injector.h" ]
-rtc_library("quality_analyzing_video_encoder") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/quality_analyzing_video_encoder.cc",
- "analyzer/video/quality_analyzing_video_encoder.h",
- ]
- deps = [
- ":encoded_image_data_injector_api",
- ":id_generator",
- "../../../api:video_quality_analyzer_api",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_rtp_headers",
- "../../../api/video_codecs:video_codecs_api",
- "../../../modules/video_coding:video_codec_interface",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- ]
-}
+ deps = [ "../../../api/video:encoded_image" ]
+ }
-if (rtc_include_tests) {
- rtc_library("video_quality_analyzer_injection_helper") {
+ rtc_library("default_encoded_image_data_injector") {
visibility = [ "*" ]
testonly = true
sources = [
- "analyzer/video/video_quality_analyzer_injection_helper.cc",
- "analyzer/video/video_quality_analyzer_injection_helper.h",
+ "analyzer/video/default_encoded_image_data_injector.cc",
+ "analyzer/video/default_encoded_image_data_injector.h",
]
+
deps = [
":encoded_image_data_injector_api",
- ":id_generator",
- ":quality_analyzing_video_decoder",
- ":quality_analyzing_video_encoder",
- ":simulcast_dummy_buffer_helper",
- "../..:test_renderer",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:stats_observer_interface",
- "../../../api:video_quality_analyzer_api",
- "../../../api/video:video_frame",
- "../../../api/video:video_rtp_headers",
- "../../../api/video_codecs:video_codecs_api",
+ "../../../api/video:encoded_image",
+ "../../../rtc_base:checks",
"../../../rtc_base:criticalsection",
- "../../../test:video_test_common",
- "../../../test:video_test_support",
- "//third_party/abseil-cpp/absl/memory",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
- rtc_library("echo_emulation") {
+ rtc_library("single_process_encoded_image_data_injector") {
visibility = [ "*" ]
testonly = true
sources = [
- "echo/echo_emulation.cc",
- "echo/echo_emulation.h",
+ "analyzer/video/single_process_encoded_image_data_injector.cc",
+ "analyzer/video/single_process_encoded_image_data_injector.h",
]
+
deps = [
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../modules/audio_device:audio_device_impl",
- "../../../rtc_base:rtc_base_approved",
+ ":encoded_image_data_injector_api",
+ "../../../api/video:encoded_image",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
}
- rtc_library("test_peer") {
+ rtc_library("id_generator") {
visibility = [ "*" ]
testonly = true
sources = [
- "test_peer.cc",
- "test_peer.h",
+ "analyzer/video/id_generator.cc",
+ "analyzer/video/id_generator.h",
+ ]
+ deps = []
+ }
+
+ rtc_library("simulcast_dummy_buffer_helper") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "analyzer/video/simulcast_dummy_buffer_helper.cc",
+ "analyzer/video/simulcast_dummy_buffer_helper.h",
]
deps = [
- ":peer_configurer",
- ":peer_connection_quality_test_params",
- "../../../api:frame_generator_api",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:scoped_refptr",
- "../../../modules/audio_processing:api",
- "../../../pc:peerconnection_wrapper",
- "//third_party/abseil-cpp/absl/memory",
- "//third_party/abseil-cpp/absl/types:variant",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_frame_i420",
]
}
- rtc_library("test_peer_factory") {
+ rtc_library("quality_analyzing_video_decoder") {
visibility = [ "*" ]
testonly = true
sources = [
- "test_peer_factory.cc",
- "test_peer_factory.h",
+ "analyzer/video/quality_analyzing_video_decoder.cc",
+ "analyzer/video/quality_analyzing_video_decoder.h",
]
deps = [
- ":echo_emulation",
- ":peer_configurer",
- ":peer_connection_quality_test_params",
- ":quality_analyzing_video_encoder",
- ":test_peer",
- ":video_quality_analyzer_injection_helper",
- "../..:copy_to_file_audio_capturer",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api/rtc_event_log:rtc_event_log_factory",
- "../../../api/task_queue:default_task_queue_factory",
- "../../../api/video_codecs:builtin_video_decoder_factory",
- "../../../api/video_codecs:builtin_video_encoder_factory",
- "../../../media:rtc_audio_video",
- "../../../media:rtc_media_engine_defaults",
- "../../../modules/audio_device:audio_device_impl",
- "../../../modules/audio_processing/aec_dump",
- "../../../p2p:rtc_p2p",
- "../../../rtc_base:rtc_task_queue",
- "//third_party/abseil-cpp/absl/memory",
+ ":encoded_image_data_injector_api",
+ ":id_generator",
+ ":simulcast_dummy_buffer_helper",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/video:encoded_image",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_frame_i420",
+ "../../../api/video:video_rtp_headers",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../modules/video_coding:video_codec_interface",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
- rtc_library("media_helper") {
+ rtc_library("quality_analyzing_video_encoder") {
visibility = [ "*" ]
testonly = true
sources = [
- "media/media_helper.cc",
- "media/media_helper.h",
- "media/test_video_capturer_video_track_source.h",
+ "analyzer/video/quality_analyzing_video_encoder.cc",
+ "analyzer/video/quality_analyzing_video_encoder.h",
]
deps = [
- ":peer_configurer",
- ":test_peer",
- ":video_quality_analyzer_injection_helper",
- "../..:fileutils",
- "../..:platform_video_capturer",
- "../..:video_test_common",
- "../../../api:create_frame_generator",
- "../../../api:frame_generator_api",
- "../../../api:media_stream_interface",
- "../../../api:peer_connection_quality_test_fixture_api",
+ ":encoded_image_data_injector_api",
+ ":id_generator",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/video:encoded_image",
"../../../api/video:video_frame",
- "../../../pc:peerconnection",
- "//third_party/abseil-cpp/absl/types:variant",
+ "../../../api/video:video_rtp_headers",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../modules/video_coding:video_codec_interface",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ if (rtc_include_tests) {
+ rtc_library("video_quality_analyzer_injection_helper") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "analyzer/video/video_quality_analyzer_injection_helper.cc",
+ "analyzer/video/video_quality_analyzer_injection_helper.h",
+ ]
+ deps = [
+ ":encoded_image_data_injector_api",
+ ":id_generator",
+ ":quality_analyzing_video_decoder",
+ ":quality_analyzing_video_encoder",
+ ":simulcast_dummy_buffer_helper",
+ "../..:test_renderer",
+ "../../../api:array_view",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:stats_observer_interface",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_rtp_headers",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../rtc_base:criticalsection",
+ "../../../test:video_test_common",
+ "../../../test:video_test_support",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+ }
+
+ rtc_library("echo_emulation") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "echo/echo_emulation.cc",
+ "echo/echo_emulation.h",
+ ]
+ deps = [
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../rtc_base:rtc_base_approved",
+ ]
+ }
+
+ rtc_library("test_peer") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "test_peer.cc",
+ "test_peer.h",
+ ]
+ deps = [
+ ":peer_configurer",
+ ":peer_connection_quality_test_params",
+ "../../../api:frame_generator_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:scoped_refptr",
+ "../../../modules/audio_processing:api",
+ "../../../pc:peerconnection_wrapper",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:variant",
+ ]
+ }
+
+ rtc_library("test_peer_factory") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "test_peer_factory.cc",
+ "test_peer_factory.h",
+ ]
+ deps = [
+ ":echo_emulation",
+ ":peer_configurer",
+ ":peer_connection_quality_test_params",
+ ":quality_analyzing_video_encoder",
+ ":test_peer",
+ ":video_quality_analyzer_injection_helper",
+ "../..:copy_to_file_audio_capturer",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/rtc_event_log:rtc_event_log_factory",
+ "../../../api/task_queue:default_task_queue_factory",
+ "../../../api/video_codecs:builtin_video_decoder_factory",
+ "../../../api/video_codecs:builtin_video_encoder_factory",
+ "../../../media:rtc_audio_video",
+ "../../../media:rtc_media_engine_defaults",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../modules/audio_processing/aec_dump",
+ "../../../p2p:rtc_p2p",
+ "../../../rtc_base:rtc_task_queue",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+ }
+
+ rtc_library("media_helper") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "media/media_helper.cc",
+ "media/media_helper.h",
+ "media/test_video_capturer_video_track_source.h",
+ ]
+ deps = [
+ ":peer_configurer",
+ ":test_peer",
+ ":video_quality_analyzer_injection_helper",
+ "../..:fileutils",
+ "../..:platform_video_capturer",
+ "../..:video_test_common",
+ "../../../api:create_frame_generator",
+ "../../../api:frame_generator_api",
+ "../../../api:media_stream_interface",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/video:video_frame",
+ "../../../pc:peerconnection",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ]
+ }
+
+ rtc_library("peer_configurer") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "peer_configurer.cc",
+ "peer_configurer.h",
+ ]
+ deps = [
+ ":peer_connection_quality_test_params",
+ "../..:fileutils",
+ "../../../api:callfactory_api",
+ "../../../api:create_peer_connection_quality_test_frame_generator",
+ "../../../api:fec_controller_api",
+ "../../../api:packet_socket_factory",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/rtc_event_log",
+ "../../../api/task_queue",
+ "../../../api/transport:network_control",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../rtc_base",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("test_activities_executor") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "test_activities_executor.cc",
+ "test_activities_executor.h",
+ ]
+ deps = [
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../rtc_base:task_queue_for_test",
+ "../../../rtc_base/task_utils:repeating_task",
+ "../../../system_wrappers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("peerconnection_quality_test") {
+ visibility = [ "*" ]
+ testonly = true
+
+ sources = [
+ "peer_connection_quality_test.cc",
+ "peer_connection_quality_test.h",
+ ]
+ deps = [
+ ":analyzer_helper",
+ ":default_audio_quality_analyzer",
+ ":default_video_quality_analyzer",
+ ":media_helper",
+ ":peer_configurer",
+ ":peer_connection_quality_test_params",
+ ":sdp_changer",
+ ":single_process_encoded_image_data_injector",
+ ":stats_poller",
+ ":test_activities_executor",
+ ":test_peer",
+ ":test_peer_factory",
+ ":video_quality_analyzer_injection_helper",
+ ":video_quality_metrics_reporter",
+ "../..:field_trial",
+ "../..:fileutils",
+ "../..:perf_test",
+ "../../../api:audio_quality_analyzer_api",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:media_stream_interface",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtc_event_log_output_file",
+ "../../../api:scoped_refptr",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/rtc_event_log",
+ "../../../api/task_queue",
+ "../../../api/task_queue:default_task_queue_factory",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../pc:pc_test_utils",
+ "../../../pc:peerconnection",
+ "../../../rtc_base",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:macromagic",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../rtc_base:safe_conversions",
+ "../../../rtc_base:task_queue_for_test",
+ "../../../system_wrappers",
+ "../../../system_wrappers:field_trial",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("single_process_encoded_image_data_injector_unittest") {
+ testonly = true
+ sources = [
+ "analyzer/video/single_process_encoded_image_data_injector_unittest.cc",
+ ]
+ deps = [
+ ":single_process_encoded_image_data_injector",
+ "../../../api/video:encoded_image",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../test:test_support",
+ ]
+ }
+
+ rtc_library("default_encoded_image_data_injector_unittest") {
+ testonly = true
+ sources =
+ [ "analyzer/video/default_encoded_image_data_injector_unittest.cc" ]
+ deps = [
+ ":default_encoded_image_data_injector",
+ "../../../api/video:encoded_image",
+ "../../../rtc_base:rtc_base_approved",
+ "../../../test:test_support",
+ ]
+ }
+
+ peer_connection_e2e_smoke_test_resources = [
+ "../../../resources/pc_quality_smoke_test_alice_source.wav",
+ "../../../resources/pc_quality_smoke_test_bob_source.wav",
]
+ if (is_ios) {
+ bundle_data("peer_connection_e2e_smoke_test_resources_bundle_data") {
+ testonly = true
+ sources = peer_connection_e2e_smoke_test_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ }
+
+ rtc_library("peer_connection_e2e_smoke_test") {
+ testonly = true
+
+ sources = [ "peer_connection_e2e_smoke_test.cc" ]
+ deps = [
+ ":default_audio_quality_analyzer",
+ ":default_video_quality_analyzer",
+ ":network_quality_metrics_reporter",
+ "../../../api:callfactory_api",
+ "../../../api:create_network_emulation_manager",
+ "../../../api:create_peer_connection_quality_test_frame_generator",
+ "../../../api:create_peerconnection_quality_test_fixture",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:media_stream_interface",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:scoped_refptr",
+ "../../../api:simulated_network_api",
+ "../../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../../api/audio_codecs:builtin_audio_encoder_factory",
+ "../../../api/video_codecs:builtin_video_decoder_factory",
+ "../../../api/video_codecs:builtin_video_encoder_factory",
+ "../../../call:simulated_network",
+ "../../../media:rtc_audio_video",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../p2p:rtc_p2p",
+ "../../../pc:pc_test_utils",
+ "../../../pc:peerconnection_wrapper",
+ "../../../rtc_base",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:rtc_event",
+ "../../../system_wrappers:field_trial",
+ "../../../test:field_trial",
+ "../../../test:fileutils",
+ "../../../test:test_support",
+ ]
+ data = peer_connection_e2e_smoke_test_resources
+ if (is_ios) {
+ deps += [ ":peer_connection_e2e_smoke_test_resources_bundle_data" ]
+ }
+ }
+
+ rtc_library("stats_poller") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "stats_poller.cc",
+ "stats_poller.h",
+ ]
+ deps = [
+ ":test_peer",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:stats_observer_interface",
+ "../../../rtc_base:logging",
+ ]
+ }
+
+ rtc_library("default_video_quality_analyzer_test") {
+ testonly = true
+ sources = [ "analyzer/video/default_video_quality_analyzer_test.cc" ]
+ deps = [
+ ":default_video_quality_analyzer",
+ "../..:test_support",
+ "../../../api:create_frame_generator",
+ "../../../api:rtp_packet_info",
+ "../../../api/video:encoded_image",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_frame_i420",
+ "../../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../../rtc_base:stringutils",
+ "../../../system_wrappers",
+ ]
+ }
+
+ rtc_library("multi_head_queue_test") {
+ testonly = true
+ sources = [ "analyzer/video/multi_head_queue_test.cc" ]
+ deps = [
+ ":multi_head_queue",
+ "../../../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
}
- rtc_library("peer_configurer") {
+ rtc_library("analyzer_helper") {
visibility = [ "*" ]
- testonly = true
sources = [
- "peer_configurer.cc",
- "peer_configurer.h",
+ "analyzer_helper.cc",
+ "analyzer_helper.h",
]
deps = [
- ":peer_connection_quality_test_params",
- "../..:fileutils",
- "../../../api:callfactory_api",
- "../../../api:create_peer_connection_quality_test_frame_generator",
- "../../../api:fec_controller_api",
- "../../../api:packet_socket_factory",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api/rtc_event_log",
- "../../../api/task_queue",
- "../../../api/transport:network_control",
- "../../../api/transport/media:media_transport_interface",
- "../../../api/video_codecs:video_codecs_api",
- "../../../rtc_base",
- "//third_party/abseil-cpp/absl/strings",
+ "../../../api:track_id_stream_label_map",
+ "../../../rtc_base:macromagic",
+ "../../../rtc_base/synchronization:sequence_checker",
]
}
- rtc_library("test_activities_executor") {
+ rtc_library("default_audio_quality_analyzer") {
visibility = [ "*" ]
testonly = true
sources = [
- "test_activities_executor.cc",
- "test_activities_executor.h",
+ "analyzer/audio/default_audio_quality_analyzer.cc",
+ "analyzer/audio/default_audio_quality_analyzer.h",
]
+
deps = [
+ "../..:perf_test",
+ "../../../api:audio_quality_analyzer_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:stats_observer_interface",
+ "../../../api:track_id_stream_label_map",
"../../../api/units:time_delta",
"../../../api/units:timestamp",
- "../../../rtc_base:checks",
"../../../rtc_base:criticalsection",
"../../../rtc_base:logging",
- "../../../rtc_base:rtc_base_approved",
- "../../../rtc_base:task_queue_for_test",
- "../../../rtc_base/task_utils:repeating_task",
- "../../../system_wrappers",
- "//third_party/abseil-cpp/absl/memory",
- "//third_party/abseil-cpp/absl/types:optional",
+ "../../../rtc_base:rtc_numerics",
]
}
- rtc_library("peerconnection_quality_test") {
+ rtc_library("example_video_quality_analyzer") {
visibility = [ "*" ]
testonly = true
-
sources = [
- "peer_connection_quality_test.cc",
- "peer_connection_quality_test.h",
+ "analyzer/video/example_video_quality_analyzer.cc",
+ "analyzer/video/example_video_quality_analyzer.h",
]
+
deps = [
- ":analyzer_helper",
- ":default_audio_quality_analyzer",
- ":default_video_quality_analyzer",
- ":media_helper",
- ":peer_configurer",
- ":peer_connection_quality_test_params",
- ":sdp_changer",
- ":single_process_encoded_image_data_injector",
- ":stats_poller",
- ":test_activities_executor",
- ":test_peer",
- ":test_peer_factory",
- ":video_quality_analyzer_injection_helper",
- ":video_quality_metrics_reporter",
- "../..:field_trial",
- "../..:fileutils",
- "../..:perf_test",
- "../../../api:audio_quality_analyzer_api",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:media_stream_interface",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:rtc_event_log_output_file",
- "../../../api:scoped_refptr",
+ "../../../api:array_view",
"../../../api:video_quality_analyzer_api",
- "../../../api/rtc_event_log",
- "../../../api/task_queue",
- "../../../api/task_queue:default_task_queue_factory",
- "../../../api/units:time_delta",
- "../../../api/units:timestamp",
- "../../../pc:pc_test_utils",
- "../../../pc:peerconnection",
- "../../../rtc_base",
- "../../../rtc_base:gunit_helpers",
- "../../../rtc_base:macromagic",
- "../../../rtc_base:rtc_base_approved",
- "../../../rtc_base:safe_conversions",
- "../../../rtc_base:task_queue_for_test",
- "../../../system_wrappers",
- "../../../system_wrappers:field_trial",
+ "../../../api/video:encoded_image",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_rtp_headers",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
]
}
- rtc_library("single_process_encoded_image_data_injector_unittest") {
+ rtc_library("video_quality_metrics_reporter") {
+ visibility = [ "*" ]
+
testonly = true
sources = [
- "analyzer/video/single_process_encoded_image_data_injector_unittest.cc",
+ "analyzer/video/video_quality_metrics_reporter.cc",
+ "analyzer/video/video_quality_metrics_reporter.h",
]
deps = [
- ":single_process_encoded_image_data_injector",
- "../../../api/video:encoded_image",
- "../../../rtc_base:rtc_base_approved",
- "../../../test:test_support",
+ "../..:perf_test",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtc_stats_api",
+ "../../../api/units:data_rate",
+ "../../../api/units:data_size",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:rtc_numerics",
]
}
- rtc_library("default_encoded_image_data_injector_unittest") {
+ rtc_library("default_video_quality_analyzer") {
+ visibility = [ "*" ]
+
testonly = true
- sources =
- [ "analyzer/video/default_encoded_image_data_injector_unittest.cc" ]
+ sources = [
+ "analyzer/video/default_video_quality_analyzer.cc",
+ "analyzer/video/default_video_quality_analyzer.h",
+ ]
+
deps = [
- ":default_encoded_image_data_injector",
+ ":multi_head_queue",
+ "../..:perf_test",
+ "../../../api:array_view",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
"../../../api/video:encoded_image",
+ "../../../api/video:video_frame",
+ "../../../api/video:video_frame_i420",
+ "../../../api/video:video_rtp_headers",
+ "../../../common_video",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
"../../../rtc_base:rtc_base_approved",
- "../../../test:test_support",
+ "../../../rtc_base:rtc_base_tests_utils",
+ "../../../rtc_base:rtc_event",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base:timeutils",
+ "../../../system_wrappers",
]
}
- peer_connection_e2e_smoke_test_resources = [
- "../../../resources/pc_quality_smoke_test_alice_source.wav",
- "../../../resources/pc_quality_smoke_test_bob_source.wav",
- ]
- if (is_ios) {
- bundle_data("peer_connection_e2e_smoke_test_resources_bundle_data") {
- testonly = true
- sources = peer_connection_e2e_smoke_test_resources
- outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
- }
- }
-
- rtc_library("peer_connection_e2e_smoke_test") {
+ rtc_library("network_quality_metrics_reporter") {
+ visibility = [ "*" ]
testonly = true
-
- sources = [ "peer_connection_e2e_smoke_test.cc" ]
+ sources = [
+ "network_quality_metrics_reporter.cc",
+ "network_quality_metrics_reporter.h",
+ ]
deps = [
- ":default_audio_quality_analyzer",
- ":default_video_quality_analyzer",
- ":network_quality_metrics_reporter",
- "../../../api:callfactory_api",
- "../../../api:create_network_emulation_manager",
- "../../../api:create_peer_connection_quality_test_frame_generator",
- "../../../api:create_peerconnection_quality_test_fixture",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:media_stream_interface",
+ "../..:perf_test",
"../../../api:network_emulation_manager_api",
"../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:scoped_refptr",
- "../../../api:simulated_network_api",
- "../../../api/audio_codecs:builtin_audio_decoder_factory",
- "../../../api/audio_codecs:builtin_audio_encoder_factory",
- "../../../api/video_codecs:builtin_video_decoder_factory",
- "../../../api/video_codecs:builtin_video_encoder_factory",
- "../../../call:simulated_network",
- "../../../media:rtc_audio_video",
- "../../../modules/audio_device:audio_device_impl",
- "../../../p2p:rtc_p2p",
- "../../../pc:pc_test_utils",
- "../../../pc:peerconnection_wrapper",
- "../../../rtc_base",
- "../../../rtc_base:gunit_helpers",
- "../../../rtc_base:logging",
+ "../../../api:rtc_stats_api",
+ "../../../api/units:data_size",
+ "../../../rtc_base:criticalsection",
"../../../rtc_base:rtc_event",
"../../../system_wrappers:field_trial",
- "../../../test:field_trial",
- "../../../test:fileutils",
- "../../../test:test_support",
]
- data = peer_connection_e2e_smoke_test_resources
- if (is_ios) {
- deps += [ ":peer_connection_e2e_smoke_test_resources_bundle_data" ]
- }
}
- rtc_library("stats_poller") {
+ rtc_library("sdp_changer") {
visibility = [ "*" ]
testonly = true
sources = [
- "stats_poller.cc",
- "stats_poller.h",
+ "sdp/sdp_changer.cc",
+ "sdp/sdp_changer.h",
]
deps = [
- ":test_peer",
+ "../../../api:array_view",
"../../../api:libjingle_peerconnection_api",
- "../../../api:stats_observer_interface",
- "../../../rtc_base:logging",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtp_parameters",
+ "../../../media:rtc_media_base",
+ "../../../p2p:rtc_p2p",
+ "../../../pc:peerconnection",
+ "../../../pc:rtc_pc_base",
+ "../../../rtc_base:stringutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
- rtc_library("default_video_quality_analyzer_test") {
+ rtc_library("multi_head_queue") {
+ visibility = [ "*" ]
testonly = true
- sources = [ "analyzer/video/default_video_quality_analyzer_test.cc" ]
- deps = [
- ":default_video_quality_analyzer",
- "../..:test_support",
- "../../../api:create_frame_generator",
- "../../../api:rtp_packet_info",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_frame_i420",
- "../../../modules/rtp_rtcp:rtp_rtcp_format",
- "../../../system_wrappers",
- ]
+ sources = [ "analyzer/video/multi_head_queue.h" ]
+ deps = [ "../../../rtc_base:checks" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
-
-rtc_library("analyzer_helper") {
- visibility = [ "*" ]
- sources = [
- "analyzer_helper.cc",
- "analyzer_helper.h",
- ]
- deps = [
- "../../../api:track_id_stream_label_map",
- "../../../rtc_base:macromagic",
- "../../../rtc_base/synchronization:sequence_checker",
- ]
-}
-
-rtc_library("default_audio_quality_analyzer") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/audio/default_audio_quality_analyzer.cc",
- "analyzer/audio/default_audio_quality_analyzer.h",
- ]
-
- deps = [
- "../..:perf_test",
- "../../../api:audio_quality_analyzer_api",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:stats_observer_interface",
- "../../../api:track_id_stream_label_map",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- "../../../rtc_base:rtc_numerics",
- ]
-}
-
-rtc_library("example_video_quality_analyzer") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "analyzer/video/example_video_quality_analyzer.cc",
- "analyzer/video/example_video_quality_analyzer.h",
- ]
-
- deps = [
- "../../../api:video_quality_analyzer_api",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_rtp_headers",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- ]
-}
-
-rtc_library("video_quality_metrics_reporter") {
- visibility = [ "*" ]
-
- testonly = true
- sources = [
- "analyzer/video/video_quality_metrics_reporter.cc",
- "analyzer/video/video_quality_metrics_reporter.h",
- ]
- deps = [
- "../..:perf_test",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:rtc_numerics",
- ]
-}
-
-rtc_library("default_video_quality_analyzer") {
- visibility = [ "*" ]
-
- testonly = true
- sources = [
- "analyzer/video/default_video_quality_analyzer.cc",
- "analyzer/video/default_video_quality_analyzer.h",
- ]
-
- deps = [
- "../..:perf_test",
- "../../../api:video_quality_analyzer_api",
- "../../../api/units:time_delta",
- "../../../api/units:timestamp",
- "../../../api/video:encoded_image",
- "../../../api/video:video_frame",
- "../../../api/video:video_frame_i420",
- "../../../api/video:video_rtp_headers",
- "../../../common_video",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:logging",
- "../../../rtc_base:rtc_base_approved",
- "../../../rtc_base:rtc_base_tests_utils",
- "../../../rtc_base:rtc_event",
- "../../../rtc_base:rtc_numerics",
- "../../../rtc_base:timeutils",
- "../../../system_wrappers",
- ]
-}
-
-rtc_library("network_quality_metrics_reporter") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "network_quality_metrics_reporter.cc",
- "network_quality_metrics_reporter.h",
- ]
- deps = [
- "../..:perf_test",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:network_emulation_manager_api",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../rtc_base:criticalsection",
- "../../../rtc_base:rtc_event",
- "../../../system_wrappers:field_trial",
- ]
-}
-
-rtc_library("sdp_changer") {
- visibility = [ "*" ]
- testonly = true
- sources = [
- "sdp/sdp_changer.cc",
- "sdp/sdp_changer.h",
- ]
- deps = [
- "../../../api:array_view",
- "../../../api:libjingle_peerconnection_api",
- "../../../api:peer_connection_quality_test_fixture_api",
- "../../../api:rtp_parameters",
- "../../../media:rtc_media_base",
- "../../../p2p:rtc_p2p",
- "../../../pc:peerconnection",
- "../../../pc:rtc_pc_base",
- "../../../rtc_base:stringutils",
- "//third_party/abseil-cpp/absl/memory",
- "//third_party/abseil-cpp/absl/strings:strings",
- "//third_party/abseil-cpp/absl/types:optional",
- ]
-}
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
index b8f1740e46d..b8902335213 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
@@ -10,16 +10,12 @@
#include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
-#include "api/stats_types.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
#include "rtc_base/logging.h"
namespace webrtc {
namespace webrtc_pc_e2e {
-namespace {
-
-static const char kStatsAudioMediaType[] = "audio";
-
-} // namespace
void DefaultAudioQualityAnalyzer::Start(
std::string test_case_name,
@@ -29,68 +25,82 @@ void DefaultAudioQualityAnalyzer::Start(
}
void DefaultAudioQualityAnalyzer::OnStatsReports(
- const std::string& pc_label,
- const StatsReports& stats_reports) {
- for (const StatsReport* stats_report : stats_reports) {
- // NetEq stats are only present in kStatsReportTypeSsrc reports, so all
- // other reports are just ignored.
- if (stats_report->type() != StatsReport::StatsType::kStatsReportTypeSsrc) {
- continue;
- }
- // Ignoring stats reports of "video" SSRC.
- const webrtc::StatsReport::Value* media_type = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameMediaType);
- RTC_CHECK(media_type);
- if (strcmp(media_type->static_string_val(), kStatsAudioMediaType) != 0) {
- continue;
- }
- if (stats_report->FindValue(
- webrtc::StatsReport::kStatsValueNameBytesSent)) {
- // If kStatsValueNameBytesSent is present, it means it's a send stream,
- // but we need audio metrics for receive stream, so skip it.
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ // TODO(https://crbug.com/webrtc/11683): use "inbound-rtp" instead of "track"
+ // stats when required audio metrics moved there
+ auto stats = report->GetStatsOfType<RTCMediaStreamTrackStats>();
+
+ for (auto& stat : stats) {
+ if (!stat->kind.is_defined() ||
+ !(*stat->kind == RTCMediaStreamTrackKind::kAudio) ||
+ !*stat->remote_source) {
continue;
}
- const webrtc::StatsReport::Value* expand_rate = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameExpandRate);
- const webrtc::StatsReport::Value* accelerate_rate = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameAccelerateRate);
- const webrtc::StatsReport::Value* preemptive_rate = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNamePreemptiveExpandRate);
- const webrtc::StatsReport::Value* speech_expand_rate =
- stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameSpeechExpandRate);
- const webrtc::StatsReport::Value* preferred_buffer_size_ms =
- stats_report->FindValue(StatsReport::StatsValueName::
- kStatsValueNamePreferredJitterBufferMs);
- RTC_CHECK(expand_rate);
- RTC_CHECK(accelerate_rate);
- RTC_CHECK(preemptive_rate);
- RTC_CHECK(speech_expand_rate);
- RTC_CHECK(preferred_buffer_size_ms);
+ StatsSample sample;
+ sample.total_samples_received =
+ stat->total_samples_received.ValueOrDefault(0ul);
+ sample.concealed_samples = stat->concealed_samples.ValueOrDefault(0ul);
+ sample.removed_samples_for_acceleration =
+ stat->removed_samples_for_acceleration.ValueOrDefault(0ul);
+ sample.inserted_samples_for_deceleration =
+ stat->inserted_samples_for_deceleration.ValueOrDefault(0ul);
+ sample.silent_concealed_samples =
+ stat->silent_concealed_samples.ValueOrDefault(0ul);
+ sample.jitter_buffer_target_delay =
+ TimeDelta::Seconds(stat->jitter_buffer_target_delay.ValueOrDefault(0.));
+ sample.jitter_buffer_emitted_count =
+ stat->jitter_buffer_emitted_count.ValueOrDefault(0ul);
const std::string& stream_label =
- GetStreamLabelFromStatsReport(stats_report);
+ analyzer_helper_->GetStreamLabelFromTrackId(*stat->track_identifier);
rtc::CritScope crit(&lock_);
+ StatsSample prev_sample = last_stats_sample_[stream_label];
+ RTC_CHECK_GE(sample.total_samples_received,
+ prev_sample.total_samples_received);
+ double total_samples_diff = static_cast<double>(
+ sample.total_samples_received - prev_sample.total_samples_received);
+ if (total_samples_diff == 0) {
+ return;
+ }
+
AudioStreamStats& audio_stream_stats = streams_stats_[stream_label];
- audio_stream_stats.expand_rate.AddSample(expand_rate->float_val());
- audio_stream_stats.accelerate_rate.AddSample(accelerate_rate->float_val());
- audio_stream_stats.preemptive_rate.AddSample(preemptive_rate->float_val());
+ audio_stream_stats.expand_rate.AddSample(
+ (sample.concealed_samples - prev_sample.concealed_samples) /
+ total_samples_diff);
+ audio_stream_stats.accelerate_rate.AddSample(
+ (sample.removed_samples_for_acceleration -
+ prev_sample.removed_samples_for_acceleration) /
+ total_samples_diff);
+ audio_stream_stats.preemptive_rate.AddSample(
+ (sample.inserted_samples_for_deceleration -
+ prev_sample.inserted_samples_for_deceleration) /
+ total_samples_diff);
+
+ int64_t speech_concealed_samples =
+ sample.concealed_samples - sample.silent_concealed_samples;
+ int64_t prev_speech_concealed_samples =
+ prev_sample.concealed_samples - prev_sample.silent_concealed_samples;
audio_stream_stats.speech_expand_rate.AddSample(
- speech_expand_rate->float_val());
- audio_stream_stats.preferred_buffer_size_ms.AddSample(
- preferred_buffer_size_ms->int_val());
- }
-}
+ (speech_concealed_samples - prev_speech_concealed_samples) /
+ total_samples_diff);
+
+ int64_t jitter_buffer_emitted_count_diff =
+ sample.jitter_buffer_emitted_count -
+ prev_sample.jitter_buffer_emitted_count;
+ if (jitter_buffer_emitted_count_diff > 0) {
+ TimeDelta jitter_buffer_target_delay_diff =
+ sample.jitter_buffer_target_delay -
+ prev_sample.jitter_buffer_target_delay;
+ audio_stream_stats.preferred_buffer_size_ms.AddSample(
+ jitter_buffer_target_delay_diff.ms<double>() /
+ jitter_buffer_emitted_count_diff);
+ }
-const std::string& DefaultAudioQualityAnalyzer::GetStreamLabelFromStatsReport(
- const StatsReport* stats_report) const {
- const webrtc::StatsReport::Value* report_track_id = stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameTrackId);
- RTC_CHECK(report_track_id);
- return analyzer_helper_->GetStreamLabelFromTrackId(
- report_track_id->string_val());
+ last_stats_sample_[stream_label] = sample;
+ }
}
std::string DefaultAudioQualityAnalyzer::GetTestCaseName(
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h
index 33aaefd4c3f..c990e4f357f 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h
@@ -14,9 +14,9 @@
#include <map>
#include <string>
-#include "api/stats_types.h"
#include "api/test/audio_quality_analyzer_interface.h"
#include "api/test/track_id_stream_label_map.h"
+#include "api/units/time_delta.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/numerics/samples_stats_counter.h"
#include "test/testsupport/perf_test.h"
@@ -32,22 +32,29 @@ struct AudioStreamStats {
SamplesStatsCounter preferred_buffer_size_ms;
};
-// TODO(bugs.webrtc.org/10430): Migrate to the new GetStats as soon as
-// bugs.webrtc.org/10428 is fixed.
class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface {
public:
void Start(std::string test_case_name,
TrackIdStreamLabelMap* analyzer_helper) override;
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& stats_reports) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
void Stop() override;
// Returns audio quality stats per stream label.
std::map<std::string, AudioStreamStats> GetAudioStreamsStats() const;
private:
- const std::string& GetStreamLabelFromStatsReport(
- const StatsReport* stats_report) const;
+ struct StatsSample {
+ uint64_t total_samples_received = 0;
+ uint64_t concealed_samples = 0;
+ uint64_t removed_samples_for_acceleration = 0;
+ uint64_t inserted_samples_for_deceleration = 0;
+ uint64_t silent_concealed_samples = 0;
+ TimeDelta jitter_buffer_target_delay = TimeDelta::Zero();
+ uint64_t jitter_buffer_emitted_count = 0;
+ };
+
std::string GetTestCaseName(const std::string& stream_label) const;
void ReportResult(const std::string& metric_name,
const std::string& stream_label,
@@ -60,6 +67,7 @@ class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface {
rtc::CriticalSection lock_;
std::map<std::string, AudioStreamStats> streams_stats_ RTC_GUARDED_BY(lock_);
+ std::map<std::string, StatsSample> last_stats_sample_ RTC_GUARDED_BY(lock_);
};
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
index 786509ddb7e..851238f1e74 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
@@ -14,11 +14,13 @@
#include <memory>
#include <utility>
+#include "api/array_view.h"
#include "api/units/time_delta.h"
#include "api/video/i420_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "rtc_base/cpu_time.h"
#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
#include "rtc_base/time_utils.h"
namespace webrtc {
@@ -35,6 +37,7 @@ void LogFrameCounters(const std::string& name, const FrameCounters& counters) {
RTC_LOG(INFO) << "[" << name << "] Pre encoded : " << counters.pre_encoded;
RTC_LOG(INFO) << "[" << name << "] Encoded : " << counters.encoded;
RTC_LOG(INFO) << "[" << name << "] Received : " << counters.received;
+ RTC_LOG(INFO) << "[" << name << "] Decoded : " << counters.decoded;
RTC_LOG(INFO) << "[" << name << "] Rendered : " << counters.rendered;
RTC_LOG(INFO) << "[" << name << "] Dropped : " << counters.dropped;
}
@@ -46,6 +49,15 @@ void LogStreamInternalStats(const std::string& name, const StreamStats& stats) {
<< stats.dropped_before_encoder;
}
+template <typename T>
+absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
+ auto it = map.find(key);
+ if (it == map.end()) {
+ return absl::nullopt;
+ }
+ return it->second;
+}
+
} // namespace
void RateCounter::AddEvent(Timestamp event_time) {
@@ -65,9 +77,52 @@ double RateCounter::GetEventsPerSecond() const {
(event_last_time_ - event_first_time_).us() * kMicrosPerSecond;
}
+std::string StatsKey::ToString() const {
+ rtc::StringBuilder out;
+ out << stream_label << "_" << sender << "_" << receiver;
+ return out.str();
+}
+
+bool operator<(const StatsKey& a, const StatsKey& b) {
+ if (a.stream_label != b.stream_label) {
+ return a.stream_label < b.stream_label;
+ }
+ if (a.sender != b.sender) {
+ return a.sender < b.sender;
+ }
+ return a.receiver < b.receiver;
+}
+
+bool operator==(const StatsKey& a, const StatsKey& b) {
+ return a.stream_label == b.stream_label && a.sender == b.sender &&
+ a.receiver == b.receiver;
+}
+
+std::string InternalStatsKey::ToString() const {
+ rtc::StringBuilder out;
+ out << "stream=" << stream << "_sender=" << sender
+ << "_receiver=" << receiver;
+ return out.str();
+}
+
+bool operator<(const InternalStatsKey& a, const InternalStatsKey& b) {
+ if (a.stream != b.stream) {
+ return a.stream < b.stream;
+ }
+ if (a.sender != b.sender) {
+ return a.sender < b.sender;
+ }
+ return a.receiver < b.receiver;
+}
+
+bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) {
+ return a.stream == b.stream && a.sender == b.sender &&
+ a.receiver == b.receiver;
+}
+
DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
bool heavy_metrics_computation_enabled,
- int max_frames_in_flight_per_stream_count)
+ size_t max_frames_in_flight_per_stream_count)
: heavy_metrics_computation_enabled_(heavy_metrics_computation_enabled),
max_frames_in_flight_per_stream_count_(
max_frames_in_flight_per_stream_count),
@@ -76,9 +131,12 @@ DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() {
Stop();
}
-void DefaultVideoQualityAnalyzer::Start(std::string test_case_name,
- int max_threads_count) {
+void DefaultVideoQualityAnalyzer::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {
test_label_ = std::move(test_case_name);
+ peers_ = std::make_unique<NamesCollection>(peer_names);
for (int i = 0; i < max_threads_count; i++) {
auto thread = std::make_unique<rtc::PlatformThread>(
&DefaultVideoQualityAnalyzer::ProcessComparisonsThread, this,
@@ -98,73 +156,115 @@ void DefaultVideoQualityAnalyzer::Start(std::string test_case_name,
}
uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
+ absl::string_view peer_name,
const std::string& stream_label,
const webrtc::VideoFrame& frame) {
// |next_frame_id| is atomic, so we needn't lock here.
uint16_t frame_id = next_frame_id_++;
Timestamp start_time = Timestamp::MinusInfinity();
+ size_t peer_index = peers_->index(peer_name);
+ size_t stream_index;
{
rtc::CritScope crit(&lock_);
- // Create a local copy of start_time_ to access it under |comparison_lock_|
- // without holding a |lock_|
+ // Create a local copy of start_time_ to access it under
+ // |comparison_lock_| without holding a |lock_|
start_time = start_time_;
+ stream_index = streams_.AddIfAbsent(stream_label);
}
{
// Ensure stats for this stream exists.
rtc::CritScope crit(&comparison_lock_);
- if (stream_stats_.find(stream_label) == stream_stats_.end()) {
- stream_stats_.insert({stream_label, StreamStats()});
- // Assume that the first freeze was before first stream frame captured.
- // This way time before the first freeze would be counted as time between
- // freezes.
- stream_last_freeze_end_time_.insert({stream_label, start_time});
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i == peer_index) {
+ continue;
+ }
+ InternalStatsKey stats_key(stream_index, peer_index, i);
+ if (stream_stats_.find(stats_key) == stream_stats_.end()) {
+ stream_stats_.insert({stats_key, StreamStats()});
+ // Assume that the first freeze was before first stream frame captured.
+ // This way time before the first freeze would be counted as time
+ // between freezes.
+ stream_last_freeze_end_time_.insert({stats_key, start_time});
+ } else {
+ // When we see some |stream_label| for the first time we need to create
+ // stream stats object for it and set up some states, but we need to do
+ // it only once and for all receivers, so on the next frame on the same
+ // |stream_label| we can be sure, that it's already done and we needn't
+ // to scan though all peers again.
+ break;
+ }
}
}
{
rtc::CritScope crit(&lock_);
+ stream_to_sender_[stream_index] = peer_index;
frame_counters_.captured++;
- stream_frame_counters_[stream_label].captured++;
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i != peer_index) {
+ InternalStatsKey key(stream_index, peer_index, i);
+ stream_frame_counters_[key].captured++;
+ }
+ }
- StreamState* state = &stream_states_[stream_label];
+ auto state_it = stream_states_.find(stream_index);
+ if (state_it == stream_states_.end()) {
+ stream_states_.emplace(stream_index,
+ StreamState(peer_index, peers_->size()));
+ }
+ StreamState* state = &stream_states_.at(stream_index);
state->PushBack(frame_id);
// Update frames in flight info.
auto it = captured_frames_in_flight_.find(frame_id);
if (it != captured_frames_in_flight_.end()) {
- // We overflow uint16_t and hit previous frame id and this frame is still
- // in flight. It means that this stream wasn't rendered for long time and
- // we need to process existing frame as dropped.
- auto stats_it = frame_stats_.find(frame_id);
- RTC_DCHECK(stats_it != frame_stats_.end());
-
- uint16_t oldest_frame_id = state->PopFront();
- RTC_DCHECK_EQ(frame_id, oldest_frame_id);
- frame_counters_.dropped++;
- stream_frame_counters_[stream_label].dropped++;
- AddComparison(it->second, absl::nullopt, true, stats_it->second);
+ // If we overflow uint16_t and hit previous frame id and this frame is
+ // still in flight, it means that this stream wasn't rendered for long
+ // time and we need to process existing frame as dropped.
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i == peer_index) {
+ continue;
+ }
+
+ uint16_t oldest_frame_id = state->PopFront(i);
+ RTC_DCHECK_EQ(frame_id, oldest_frame_id);
+ frame_counters_.dropped++;
+ InternalStatsKey key(stream_index, peer_index, i);
+ stream_frame_counters_.at(key).dropped++;
+
+ rtc::CritScope crit1(&comparison_lock_);
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ captured_frames_in_flight_.size());
+ AddComparison(InternalStatsKey(stream_index, peer_index, i),
+ it->second.frame(), absl::nullopt, true,
+ it->second.GetStatsForPeer(i));
+ }
captured_frames_in_flight_.erase(it);
- frame_stats_.erase(stats_it);
}
- captured_frames_in_flight_.insert(
- std::pair<uint16_t, VideoFrame>(frame_id, frame));
+ captured_frames_in_flight_.emplace(
+ frame_id,
+ FrameInFlight(stream_index, frame,
+ /*captured_time=*/Now(), peer_index, peers_->size()));
// Set frame id on local copy of the frame
- captured_frames_in_flight_.at(frame_id).set_id(frame_id);
- frame_stats_.insert(std::pair<uint16_t, FrameStats>(
- frame_id, FrameStats(stream_label, /*captured_time=*/Now())));
+ captured_frames_in_flight_.at(frame_id).SetFrameId(frame_id);
// Update history stream<->frame mapping
for (auto it = stream_to_frame_id_history_.begin();
it != stream_to_frame_id_history_.end(); ++it) {
it->second.erase(frame_id);
}
- stream_to_frame_id_history_[stream_label].insert(frame_id);
+ stream_to_frame_id_history_[stream_index].insert(frame_id);
// If state has too many frames that are in flight => remove the oldest
// queued frame in order to avoid to use too much memory.
if (state->GetAliveFramesCount() > max_frames_in_flight_per_stream_count_) {
uint16_t frame_id_to_remove = state->MarkNextAliveFrameAsDead();
- auto removed_count = captured_frames_in_flight_.erase(frame_id_to_remove);
- RTC_DCHECK_EQ(removed_count, 1)
+ auto it = captured_frames_in_flight_.find(frame_id_to_remove);
+ RTC_CHECK(it != captured_frames_in_flight_.end())
+ << "Frame with ID " << frame_id_to_remove
+ << " is expected to be in flight, but hasn't been found in "
+ << "|captured_frames_in_flight_|";
+ bool is_removed = it->second.RemoveFrame();
+ RTC_DCHECK(is_removed)
<< "Invalid stream state: alive frame is removed already";
}
}
@@ -172,52 +272,76 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
}
void DefaultVideoQualityAnalyzer::OnFramePreEncode(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
rtc::CritScope crit(&lock_);
- auto it = frame_stats_.find(frame.id());
- RTC_DCHECK(it != frame_stats_.end())
+ auto it = captured_frames_in_flight_.find(frame.id());
+ RTC_DCHECK(it != captured_frames_in_flight_.end())
<< "Frame id=" << frame.id() << " not found";
frame_counters_.pre_encoded++;
- stream_frame_counters_[it->second.stream_label].pre_encoded++;
- it->second.pre_encode_time = Now();
+ size_t peer_index = peers_->index(peer_name);
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i != peer_index) {
+ InternalStatsKey key(it->second.stream(), peer_index, i);
+ stream_frame_counters_.at(key).pre_encoded++;
+ }
+ }
+ it->second.SetPreEncodeTime(Now());
}
void DefaultVideoQualityAnalyzer::OnFrameEncoded(
+ absl::string_view peer_name,
uint16_t frame_id,
const webrtc::EncodedImage& encoded_image,
const EncoderStats& stats) {
rtc::CritScope crit(&lock_);
- auto it = frame_stats_.find(frame_id);
- RTC_DCHECK(it != frame_stats_.end());
+ auto it = captured_frames_in_flight_.find(frame_id);
+ RTC_DCHECK(it != captured_frames_in_flight_.end());
// For SVC we can receive multiple encoded images for one frame, so to cover
// all cases we have to pick the last encode time.
- if (it->second.encoded_time.IsInfinite()) {
+ if (!it->second.HasEncodedTime()) {
// Increase counters only when we meet this frame first time.
frame_counters_.encoded++;
- stream_frame_counters_[it->second.stream_label].encoded++;
+ size_t peer_index = peers_->index(peer_name);
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i != peer_index) {
+ InternalStatsKey key(it->second.stream(), peer_index, i);
+ stream_frame_counters_.at(key).encoded++;
+ }
+ }
}
- it->second.encoded_time = Now();
- it->second.encoded_image_size = encoded_image.size();
- it->second.target_encode_bitrate += stats.target_encode_bitrate;
+ it->second.OnFrameEncoded(Now(), encoded_image.size(),
+ stats.target_encode_bitrate);
}
void DefaultVideoQualityAnalyzer::OnFrameDropped(
+ absl::string_view peer_name,
webrtc::EncodedImageCallback::DropReason reason) {
// Here we do nothing, because we will see this drop on renderer side.
}
void DefaultVideoQualityAnalyzer::OnFramePreDecode(
+ absl::string_view peer_name,
uint16_t frame_id,
const webrtc::EncodedImage& input_image) {
rtc::CritScope crit(&lock_);
- auto it = frame_stats_.find(frame_id);
- RTC_DCHECK(it != frame_stats_.end());
- RTC_DCHECK(it->second.received_time.IsInfinite())
- << "Received multiple spatial layers for stream_label="
- << it->second.stream_label;
+ size_t peer_index = peers_->index(peer_name);
+
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it == captured_frames_in_flight_.end() ||
+ it->second.HasReceivedTime(peer_index)) {
+ // It means this frame was predecoded before, so we can skip it. It may
+ // happen when we have multiple simulcast streams in one track and received
+ // the same picture from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
+
frame_counters_.received++;
- stream_frame_counters_[it->second.stream_label].received++;
- it->second.decode_start_time = Now();
+ InternalStatsKey key(it->second.stream(),
+ stream_to_sender_.at(it->second.stream()), peer_index);
+ stream_frame_counters_.at(key).received++;
// Determine the time of the last received packet of this video frame.
RTC_DCHECK(!input_image.PacketInfos().empty());
int64_t last_receive_time =
@@ -227,112 +351,145 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode(
return a.receive_time_ms() < b.receive_time_ms();
})
->receive_time_ms();
- it->second.received_time = Timestamp::Millis(last_receive_time);
+ it->second.OnFramePreDecode(
+ peer_index,
+ /*received_time=*/Timestamp::Millis(last_receive_time),
+ /*decode_start_time=*/Now());
}
void DefaultVideoQualityAnalyzer::OnFrameDecoded(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame,
const DecoderStats& stats) {
rtc::CritScope crit(&lock_);
- auto it = frame_stats_.find(frame.id());
- RTC_DCHECK(it != frame_stats_.end());
+ size_t peer_index = peers_->index(peer_name);
+
+ auto it = captured_frames_in_flight_.find(frame.id());
+ if (it == captured_frames_in_flight_.end() ||
+ it->second.HasDecodeEndTime(peer_index)) {
+ // It means this frame was decoded before, so we can skip it. It may happen
+ // when we have multiple simulcast streams in one track and received
+ // the same picture from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
frame_counters_.decoded++;
- stream_frame_counters_[it->second.stream_label].decoded++;
- it->second.decode_end_time = Now();
+ InternalStatsKey key(it->second.stream(),
+ stream_to_sender_.at(it->second.stream()), peer_index);
+ stream_frame_counters_.at(key).decoded++;
+ it->second.SetDecodeEndTime(peer_index, Now());
}
void DefaultVideoQualityAnalyzer::OnFrameRendered(
+ absl::string_view peer_name,
const webrtc::VideoFrame& raw_frame) {
+ rtc::CritScope crit(&lock_);
+ size_t peer_index = peers_->index(peer_name);
+
+ auto frame_it = captured_frames_in_flight_.find(raw_frame.id());
+ if (frame_it == captured_frames_in_flight_.end() ||
+ frame_it->second.HasRenderedTime(peer_index)) {
+ // It means this frame was rendered before, so we can skip it. It may happen
+ // when we have multiple simulcast streams in one track and received
+ // the same picture from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
+
// Copy entire video frame including video buffer to ensure that analyzer
// won't hold any WebRTC internal buffers.
VideoFrame frame = raw_frame;
frame.set_video_frame_buffer(
I420Buffer::Copy(*raw_frame.video_frame_buffer()->ToI420()));
- rtc::CritScope crit(&lock_);
- auto stats_it = frame_stats_.find(frame.id());
- RTC_DCHECK(stats_it != frame_stats_.end());
- FrameStats* frame_stats = &stats_it->second;
+ // Find corresponding captured frame.
+ FrameInFlight* frame_in_flight = &frame_it->second;
+ absl::optional<VideoFrame> captured_frame = frame_in_flight->frame();
+
+ const size_t stream_index = frame_in_flight->stream();
+ StreamState* state = &stream_states_.at(stream_index);
+ const InternalStatsKey stats_key(stream_index, state->owner(), peer_index);
+
// Update frames counters.
frame_counters_.rendered++;
- stream_frame_counters_[frame_stats->stream_label].rendered++;
+ stream_frame_counters_.at(stats_key).rendered++;
// Update current frame stats.
- frame_stats->rendered_time = Now();
- frame_stats->rendered_frame_width = frame.width();
- frame_stats->rendered_frame_height = frame.height();
-
- // Find corresponding captured frame.
- auto frame_it = captured_frames_in_flight_.find(frame.id());
- absl::optional<VideoFrame> captured_frame =
- frame_it != captured_frames_in_flight_.end()
- ? absl::optional<VideoFrame>(frame_it->second)
- : absl::nullopt;
+ frame_in_flight->OnFrameRendered(peer_index, Now(), frame.width(),
+ frame.height());
// After we received frame here we need to check if there are any dropped
// frames between this one and last one, that was rendered for this video
// stream.
-
- const std::string& stream_label = frame_stats->stream_label;
- StreamState* state = &stream_states_[stream_label];
int dropped_count = 0;
- while (!state->Empty() && state->Front() != frame.id()) {
+ while (!state->IsEmpty(peer_index) &&
+ state->Front(peer_index) != frame.id()) {
dropped_count++;
- uint16_t dropped_frame_id = state->PopFront();
+ uint16_t dropped_frame_id = state->PopFront(peer_index);
// Frame with id |dropped_frame_id| was dropped. We need:
// 1. Update global and stream frame counters
// 2. Extract corresponding frame from |captured_frames_in_flight_|
- // 3. Extract corresponding frame stats from |frame_stats_|
- // 4. Send extracted frame to comparison with dropped=true
- // 5. Cleanup dropped frame
+ // 3. Send extracted frame to comparison with dropped=true
+ // 4. Cleanup dropped frame
frame_counters_.dropped++;
- stream_frame_counters_[stream_label].dropped++;
+ stream_frame_counters_.at(stats_key).dropped++;
- auto dropped_frame_stats_it = frame_stats_.find(dropped_frame_id);
- RTC_DCHECK(dropped_frame_stats_it != frame_stats_.end());
auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id);
- absl::optional<VideoFrame> dropped_frame =
- dropped_frame_it != captured_frames_in_flight_.end()
- ? absl::optional<VideoFrame>(dropped_frame_it->second)
- : absl::nullopt;
+ RTC_DCHECK(dropped_frame_it != captured_frames_in_flight_.end());
+ absl::optional<VideoFrame> dropped_frame = dropped_frame_it->second.frame();
+ dropped_frame_it->second.MarkDropped(peer_index);
- AddComparison(dropped_frame, absl::nullopt, true,
- dropped_frame_stats_it->second);
+ {
+ rtc::CritScope crit1(&comparison_lock_);
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ captured_frames_in_flight_.size());
+ AddComparison(stats_key, dropped_frame, absl::nullopt, true,
+ dropped_frame_it->second.GetStatsForPeer(peer_index));
+ }
- frame_stats_.erase(dropped_frame_stats_it);
- if (dropped_frame_it != captured_frames_in_flight_.end()) {
+ if (dropped_frame_it->second.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(dropped_frame_it);
}
}
- RTC_DCHECK(!state->Empty());
- state->PopFront();
+ RTC_DCHECK(!state->IsEmpty(peer_index));
+ state->PopFront(peer_index);
- if (state->last_rendered_frame_time()) {
- frame_stats->prev_frame_rendered_time =
- state->last_rendered_frame_time().value();
+ if (state->last_rendered_frame_time(peer_index)) {
+ frame_in_flight->SetPrevFrameRenderedTime(
+ peer_index, state->last_rendered_frame_time(peer_index).value());
}
- state->set_last_rendered_frame_time(frame_stats->rendered_time);
+ state->SetLastRenderedFrameTime(peer_index,
+ frame_in_flight->rendered_time(peer_index));
{
rtc::CritScope cr(&comparison_lock_);
- stream_stats_[stream_label].skipped_between_rendered.AddSample(
- dropped_count);
+ stream_stats_[stats_key].skipped_between_rendered.AddSample(dropped_count);
}
- AddComparison(captured_frame, frame, false, *frame_stats);
- if (frame_it != captured_frames_in_flight_.end()) {
+ {
+ rtc::CritScope crit(&comparison_lock_);
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ captured_frames_in_flight_.size());
+ AddComparison(stats_key, captured_frame, frame, false,
+ frame_in_flight->GetStatsForPeer(peer_index));
+ }
+
+ if (frame_it->second.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(frame_it);
}
- frame_stats_.erase(stats_it);
}
void DefaultVideoQualityAnalyzer::OnEncoderError(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame,
int32_t error_code) {
RTC_LOG(LS_ERROR) << "Encoder error for frame.id=" << frame.id()
<< ", code=" << error_code;
}
-void DefaultVideoQualityAnalyzer::OnDecoderError(uint16_t frame_id,
+void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
int32_t error_code) {
RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id
<< ", code=" << error_code;
@@ -362,46 +519,58 @@ void DefaultVideoQualityAnalyzer::Stop() {
// between freezes.
rtc::CritScope crit1(&lock_);
rtc::CritScope crit2(&comparison_lock_);
- for (auto& item : stream_stats_) {
- const StreamState& state = stream_states_[item.first];
- // If there are no freezes in the call we have to report
- // time_between_freezes_ms as call duration and in such case
- // |stream_last_freeze_end_time_| for this stream will be |start_time_|.
- // If there is freeze, then we need add time from last rendered frame
- // to last freeze end as time between freezes.
- if (state.last_rendered_frame_time()) {
- item.second.time_between_freezes_ms.AddSample(
- (state.last_rendered_frame_time().value() -
- stream_last_freeze_end_time_.at(item.first))
- .ms());
+ for (auto& state_entry : stream_states_) {
+ const size_t stream_index = state_entry.first;
+ const StreamState& stream_state = state_entry.second;
+ for (size_t i = 0; i < peers_->size(); ++i) {
+ if (i == static_cast<size_t>(stream_state.owner())) {
+ continue;
+ }
+
+ InternalStatsKey stats_key(stream_index, stream_state.owner(), i);
+
+ // If there are no freezes in the call we have to report
+ // time_between_freezes_ms as call duration and in such case
+ // |stream_last_freeze_end_time_| for this stream will be |start_time_|.
+ // If there is freeze, then we need add time from last rendered frame
+ // to last freeze end as time between freezes.
+ if (stream_state.last_rendered_frame_time(i)) {
+ stream_stats_[stats_key].time_between_freezes_ms.AddSample(
+ stream_state.last_rendered_frame_time(i).value().ms() -
+ stream_last_freeze_end_time_.at(stats_key).ms());
+ }
}
}
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ captured_frames_in_flight_.size());
}
ReportResults();
}
std::string DefaultVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
rtc::CritScope crit1(&lock_);
- auto it = frame_stats_.find(frame_id);
- if (it != frame_stats_.end()) {
- return it->second.stream_label;
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it != captured_frames_in_flight_.end()) {
+ return streams_.name(it->second.stream());
}
for (auto hist_it = stream_to_frame_id_history_.begin();
hist_it != stream_to_frame_id_history_.end(); ++hist_it) {
auto hist_set_it = hist_it->second.find(frame_id);
if (hist_set_it != hist_it->second.end()) {
- return hist_it->first;
+ return streams_.name(hist_it->first);
}
}
RTC_CHECK(false) << "Unknown frame_id=" << frame_id;
}
-std::set<std::string> DefaultVideoQualityAnalyzer::GetKnownVideoStreams()
- const {
+std::set<StatsKey> DefaultVideoQualityAnalyzer::GetKnownVideoStreams() const {
+ rtc::CritScope crit1(&lock_);
rtc::CritScope crit2(&comparison_lock_);
- std::set<std::string> out;
+ std::set<StatsKey> out;
for (auto& item : stream_stats_) {
- out.insert(item.first);
+ RTC_LOG(INFO) << item.first.ToString() << " ==> "
+ << ToStatsKey(item.first).ToString();
+ out.insert(ToStatsKey(item.first));
}
return out;
}
@@ -411,16 +580,24 @@ const FrameCounters& DefaultVideoQualityAnalyzer::GetGlobalCounters() const {
return frame_counters_;
}
-const std::map<std::string, FrameCounters>&
+std::map<StatsKey, FrameCounters>
DefaultVideoQualityAnalyzer::GetPerStreamCounters() const {
rtc::CritScope crit(&lock_);
- return stream_frame_counters_;
+ std::map<StatsKey, FrameCounters> out;
+ for (auto& item : stream_frame_counters_) {
+ out.emplace(ToStatsKey(item.first), item.second);
+ }
+ return out;
}
-std::map<std::string, StreamStats> DefaultVideoQualityAnalyzer::GetStats()
- const {
- rtc::CritScope cri(&comparison_lock_);
- return stream_stats_;
+std::map<StatsKey, StreamStats> DefaultVideoQualityAnalyzer::GetStats() const {
+ rtc::CritScope crit1(&lock_);
+ rtc::CritScope crit2(&comparison_lock_);
+ std::map<StatsKey, StreamStats> out;
+ for (auto& item : stream_stats_) {
+ out.emplace(ToStatsKey(item.first), item.second);
+ }
+ return out;
}
AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const {
@@ -429,25 +606,27 @@ AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const {
}
void DefaultVideoQualityAnalyzer::AddComparison(
+ InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats) {
StartExcludingCpuThreadTime();
- rtc::CritScope crit(&comparison_lock_);
analyzer_stats_.comparisons_queue_size.AddSample(comparisons_.size());
// If there too many computations waiting in the queue, we won't provide
// frames itself to make future computations lighter.
if (comparisons_.size() >= kMaxActiveComparisons) {
- comparisons_.emplace_back(absl::nullopt, absl::nullopt, dropped,
- frame_stats, OverloadReason::kCpu);
+ comparisons_.emplace_back(std::move(stats_key), absl::nullopt,
+ absl::nullopt, dropped, std::move(frame_stats),
+ OverloadReason::kCpu);
} else {
OverloadReason overload_reason = OverloadReason::kNone;
if (!captured && !dropped) {
overload_reason = OverloadReason::kMemory;
}
- comparisons_.emplace_back(std::move(captured), std::move(rendered), dropped,
- frame_stats, overload_reason);
+ comparisons_.emplace_back(std::move(stats_key), std::move(captured),
+ std::move(rendered), dropped,
+ std::move(frame_stats), overload_reason);
}
comparison_available_event_.Set();
StopExcludingCpuThreadTime();
@@ -507,8 +686,8 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
const FrameStats& frame_stats = comparison.frame_stats;
rtc::CritScope crit(&comparison_lock_);
- auto stats_it = stream_stats_.find(frame_stats.stream_label);
- RTC_CHECK(stats_it != stream_stats_.end());
+ auto stats_it = stream_stats_.find(comparison.stats_key);
+ RTC_CHECK(stats_it != stream_stats_.end()) << comparison.stats_key.ToString();
StreamStats* stats = &stats_it->second;
analyzer_stats_.comparisons_done++;
if (comparison.overload_reason == OverloadReason::kCpu) {
@@ -561,7 +740,7 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
3 * average_time_between_rendered_frames_ms)) {
stats->freeze_time_ms.AddSample(time_between_rendered_frames.ms());
auto freeze_end_it =
- stream_last_freeze_end_time_.find(frame_stats.stream_label);
+ stream_last_freeze_end_time_.find(comparison.stats_key);
RTC_DCHECK(freeze_end_it != stream_last_freeze_end_time_.end());
stats->time_between_freezes_ms.AddSample(
(frame_stats.prev_frame_rendered_time - freeze_end_it->second)
@@ -578,15 +757,16 @@ void DefaultVideoQualityAnalyzer::ReportResults() {
rtc::CritScope crit1(&lock_);
rtc::CritScope crit2(&comparison_lock_);
for (auto& item : stream_stats_) {
- ReportResults(GetTestCaseName(item.first), item.second,
- stream_frame_counters_.at(item.first));
+ ReportResults(GetTestCaseName(StatsKeyToMetricName(ToStatsKey(item.first))),
+ item.second, stream_frame_counters_.at(item.first));
}
test::PrintResult("cpu_usage", "", test_label_.c_str(), GetCpuUsagePercent(),
"%", false, ImproveDirection::kSmallerIsBetter);
LogFrameCounters("Global", frame_counters_);
for (auto& item : stream_stats_) {
- LogFrameCounters(item.first, stream_frame_counters_.at(item.first));
- LogStreamInternalStats(item.first, item.second);
+ LogFrameCounters(ToStatsKey(item.first).ToString(),
+ stream_frame_counters_.at(item.first));
+ LogStreamInternalStats(ToStatsKey(item.first).ToString(), item.second);
}
if (!analyzer_stats_.comparisons_queue_size.IsEmpty()) {
RTC_LOG(INFO) << "comparisons_queue_size min="
@@ -714,6 +894,20 @@ Timestamp DefaultVideoQualityAnalyzer::Now() {
return clock_->CurrentTime();
}
+StatsKey DefaultVideoQualityAnalyzer::ToStatsKey(
+ const InternalStatsKey& key) const {
+ return StatsKey(streams_.name(key.stream), peers_->name(key.sender),
+ peers_->name(key.receiver));
+}
+
+std::string DefaultVideoQualityAnalyzer::StatsKeyToMetricName(
+ const StatsKey& key) {
+ if (peers_->size() <= 2) {
+ return key.stream_label;
+ }
+ return key.ToString();
+}
+
void DefaultVideoQualityAnalyzer::StartMeasuringCpuProcessTime() {
rtc::CritScope lock(&cpu_measurement_lock_);
cpu_time_ -= rtc::GetProcessCpuTimeNanos();
@@ -741,35 +935,208 @@ double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() {
return static_cast<double>(cpu_time_) / wallclock_time_ * 100.0;
}
-DefaultVideoQualityAnalyzer::FrameStats::FrameStats(std::string stream_label,
- Timestamp captured_time)
- : stream_label(std::move(stream_label)), captured_time(captured_time) {}
-
DefaultVideoQualityAnalyzer::FrameComparison::FrameComparison(
+ InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats,
OverloadReason overload_reason)
- : captured(std::move(captured)),
+ : stats_key(std::move(stats_key)),
+ captured(std::move(captured)),
rendered(std::move(rendered)),
dropped(dropped),
frame_stats(std::move(frame_stats)),
overload_reason(overload_reason) {}
-uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront() {
- uint16_t frame_id = frame_ids_.front();
- frame_ids_.pop_front();
- if (dead_frames_count_ > 0) {
- dead_frames_count_--;
+uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront(size_t peer) {
+ absl::optional<uint16_t> frame_id = frame_ids_.PopFront(peer);
+ RTC_DCHECK(frame_id.has_value());
+
+ // If alive's frame queue is longer than all others, than also pop frame from
+ // it, because that frame is received by all receivers.
+ size_t owner_size = frame_ids_.size(owner_);
+ size_t other_size = 0;
+ for (size_t i = 0; i < frame_ids_.readers_count(); ++i) {
+ size_t cur_size = frame_ids_.size(i);
+ if (i != owner_ && cur_size > other_size) {
+ other_size = cur_size;
+ }
}
- return frame_id;
+ if (owner_size > other_size) {
+ absl::optional<uint16_t> alive_frame_id = frame_ids_.PopFront(owner_);
+ RTC_DCHECK(alive_frame_id.has_value());
+ RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value());
+ }
+
+ return frame_id.value();
}
uint16_t DefaultVideoQualityAnalyzer::StreamState::MarkNextAliveFrameAsDead() {
- uint16_t frame_id = frame_ids_[dead_frames_count_];
- dead_frames_count_++;
- return frame_id;
+ absl::optional<uint16_t> frame_id = frame_ids_.PopFront(owner_);
+ RTC_DCHECK(frame_id.has_value());
+ return frame_id.value();
+}
+
+void DefaultVideoQualityAnalyzer::StreamState::SetLastRenderedFrameTime(
+ size_t peer,
+ Timestamp time) {
+ auto it = last_rendered_frame_time_.find(peer);
+ if (it == last_rendered_frame_time_.end()) {
+ last_rendered_frame_time_.insert({peer, time});
+ } else {
+ it->second = time;
+ }
+}
+
+absl::optional<Timestamp>
+DefaultVideoQualityAnalyzer::StreamState::last_rendered_frame_time(
+ size_t peer) const {
+ return MaybeGetValue(last_rendered_frame_time_, peer);
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::RemoveFrame() {
+ if (!frame_) {
+ return false;
+ }
+ frame_ = absl::nullopt;
+ return true;
+}
+
+void DefaultVideoQualityAnalyzer::FrameInFlight::SetFrameId(uint16_t id) {
+ if (frame_) {
+ frame_->set_id(id);
+ }
+}
+
+std::vector<size_t>
+DefaultVideoQualityAnalyzer::FrameInFlight::GetPeersWhichDidntReceive() const {
+ std::vector<size_t> out;
+ for (size_t i = 0; i < peers_count_; ++i) {
+ auto it = receiver_stats_.find(i);
+ if (i != owner_ && it != receiver_stats_.end() &&
+ it->second.rendered_time.IsInfinite()) {
+ out.push_back(i);
+ }
+ }
+ return out;
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::HaveAllPeersReceived() const {
+ for (size_t i = 0; i < peers_count_; ++i) {
+ if (i == owner_) {
+ continue;
+ }
+
+ auto it = receiver_stats_.find(i);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+
+ if (!it->second.dropped && it->second.rendered_time.IsInfinite()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameEncoded(
+ webrtc::Timestamp time,
+ int64_t encoded_image_size,
+ uint32_t target_encode_bitrate) {
+ encoded_time_ = time;
+ encoded_image_size_ = encoded_image_size;
+ target_encode_bitrate_ += target_encode_bitrate;
+}
+
+void DefaultVideoQualityAnalyzer::FrameInFlight::OnFramePreDecode(
+ size_t peer,
+ webrtc::Timestamp received_time,
+ webrtc::Timestamp decode_start_time) {
+ receiver_stats_[peer].received_time = received_time;
+ receiver_stats_[peer].decode_start_time = decode_start_time;
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::HasReceivedTime(
+ size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.received_time.IsFinite();
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::HasDecodeEndTime(
+ size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.decode_end_time.IsFinite();
+}
+
+void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameRendered(
+ size_t peer,
+ webrtc::Timestamp time,
+ int width,
+ int height) {
+ receiver_stats_[peer].rendered_time = time;
+ receiver_stats_[peer].rendered_frame_width = width;
+ receiver_stats_[peer].rendered_frame_height = height;
+}
+
+bool DefaultVideoQualityAnalyzer::FrameInFlight::HasRenderedTime(
+ size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.rendered_time.IsFinite();
+}
+
+DefaultVideoQualityAnalyzer::FrameStats
+DefaultVideoQualityAnalyzer::FrameInFlight::GetStatsForPeer(size_t peer) const {
+ FrameStats stats(captured_time_);
+ stats.pre_encode_time = pre_encode_time_;
+ stats.encoded_time = encoded_time_;
+ stats.target_encode_bitrate = target_encode_bitrate_;
+ stats.encoded_image_size = encoded_image_size_;
+
+ absl::optional<ReceiverFrameStats> receiver_stats =
+ MaybeGetValue<ReceiverFrameStats>(receiver_stats_, peer);
+ if (receiver_stats.has_value()) {
+ stats.received_time = receiver_stats->received_time;
+ stats.decode_start_time = receiver_stats->decode_start_time;
+ stats.decode_end_time = receiver_stats->decode_end_time;
+ stats.rendered_time = receiver_stats->rendered_time;
+ stats.prev_frame_rendered_time = receiver_stats->prev_frame_rendered_time;
+ stats.rendered_frame_width = receiver_stats->rendered_frame_width;
+ stats.rendered_frame_height = receiver_stats->rendered_frame_height;
+ }
+ return stats;
+}
+
+size_t DefaultVideoQualityAnalyzer::NamesCollection::AddIfAbsent(
+ absl::string_view name) {
+ auto it = index_.find(name);
+ if (it != index_.end()) {
+ return it->second;
+ }
+ size_t out = names_.size();
+ size_t old_capacity = names_.capacity();
+ names_.emplace_back(name);
+ size_t new_capacity = names_.capacity();
+
+ if (old_capacity == new_capacity) {
+ index_.emplace(names_[out], out);
+ } else {
+ // Reallocation happened in the vector, so we need to rebuild |index_|
+ index_.clear();
+ for (size_t i = 0; i < names_.size(); ++i) {
+ index_.emplace(names_[i], i);
+ }
+ }
+ return out;
}
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
index 6bebb0f02b2..95049b10548 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
@@ -19,6 +19,7 @@
#include <string>
#include <vector>
+#include "api/array_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/units/timestamp.h"
#include "api/video/encoded_image.h"
@@ -28,6 +29,7 @@
#include "rtc_base/numerics/samples_stats_counter.h"
#include "rtc_base/platform_thread.h"
#include "system_wrappers/include/clock.h"
+#include "test/pc/e2e/analyzer/video/multi_head_queue.h"
#include "test/testsupport/perf_test.h"
namespace webrtc {
@@ -36,7 +38,7 @@ namespace webrtc_pc_e2e {
// WebRTC will request a key frame after 3 seconds if no frames were received.
// We assume max frame rate ~60 fps, so 270 frames will cover max freeze without
// key frame request.
-constexpr int kDefaultMaxFramesInFlightPerStream = 270;
+constexpr size_t kDefaultMaxFramesInFlightPerStream = 270;
class RateCounter {
public:
@@ -124,52 +126,102 @@ struct AnalyzerStats {
// it is queued when its captured frame was already removed due to high memory
// usage for that video stream.
int64_t memory_overloaded_comparisons_done = 0;
+ // Count of frames in flight in analyzer measured when new comparison is added
+ // and after analyzer was stopped.
+ SamplesStatsCounter frames_in_flight_left_count;
};
+struct StatsKey {
+ StatsKey(std::string stream_label, std::string sender, std::string receiver)
+ : stream_label(std::move(stream_label)),
+ sender(std::move(sender)),
+ receiver(std::move(receiver)) {}
+
+ std::string ToString() const;
+
+ // Label of video stream to which stats belongs to.
+ std::string stream_label;
+ // Name of the peer which send this stream.
+ std::string sender;
+ // Name of the peer on which stream was received.
+ std::string receiver;
+};
+
+// Required to use StatsKey as std::map key.
+bool operator<(const StatsKey& a, const StatsKey& b);
+bool operator==(const StatsKey& a, const StatsKey& b);
+
+struct InternalStatsKey {
+ InternalStatsKey(size_t stream, size_t sender, size_t receiver)
+ : stream(stream), sender(sender), receiver(receiver) {}
+
+ std::string ToString() const;
+
+ size_t stream;
+ size_t sender;
+ size_t receiver;
+};
+
+// Required to use InternalStatsKey as std::map key.
+bool operator<(const InternalStatsKey& a, const InternalStatsKey& b);
+bool operator==(const InternalStatsKey& a, const InternalStatsKey& b);
+
class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
public:
explicit DefaultVideoQualityAnalyzer(
bool heavy_metrics_computation_enabled = true,
- int max_frames_in_flight_per_stream_count =
+ size_t max_frames_in_flight_per_stream_count =
kDefaultMaxFramesInFlightPerStream);
~DefaultVideoQualityAnalyzer() override;
- void Start(std::string test_case_name, int max_threads_count) override;
- uint16_t OnFrameCaptured(const std::string& stream_label,
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) override;
+ uint16_t OnFrameCaptured(absl::string_view peer_name,
+ const std::string& stream_label,
const VideoFrame& frame) override;
- void OnFramePreEncode(const VideoFrame& frame) override;
- void OnFrameEncoded(uint16_t frame_id,
+ void OnFramePreEncode(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image,
const EncoderStats& stats) override;
- void OnFrameDropped(EncodedImageCallback::DropReason reason) override;
- void OnFramePreDecode(uint16_t frame_id,
+ void OnFrameDropped(absl::string_view peer_name,
+ EncodedImageCallback::DropReason reason) override;
+ void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& input_image) override;
- void OnFrameDecoded(const VideoFrame& frame,
+ void OnFrameDecoded(absl::string_view peer_name,
+ const VideoFrame& frame,
const DecoderStats& stats) override;
- void OnFrameRendered(const VideoFrame& frame) override;
- void OnEncoderError(const VideoFrame& frame, int32_t error_code) override;
- void OnDecoderError(uint16_t frame_id, int32_t error_code) override;
+ void OnFrameRendered(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnEncoderError(absl::string_view peer_name,
+ const VideoFrame& frame,
+ int32_t error_code) override;
+ void OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code) override;
void Stop() override;
std::string GetStreamLabel(uint16_t frame_id) override;
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& stats_reports) override {}
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {}
// Returns set of stream labels, that were met during test call.
- std::set<std::string> GetKnownVideoStreams() const;
+ std::set<StatsKey> GetKnownVideoStreams() const;
const FrameCounters& GetGlobalCounters() const;
// Returns frame counter per stream label. Valid stream labels can be obtained
// by calling GetKnownVideoStreams()
- const std::map<std::string, FrameCounters>& GetPerStreamCounters() const;
+ std::map<StatsKey, FrameCounters> GetPerStreamCounters() const;
// Returns video quality stats per stream label. Valid stream labels can be
// obtained by calling GetKnownVideoStreams()
- std::map<std::string, StreamStats> GetStats() const;
+ std::map<StatsKey, StreamStats> GetStats() const;
AnalyzerStats GetAnalyzerStats() const;
private:
struct FrameStats {
- FrameStats(std::string stream_label, Timestamp captured_time);
-
- std::string stream_label;
+ FrameStats(Timestamp captured_time) : captured_time(captured_time) {}
// Frame events timestamp.
Timestamp captured_time;
@@ -182,12 +234,11 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
Timestamp rendered_time = Timestamp::MinusInfinity();
Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
+ int64_t encoded_image_size = 0;
uint32_t target_encode_bitrate = 0;
absl::optional<int> rendered_frame_width = absl::nullopt;
absl::optional<int> rendered_frame_height = absl::nullopt;
-
- int64_t encoded_image_size = 0;
};
// Describes why comparison was done in overloaded mode (without calculating
@@ -209,12 +260,14 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// because there were too many comparisons in the queue. |dropped| can be
// true or false showing was frame dropped or not.
struct FrameComparison {
- FrameComparison(absl::optional<VideoFrame> captured,
+ FrameComparison(InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats,
OverloadReason overload_reason);
+ InternalStatsKey stats_key;
// Frames can be omitted if there too many computations waiting in the
// queue.
absl::optional<VideoFrame> captured;
@@ -230,49 +283,175 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// Represents a current state of video stream.
class StreamState {
public:
- void PushBack(uint16_t frame_id) { frame_ids_.emplace_back(frame_id); }
-
- uint16_t PopFront();
-
- bool Empty() { return frame_ids_.empty(); }
+ StreamState(size_t owner, size_t peers_count)
+ : owner_(owner), frame_ids_(peers_count) {}
- uint16_t Front() { return frame_ids_.front(); }
+ size_t owner() const { return owner_; }
- int GetAliveFramesCount() { return frame_ids_.size() - dead_frames_count_; }
+ void PushBack(uint16_t frame_id) { frame_ids_.PushBack(frame_id); }
+ // Crash if state is empty.
+ uint16_t PopFront(size_t peer);
+ bool IsEmpty(size_t peer) const { return frame_ids_.IsEmpty(peer); }
+ // Crash if state is empty.
+ uint16_t Front(size_t peer) const { return frame_ids_.Front(peer).value(); }
+ size_t GetAliveFramesCount() { return frame_ids_.size(owner_); }
uint16_t MarkNextAliveFrameAsDead();
- void set_last_rendered_frame_time(Timestamp time) {
- last_rendered_frame_time_ = time;
- }
- absl::optional<Timestamp> last_rendered_frame_time() const {
- return last_rendered_frame_time_;
- }
+ void SetLastRenderedFrameTime(size_t peer, Timestamp time);
+ absl::optional<Timestamp> last_rendered_frame_time(size_t peer) const;
private:
+ // Index of the owner. Owner's queue in |frame_ids_| will keep alive frames.
+ const size_t owner_;
// To correctly determine dropped frames we have to know sequence of frames
// in each stream so we will keep a list of frame ids inside the stream.
- // When the frame is rendered, we will pop ids from the list for until id
- // will match with rendered one. All ids before matched one can be
- // considered as dropped:
+ // This list is represented by multi head queue of frame ids with separate
+ // head for each receiver. When the frame is rendered, we will pop ids from
+ // the corresponding head until id will match with rendered one. All ids
+ // before matched one can be considered as dropped:
//
// | frame_id1 |->| frame_id2 |->| frame_id3 |->| frame_id4 |
//
// If we received frame with id frame_id3, then we will pop frame_id1 and
// frame_id2 and consider that frames as dropped and then compare received
// frame with the one from |captured_frames_in_flight_| with id frame_id3.
- std::deque<uint16_t> frame_ids_;
- // Count of dead frames in the beginning of the deque.
- int dead_frames_count_;
- absl::optional<Timestamp> last_rendered_frame_time_ = absl::nullopt;
+ //
+ // To track alive frames (frames that contains frame's payload in
+ // |captured_frames_in_flight_|) the head which corresponds to |owner_| will
+ // be used. So that head will point to the first alive frame in frames list.
+ MultiHeadQueue<uint16_t> frame_ids_;
+ std::map<size_t, Timestamp> last_rendered_frame_time_;
};
enum State { kNew, kActive, kStopped };
- void AddComparison(absl::optional<VideoFrame> captured,
+ struct ReceiverFrameStats {
+ // Time when last packet of a frame was received.
+ Timestamp received_time = Timestamp::MinusInfinity();
+ Timestamp decode_start_time = Timestamp::MinusInfinity();
+ Timestamp decode_end_time = Timestamp::MinusInfinity();
+ Timestamp rendered_time = Timestamp::MinusInfinity();
+ Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
+
+ absl::optional<int> rendered_frame_width = absl::nullopt;
+ absl::optional<int> rendered_frame_height = absl::nullopt;
+
+ bool dropped = false;
+ };
+
+ class FrameInFlight {
+ public:
+ FrameInFlight(size_t stream,
+ VideoFrame frame,
+ Timestamp captured_time,
+ size_t owner,
+ size_t peers_count)
+ : stream_(stream),
+ owner_(owner),
+ peers_count_(peers_count),
+ frame_(std::move(frame)),
+ captured_time_(captured_time) {}
+
+ size_t stream() const { return stream_; }
+ const absl::optional<VideoFrame>& frame() const { return frame_; }
+ // Returns was frame removed or not.
+ bool RemoveFrame();
+ void SetFrameId(uint16_t id);
+
+ std::vector<size_t> GetPeersWhichDidntReceive() const;
+ bool HaveAllPeersReceived() const;
+
+ void SetPreEncodeTime(webrtc::Timestamp time) { pre_encode_time_ = time; }
+
+ void OnFrameEncoded(webrtc::Timestamp time,
+ int64_t encoded_image_size,
+ uint32_t target_encode_bitrate);
+
+ bool HasEncodedTime() const { return encoded_time_.IsFinite(); }
+
+ void OnFramePreDecode(size_t peer,
+ webrtc::Timestamp received_time,
+ webrtc::Timestamp decode_start_time);
+
+ bool HasReceivedTime(size_t peer) const;
+
+ void SetDecodeEndTime(size_t peer, webrtc::Timestamp time) {
+ receiver_stats_[peer].decode_end_time = time;
+ }
+
+ bool HasDecodeEndTime(size_t peer) const;
+
+ void OnFrameRendered(size_t peer,
+ webrtc::Timestamp time,
+ int width,
+ int height);
+
+ bool HasRenderedTime(size_t peer) const;
+
+ // Crash if rendered time is not set for specified |peer|.
+ webrtc::Timestamp rendered_time(size_t peer) const {
+ return receiver_stats_.at(peer).rendered_time;
+ }
+
+ void MarkDropped(size_t peer) { receiver_stats_[peer].dropped = true; }
+
+ void SetPrevFrameRenderedTime(size_t peer, webrtc::Timestamp time) {
+ receiver_stats_[peer].prev_frame_rendered_time = time;
+ }
+
+ FrameStats GetStatsForPeer(size_t peer) const;
+
+ private:
+ const size_t stream_;
+ const size_t owner_;
+ const size_t peers_count_;
+ absl::optional<VideoFrame> frame_;
+
+ // Frame events timestamp.
+ Timestamp captured_time_;
+ Timestamp pre_encode_time_ = Timestamp::MinusInfinity();
+ Timestamp encoded_time_ = Timestamp::MinusInfinity();
+ int64_t encoded_image_size_ = 0;
+ uint32_t target_encode_bitrate_ = 0;
+ std::map<size_t, ReceiverFrameStats> receiver_stats_;
+ };
+
+ class NamesCollection {
+ public:
+ NamesCollection() = default;
+ explicit NamesCollection(rtc::ArrayView<const std::string> names) {
+ names_ = std::vector<std::string>(names.begin(), names.end());
+ for (size_t i = 0; i < names_.size(); ++i) {
+ index_.emplace(names_[i], i);
+ }
+ }
+
+ size_t size() const { return names_.size(); }
+
+ size_t index(absl::string_view name) const { return index_.at(name); }
+
+ const std::string& name(size_t index) const { return names_[index]; }
+
+ bool HasName(absl::string_view name) const {
+ return index_.find(name) != index_.end();
+ }
+
+ // Add specified |name| to the collection if it isn't presented.
+ // Returns index which corresponds to specified |name|.
+ size_t AddIfAbsent(absl::string_view name);
+
+ private:
+ std::vector<std::string> names_;
+ std::map<absl::string_view, size_t> index_;
+ };
+
+ void AddComparison(InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
- FrameStats frame_stats);
+ FrameStats frame_stats)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_);
static void ProcessComparisonsThread(void* obj);
void ProcessComparisons();
void ProcessComparison(const FrameComparison& comparison);
@@ -292,6 +471,11 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// Returns name of current test case for reporting.
std::string GetTestCaseName(const std::string& stream_label) const;
Timestamp Now();
+ StatsKey ToStatsKey(const InternalStatsKey& key) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ // Returns string representation of stats key for metrics naming. Used for
+ // backward compatibility by metrics naming for 2 peers cases.
+ std::string StatsKeyToMetricName(const StatsKey& key);
void StartMeasuringCpuProcessTime();
void StopMeasuringCpuProcessTime();
@@ -300,15 +484,19 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
double GetCpuUsagePercent();
const bool heavy_metrics_computation_enabled_;
- const int max_frames_in_flight_per_stream_count_;
+ const size_t max_frames_in_flight_per_stream_count_;
webrtc::Clock* const clock_;
std::atomic<uint16_t> next_frame_id_{0};
std::string test_label_;
+ std::unique_ptr<NamesCollection> peers_;
rtc::CriticalSection lock_;
State state_ RTC_GUARDED_BY(lock_) = State::kNew;
Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity();
+ // Mapping from stream label to unique size_t value to use in stats and avoid
+ // extra string copying.
+ NamesCollection streams_ RTC_GUARDED_BY(lock_);
// Frames that were captured by all streams and still aren't rendered by any
// stream or deemed dropped. Frame with id X can be removed from this map if:
// 1. The frame with id X was received in OnFrameRendered
@@ -316,27 +504,29 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// 3. Next available frame id for newly captured frame is X
// 4. There too many frames in flight for current video stream and X is the
// oldest frame id in this stream.
- std::map<uint16_t, VideoFrame> captured_frames_in_flight_
+ std::map<uint16_t, FrameInFlight> captured_frames_in_flight_
RTC_GUARDED_BY(lock_);
// Global frames count for all video streams.
FrameCounters frame_counters_ RTC_GUARDED_BY(lock_);
- // Frame counters per each stream.
- std::map<std::string, FrameCounters> stream_frame_counters_
+ // Frame counters per each stream per each receiver.
+ std::map<InternalStatsKey, FrameCounters> stream_frame_counters_
RTC_GUARDED_BY(lock_);
- std::map<uint16_t, FrameStats> frame_stats_ RTC_GUARDED_BY(lock_);
- std::map<std::string, StreamState> stream_states_ RTC_GUARDED_BY(lock_);
-
- // Stores history mapping between stream labels and frame ids. Updated when
- // frame id overlap. It required to properly return stream label after 1st
- // frame from simulcast streams was already rendered and last is still
- // encoding.
- std::map<std::string, std::set<uint16_t>> stream_to_frame_id_history_
+ // Map from stream index in |streams_| to its StreamState.
+ std::map<size_t, StreamState> stream_states_ RTC_GUARDED_BY(lock_);
+ // Map from stream index in |streams_| to sender peer index in |peers_|.
+ std::map<size_t, size_t> stream_to_sender_ RTC_GUARDED_BY(lock_);
+
+ // Stores history mapping between stream index in |streams_| and frame ids.
+ // Updated when frame id overlap. It required to properly return stream label
+ // after 1st frame from simulcast streams was already rendered and last is
+ // still encoding.
+ std::map<size_t, std::set<uint16_t>> stream_to_frame_id_history_
RTC_GUARDED_BY(lock_);
rtc::CriticalSection comparison_lock_;
- std::map<std::string, StreamStats> stream_stats_
+ std::map<InternalStatsKey, StreamStats> stream_stats_
RTC_GUARDED_BY(comparison_lock_);
- std::map<std::string, Timestamp> stream_last_freeze_end_time_
+ std::map<InternalStatsKey, Timestamp> stream_last_freeze_end_time_
RTC_GUARDED_BY(comparison_lock_);
std::deque<FrameComparison> comparisons_ RTC_GUARDED_BY(comparison_lock_);
AnalyzerStats analyzer_stats_ RTC_GUARDED_BY(comparison_lock_);
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
index 1bc29c5f091..55cc438b9d0 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
@@ -18,6 +18,7 @@
#include "api/video/encoded_image.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
+#include "rtc_base/strings/string_builder.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
@@ -26,11 +27,15 @@ namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
+using StatsSample = ::webrtc::SamplesStatsCounter::StatsSample;
+
constexpr int kAnalyzerMaxThreadsCount = 1;
constexpr int kMaxFramesInFlightPerStream = 10;
constexpr int kFrameWidth = 320;
constexpr int kFrameHeight = 240;
constexpr char kStreamLabel[] = "video-stream";
+constexpr char kSenderPeerName[] = "alice";
+constexpr char kReceiverPeerName[] = "bob";
VideoFrame NextFrame(test::FrameGeneratorInterface* frame_generator,
int64_t timestamp_us) {
@@ -64,6 +69,24 @@ VideoFrame DeepCopy(const VideoFrame& frame) {
return copy;
}
+std::vector<StatsSample> GetSortedSamples(const SamplesStatsCounter& counter) {
+ rtc::ArrayView<const StatsSample> view = counter.GetTimedSamples();
+ std::vector<StatsSample> out(view.begin(), view.end());
+ std::sort(out.begin(), out.end(),
+ [](const StatsSample& a, const StatsSample& b) {
+ return a.time < b.time;
+ });
+ return out;
+}
+
+std::string ToString(const std::vector<StatsSample>& values) {
+ rtc::StringBuilder out;
+ for (const auto& v : values) {
+ out << "{ time_ms=" << v.time.ms() << "; value=" << v.value << "}, ";
+ }
+ return out.str();
+}
+
TEST(DefaultVideoQualityAnalyzerTest,
MemoryOverloadedAndThenAllFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
@@ -73,26 +96,30 @@ TEST(DefaultVideoQualityAnalyzerTest,
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
- analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
- frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
- analyzer.OnFramePreEncode(frame);
- analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame),
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
for (const uint16_t& frame_id : frames_order) {
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
- analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
- analyzer.OnFrameDecoded(received_frame,
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
- analyzer.OnFrameRendered(received_frame);
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
@@ -112,6 +139,87 @@ TEST(DefaultVideoQualityAnalyzerTest,
}
TEST(DefaultVideoQualityAnalyzerTest,
+ FillMaxMemoryReceiveAllMemoryOverloadedAndThenAllFramesReceived) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(
+ /*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ // Feel analyzer's memory up to limit
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+ }
+
+ // Receive all frames.
+ for (const uint16_t& frame_id : frames_order) {
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+ frames_order.clear();
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+
+ // Overload analyzer's memory up to limit
+ for (int i = 0; i < 2 * kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+ }
+
+ // Receive all frames.
+ for (const uint16_t& frame_id : frames_order) {
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done,
+ kMaxFramesInFlightPerStream);
+ EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 3);
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 3);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream * 3);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
MemoryOverloadedHalfDroppedAndThenHalfFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
@@ -120,27 +228,31 @@ TEST(DefaultVideoQualityAnalyzerTest,
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
- analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
- frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
- analyzer.OnFramePreEncode(frame);
- analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame),
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
for (size_t i = kMaxFramesInFlightPerStream; i < frames_order.size(); ++i) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
- analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
- analyzer.OnFrameDecoded(received_frame,
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
- analyzer.OnFrameRendered(received_frame);
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
@@ -166,27 +278,31 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
- analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
- frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
- analyzer.OnFramePreEncode(frame);
- analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame),
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
for (size_t i = 1; i < frames_order.size(); i += 2) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
- analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
- analyzer.OnFrameDecoded(received_frame,
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
- analyzer.OnFrameRendered(received_frame);
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
@@ -199,6 +315,11 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream);
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream / 2);
@@ -207,6 +328,231 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream / 2);
}
+TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwice) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(
+ /*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
+ captured_frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, captured_frame));
+ analyzer.OnFramePreEncode(kSenderPeerName, captured_frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, captured_frame.id(),
+ FakeEncode(captured_frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+
+ VideoFrame received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+
+ received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, 1);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 1);
+ EXPECT_EQ(frame_counters.received, 1);
+ EXPECT_EQ(frame_counters.decoded, 1);
+ EXPECT_EQ(frame_counters.rendered, 1);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ constexpr char kAlice[] = "alice";
+ constexpr char kBob[] = "bob";
+ constexpr char kCharlie[] = "charlie";
+
+ DefaultVideoQualityAnalyzer analyzer(
+ /*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
+ analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kAlice, frame);
+ SleepMs(20);
+ analyzer.OnFrameEncoded(kAlice, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+ }
+
+ SleepMs(50);
+
+ for (size_t i = 1; i < frames_order.size(); i += 2) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ SleepMs(30);
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ SleepMs(10);
+ analyzer.OnFrameRendered(kBob, received_frame);
+ }
+
+ for (size_t i = 1; i < frames_order.size(); i += 2) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kCharlie, received_frame.id(),
+ FakeEncode(received_frame));
+ SleepMs(40);
+ analyzer.OnFrameDecoded(kCharlie, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ SleepMs(5);
+ analyzer.OnFrameRendered(kCharlie, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(analyzer_stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(analyzer_stats.comparisons_done, kMaxFramesInFlightPerStream * 2);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(analyzer.GetKnownVideoStreams().size(), 2lu);
+ for (auto stream_key : analyzer.GetKnownVideoStreams()) {
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(stream_key);
+ // On some devices the pipeline can be too slow, so we actually can't
+ // force real constraints here. Lets just check, that at least 1
+ // frame passed whole pipeline.
+ EXPECT_GE(stream_conters.captured, 10);
+ EXPECT_GE(stream_conters.pre_encoded, 10);
+ EXPECT_GE(stream_conters.encoded, 10);
+ EXPECT_GE(stream_conters.received, 5);
+ EXPECT_GE(stream_conters.decoded, 5);
+ EXPECT_GE(stream_conters.rendered, 5);
+ EXPECT_GE(stream_conters.dropped, 5);
+ }
+
+ std::map<StatsKey, StreamStats> stats = analyzer.GetStats();
+ const StatsKey kAliceBobStats(kStreamLabel, kAlice, kBob);
+ const StatsKey kAliceCharlieStats(kStreamLabel, kAlice, kCharlie);
+ EXPECT_EQ(stats.size(), 2lu);
+ {
+ auto it = stats.find(kAliceBobStats);
+ EXPECT_FALSE(it == stats.end());
+ ASSERT_FALSE(it->second.encode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.encode_time_ms.GetMin(), 20);
+ ASSERT_FALSE(it->second.decode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.decode_time_ms.GetMin(), 30);
+ ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty());
+ EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(),
+ kFrameWidth * kFrameHeight - 1);
+ EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(),
+ kFrameWidth * kFrameHeight + 1);
+ }
+ {
+ auto it = stats.find(kAliceCharlieStats);
+ EXPECT_FALSE(it == stats.end());
+ ASSERT_FALSE(it->second.encode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.encode_time_ms.GetMin(), 20);
+ ASSERT_FALSE(it->second.decode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.decode_time_ms.GetMin(), 30);
+ ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty());
+ EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(),
+ kFrameWidth * kFrameHeight - 1);
+ EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(),
+ kFrameWidth * kFrameHeight + 1);
+ }
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceWith2Receivers) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ constexpr char kAlice[] = "alice";
+ constexpr char kBob[] = "bob";
+ constexpr char kCharlie[] = "charlie";
+
+ DefaultVideoQualityAnalyzer analyzer(
+ /*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
+ analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
+ captured_frame.set_id(
+ analyzer.OnFrameCaptured(kAlice, kStreamLabel, captured_frame));
+ analyzer.OnFramePreEncode(kAlice, captured_frame);
+ analyzer.OnFrameEncoded(kAlice, captured_frame.id(),
+ FakeEncode(captured_frame),
+ VideoQualityAnalyzerInterface::EncoderStats());
+
+ VideoFrame received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, received_frame);
+
+ received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, received_frame);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, 1);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 1);
+ EXPECT_EQ(frame_counters.received, 1);
+ EXPECT_EQ(frame_counters.decoded, 1);
+ EXPECT_EQ(frame_counters.rendered, 1);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
} // namespace
} // namespace webrtc_pc_e2e
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc
index d1d1bface7d..a980b0e9d09 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc
@@ -10,6 +10,7 @@
#include "test/pc/e2e/analyzer/video/example_video_quality_analyzer.h"
+#include "api/array_view.h"
#include "rtc_base/logging.h"
namespace webrtc {
@@ -18,10 +19,13 @@ namespace webrtc_pc_e2e {
ExampleVideoQualityAnalyzer::ExampleVideoQualityAnalyzer() = default;
ExampleVideoQualityAnalyzer::~ExampleVideoQualityAnalyzer() = default;
-void ExampleVideoQualityAnalyzer::Start(std::string test_case_name,
- int max_threads_count) {}
+void ExampleVideoQualityAnalyzer::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {}
uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured(
+ absl::string_view peer_name,
const std::string& stream_label,
const webrtc::VideoFrame& frame) {
rtc::CritScope crit(&lock_);
@@ -45,12 +49,14 @@ uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured(
}
void ExampleVideoQualityAnalyzer::OnFramePreEncode(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
rtc::CritScope crit(&lock_);
++frames_pre_encoded_;
}
void ExampleVideoQualityAnalyzer::OnFrameEncoded(
+ absl::string_view peer_name,
uint16_t frame_id,
const webrtc::EncodedImage& encoded_image,
const EncoderStats& stats) {
@@ -59,6 +65,7 @@ void ExampleVideoQualityAnalyzer::OnFrameEncoded(
}
void ExampleVideoQualityAnalyzer::OnFrameDropped(
+ absl::string_view peer_name,
webrtc::EncodedImageCallback::DropReason reason) {
RTC_LOG(INFO) << "Frame dropped by encoder";
rtc::CritScope crit(&lock_);
@@ -66,6 +73,7 @@ void ExampleVideoQualityAnalyzer::OnFrameDropped(
}
void ExampleVideoQualityAnalyzer::OnFramePreDecode(
+ absl::string_view peer_name,
uint16_t frame_id,
const webrtc::EncodedImage& encoded_image) {
rtc::CritScope crit(&lock_);
@@ -73,6 +81,7 @@ void ExampleVideoQualityAnalyzer::OnFramePreDecode(
}
void ExampleVideoQualityAnalyzer::OnFrameDecoded(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame,
const DecoderStats& stats) {
rtc::CritScope crit(&lock_);
@@ -80,6 +89,7 @@ void ExampleVideoQualityAnalyzer::OnFrameDecoded(
}
void ExampleVideoQualityAnalyzer::OnFrameRendered(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
rtc::CritScope crit(&lock_);
frames_in_flight_.erase(frame.id());
@@ -87,13 +97,15 @@ void ExampleVideoQualityAnalyzer::OnFrameRendered(
}
void ExampleVideoQualityAnalyzer::OnEncoderError(
+ absl::string_view peer_name,
const webrtc::VideoFrame& frame,
int32_t error_code) {
RTC_LOG(LS_ERROR) << "Failed to encode frame " << frame.id()
<< ". Code: " << error_code;
}
-void ExampleVideoQualityAnalyzer::OnDecoderError(uint16_t frame_id,
+void ExampleVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
int32_t error_code) {
RTC_LOG(LS_ERROR) << "Failed to decode frame " << frame_id
<< ". Code: " << error_code;
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h
index 0d6169f9fa2..0126093c87a 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h
@@ -16,6 +16,7 @@
#include <set>
#include <string>
+#include "api/array_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
@@ -33,21 +34,34 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
ExampleVideoQualityAnalyzer();
~ExampleVideoQualityAnalyzer() override;
- void Start(std::string test_case_name, int max_threads_count) override;
- uint16_t OnFrameCaptured(const std::string& stream_label,
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) override;
+ uint16_t OnFrameCaptured(absl::string_view peer_name,
+ const std::string& stream_label,
const VideoFrame& frame) override;
- void OnFramePreEncode(const VideoFrame& frame) override;
- void OnFrameEncoded(uint16_t frame_id,
+ void OnFramePreEncode(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image,
const EncoderStats& stats) override;
- void OnFrameDropped(EncodedImageCallback::DropReason reason) override;
- void OnFramePreDecode(uint16_t frame_id,
+ void OnFrameDropped(absl::string_view peer_name,
+ EncodedImageCallback::DropReason reason) override;
+ void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
const EncodedImage& encoded_image) override;
- void OnFrameDecoded(const VideoFrame& frame,
+ void OnFrameDecoded(absl::string_view peer_name,
+ const VideoFrame& frame,
const DecoderStats& stats) override;
- void OnFrameRendered(const VideoFrame& frame) override;
- void OnEncoderError(const VideoFrame& frame, int32_t error_code) override;
- void OnDecoderError(uint16_t frame_id, int32_t error_code) override;
+ void OnFrameRendered(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnEncoderError(absl::string_view peer_name,
+ const VideoFrame& frame,
+ int32_t error_code) override;
+ void OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code) override;
void Stop() override;
std::string GetStreamLabel(uint16_t frame_id) override;
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/multi_head_queue.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/multi_head_queue.h
new file mode 100644
index 00000000000..52314a60d54
--- /dev/null
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/multi_head_queue.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_
+
+#include <deque>
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// A queue that allows more than one reader. Readers are independent, and all
+// readers will see all elements; an inserted element stays in the queue until
+// all readers have extracted it. Elements are copied and copying is assumed to
+// be cheap.
+template <typename T>
+class MultiHeadQueue {
+ public:
+ // Creates queue with exactly |readers_count| readers.
+ explicit MultiHeadQueue(size_t readers_count) {
+ for (size_t i = 0; i < readers_count; ++i) {
+ queues_.push_back(std::deque<T>());
+ }
+ }
+
+ // Add value to the end of the queue. Complexity O(readers_count).
+ void PushBack(T value) {
+ for (auto& queue : queues_) {
+ queue.push_back(value);
+ }
+ }
+
+ // Extract element from specified head. Complexity O(1).
+ absl::optional<T> PopFront(size_t index) {
+ RTC_CHECK_LT(index, queues_.size());
+ if (queues_[index].empty()) {
+ return absl::nullopt;
+ }
+ T out = queues_[index].front();
+ queues_[index].pop_front();
+ return out;
+ }
+
+ // Returns element at specified head. Complexity O(1).
+ absl::optional<T> Front(size_t index) const {
+ RTC_CHECK_LT(index, queues_.size());
+ if (queues_[index].empty()) {
+ return absl::nullopt;
+ }
+ return queues_[index].front();
+ }
+
+ // Returns true if for specified head there are no more elements in the queue
+ // or false otherwise. Complexity O(1).
+ bool IsEmpty(size_t index) const {
+ RTC_CHECK_LT(index, queues_.size());
+ return queues_[index].empty();
+ }
+
+ // Returns size of the longest queue between all readers.
+ // Complexity O(readers_count).
+ size_t size() const {
+ size_t size = 0;
+ for (auto& queue : queues_) {
+ if (queue.size() > size) {
+ size = queue.size();
+ }
+ }
+ return size;
+ }
+
+ // Returns size of the specified queue. Complexity O(1).
+ size_t size(size_t index) const {
+ RTC_CHECK_LT(index, queues_.size());
+ return queues_[index].size();
+ }
+
+ size_t readers_count() const { return queues_.size(); }
+
+ private:
+ std::vector<std::deque<T>> queues_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_MULTI_HEAD_QUEUE_H_
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/multi_head_queue_test.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/multi_head_queue_test.cc
new file mode 100644
index 00000000000..3a4ab6cdbb9
--- /dev/null
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/multi_head_queue_test.cc
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/multi_head_queue.h"
+#include "absl/types/optional.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+TEST(MultiHeadQueueTest, GetOnEmpty) {
+ MultiHeadQueue<int> queue = MultiHeadQueue<int>(10);
+ EXPECT_TRUE(queue.IsEmpty(0));
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_FALSE(queue.PopFront(i).has_value());
+ EXPECT_FALSE(queue.Front(i).has_value());
+ }
+}
+
+TEST(MultiHeadQueueTest, SingleHeadOneAddOneRemove) {
+ MultiHeadQueue<int> queue = MultiHeadQueue<int>(1);
+ queue.PushBack(1);
+ EXPECT_EQ(queue.size(), 1lu);
+ EXPECT_TRUE(queue.Front(0).has_value());
+ EXPECT_EQ(queue.Front(0).value(), 1);
+ absl::optional<int> value = queue.PopFront(0);
+ EXPECT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), 1);
+ EXPECT_EQ(queue.size(), 0lu);
+ EXPECT_TRUE(queue.IsEmpty(0));
+}
+
+TEST(MultiHeadQueueTest, SingleHead) {
+ MultiHeadQueue<size_t> queue = MultiHeadQueue<size_t>(1);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(0);
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+}
+
+TEST(MultiHeadQueueTest, ThreeHeadsAddAllRemoveAllPerHead) {
+ MultiHeadQueue<size_t> queue = MultiHeadQueue<size_t>(3);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(0);
+ EXPECT_EQ(queue.size(), 10lu);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(1);
+ EXPECT_EQ(queue.size(), 10lu);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(2);
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+}
+
+TEST(MultiHeadQueueTest, ThreeHeadsAddAllRemoveAll) {
+ MultiHeadQueue<size_t> queue = MultiHeadQueue<size_t>(3);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value1 = queue.PopFront(0);
+ absl::optional<size_t> value2 = queue.PopFront(1);
+ absl::optional<size_t> value3 = queue.PopFront(2);
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ ASSERT_TRUE(value1.has_value());
+ ASSERT_TRUE(value2.has_value());
+ ASSERT_TRUE(value3.has_value());
+ EXPECT_EQ(value1.value(), i);
+ EXPECT_EQ(value2.value(), i);
+ EXPECT_EQ(value3.value(), i);
+ }
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
index 228ab8ac025..9e81c8728bd 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
@@ -15,6 +15,7 @@
#include <memory>
#include <utility>
+#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/video/i420_buffer.h"
#include "modules/video_coding/include/video_error_codes.h"
@@ -26,10 +27,12 @@ namespace webrtc_pc_e2e {
QualityAnalyzingVideoDecoder::QualityAnalyzingVideoDecoder(
int id,
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoder> delegate,
EncodedImageDataExtractor* extractor,
VideoQualityAnalyzerInterface* analyzer)
: id_(id),
+ peer_name_(peer_name),
implementation_name_("AnalyzingDecoder-" +
std::string(delegate->ImplementationName())),
delegate_(std::move(delegate)),
@@ -87,7 +90,7 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image,
// We can safely dereference |origin_image|, because it can be removed from
// the map only after |delegate_| Decode method will be invoked. Image will be
// removed inside DecodedImageCallback, which can be done on separate thread.
- analyzer_->OnFramePreDecode(out.id, *origin_image);
+ analyzer_->OnFramePreDecode(peer_name_, out.id, *origin_image);
int32_t result =
delegate_->Decode(*origin_image, missing_frames, render_time_ms);
if (result != WEBRTC_VIDEO_CODEC_OK) {
@@ -97,7 +100,7 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image,
timestamp_to_frame_id_.erase(input_image.Timestamp());
decoding_images_.erase(out.id);
}
- analyzer_->OnDecoderError(out.id, result);
+ analyzer_->OnDecoderError(peer_name_, out.id, result);
}
return result;
}
@@ -224,15 +227,17 @@ void QualityAnalyzingVideoDecoder::OnFrameDecoded(
frame->set_id(frame_id);
VideoQualityAnalyzerInterface::DecoderStats stats;
stats.decode_time_ms = decode_time_ms;
- analyzer_->OnFrameDecoded(*frame, stats);
+ analyzer_->OnFrameDecoded(peer_name_, *frame, stats);
}
QualityAnalyzingVideoDecoderFactory::QualityAnalyzingVideoDecoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoderFactory> delegate,
IdGenerator<int>* id_generator,
EncodedImageDataExtractor* extractor,
VideoQualityAnalyzerInterface* analyzer)
- : delegate_(std::move(delegate)),
+ : peer_name_(peer_name),
+ delegate_(std::move(delegate)),
id_generator_(id_generator),
extractor_(extractor),
analyzer_(analyzer) {}
@@ -249,7 +254,8 @@ QualityAnalyzingVideoDecoderFactory::CreateVideoDecoder(
const SdpVideoFormat& format) {
std::unique_ptr<VideoDecoder> decoder = delegate_->CreateVideoDecoder(format);
return std::make_unique<QualityAnalyzingVideoDecoder>(
- id_generator_->GetNextId(), std::move(decoder), extractor_, analyzer_);
+ id_generator_->GetNextId(), peer_name_, std::move(decoder), extractor_,
+ analyzer_);
}
std::unique_ptr<VideoDecoder>
@@ -259,7 +265,8 @@ QualityAnalyzingVideoDecoderFactory::LegacyCreateVideoDecoder(
std::unique_ptr<VideoDecoder> decoder =
delegate_->LegacyCreateVideoDecoder(format, receive_stream_id);
return std::make_unique<QualityAnalyzingVideoDecoder>(
- id_generator_->GetNextId(), std::move(decoder), extractor_, analyzer_);
+ id_generator_->GetNextId(), peer_name_, std::move(decoder), extractor_,
+ analyzer_);
}
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
index 5cbc8822266..decb844bc2a 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
@@ -16,6 +16,7 @@
#include <string>
#include <vector>
+#include "absl/strings/string_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
@@ -45,14 +46,15 @@ namespace webrtc_pc_e2e {
// callback, where video analyzer will be called again and then decoded frame
// will be passed to origin callback, provided by user.
//
-// Quality decoder registers its own callback in origin decoder at the same
-// time, when user registers his callback in quality decoder.
+// Quality decoder registers its own callback in origin decoder, at the same
+// time the user registers their callback in quality decoder.
class QualityAnalyzingVideoDecoder : public VideoDecoder {
public:
// Creates analyzing decoder. |id| is unique coding entity id, that will
// be used to distinguish all encoders and decoders inside
// EncodedImageDataInjector and EncodedImageIdExtracor.
QualityAnalyzingVideoDecoder(int id,
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoder> delegate,
EncodedImageDataExtractor* extractor,
VideoQualityAnalyzerInterface* analyzer);
@@ -104,6 +106,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder {
absl::optional<uint8_t> qp);
const int id_;
+ const std::string peer_name_;
const std::string implementation_name_;
std::unique_ptr<VideoDecoder> delegate_;
EncodedImageDataExtractor* const extractor_;
@@ -129,6 +132,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder {
class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory {
public:
QualityAnalyzingVideoDecoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoderFactory> delegate,
IdGenerator<int>* id_generator,
EncodedImageDataExtractor* extractor,
@@ -144,6 +148,7 @@ class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory {
const std::string& receive_stream_id) override;
private:
+ const std::string peer_name_;
std::unique_ptr<VideoDecoderFactory> delegate_;
IdGenerator<int>* const id_generator_;
EncodedImageDataExtractor* const extractor_;
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
index 2e7b8f41529..4d04a2ccbbb 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
@@ -14,6 +14,7 @@
#include <memory>
#include <utility>
+#include "absl/strings/string_view.h"
#include "api/video/video_codec_type.h"
#include "api/video_codecs/video_encoder.h"
#include "modules/video_coding/include/video_error_codes.h"
@@ -54,12 +55,14 @@ std::pair<uint32_t, uint32_t> GetMinMaxBitratesBps(const VideoCodec& codec,
QualityAnalyzingVideoEncoder::QualityAnalyzingVideoEncoder(
int id,
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoder> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
EncodedImageDataInjector* injector,
VideoQualityAnalyzerInterface* analyzer)
: id_(id),
+ peer_name_(peer_name),
delegate_(std::move(delegate)),
bitrate_multiplier_(bitrate_multiplier),
stream_required_spatial_index_(std::move(stream_required_spatial_index)),
@@ -134,7 +137,7 @@ int32_t QualityAnalyzingVideoEncoder::Encode(
// images from encoder. So it should be a bug in setup on in the encoder.
RTC_DCHECK_LT(timestamp_to_frame_id_list_.size(), kMaxFrameInPipelineCount);
}
- analyzer_->OnFramePreEncode(frame);
+ analyzer_->OnFramePreEncode(peer_name_, frame);
int32_t result = delegate_->Encode(frame, frame_types);
if (result != WEBRTC_VIDEO_CODEC_OK) {
// If origin encoder failed, then cleanup data for this frame.
@@ -152,7 +155,7 @@ int32_t QualityAnalyzingVideoEncoder::Encode(
}
}
}
- analyzer_->OnEncoderError(frame, result);
+ analyzer_->OnEncoderError(peer_name_, frame, result);
}
return result;
}
@@ -277,7 +280,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage(
// not discarded layers have to be passed.
VideoQualityAnalyzerInterface::EncoderStats stats;
stats.target_encode_bitrate = target_encode_bitrate;
- analyzer_->OnFrameEncoded(frame_id, encoded_image, stats);
+ analyzer_->OnFrameEncoded(peer_name_, frame_id, encoded_image, stats);
}
// Image data injector injects frame id and discard flag into provided
@@ -298,7 +301,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage(
void QualityAnalyzingVideoEncoder::OnDroppedFrame(
EncodedImageCallback::DropReason reason) {
rtc::CritScope crit(&lock_);
- analyzer_->OnFrameDropped(reason);
+ analyzer_->OnFrameDropped(peer_name_, reason);
RTC_DCHECK(delegate_callback_);
delegate_callback_->OnDroppedFrame(reason);
}
@@ -348,13 +351,15 @@ bool QualityAnalyzingVideoEncoder::ShouldDiscard(
}
QualityAnalyzingVideoEncoderFactory::QualityAnalyzingVideoEncoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoderFactory> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
IdGenerator<int>* id_generator,
EncodedImageDataInjector* injector,
VideoQualityAnalyzerInterface* analyzer)
- : delegate_(std::move(delegate)),
+ : peer_name_(peer_name),
+ delegate_(std::move(delegate)),
bitrate_multiplier_(bitrate_multiplier),
stream_required_spatial_index_(std::move(stream_required_spatial_index)),
id_generator_(id_generator),
@@ -378,9 +383,9 @@ std::unique_ptr<VideoEncoder>
QualityAnalyzingVideoEncoderFactory::CreateVideoEncoder(
const SdpVideoFormat& format) {
return std::make_unique<QualityAnalyzingVideoEncoder>(
- id_generator_->GetNextId(), delegate_->CreateVideoEncoder(format),
- bitrate_multiplier_, stream_required_spatial_index_, injector_,
- analyzer_);
+ id_generator_->GetNextId(), peer_name_,
+ delegate_->CreateVideoEncoder(format), bitrate_multiplier_,
+ stream_required_spatial_index_, injector_, analyzer_);
}
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
index 3307dc73255..f6db1369b09 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
@@ -16,6 +16,7 @@
#include <utility>
#include <vector>
+#include "absl/strings/string_view.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/video/video_frame.h"
#include "api/video_codecs/sdp_video_format.h"
@@ -49,8 +50,8 @@ constexpr int kAnalyzeAnySpatialStream = -1;
// injected into EncodedImage with passed EncodedImageDataInjector. Then new
// EncodedImage will be passed to origin callback, provided by user.
//
-// Quality encoder registers its own callback in origin encoder at the same
-// time, when user registers his callback in quality encoder.
+// Quality encoder registers its own callback in origin encoder, at the same
+// time the user registers their callback in quality encoder.
class QualityAnalyzingVideoEncoder : public VideoEncoder,
public EncodedImageCallback {
public:
@@ -59,6 +60,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder,
// EncodedImageDataInjector and EncodedImageIdExtracor.
QualityAnalyzingVideoEncoder(
int id,
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoder> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
@@ -139,6 +141,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder,
RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
const int id_;
+ const std::string peer_name_;
std::unique_ptr<VideoEncoder> delegate_;
const double bitrate_multiplier_;
// Contains mapping from stream label to optional spatial index.
@@ -170,6 +173,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder,
class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory {
public:
QualityAnalyzingVideoEncoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoderFactory> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
@@ -186,6 +190,7 @@ class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory {
const SdpVideoFormat& format) override;
private:
+ const std::string peer_name_;
std::unique_ptr<VideoEncoderFactory> delegate_;
const double bitrate_multiplier_;
std::map<std::string, absl::optional<int>> stream_required_spatial_index_;
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc
index ec0d26b780b..75f1265e1f4 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc
@@ -19,13 +19,6 @@
namespace webrtc {
namespace webrtc_pc_e2e {
-namespace {
-
-// Number of bytes from the beginning of the EncodedImage buffer that will be
-// used to store frame id and sub id.
-constexpr size_t kUsedBufferSize = 3;
-
-} // namespace
SingleProcessEncodedImageDataInjector::SingleProcessEncodedImageDataInjector() =
default;
@@ -37,13 +30,13 @@ EncodedImage SingleProcessEncodedImageDataInjector::InjectData(
bool discard,
const EncodedImage& source,
int coding_entity_id) {
- RTC_CHECK(source.size() >= kUsedBufferSize);
+ RTC_CHECK(source.size() >= ExtractionInfo::kUsedBufferSize);
ExtractionInfo info;
- info.length = source.size();
info.discard = discard;
- size_t insertion_pos = source.size() - kUsedBufferSize;
- memcpy(info.origin_data, &source.data()[insertion_pos], kUsedBufferSize);
+ size_t insertion_pos = source.size() - ExtractionInfo::kUsedBufferSize;
+ memcpy(info.origin_data, &source.data()[insertion_pos],
+ ExtractionInfo::kUsedBufferSize);
{
rtc::CritScope crit(&lock_);
// Will create new one if missed.
@@ -69,15 +62,27 @@ EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData(
uint8_t* buffer = out.data();
size_t size = out.size();
- // |pos| is pointing to end of current encoded image.
- size_t pos = size - 1;
+ std::vector<size_t> frame_sizes;
+ std::vector<size_t> frame_sl_index;
+ size_t max_spatial_index = out.SpatialIndex().value_or(0);
+ for (size_t i = 0; i <= max_spatial_index; ++i) {
+ auto frame_size = source.SpatialLayerFrameSize(i);
+ if (frame_size.value_or(0)) {
+ frame_sl_index.push_back(i);
+ frame_sizes.push_back(frame_size.value());
+ }
+ }
+ if (frame_sizes.empty()) {
+ frame_sizes.push_back(size);
+ }
+
+ size_t prev_frames_size = 0;
absl::optional<uint16_t> id = absl::nullopt;
bool discard = true;
std::vector<ExtractionInfo> extraction_infos;
- // Go through whole buffer and find all related extraction infos in
- // order from 1st encoded image to the last.
- while (true) {
- size_t insertion_pos = pos - kUsedBufferSize + 1;
+ for (size_t frame_size : frame_sizes) {
+ size_t insertion_pos =
+ prev_frames_size + frame_size - ExtractionInfo::kUsedBufferSize;
// Extract frame id from first 2 bytes starting from insertion pos.
uint16_t next_id = buffer[insertion_pos] + (buffer[insertion_pos + 1] << 8);
// Extract frame sub id from second 3 byte starting from insertion pos.
@@ -90,6 +95,8 @@ EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData(
{
rtc::CritScope crit(&lock_);
auto ext_vector_it = extraction_cache_.find(next_id);
+ // TODO(titovartem) add support for receiving single frame multiple times
+ // when in simulcast key frame for another spatial stream can be received.
RTC_CHECK(ext_vector_it != extraction_cache_.end())
<< "Unknown frame_id=" << next_id;
@@ -99,41 +106,45 @@ EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData(
info = info_it->second;
ext_vector_it->second.infos.erase(info_it);
}
- extraction_infos.push_back(info);
// We need to discard encoded image only if all concatenated encoded images
// have to be discarded.
discard = discard && info.discard;
- if (pos < info.length) {
- break;
- }
- pos -= info.length;
+
+ extraction_infos.push_back(info);
+ prev_frames_size += frame_size;
}
RTC_CHECK(id);
- std::reverse(extraction_infos.begin(), extraction_infos.end());
+
if (discard) {
out.set_size(0);
+ for (size_t i = 0; i <= max_spatial_index; ++i) {
+ out.SetSpatialLayerFrameSize(i, 0);
+ }
return EncodedImageExtractionResult{*id, out, true};
}
// Make a pass from begin to end to restore origin payload and erase discarded
// encoded images.
- pos = 0;
- auto extraction_infos_it = extraction_infos.begin();
- while (pos < size) {
- RTC_DCHECK(extraction_infos_it != extraction_infos.end());
- const ExtractionInfo& info = *extraction_infos_it;
+ size_t pos = 0;
+ for (size_t frame_index = 0; frame_index < frame_sizes.size();
+ ++frame_index) {
+ RTC_CHECK(pos < size);
+ const size_t frame_size = frame_sizes[frame_index];
+ const ExtractionInfo& info = extraction_infos[frame_index];
if (info.discard) {
// If this encoded image is marked to be discarded - erase it's payload
// from the buffer.
- memmove(&buffer[pos], &buffer[pos + info.length],
- size - pos - info.length);
- size -= info.length;
+ memmove(&buffer[pos], &buffer[pos + frame_size], size - pos - frame_size);
+ RTC_CHECK_LT(frame_index, frame_sl_index.size())
+ << "codec doesn't support discard option or the image, that was "
+ "supposed to be discarded, is lost";
+ out.SetSpatialLayerFrameSize(frame_sl_index[frame_index], 0);
+ size -= frame_size;
} else {
- memcpy(&buffer[pos + info.length - kUsedBufferSize], info.origin_data,
- kUsedBufferSize);
- pos += info.length;
+ memcpy(&buffer[pos + frame_size - ExtractionInfo::kUsedBufferSize],
+ info.origin_data, ExtractionInfo::kUsedBufferSize);
+ pos += frame_size;
}
- ++extraction_infos_it;
}
out.set_size(pos);
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h
index 3787cc51aac..f79532e0961 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h
@@ -57,15 +57,16 @@ class SingleProcessEncodedImageDataInjector : public EncodedImageDataInjector,
// Contains data required to extract frame id from EncodedImage and restore
// original buffer.
struct ExtractionInfo {
+ // Number of bytes from the beginning of the EncodedImage buffer that will
+ // be used to store frame id and sub id.
+ const static size_t kUsedBufferSize = 3;
// Frame sub id to distinguish encoded images for different spatial layers.
uint8_t sub_id;
- // Length of the origin buffer encoded image.
- size_t length;
// Flag to show is this encoded images should be discarded by analyzing
// decoder because of not required spatial layer/simulcast stream.
bool discard;
// Data from first 3 bytes of origin encoded image's payload.
- uint8_t origin_data[3];
+ uint8_t origin_data[ExtractionInfo::kUsedBufferSize];
};
struct ExtractionInfoVector {
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc
index 67cafa75a6d..e25361e337b 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc
@@ -44,6 +44,7 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardFalse) {
EXPECT_FALSE(out.discard);
EXPECT_EQ(out.image.size(), 10ul);
EXPECT_EQ(out.image.capacity(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
for (int i = 0; i < 10; ++i) {
EXPECT_EQ(out.image.data()[i], i + 1);
}
@@ -63,6 +64,60 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractDiscardTrue) {
EXPECT_TRUE(out.discard);
EXPECT_EQ(out.image.size(), 0ul);
EXPECT_EQ(out.image.capacity(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+}
+
+TEST(SingleProcessEncodedImageDataInjector, InjectWithUnsetSpatialLayerSizes) {
+ SingleProcessEncodedImageDataInjector injector;
+
+ rtc::Buffer buffer = CreateBufferOfSizeNFilledWithValuesFromX(10, 1);
+
+ EncodedImage source(buffer.data(), 10, 10);
+ source.SetTimestamp(123456789);
+
+ EncodedImage intermediate = injector.InjectData(512, false, source, 1);
+ intermediate.SetSpatialIndex(2);
+
+ EncodedImageExtractionResult out = injector.ExtractData(intermediate, 2);
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.capacity(), 10ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjector, InjectWithZeroSpatialLayerSizes) {
+ SingleProcessEncodedImageDataInjector injector;
+
+ rtc::Buffer buffer = CreateBufferOfSizeNFilledWithValuesFromX(10, 1);
+
+ EncodedImage source(buffer.data(), 10, 10);
+ source.SetTimestamp(123456789);
+
+ EncodedImage intermediate = injector.InjectData(512, false, source, 1);
+ intermediate.SetSpatialIndex(2);
+ intermediate.SetSpatialLayerFrameSize(0, 0);
+ intermediate.SetSpatialLayerFrameSize(1, 0);
+ intermediate.SetSpatialLayerFrameSize(2, 0);
+
+ EncodedImageExtractionResult out = injector.ExtractData(intermediate, 2);
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.capacity(), 10ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
}
TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) {
@@ -95,6 +150,7 @@ TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) {
EXPECT_FALSE(out1.discard);
EXPECT_EQ(out1.image.size(), 10ul);
EXPECT_EQ(out1.image.capacity(), 10ul);
+ EXPECT_EQ(out1.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
for (int i = 0; i < 10; ++i) {
EXPECT_EQ(out1.image.data()[i], i + 1);
}
@@ -102,10 +158,12 @@ TEST(SingleProcessEncodedImageDataInjector, Inject3Extract3) {
EXPECT_TRUE(out2.discard);
EXPECT_EQ(out2.image.size(), 0ul);
EXPECT_EQ(out2.image.capacity(), 10ul);
+ EXPECT_EQ(out2.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
EXPECT_EQ(out3.id, 520);
EXPECT_FALSE(out3.discard);
EXPECT_EQ(out3.image.size(), 10ul);
EXPECT_EQ(out3.image.capacity(), 10ul);
+ EXPECT_EQ(out3.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
for (int i = 0; i < 10; ++i) {
EXPECT_EQ(out3.image.data()[i], i + 21);
}
@@ -140,6 +198,10 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractFromConcatenated) {
concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size());
EncodedImage concatenated(concatenated_buffer.data(), concatenated_length,
concatenated_length);
+ concatenated.SetSpatialIndex(2);
+ concatenated.SetSpatialLayerFrameSize(0, intermediate1.size());
+ concatenated.SetSpatialLayerFrameSize(1, intermediate2.size());
+ concatenated.SetSpatialLayerFrameSize(2, intermediate3.size());
// Extract frame id from concatenated image
EncodedImageExtractionResult out = injector.ExtractData(concatenated, 2);
@@ -152,6 +214,10 @@ TEST(SingleProcessEncodedImageDataInjector, InjectExtractFromConcatenated) {
EXPECT_EQ(out.image.data()[i], i + 1);
EXPECT_EQ(out.image.data()[i + 10], i + 21);
}
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(1).value_or(0), 0ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(2).value_or(0), 10ul);
}
TEST(SingleProcessEncodedImageDataInjector,
@@ -184,6 +250,10 @@ TEST(SingleProcessEncodedImageDataInjector,
concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size());
EncodedImage concatenated(concatenated_buffer.data(), concatenated_length,
concatenated_length);
+ concatenated.SetSpatialIndex(2);
+ concatenated.SetSpatialLayerFrameSize(0, intermediate1.size());
+ concatenated.SetSpatialLayerFrameSize(1, intermediate2.size());
+ concatenated.SetSpatialLayerFrameSize(2, intermediate3.size());
// Extract frame id from concatenated image
EncodedImageExtractionResult out = injector.ExtractData(concatenated, 2);
@@ -192,6 +262,10 @@ TEST(SingleProcessEncodedImageDataInjector,
EXPECT_TRUE(out.discard);
EXPECT_EQ(out.image.size(), 0ul);
EXPECT_EQ(out.image.capacity(), 3 * 10ul);
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
}
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc
index 074188439b3..19487778485 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc
@@ -14,6 +14,8 @@
#include <vector>
#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
#include "test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h"
#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h"
#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
@@ -43,10 +45,12 @@ class AnalyzingFramePreprocessor
: public test::TestVideoCapturer::FramePreprocessor {
public:
AnalyzingFramePreprocessor(
- std::string stream_label,
+ absl::string_view peer_name,
+ absl::string_view stream_label,
VideoQualityAnalyzerInterface* analyzer,
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks)
- : stream_label_(std::move(stream_label)),
+ : peer_name_(peer_name),
+ stream_label_(stream_label),
analyzer_(analyzer),
sinks_(std::move(sinks)) {}
~AnalyzingFramePreprocessor() override = default;
@@ -54,7 +58,8 @@ class AnalyzingFramePreprocessor
VideoFrame Preprocess(const VideoFrame& source_frame) override {
// Copy VideoFrame to be able to set id on it.
VideoFrame frame = source_frame;
- uint16_t frame_id = analyzer_->OnFrameCaptured(stream_label_, frame);
+ uint16_t frame_id =
+ analyzer_->OnFrameCaptured(peer_name_, stream_label_, frame);
frame.set_id(frame_id);
for (auto& sink : sinks_) {
@@ -64,6 +69,7 @@ class AnalyzingFramePreprocessor
}
private:
+ const std::string peer_name_;
const std::string stream_label_;
VideoQualityAnalyzerInterface* const analyzer_;
const std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>
@@ -88,26 +94,29 @@ VideoQualityAnalyzerInjectionHelper::~VideoQualityAnalyzerInjectionHelper() =
std::unique_ptr<VideoEncoderFactory>
VideoQualityAnalyzerInjectionHelper::WrapVideoEncoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoderFactory> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index)
const {
return std::make_unique<QualityAnalyzingVideoEncoderFactory>(
- std::move(delegate), bitrate_multiplier,
+ peer_name, std::move(delegate), bitrate_multiplier,
std::move(stream_required_spatial_index),
encoding_entities_id_generator_.get(), injector_, analyzer_.get());
}
std::unique_ptr<VideoDecoderFactory>
VideoQualityAnalyzerInjectionHelper::WrapVideoDecoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoderFactory> delegate) const {
return std::make_unique<QualityAnalyzingVideoDecoderFactory>(
- std::move(delegate), encoding_entities_id_generator_.get(), extractor_,
- analyzer_.get());
+ peer_name, std::move(delegate), encoding_entities_id_generator_.get(),
+ extractor_, analyzer_.get());
}
std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor(
+ absl::string_view peer_name,
const VideoConfig& config) {
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks;
test::VideoFrameWriter* writer =
@@ -125,23 +134,27 @@ VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor(
known_video_configs_.insert({*config.stream_label, config});
}
return std::make_unique<AnalyzingFramePreprocessor>(
- std::move(*config.stream_label), analyzer_.get(), std::move(sinks));
+ peer_name, std::move(*config.stream_label), analyzer_.get(),
+ std::move(sinks));
}
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>
-VideoQualityAnalyzerInjectionHelper::CreateVideoSink() {
- return std::make_unique<AnalyzingVideoSink>(this);
+VideoQualityAnalyzerInjectionHelper::CreateVideoSink(
+ absl::string_view peer_name) {
+ return std::make_unique<AnalyzingVideoSink>(peer_name, this);
}
-void VideoQualityAnalyzerInjectionHelper::Start(std::string test_case_name,
- int max_threads_count) {
- analyzer_->Start(std::move(test_case_name), max_threads_count);
+void VideoQualityAnalyzerInjectionHelper::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {
+ analyzer_->Start(std::move(test_case_name), peer_names, max_threads_count);
}
void VideoQualityAnalyzerInjectionHelper::OnStatsReports(
- const std::string& pc_label,
- const StatsReports& stats_reports) {
- analyzer_->OnStatsReports(pc_label, stats_reports);
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ analyzer_->OnStatsReports(pc_label, report);
}
void VideoQualityAnalyzerInjectionHelper::Stop() {
@@ -169,12 +182,13 @@ VideoQualityAnalyzerInjectionHelper::MaybeCreateVideoWriter(
return out;
}
-void VideoQualityAnalyzerInjectionHelper::OnFrame(const VideoFrame& frame) {
+void VideoQualityAnalyzerInjectionHelper::OnFrame(absl::string_view peer_name,
+ const VideoFrame& frame) {
if (IsDummyFrameBuffer(frame.video_frame_buffer()->ToI420())) {
// This is dummy frame, so we don't need to process it further.
return;
}
- analyzer_->OnFrameRendered(frame);
+ analyzer_->OnFrameRendered(peer_name, frame);
std::string stream_label = analyzer_->GetStreamLabel(frame.id());
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>* sinks =
PopulateSinks(stream_label);
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h
index a0daa9ff18a..ca5243484d4 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h
@@ -14,7 +14,10 @@
#include <map>
#include <memory>
#include <string>
+#include <vector>
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
#include "api/test/peerconnection_quality_test_fixture.h"
#include "api/test/stats_observer_interface.h"
#include "api/test/video_quality_analyzer_interface.h"
@@ -46,6 +49,7 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface {
// Wraps video encoder factory to give video quality analyzer access to frames
// before encoding and encoded images after.
std::unique_ptr<VideoEncoderFactory> WrapVideoEncoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoEncoderFactory> delegate,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index)
@@ -53,25 +57,31 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface {
// Wraps video decoder factory to give video quality analyzer access to
// received encoded images and frames, that were decoded from them.
std::unique_ptr<VideoDecoderFactory> WrapVideoDecoderFactory(
+ absl::string_view peer_name,
std::unique_ptr<VideoDecoderFactory> delegate) const;
// Creates VideoFrame preprocessor, that will allow video quality analyzer to
// get access to the captured frames. If provided config also specifies
// |input_dump_file_name|, video will be written into that file.
std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
- CreateFramePreprocessor(const VideoConfig& config);
+ CreateFramePreprocessor(absl::string_view peer_name,
+ const VideoConfig& config);
// Creates sink, that will allow video quality analyzer to get access to
// the rendered frames. If corresponding video track has
// |output_dump_file_name| in its VideoConfig, then video also will be written
// into that file.
- std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> CreateVideoSink();
+ std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> CreateVideoSink(
+ absl::string_view peer_name);
- void Start(std::string test_case_name, int max_threads_count);
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count);
// Forwards |stats_reports| for Peer Connection |pc_label| to
// |analyzer_|.
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& stats_reports) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
// Stops VideoQualityAnalyzerInterface to populate final data and metrics.
// Should be invoked after analyzed video tracks are disposed.
@@ -80,20 +90,24 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface {
private:
class AnalyzingVideoSink final : public rtc::VideoSinkInterface<VideoFrame> {
public:
- explicit AnalyzingVideoSink(VideoQualityAnalyzerInjectionHelper* helper)
- : helper_(helper) {}
+ explicit AnalyzingVideoSink(absl::string_view peer_name,
+ VideoQualityAnalyzerInjectionHelper* helper)
+ : peer_name_(peer_name), helper_(helper) {}
~AnalyzingVideoSink() override = default;
- void OnFrame(const VideoFrame& frame) override { helper_->OnFrame(frame); }
+ void OnFrame(const VideoFrame& frame) override {
+ helper_->OnFrame(peer_name_, frame);
+ }
private:
+ const std::string peer_name_;
VideoQualityAnalyzerInjectionHelper* const helper_;
};
test::VideoFrameWriter* MaybeCreateVideoWriter(
absl::optional<std::string> file_name,
const PeerConnectionE2EQualityTestFixture::VideoConfig& config);
- void OnFrame(const VideoFrame& frame);
+ void OnFrame(absl::string_view peer_name, const VideoFrame& frame);
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>*
PopulateSinks(const std::string& stream_label);
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc
index 754a0a468fe..baf973f2770 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc
@@ -10,51 +10,88 @@
#include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/units/data_rate.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+
namespace webrtc {
namespace webrtc_pc_e2e {
-namespace {
-
-constexpr int kBitsInByte = 8;
-
-} // namespace
void VideoQualityMetricsReporter::Start(absl::string_view test_case_name) {
test_case_name_ = std::string(test_case_name);
+ start_time_ = Now();
}
-// TODO(bugs.webrtc.org/10430): Migrate to the new GetStats as soon as
-// bugs.webrtc.org/10428 is fixed.
void VideoQualityMetricsReporter::OnStatsReports(
- const std::string& pc_label,
- const StatsReports& stats_reports) {
- for (const StatsReport* stats_report : stats_reports) {
- // The only stats collected by this analyzer are present in
- // kStatsReportTypeBwe reports, so all other reports are just ignored.
- if (stats_report->type() != StatsReport::StatsType::kStatsReportTypeBwe) {
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ RTC_CHECK(start_time_)
+ << "Please invoke Start(...) method before calling OnStatsReports(...)";
+
+ auto transport_stats = report->GetStatsOfType<RTCTransportStats>();
+ if (transport_stats.size() == 0u ||
+ !transport_stats[0]->selected_candidate_pair_id.is_defined()) {
+ return;
+ }
+ RTC_DCHECK_EQ(transport_stats.size(), 1);
+ std::string selected_ice_id =
+ transport_stats[0]->selected_candidate_pair_id.ValueToString();
+ // Use the selected ICE candidate pair ID to get the appropriate ICE stats.
+ const RTCIceCandidatePairStats ice_candidate_pair_stats =
+ report->Get(selected_ice_id)->cast_to<const RTCIceCandidatePairStats>();
+
+ auto outbound_rtp_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
+ StatsSample sample;
+ for (auto& s : outbound_rtp_stats) {
+ if (!s->media_type.is_defined()) {
+ continue;
+ }
+ if (!(*s->media_type == RTCMediaStreamTrackKind::kVideo)) {
continue;
}
- const webrtc::StatsReport::Value* available_send_bandwidth =
- stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameAvailableSendBandwidth);
- const webrtc::StatsReport::Value* retransmission_bitrate =
- stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameRetransmitBitrate);
- const webrtc::StatsReport::Value* transmission_bitrate =
- stats_report->FindValue(
- StatsReport::StatsValueName::kStatsValueNameTransmitBitrate);
- RTC_CHECK(available_send_bandwidth);
- RTC_CHECK(retransmission_bitrate);
- RTC_CHECK(transmission_bitrate);
-
- rtc::CritScope crit(&video_bwe_stats_lock_);
- VideoBweStats& video_bwe_stats = video_bwe_stats_[pc_label];
+ if (s->timestamp_us() > sample.sample_time.us()) {
+ sample.sample_time = Timestamp::Micros(s->timestamp_us());
+ }
+ sample.retransmitted_bytes_sent +=
+ DataSize::Bytes(s->retransmitted_bytes_sent.ValueOrDefault(0ul));
+ sample.bytes_sent += DataSize::Bytes(s->bytes_sent.ValueOrDefault(0ul));
+ sample.header_bytes_sent +=
+ DataSize::Bytes(s->header_bytes_sent.ValueOrDefault(0ul));
+ }
+
+ rtc::CritScope crit(&video_bwe_stats_lock_);
+ VideoBweStats& video_bwe_stats = video_bwe_stats_[std::string(pc_label)];
+ if (ice_candidate_pair_stats.available_outgoing_bitrate.is_defined()) {
video_bwe_stats.available_send_bandwidth.AddSample(
- available_send_bandwidth->int_val());
- video_bwe_stats.transmission_bitrate.AddSample(
- transmission_bitrate->int_val());
- video_bwe_stats.retransmission_bitrate.AddSample(
- retransmission_bitrate->int_val());
+ DataRate::BitsPerSec(
+ *ice_candidate_pair_stats.available_outgoing_bitrate)
+ .bytes_per_sec());
+ }
+
+ StatsSample prev_sample = last_stats_sample_[std::string(pc_label)];
+ if (prev_sample.sample_time.IsZero()) {
+ prev_sample.sample_time = start_time_.value();
+ }
+ last_stats_sample_[std::string(pc_label)] = sample;
+
+ TimeDelta time_between_samples = sample.sample_time - prev_sample.sample_time;
+ if (time_between_samples.IsZero()) {
+ return;
}
+
+ DataRate retransmission_bitrate =
+ (sample.retransmitted_bytes_sent - prev_sample.retransmitted_bytes_sent) /
+ time_between_samples;
+ video_bwe_stats.retransmission_bitrate.AddSample(
+ retransmission_bitrate.bytes_per_sec());
+ DataRate transmission_bitrate =
+ (sample.bytes_sent + sample.header_bytes_sent - prev_sample.bytes_sent -
+ prev_sample.header_bytes_sent) /
+ time_between_samples;
+ video_bwe_stats.transmission_bitrate.AddSample(
+ transmission_bitrate.bytes_per_sec());
}
void VideoQualityMetricsReporter::StopAndReportResults() {
@@ -73,14 +110,11 @@ void VideoQualityMetricsReporter::ReportVideoBweResults(
const std::string& test_case_name,
const VideoBweStats& video_bwe_stats) {
ReportResult("available_send_bandwidth", test_case_name,
- video_bwe_stats.available_send_bandwidth / kBitsInByte,
- "bytesPerSecond");
+ video_bwe_stats.available_send_bandwidth, "bytesPerSecond");
ReportResult("transmission_bitrate", test_case_name,
- video_bwe_stats.transmission_bitrate / kBitsInByte,
- "bytesPerSecond");
+ video_bwe_stats.transmission_bitrate, "bytesPerSecond");
ReportResult("retransmission_bitrate", test_case_name,
- video_bwe_stats.retransmission_bitrate / kBitsInByte,
- "bytesPerSecond");
+ video_bwe_stats.retransmission_bitrate, "bytesPerSecond");
}
void VideoQualityMetricsReporter::ReportResult(
diff --git a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h
index 1688a7b6fcf..a6ac9b4fa18 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h
@@ -15,6 +15,8 @@
#include <string>
#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/numerics/samples_stats_counter.h"
#include "test/testsupport/perf_test.h"
@@ -31,15 +33,24 @@ struct VideoBweStats {
class VideoQualityMetricsReporter
: public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter {
public:
- VideoQualityMetricsReporter() = default;
+ VideoQualityMetricsReporter(Clock* const clock) : clock_(clock) {}
~VideoQualityMetricsReporter() override = default;
void Start(absl::string_view test_case_name) override;
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& reports) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
void StopAndReportResults() override;
private:
+ struct StatsSample {
+ DataSize bytes_sent = DataSize::Zero();
+ DataSize header_bytes_sent = DataSize::Zero();
+ DataSize retransmitted_bytes_sent = DataSize::Zero();
+
+ Timestamp sample_time = Timestamp::Zero();
+ };
+
std::string GetTestCaseName(const std::string& stream_label) const;
static void ReportVideoBweResults(const std::string& test_case_name,
const VideoBweStats& video_bwe_stats);
@@ -50,14 +61,20 @@ class VideoQualityMetricsReporter
const std::string& unit,
webrtc::test::ImproveDirection improve_direction =
webrtc::test::ImproveDirection::kNone);
+ Timestamp Now() const { return clock_->CurrentTime(); }
+
+ Clock* const clock_;
std::string test_case_name_;
+ absl::optional<Timestamp> start_time_;
rtc::CriticalSection video_bwe_stats_lock_;
// Map between a peer connection label (provided by the framework) and
// its video BWE stats.
std::map<std::string, VideoBweStats> video_bwe_stats_
RTC_GUARDED_BY(video_bwe_stats_lock_);
+ std::map<std::string, StatsSample> last_stats_sample_
+ RTC_GUARDED_BY(video_bwe_stats_lock_);
};
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/test/pc/e2e/echo/echo_emulation.cc b/chromium/third_party/webrtc/test/pc/e2e/echo/echo_emulation.cc
index 2beaa34cbd8..230e8e3eca9 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/echo/echo_emulation.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/echo/echo_emulation.cc
@@ -57,17 +57,7 @@ void EchoEmulatingCapturer::OnAudioRendered(
}
queue_input_.assign(data.begin(), data.end());
if (!renderer_queue_.Insert(&queue_input_)) {
- // Test audio device works too slow with sanitizers and on some platforms
- // and can't properly process audio, so when capturer will be stopped
- // renderer will quickly overfill the queue.
- // TODO(crbug.com/webrtc/10850) remove it when test ADM will be fast enough.
-#if defined(THREAD_SANITIZER) || defined(MEMORY_SANITIZER) || \
- defined(ADDRESS_SANITIZER) || defined(WEBRTC_ANDROID) || \
- (defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG))
RTC_LOG(WARNING) << "Echo queue is full";
-#else
- RTC_CHECK(false) << "Echo queue is full";
-#endif
}
}
diff --git a/chromium/third_party/webrtc/test/pc/e2e/media/media_helper.cc b/chromium/third_party/webrtc/test/pc/e2e/media/media_helper.cc
index d3fa6ffe039..d1c27838a6e 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/media/media_helper.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/media/media_helper.cc
@@ -58,7 +58,7 @@ MediaHelper::MaybeAddVideo(TestPeer* peer) {
std::unique_ptr<test::TestVideoCapturer> capturer = CreateVideoCapturer(
video_config, peer->ReleaseVideoSource(i),
video_quality_analyzer_injection_helper_->CreateFramePreprocessor(
- video_config));
+ params->name.value(), video_config));
bool is_screencast =
video_config.content_hint == VideoTrackInterface::ContentHint::kText ||
video_config.content_hint ==
diff --git a/chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.cc b/chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.cc
index 56f0337037c..3b232fdc710 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.cc
@@ -11,7 +11,8 @@
#include <utility>
-#include "api/stats_types.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
#include "rtc_base/event.h"
#include "system_wrappers/include/field_trial.h"
#include "test/testsupport/perf_test.h"
@@ -40,28 +41,29 @@ void NetworkQualityMetricsReporter::Start(absl::string_view test_case_name) {
}
void NetworkQualityMetricsReporter::OnStatsReports(
- const std::string& pc_label,
- const StatsReports& reports) {
- rtc::CritScope cs(&lock_);
- int64_t payload_bytes_received = 0;
- int64_t payload_bytes_sent = 0;
- for (const StatsReport* report : reports) {
- if (report->type() == StatsReport::kStatsReportTypeSsrc) {
- const auto* received =
- report->FindValue(StatsReport::kStatsValueNameBytesReceived);
- if (received) {
- payload_bytes_received += received->int64_val();
- }
- const auto* sent =
- report->FindValue(StatsReport::kStatsValueNameBytesSent);
- if (sent) {
- payload_bytes_sent += sent->int64_val();
- }
- }
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ DataSize payload_received = DataSize::Zero();
+ DataSize payload_sent = DataSize::Zero();
+
+ auto inbound_stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
+ for (const auto& stat : inbound_stats) {
+ payload_received +=
+ DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul) +
+ stat->header_bytes_received.ValueOrDefault(0ul));
+ }
+
+ auto outbound_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
+ for (const auto& stat : outbound_stats) {
+ payload_sent +=
+ DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul) +
+ stat->header_bytes_sent.ValueOrDefault(0ul));
}
- PCStats& stats = pc_stats_[pc_label];
- stats.payload_bytes_received = payload_bytes_received;
- stats.payload_bytes_sent = payload_bytes_sent;
+
+ rtc::CritScope cs(&lock_);
+ PCStats& stats = pc_stats_[std::string(pc_label)];
+ stats.payload_received = payload_received;
+ stats.payload_sent = payload_sent;
}
void NetworkQualityMetricsReporter::StopAndReportResults() {
@@ -125,9 +127,9 @@ void NetworkQualityMetricsReporter::ReportStats(
void NetworkQualityMetricsReporter::ReportPCStats(const std::string& pc_label,
const PCStats& stats) {
- ReportResult("payload_bytes_received", pc_label, stats.payload_bytes_received,
- "sizeInBytes");
- ReportResult("payload_bytes_sent", pc_label, stats.payload_bytes_sent,
+ ReportResult("payload_bytes_received", pc_label,
+ stats.payload_received.bytes(), "sizeInBytes");
+ ReportResult("payload_bytes_sent", pc_label, stats.payload_sent.bytes(),
"sizeInBytes");
}
diff --git a/chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.h b/chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.h
index 6454f175260..932e03140be 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/network_quality_metrics_reporter.h
@@ -15,6 +15,7 @@
#include "api/test/network_emulation_manager.h"
#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/units/data_size.h"
#include "rtc_base/critical_section.h"
namespace webrtc {
@@ -30,16 +31,17 @@ class NetworkQualityMetricsReporter
// Network stats must be empty when this method will be invoked.
void Start(absl::string_view test_case_name) override;
- void OnStatsReports(const std::string& pc_label,
- const StatsReports& reports) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
void StopAndReportResults() override;
private:
struct PCStats {
// TODO(nisse): Separate audio and video counters. Depends on standard stat
// counters, enabled by field trial "WebRTC-UseStandardBytesStats".
- int64_t payload_bytes_received = 0;
- int64_t payload_bytes_sent = 0;
+ DataSize payload_received = DataSize::Zero();
+ DataSize payload_sent = DataSize::Zero();
};
static EmulatedNetworkStats PopulateStats(
diff --git a/chromium/third_party/webrtc/test/pc/e2e/peer_configurer.h b/chromium/third_party/webrtc/test/pc/e2e/peer_configurer.h
index 010ddcee82c..7da547bdc31 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/peer_configurer.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/peer_configurer.h
@@ -23,7 +23,6 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/test/create_peer_connection_quality_test_frame_generator.h"
#include "api/test/peerconnection_quality_test_fixture.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
@@ -86,12 +85,6 @@ class PeerConfigurerImpl final
std::move(network_controller_factory);
return this;
}
- PeerConfigurer* SetMediaTransportFactory(
- std::unique_ptr<MediaTransportFactory> media_transport_factory) override {
- components_->pcf_dependencies->media_transport_factory =
- std::move(media_transport_factory);
- return this;
- }
PeerConfigurer* SetVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> video_encoder_factory) override {
components_->pcf_dependencies->video_encoder_factory =
@@ -172,7 +165,15 @@ class PeerConfigurerImpl final
}
PeerConfigurer* SetBitrateParameters(
PeerConnectionInterface::BitrateParameters bitrate_params) override {
- params_->bitrate_params = bitrate_params;
+ BitrateSettings bitrate_settings;
+ bitrate_settings.min_bitrate_bps = bitrate_params.min_bitrate_bps;
+ bitrate_settings.start_bitrate_bps = bitrate_params.current_bitrate_bps;
+ bitrate_settings.max_bitrate_bps = bitrate_params.max_bitrate_bps;
+ return SetBitrateSettings(bitrate_settings);
+ }
+ PeerConfigurer* SetBitrateSettings(
+ BitrateSettings bitrate_settings) override {
+ params_->bitrate_settings = bitrate_settings;
return this;
}
diff --git a/chromium/third_party/webrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc b/chromium/third_party/webrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc
index 8080d4bb0a5..ab6aaa07313 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc
@@ -83,7 +83,7 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test {
auto fixture = CreatePeerConnectionE2EQualityTestFixture(
test_case_name, /*audio_quality_analyzer=*/nullptr,
std::move(video_quality_analyzer));
- fixture->ExecuteAt(TimeDelta::Seconds(2),
+ fixture->ExecuteAt(TimeDelta::Seconds(1),
[alice_network_behavior_ptr](TimeDelta) {
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
@@ -110,19 +110,20 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test {
fixture->Run(run_params);
EXPECT_GE(fixture->GetRealTestDuration(), run_params.run_duration);
- for (auto stream_label : video_analyzer_ptr->GetKnownVideoStreams()) {
+ for (auto stream_key : video_analyzer_ptr->GetKnownVideoStreams()) {
FrameCounters stream_conters =
- video_analyzer_ptr->GetPerStreamCounters().at(stream_label);
+ video_analyzer_ptr->GetPerStreamCounters().at(stream_key);
// On some devices the pipeline can be too slow, so we actually can't
// force real constraints here. Lets just check, that at least 1
// frame passed whole pipeline.
- int64_t expected_min_fps = run_params.run_duration.seconds() * 30;
- EXPECT_GE(stream_conters.captured, expected_min_fps);
- EXPECT_GE(stream_conters.pre_encoded, 1);
- EXPECT_GE(stream_conters.encoded, 1);
- EXPECT_GE(stream_conters.received, 1);
- EXPECT_GE(stream_conters.decoded, 1);
- EXPECT_GE(stream_conters.rendered, 1);
+ int64_t expected_min_fps = run_params.run_duration.seconds() * 15;
+ EXPECT_GE(stream_conters.captured, expected_min_fps)
+ << stream_key.ToString();
+ EXPECT_GE(stream_conters.pre_encoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.encoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.received, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.decoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.rendered, 1) << stream_key.ToString();
}
}
};
@@ -148,7 +149,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) {
RunTest(
"smoke", run_params,
[](PeerConfigurer* alice) {
- VideoConfig video(640, 360, 30);
+ VideoConfig video(160, 120, 15);
video.stream_label = "alice-video";
video.sync_group = "alice-media";
alice->AddVideoConfig(std::move(video));
@@ -164,23 +165,11 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) {
},
[](PeerConfigurer* charlie) {
charlie->SetName("charlie");
- VideoConfig video(640, 360, 30);
+ VideoConfig video(160, 120, 15);
video.stream_label = "charlie-video";
video.temporal_layers_count = 2;
charlie->AddVideoConfig(std::move(video));
- VideoConfig screenshare(640, 360, 30);
- screenshare.stream_label = "charlie-screenshare";
- screenshare.content_hint = VideoTrackInterface::ContentHint::kText;
- ScreenShareConfig screen_share_config =
- ScreenShareConfig(TimeDelta::Seconds(2));
- screen_share_config.scrolling_params = ScrollingParams(
- TimeDelta::Millis(1800), kDefaultSlidesWidth, kDefaultSlidesHeight);
- auto screen_share_frame_generator =
- CreateScreenShareFrameGenerator(screenshare, screen_share_config);
- charlie->AddVideoConfig(std::move(screenshare),
- std::move(screen_share_frame_generator));
-
AudioConfig audio;
audio.stream_label = "charlie-audio";
audio.mode = AudioConfig::Mode::kFile;
@@ -192,6 +181,35 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) {
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_Screenshare DISABLED_Screenshare
+#else
+#define MAYBE_Screenshare Screenshare
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Screenshare) {
+ RunParams run_params(TimeDelta::Seconds(2));
+ test::ScopedFieldTrials field_trials(
+ std::string(field_trial::GetFieldTrialString()) +
+ "WebRTC-UseStandardBytesStats/Enabled/");
+ RunTest(
+ "screenshare", run_params,
+ [](PeerConfigurer* alice) {
+ VideoConfig screenshare(320, 180, 30);
+ screenshare.stream_label = "alice-screenshare";
+ screenshare.content_hint = VideoTrackInterface::ContentHint::kText;
+ ScreenShareConfig screen_share_config =
+ ScreenShareConfig(TimeDelta::Seconds(2));
+ screen_share_config.scrolling_params = ScrollingParams(
+ TimeDelta::Millis(1800), kDefaultSlidesWidth, kDefaultSlidesHeight);
+ auto screen_share_frame_generator =
+ CreateScreenShareFrameGenerator(screenshare, screen_share_config);
+ alice->AddVideoConfig(std::move(screenshare),
+ std::move(screen_share_frame_generator));
+ },
+ [](PeerConfigurer* charlie) {});
+}
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
#define MAYBE_Echo DISABLED_Echo
#else
#define MAYBE_Echo Echo
@@ -232,9 +250,9 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Simulcast) {
RunTest(
"simulcast", run_params,
[](PeerConfigurer* alice) {
- VideoConfig simulcast(1280, 720, 30);
+ VideoConfig simulcast(1280, 720, 15);
simulcast.stream_label = "alice-simulcast";
- simulcast.simulcast_config = VideoSimulcastConfig(3, 0);
+ simulcast.simulcast_config = VideoSimulcastConfig(2, 0);
alice->AddVideoConfig(std::move(simulcast));
AudioConfig audio;
@@ -244,18 +262,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Simulcast) {
test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
alice->SetAudioConfig(std::move(audio));
},
- [](PeerConfigurer* bob) {
- VideoConfig video(640, 360, 30);
- video.stream_label = "bob-video";
- bob->AddVideoConfig(std::move(video));
-
- AudioConfig audio;
- audio.stream_label = "bob-audio";
- audio.mode = AudioConfig::Mode::kFile;
- audio.input_file_name =
- test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
- bob->SetAudioConfig(std::move(audio));
- });
+ [](PeerConfigurer* bob) {});
}
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
@@ -270,11 +277,11 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Svc) {
RunTest(
"simulcast", run_params,
[](PeerConfigurer* alice) {
- VideoConfig simulcast(1280, 720, 30);
+ VideoConfig simulcast(1280, 720, 15);
simulcast.stream_label = "alice-svc";
// Because we have network with packets loss we can analyze only the
// highest spatial layer in SVC mode.
- simulcast.simulcast_config = VideoSimulcastConfig(3, 2);
+ simulcast.simulcast_config = VideoSimulcastConfig(2, 1);
alice->AddVideoConfig(std::move(simulcast));
AudioConfig audio;
@@ -284,18 +291,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Svc) {
test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
alice->SetAudioConfig(std::move(audio));
},
- [](PeerConfigurer* bob) {
- VideoConfig video(640, 360, 30);
- video.stream_label = "bob-video";
- bob->AddVideoConfig(std::move(video));
-
- AudioConfig audio;
- audio.stream_label = "bob-audio";
- audio.mode = AudioConfig::Mode::kFile;
- audio.input_file_name =
- test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
- bob->SetAudioConfig(std::move(audio));
- });
+ [](PeerConfigurer* bob) {});
}
// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
@@ -312,11 +308,11 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_HighBitrate) {
RunTest(
"smoke", run_params,
[](PeerConfigurer* alice) {
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.current_bitrate_bps = 3'000'000;
- bitrate_params.max_bitrate_bps = 3'000'000;
- alice->SetBitrateParameters(bitrate_params);
- VideoConfig video(800, 600, 30);
+ BitrateSettings bitrate_settings;
+ bitrate_settings.start_bitrate_bps = 3'000'000;
+ bitrate_settings.max_bitrate_bps = 3'000'000;
+ alice->SetBitrateSettings(bitrate_settings);
+ VideoConfig video(800, 600, 15);
video.stream_label = "alice-video";
video.min_encode_bitrate_bps = 500'000;
video.max_encode_bitrate_bps = 3'000'000;
@@ -330,24 +326,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_HighBitrate) {
audio.sampling_frequency_in_hz = 48000;
alice->SetAudioConfig(std::move(audio));
},
- [](PeerConfigurer* bob) {
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.current_bitrate_bps = 3'000'000;
- bitrate_params.max_bitrate_bps = 3'000'000;
- bob->SetBitrateParameters(bitrate_params);
- VideoConfig video(800, 600, 30);
- video.stream_label = "bob-video";
- video.min_encode_bitrate_bps = 500'000;
- video.max_encode_bitrate_bps = 3'000'000;
- bob->AddVideoConfig(std::move(video));
-
- AudioConfig audio;
- audio.stream_label = "bob-audio";
- audio.mode = AudioConfig::Mode::kFile;
- audio.input_file_name =
- test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
- bob->SetAudioConfig(std::move(audio));
- });
+ [](PeerConfigurer* bob) {});
}
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.cc b/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.cc
index 17104a90aa2..a23d2248f3c 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.cc
@@ -14,6 +14,7 @@
#include <set>
#include <utility>
+#include "absl/strings/string_view.h"
#include "api/jsep.h"
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
@@ -205,18 +206,21 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
RemotePeerAudioConfig::Create(bob_configurer->params()->audio_config);
absl::optional<RemotePeerAudioConfig> bob_remote_audio_config =
RemotePeerAudioConfig::Create(alice_configurer->params()->audio_config);
- // Copy Alice and Bob video configs to correctly pass them into lambdas.
+ // Copy Alice and Bob video configs and names to correctly pass them into
+ // lambdas.
std::vector<VideoConfig> alice_video_configs =
alice_configurer->params()->video_configs;
+ std::string alice_name = alice_configurer->params()->name.value();
std::vector<VideoConfig> bob_video_configs =
bob_configurer->params()->video_configs;
+ std::string bob_name = bob_configurer->params()->name.value();
alice_ = TestPeerFactory::CreateTestPeer(
std::move(alice_configurer),
std::make_unique<FixturePeerConnectionObserver>(
- [this, bob_video_configs](
+ [this, bob_video_configs, alice_name](
rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
- OnTrackCallback(transceiver, bob_video_configs);
+ OnTrackCallback(alice_name, transceiver, bob_video_configs);
},
[this]() { StartVideo(alice_video_sources_); }),
video_quality_analyzer_injection_helper_.get(), signaling_thread.get(),
@@ -225,9 +229,9 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
bob_ = TestPeerFactory::CreateTestPeer(
std::move(bob_configurer),
std::make_unique<FixturePeerConnectionObserver>(
- [this, alice_video_configs](
- rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
- OnTrackCallback(transceiver, alice_video_configs);
+ [this, alice_video_configs,
+ bob_name](rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ OnTrackCallback(bob_name, transceiver, alice_video_configs);
},
[this]() { StartVideo(bob_video_sources_); }),
video_quality_analyzer_injection_helper_.get(), signaling_thread.get(),
@@ -246,10 +250,13 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
std::min(video_analyzer_threads, kMaxVideoAnalyzerThreads);
RTC_LOG(INFO) << "video_analyzer_threads=" << video_analyzer_threads;
quality_metrics_reporters_.push_back(
- std::make_unique<VideoQualityMetricsReporter>());
+ std::make_unique<VideoQualityMetricsReporter>(clock_));
- video_quality_analyzer_injection_helper_->Start(test_case_name_,
- video_analyzer_threads);
+ video_quality_analyzer_injection_helper_->Start(
+ test_case_name_,
+ std::vector<std::string>{alice_->params()->name.value(),
+ bob_->params()->name.value()},
+ video_analyzer_threads);
audio_quality_analyzer_->Start(test_case_name_, &analyzer_helper_);
for (auto& reporter : quality_metrics_reporters_) {
reporter->Start(test_case_name_);
@@ -371,6 +378,7 @@ void PeerConnectionE2EQualityTest::SetupRequiredFieldTrials(
}
void PeerConnectionE2EQualityTest::OnTrackCallback(
+ absl::string_view peer_name,
rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
std::vector<VideoConfig> remote_video_configs) {
const rtc::scoped_refptr<MediaStreamTrackInterface>& track =
@@ -387,7 +395,7 @@ void PeerConnectionE2EQualityTest::OnTrackCallback(
// track->kind() is kVideoKind.
auto* video_track = static_cast<VideoTrackInterface*>(track.get());
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> video_sink =
- video_quality_analyzer_injection_helper_->CreateVideoSink();
+ video_quality_analyzer_injection_helper_->CreateVideoSink(peer_name);
video_track->AddOrUpdateSink(video_sink.get(), rtc::VideoSinkWants());
output_video_sinks_.push_back(std::move(video_sink));
}
diff --git a/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.h b/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.h
index 2eb7e708c6f..b302e5c51b5 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test.h
@@ -15,6 +15,7 @@
#include <string>
#include <vector>
+#include "absl/strings/string_view.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/test/audio_quality_analyzer_interface.h"
#include "api/test/peerconnection_quality_test_fixture.h"
@@ -80,7 +81,8 @@ class PeerConnectionE2EQualityTest
// For some functionality some field trials have to be enabled, so we will
// enable them here.
void SetupRequiredFieldTrials(const RunParams& run_params);
- void OnTrackCallback(rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
+ void OnTrackCallback(absl::string_view peer_name,
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
std::vector<VideoConfig> remote_video_configs);
// Have to be run on the signaling thread.
void SetupCallOnSignalingThread(const RunParams& run_params);
diff --git a/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test_params.h b/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test_params.h
index ccb53492c3a..edefc7a0084 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test_params.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/peer_connection_quality_test_params.h
@@ -20,7 +20,6 @@
#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/test/peerconnection_quality_test_fixture.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
@@ -47,7 +46,6 @@ struct PeerConnectionFactoryComponents {
std::unique_ptr<RtcEventLogFactoryInterface> event_log_factory;
std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory;
std::unique_ptr<NetworkControllerFactoryInterface> network_controller_factory;
- std::unique_ptr<MediaTransportFactory> media_transport_factory;
std::unique_ptr<NetEqFactory> neteq_factory;
// Will be passed to MediaEngineInterface, that will be used in
@@ -114,7 +112,7 @@ struct Params {
absl::optional<std::string> aec_dump_path;
PeerConnectionInterface::RTCConfiguration rtc_configuration;
- PeerConnectionInterface::BitrateParameters bitrate_params;
+ BitrateSettings bitrate_settings;
};
} // namespace webrtc_pc_e2e
diff --git a/chromium/third_party/webrtc/test/pc/e2e/stats_poller.cc b/chromium/third_party/webrtc/test/pc/e2e/stats_poller.cc
index 987f26e7e80..e6973e6af19 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/stats_poller.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/stats_poller.cc
@@ -18,14 +18,13 @@ namespace webrtc {
namespace webrtc_pc_e2e {
void InternalStatsObserver::PollStats() {
- peer_->pc()->GetStats(this, nullptr,
- webrtc::PeerConnectionInterface::StatsOutputLevel::
- kStatsOutputLevelStandard);
+ peer_->pc()->GetStats(this);
}
-void InternalStatsObserver::OnComplete(const StatsReports& reports) {
+void InternalStatsObserver::OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
for (auto* observer : observers_) {
- observer->OnStatsReports(pc_label_, reports);
+ observer->OnStatsReports(pc_label_, report);
}
}
diff --git a/chromium/third_party/webrtc/test/pc/e2e/stats_poller.h b/chromium/third_party/webrtc/test/pc/e2e/stats_poller.h
index 3d0c2d68013..157a1478349 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/stats_poller.h
+++ b/chromium/third_party/webrtc/test/pc/e2e/stats_poller.h
@@ -17,6 +17,7 @@
#include <vector>
#include "api/peer_connection_interface.h"
+#include "api/stats/rtc_stats_collector_callback.h"
#include "api/test/stats_observer_interface.h"
#include "test/pc/e2e/test_peer.h"
@@ -25,7 +26,7 @@ namespace webrtc_pc_e2e {
// Helper class that will notify all the webrtc::test::StatsObserverInterface
// objects subscribed.
-class InternalStatsObserver : public StatsObserver {
+class InternalStatsObserver : public RTCStatsCollectorCallback {
public:
InternalStatsObserver(std::string pc_label,
TestPeer* peer,
@@ -36,7 +37,8 @@ class InternalStatsObserver : public StatsObserver {
void PollStats();
- void OnComplete(const StatsReports& reports) override;
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
private:
std::string pc_label_;
diff --git a/chromium/third_party/webrtc/test/pc/e2e/test_peer_factory.cc b/chromium/third_party/webrtc/test/pc/e2e/test_peer_factory.cc
index 455337ef3a3..f700f1cb787 100644
--- a/chromium/third_party/webrtc/test/pc/e2e/test_peer_factory.cc
+++ b/chromium/third_party/webrtc/test/pc/e2e/test_peer_factory.cc
@@ -12,6 +12,7 @@
#include <utility>
#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
@@ -172,6 +173,7 @@ std::unique_ptr<cricket::MediaEngineInterface> CreateMediaEngine(
}
void WrapVideoEncoderFactory(
+ absl::string_view peer_name,
double bitrate_multiplier,
std::map<std::string, absl::optional<int>> stream_required_spatial_index,
PeerConnectionFactoryComponents* pcf_dependencies,
@@ -184,11 +186,12 @@ void WrapVideoEncoderFactory(
}
pcf_dependencies->video_encoder_factory =
video_analyzer_helper->WrapVideoEncoderFactory(
- std::move(video_encoder_factory), bitrate_multiplier,
+ peer_name, std::move(video_encoder_factory), bitrate_multiplier,
std::move(stream_required_spatial_index));
}
void WrapVideoDecoderFactory(
+ absl::string_view peer_name,
PeerConnectionFactoryComponents* pcf_dependencies,
VideoQualityAnalyzerInjectionHelper* video_analyzer_helper) {
std::unique_ptr<VideoDecoderFactory> video_decoder_factory;
@@ -199,7 +202,7 @@ void WrapVideoDecoderFactory(
}
pcf_dependencies->video_decoder_factory =
video_analyzer_helper->WrapVideoDecoderFactory(
- std::move(video_decoder_factory));
+ peer_name, std::move(video_decoder_factory));
}
// Creates PeerConnectionFactoryDependencies objects, providing entities
@@ -226,10 +229,6 @@ PeerConnectionFactoryDependencies CreatePCFDependencies(
pcf_deps.network_controller_factory =
std::move(pcf_dependencies->network_controller_factory);
}
- if (pcf_dependencies->media_transport_factory != nullptr) {
- pcf_deps.media_transport_factory =
- std::move(pcf_dependencies->media_transport_factory);
- }
if (pcf_dependencies->neteq_factory != nullptr) {
pcf_deps.neteq_factory = std::move(pcf_dependencies->neteq_factory);
}
@@ -309,10 +308,11 @@ std::unique_ptr<TestPeer> TestPeerFactory::CreateTestPeer(
params->audio_config, remote_audio_config, echo_emulation_config,
components->pcf_dependencies->task_queue_factory.get());
WrapVideoEncoderFactory(
- bitrate_multiplier,
+ params->name.value(), bitrate_multiplier,
CalculateRequiredSpatialIndexPerStream(params->video_configs),
components->pcf_dependencies.get(), video_analyzer_helper);
- WrapVideoDecoderFactory(components->pcf_dependencies.get(),
+ WrapVideoDecoderFactory(params->name.value(),
+ components->pcf_dependencies.get(),
video_analyzer_helper);
std::unique_ptr<cricket::MediaEngineInterface> media_engine =
CreateMediaEngine(components->pcf_dependencies.get(), audio_device_module,
@@ -329,7 +329,7 @@ std::unique_ptr<TestPeer> TestPeerFactory::CreateTestPeer(
rtc::scoped_refptr<PeerConnectionInterface> peer_connection =
peer_connection_factory->CreatePeerConnection(params->rtc_configuration,
std::move(pc_deps));
- peer_connection->SetBitrate(params->bitrate_params);
+ peer_connection->SetBitrate(params->bitrate_settings);
return absl::WrapUnique(new TestPeer(
peer_connection_factory, peer_connection, std::move(observer),
diff --git a/chromium/third_party/webrtc/test/peer_scenario/BUILD.gn b/chromium/third_party/webrtc/test/peer_scenario/BUILD.gn
index d702cf539f2..bdc77b70c8d 100644
--- a/chromium/third_party/webrtc/test/peer_scenario/BUILD.gn
+++ b/chromium/third_party/webrtc/test/peer_scenario/BUILD.gn
@@ -52,6 +52,8 @@ if (rtc_include_tests) {
"../network:emulated_network",
"../scenario",
"../time_controller",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/memory",
]
diff --git a/chromium/third_party/webrtc/test/run_loop_unittest.cc b/chromium/third_party/webrtc/test/run_loop_unittest.cc
index a356cc265a5..160aba0716a 100644
--- a/chromium/third_party/webrtc/test/run_loop_unittest.cc
+++ b/chromium/third_party/webrtc/test/run_loop_unittest.cc
@@ -17,7 +17,6 @@
namespace webrtc {
TEST(RunLoopTest, TaskQueueOnThread) {
- EXPECT_EQ(TaskQueueBase::Current(), nullptr);
test::RunLoop loop;
EXPECT_EQ(TaskQueueBase::Current(), loop.task_queue());
EXPECT_TRUE(loop.task_queue()->IsCurrent());
diff --git a/chromium/third_party/webrtc/test/scenario/BUILD.gn b/chromium/third_party/webrtc/test/scenario/BUILD.gn
index e2e5f8cef22..33c68a8211d 100644
--- a/chromium/third_party/webrtc/test/scenario/BUILD.gn
+++ b/chromium/third_party/webrtc/test/scenario/BUILD.gn
@@ -141,6 +141,8 @@ if (rtc_include_tests) {
"../logging:log_writer",
"../network:emulated_network",
"../time_controller",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/memory",
diff --git a/chromium/third_party/webrtc/test/scenario/call_client.cc b/chromium/third_party/webrtc/test/scenario/call_client.cc
index fb888df6946..0107497252d 100644
--- a/chromium/third_party/webrtc/test/scenario/call_client.cc
+++ b/chromium/third_party/webrtc/test/scenario/call_client.cc
@@ -54,7 +54,8 @@ Call* CreateCall(TimeController* time_controller,
RtcEventLog* event_log,
CallClientConfig config,
LoggingNetworkControllerFactory* network_controller_factory,
- rtc::scoped_refptr<AudioState> audio_state) {
+ rtc::scoped_refptr<AudioState> audio_state,
+ rtc::scoped_refptr<SharedModuleThread> call_thread) {
CallConfig call_config(event_log);
call_config.bitrate_config.max_bitrate_bps =
config.transport.rates.max_rate.bps_or(-1);
@@ -67,7 +68,7 @@ Call* CreateCall(TimeController* time_controller,
call_config.audio_state = audio_state;
call_config.trials = config.field_trials;
return Call::Create(call_config, time_controller->GetClock(),
- time_controller->CreateProcessThread("CallModules"),
+ std::move(call_thread),
time_controller->CreateProcessThread("Pacer"));
}
@@ -213,9 +214,14 @@ CallClient::CallClient(
event_log_ = CreateEventLog(time_controller_->GetTaskQueueFactory(),
log_writer_factory_.get());
fake_audio_setup_ = InitAudio(time_controller_);
+ RTC_DCHECK(!module_thread_);
+ module_thread_ = SharedModuleThread::Create(
+ time_controller_->CreateProcessThread("CallThread"),
+ [this]() { module_thread_ = nullptr; });
+
call_.reset(CreateCall(time_controller_, event_log_.get(), config,
&network_controller_factory_,
- fake_audio_setup_.audio_state));
+ fake_audio_setup_.audio_state, module_thread_));
transport_ = std::make_unique<NetworkNodeTransport>(clock_, call_.get());
});
}
@@ -223,6 +229,7 @@ CallClient::CallClient(
CallClient::~CallClient() {
SendTask([&] {
call_.reset();
+ RTC_DCHECK(!module_thread_); // Should be set to null in the lambda above.
fake_audio_setup_ = {};
rtc::Event done;
event_log_->StopLogging([&done] { done.Set(); });
diff --git a/chromium/third_party/webrtc/test/scenario/call_client.h b/chromium/third_party/webrtc/test/scenario/call_client.h
index 33fa2765cb5..80814eb1b3b 100644
--- a/chromium/third_party/webrtc/test/scenario/call_client.h
+++ b/chromium/third_party/webrtc/test/scenario/call_client.h
@@ -157,6 +157,8 @@ class CallClient : public EmulatedNetworkReceiverInterface {
// Defined last so it's destroyed first.
TaskQueueForTest task_queue_;
+ rtc::scoped_refptr<SharedModuleThread> module_thread_;
+
const FieldTrialBasedConfig field_trials_;
};
diff --git a/chromium/third_party/webrtc/test/scenario/scenario_unittest.cc b/chromium/third_party/webrtc/test/scenario/scenario_unittest.cc
index 839e6a375e5..f74c1a5bdf3 100644
--- a/chromium/third_party/webrtc/test/scenario/scenario_unittest.cc
+++ b/chromium/third_party/webrtc/test/scenario/scenario_unittest.cc
@@ -119,7 +119,8 @@ TEST(ScenarioTest, MAYBE_RealTimeEncoding) {
}
// Regression tests based on previous runs.
EXPECT_LT(analyzer.stats().lost_count, 2);
- EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 38, 10);
+ // This far below expected but ensures that we get something.
+ EXPECT_GT(analyzer.stats().psnr_with_freeze.Mean(), 10);
}
TEST(ScenarioTest, SimTimeFakeing) {
diff --git a/chromium/third_party/webrtc/test/test_main.cc b/chromium/third_party/webrtc/test/test_main.cc
index 8555d5e6dae..50469795486 100644
--- a/chromium/third_party/webrtc/test/test_main.cc
+++ b/chromium/third_party/webrtc/test/test_main.cc
@@ -16,11 +16,10 @@
int main(int argc, char* argv[]) {
// Initialize the symbolizer to get a human-readable stack trace
- // TODO(crbug.com/1050976): Breaks iossim tests, re-enable when fixed.
- // absl::InitializeSymbolizer(argv[0]);
+ absl::InitializeSymbolizer(argv[0]);
- // absl::FailureSignalHandlerOptions options;
- // absl::InstallFailureSignalHandler(options);
+ absl::FailureSignalHandlerOptions options;
+ absl::InstallFailureSignalHandler(options);
std::unique_ptr<webrtc::TestMain> main = webrtc::TestMain::Create();
int err_code = main->Init(&argc, argv);
diff --git a/chromium/third_party/webrtc/test/test_main_lib.cc b/chromium/third_party/webrtc/test/test_main_lib.cc
index 15318b49e15..f5e02341f3f 100644
--- a/chromium/third_party/webrtc/test/test_main_lib.cc
+++ b/chromium/third_party/webrtc/test/test_main_lib.cc
@@ -17,6 +17,7 @@
#include "absl/flags/flag.h"
#include "absl/flags/parse.h"
#include "absl/memory/memory.h"
+#include "absl/strings/match.h"
#include "absl/types/optional.h"
#include "rtc_base/checks.h"
#include "rtc_base/event_tracer.h"
@@ -100,6 +101,62 @@ namespace {
class TestMainImpl : public TestMain {
public:
+ // In order to set up a fresh rtc::Thread state for each test and avoid
+ // accidentally carrying over pending tasks that might be sent from one test
+ // and executed while another test is running, we inject a TestListener
+ // that sets up a new rtc::Thread instance for the main thread, per test.
+ class TestListener : public ::testing::EmptyTestEventListener {
+ public:
+ TestListener() = default;
+
+ private:
+ bool IsDeathTest(const char* test_case_name, const char* test_name) {
+ // Workaround to avoid wrapping the main thread when we run death tests.
+ // The approach we take for detecting death tests is essentially the same
+ // as gtest does internally. Gtest does this:
+ //
+ // static const char kDeathTestCaseFilter[] = "*DeathTest:*DeathTest/*";
+ // ::testing::internal::UnitTestOptions::MatchesFilter(
+ // test_case_name, kDeathTestCaseFilter);
+ //
+ // Our approach is a little more straight forward.
+ if (absl::EndsWith(test_case_name, "DeathTest"))
+ return true;
+
+ return absl::EndsWith(test_name, "DeathTest");
+ }
+
+ void OnTestStart(const ::testing::TestInfo& test_info) override {
+ if (!IsDeathTest(test_info.test_suite_name(), test_info.name())) {
+ // Ensure that main thread gets wrapped as an rtc::Thread.
+ // TODO(bugs.webrtc.org/9714): It might be better to avoid wrapping the
+ // main thread, or leave it to individual tests that need it. But as
+ // long as we have automatic thread wrapping, we need this to avoid that
+ // some other random thread (which one depending on which tests are run)
+ // gets automatically wrapped.
+ thread_ = rtc::Thread::CreateWithSocketServer();
+ thread_->WrapCurrent();
+ RTC_DCHECK_EQ(rtc::Thread::Current(), thread_.get());
+ } else {
+ RTC_LOG(LS_INFO) << "No thread auto wrap for death test.";
+ }
+ }
+
+ void OnTestEnd(const ::testing::TestInfo& test_info) override {
+ // Terminate the message loop. Note that if the test failed to clean
+ // up pending messages, this may execute part of the test. Ideally we
+ // should print a warning message here, or even fail the test if it leaks.
+ if (thread_) {
+ thread_->Quit(); // Signal quit.
+ thread_->Run(); // Flush + process Quit signal.
+ thread_->UnwrapCurrent();
+ thread_ = nullptr;
+ }
+ }
+
+ std::unique_ptr<rtc::Thread> thread_;
+ };
+
int Init(int* argc, char* argv[]) override {
::testing::InitGoogleMock(argc, argv);
absl::ParseCommandLine(*argc, argv);
@@ -134,14 +191,7 @@ class TestMainImpl : public TestMain {
rtc::InitializeSSL();
rtc::SSLStreamAdapter::EnableTimeCallbackForTesting();
- // Ensure that main thread gets wrapped as an rtc::Thread.
- // TODO(bugs.webrt.org/9714): It might be better to avoid wrapping the main
- // thread, or leave it to individual tests that need it. But as long as we
- // have automatic thread wrapping, we need this to avoid that some other
- // random thread (which one depending on which tests are run) gets
- // automatically wrapped.
- rtc::ThreadManager::Instance()->WrapCurrentThread();
- RTC_CHECK(rtc::Thread::Current());
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestListener());
return 0;
}
diff --git a/chromium/third_party/webrtc/test/testsupport/mock/mock_frame_reader.h b/chromium/third_party/webrtc/test/testsupport/mock/mock_frame_reader.h
index 8da3695d23b..bda6b1ad2dc 100644
--- a/chromium/third_party/webrtc/test/testsupport/mock/mock_frame_reader.h
+++ b/chromium/third_party/webrtc/test/testsupport/mock/mock_frame_reader.h
@@ -19,11 +19,11 @@ namespace test {
class MockFrameReader : public FrameReader {
public:
- MOCK_METHOD0(Init, bool());
- MOCK_METHOD0(ReadFrame, rtc::scoped_refptr<I420Buffer>());
- MOCK_METHOD0(Close, void());
- MOCK_METHOD0(FrameLength, size_t());
- MOCK_METHOD0(NumberOfFrames, int());
+ MOCK_METHOD(bool, Init, (), (override));
+ MOCK_METHOD(rtc::scoped_refptr<I420Buffer>, ReadFrame, (), (override));
+ MOCK_METHOD(void, Close, (), (override));
+ MOCK_METHOD(size_t, FrameLength, (), (override));
+ MOCK_METHOD(int, NumberOfFrames, (), (override));
};
} // namespace test
diff --git a/chromium/third_party/webrtc/test/testsupport/perf_test.h b/chromium/third_party/webrtc/test/testsupport/perf_test.h
index b0a5607d20a..4bb6773336c 100644
--- a/chromium/third_party/webrtc/test/testsupport/perf_test.h
+++ b/chromium/third_party/webrtc/test/testsupport/perf_test.h
@@ -15,7 +15,6 @@
#include <string>
#include <vector>
-#include "absl/flags/flag.h"
#include "api/array_view.h"
#include "rtc_base/numerics/samples_stats_counter.h"
diff --git a/chromium/third_party/webrtc/test/testsupport/resources_dir_flag.cc b/chromium/third_party/webrtc/test/testsupport/resources_dir_flag.cc
index a6ab3b537be..87a449a401d 100644
--- a/chromium/third_party/webrtc/test/testsupport/resources_dir_flag.cc
+++ b/chromium/third_party/webrtc/test/testsupport/resources_dir_flag.cc
@@ -10,6 +10,8 @@
#include "test/testsupport/resources_dir_flag.h"
+#include "absl/flags/flag.h"
+
ABSL_FLAG(std::string,
resources_dir,
"",
diff --git a/chromium/third_party/webrtc/test/testsupport/resources_dir_flag.h b/chromium/third_party/webrtc/test/testsupport/resources_dir_flag.h
index 055cc825467..7d6f192d9bd 100644
--- a/chromium/third_party/webrtc/test/testsupport/resources_dir_flag.h
+++ b/chromium/third_party/webrtc/test/testsupport/resources_dir_flag.h
@@ -13,7 +13,7 @@
#ifndef TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__
#define TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__
-#include "absl/flags/flag.h"
+#include "absl/flags/declare.h"
ABSL_DECLARE_FLAG(std::string, resources_dir);
diff --git a/chromium/third_party/webrtc/test/testsupport/test_artifacts_unittest.cc b/chromium/third_party/webrtc/test/testsupport/test_artifacts_unittest.cc
index 98de9e4bb83..fb577610fbb 100644
--- a/chromium/third_party/webrtc/test/testsupport/test_artifacts_unittest.cc
+++ b/chromium/third_party/webrtc/test/testsupport/test_artifacts_unittest.cc
@@ -14,6 +14,7 @@
#include <string>
+#include "absl/flags/declare.h"
#include "absl/flags/flag.h"
#include "rtc_base/system/file_wrapper.h"
#include "test/gtest.h"
diff --git a/chromium/third_party/webrtc/test/time_controller/BUILD.gn b/chromium/third_party/webrtc/test/time_controller/BUILD.gn
index 7f77f0afec7..c3d5dc9031b 100644
--- a/chromium/third_party/webrtc/test/time_controller/BUILD.gn
+++ b/chromium/third_party/webrtc/test/time_controller/BUILD.gn
@@ -41,8 +41,8 @@ rtc_library("time_controller") {
"../../rtc_base/synchronization:yield_policy",
"../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers",
- "//third_party/abseil-cpp/absl/strings",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
if (rtc_include_tests) {
diff --git a/chromium/third_party/webrtc/test/time_controller/simulated_time_controller.cc b/chromium/third_party/webrtc/test/time_controller/simulated_time_controller.cc
index a81083b4fb9..769be3ff782 100644
--- a/chromium/third_party/webrtc/test/time_controller/simulated_time_controller.cc
+++ b/chromium/third_party/webrtc/test/time_controller/simulated_time_controller.cc
@@ -57,7 +57,6 @@ SimulatedTimeControllerImpl::CreateTaskQueue(
std::unique_ptr<ProcessThread> SimulatedTimeControllerImpl::CreateProcessThread(
const char* thread_name) {
- rtc::CritScope lock(&lock_);
auto process_thread =
std::make_unique<SimulatedProcessThread>(this, thread_name);
Register(process_thread.get());
@@ -117,10 +116,12 @@ void SimulatedTimeControllerImpl::RunReadyRunners() {
while (!ready_runners_.empty()) {
auto* runner = ready_runners_.front();
ready_runners_.pop_front();
+ lock_.Leave();
// Note that the RunReady function might indirectly cause a call to
- // Unregister() which will recursively grab |lock_| again to remove items
- // from |ready_runners_|.
+ // Unregister() which will grab |lock_| again to remove items from
+ // |ready_runners_|.
runner->RunReady(current_time);
+ lock_.Enter();
}
}
}
@@ -169,6 +170,7 @@ void SimulatedTimeControllerImpl::StartYield(TaskQueueBase* yielding_from) {
void SimulatedTimeControllerImpl::StopYield(TaskQueueBase* yielding_from) {
yielded_.erase(yielding_from);
}
+
} // namespace sim_time_impl
GlobalSimulatedTimeController::GlobalSimulatedTimeController(
diff --git a/chromium/third_party/webrtc/test/time_controller/simulated_time_controller.h b/chromium/third_party/webrtc/test/time_controller/simulated_time_controller.h
index 758f90989e8..48112b3a318 100644
--- a/chromium/third_party/webrtc/test/time_controller/simulated_time_controller.h
+++ b/chromium/third_party/webrtc/test/time_controller/simulated_time_controller.h
@@ -52,32 +52,34 @@ class SimulatedTimeControllerImpl : public TaskQueueFactory,
std::unique_ptr<TaskQueueBase, TaskQueueDeleter> CreateTaskQueue(
absl::string_view name,
- Priority priority) const override;
+ Priority priority) const RTC_LOCKS_EXCLUDED(time_lock_) override;
// Implements the YieldInterface by running ready tasks on all task queues,
// except that if this method is called from a task, the task queue running
// that task is skipped.
- void YieldExecution() override;
+ void YieldExecution() RTC_LOCKS_EXCLUDED(time_lock_, lock_) override;
// Create process thread with the name |thread_name|.
- std::unique_ptr<ProcessThread> CreateProcessThread(const char* thread_name);
+ std::unique_ptr<ProcessThread> CreateProcessThread(const char* thread_name)
+ RTC_LOCKS_EXCLUDED(time_lock_, lock_);
// Create thread using provided |socket_server|.
std::unique_ptr<rtc::Thread> CreateThread(
const std::string& name,
- std::unique_ptr<rtc::SocketServer> socket_server);
+ std::unique_ptr<rtc::SocketServer> socket_server)
+ RTC_LOCKS_EXCLUDED(time_lock_, lock_);
// Runs all runners in |runners_| that has tasks or modules ready for
// execution.
- void RunReadyRunners();
+ void RunReadyRunners() RTC_LOCKS_EXCLUDED(time_lock_, lock_);
// Return |current_time_|.
- Timestamp CurrentTime() const;
+ Timestamp CurrentTime() const RTC_LOCKS_EXCLUDED(time_lock_);
// Return min of runner->GetNextRunTime() for runner in |runners_|.
- Timestamp NextRunTime() const;
+ Timestamp NextRunTime() const RTC_LOCKS_EXCLUDED(lock_);
// Set |current_time_| to |target_time|.
- void AdvanceTime(Timestamp target_time);
+ void AdvanceTime(Timestamp target_time) RTC_LOCKS_EXCLUDED(time_lock_);
// Adds |runner| to |runners_|.
- void Register(SimulatedSequenceRunner* runner);
+ void Register(SimulatedSequenceRunner* runner) RTC_LOCKS_EXCLUDED(lock_);
// Removes |runner| from |runners_|.
- void Unregister(SimulatedSequenceRunner* runner);
+ void Unregister(SimulatedSequenceRunner* runner) RTC_LOCKS_EXCLUDED(lock_);
// Indicates that |yielding_from| is not ready to run.
void StartYield(TaskQueueBase* yielding_from);
diff --git a/chromium/third_party/webrtc/tools_webrtc/get_landmines.py b/chromium/third_party/webrtc/tools_webrtc/get_landmines.py
index ba8ac9c1bf7..399fb0ad72d 100755
--- a/chromium/third_party/webrtc/tools_webrtc/get_landmines.py
+++ b/chromium/third_party/webrtc/tools_webrtc/get_landmines.py
@@ -43,6 +43,7 @@ def print_landmines(): # pylint: disable=invalid-name
'https://codereview.webrtc.org/2786603002')
print ('Clobber due to Win Debug linking errors in '
'https://codereview.webrtc.org/2832063003/')
+ print 'Clobber win x86 bots (issues with isolated files).'
if host_os() == 'mac':
print 'Clobber due to iOS compile errors (crbug.com/694721)'
print 'Clobber to unblock https://codereview.webrtc.org/2709573003'
diff --git a/chromium/third_party/webrtc/tools_webrtc/libs/generate_licenses.py b/chromium/third_party/webrtc/tools_webrtc/libs/generate_licenses.py
index 04d655cba06..0e5a9ef2791 100755
--- a/chromium/third_party/webrtc/tools_webrtc/libs/generate_licenses.py
+++ b/chromium/third_party/webrtc/tools_webrtc/libs/generate_licenses.py
@@ -78,6 +78,7 @@ LIB_TO_LICENSES_DICT = {
# Compile time dependencies, no license needed:
'yasm': [],
'ow2_asm': [],
+ 'jdk': [],
}
# Third_party library _regex_ to licences mapping. Keys are regular expression
diff --git a/chromium/third_party/webrtc/tools_webrtc/mb/mb_config.pyl b/chromium/third_party/webrtc/tools_webrtc/mb/mb_config.pyl
index 059c724194c..9f9fdd2d41d 100644
--- a/chromium/third_party/webrtc/tools_webrtc/mb/mb_config.pyl
+++ b/chromium/third_party/webrtc/tools_webrtc/mb/mb_config.pyl
@@ -404,13 +404,16 @@
'ios', 'release_bot', 'arm64', 'no_ios_code_signing', 'ios_use_goma_rbe'
],
'ios_internal_debug_bot_arm64': [
- 'ios', 'debug_bot', 'arm64', 'ios_use_goma_rbe'
+ 'ios', 'debug_bot', 'arm64', 'ios_use_goma_rbe',
+ 'ios_code_signing_identity_description',
],
'ios_internal_release_bot_arm64': [
- 'ios', 'release_bot', 'arm64', 'ios_use_goma_rbe'
+ 'ios', 'release_bot', 'arm64', 'ios_use_goma_rbe',
+ 'ios_code_signing_identity_description',
],
'ios_internal_pure_release_bot_arm64': [
- 'ios', 'pure_release_bot', 'arm64', 'ios_use_goma_rbe'
+ 'ios', 'pure_release_bot', 'arm64', 'ios_use_goma_rbe',
+ 'ios_code_signing_identity_description',
],
'ios_debug_bot_x64': [
'ios', 'debug_bot', 'x64', 'ios_use_goma_rbe'
@@ -506,6 +509,10 @@
'gn_args': 'use_goma=true',
},
+ 'ios_code_signing_identity_description': {
+ 'gn_args': 'ios_code_signing_identity_description="Apple Development"',
+ },
+
'ios_use_goma_rbe': {
'gn_args': 'ios_use_goma_rbe=true',
},
diff --git a/chromium/third_party/webrtc/tools_webrtc/sancov/README b/chromium/third_party/webrtc/tools_webrtc/sancov/README
deleted file mode 100644
index c9b43e7ae0b..00000000000
--- a/chromium/third_party/webrtc/tools_webrtc/sancov/README
+++ /dev/null
@@ -1,9 +0,0 @@
-Blacklist for llvm's sancov
-
-See http://clang.llvm.org/docs/SanitizerCoverage.html .
-
-Example usage:
-> cd out/Debug
-> UBSAN_OPTIONS="coverage=1" ./peerconnection_unittests
-> sancov -html-report -blacklist ../../tools/sancov/blacklist.txt \
-> peerconnection_unittests peerconnection_unittests.123.sancov
diff --git a/chromium/third_party/webrtc/tools_webrtc/sancov/blacklist.txt b/chromium/third_party/webrtc/tools_webrtc/sancov/blacklist.txt
deleted file mode 100644
index 7043a18ef2c..00000000000
--- a/chromium/third_party/webrtc/tools_webrtc/sancov/blacklist.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-#############################################################################
-# sancov blacklist.
-# Please think twice before you add or remove these rules.
-
-#############################################################################
-# no coverage report for third party
-src:*/chromium/*
-src:*/third_party/*
-
-# OpenH264 triggers some errors that are out of our control.
-src:*/third_party/ffmpeg/libavcodec/*
-src:*/third_party/openh264/*
-
-#############################################################################
-# Ignore system libraries.
-src:*/usr/*
-
-#############################################################################
-# Ignore test source.
-src:*/test/*
-src:*_unittest.cc
diff --git a/chromium/third_party/webrtc/video/BUILD.gn b/chromium/third_party/webrtc/video/BUILD.gn
index bfc3b0695cf..84dce1fdd08 100644
--- a/chromium/third_party/webrtc/video/BUILD.gn
+++ b/chromium/third_party/webrtc/video/BUILD.gn
@@ -34,6 +34,8 @@ rtc_library("video") {
"rtp_streams_synchronizer2.h",
"rtp_video_stream_receiver.cc",
"rtp_video_stream_receiver.h",
+ "rtp_video_stream_receiver2.cc",
+ "rtp_video_stream_receiver2.h",
"rtp_video_stream_receiver_frame_transformer_delegate.cc",
"rtp_video_stream_receiver_frame_transformer_delegate.h",
"send_delay_stats.cc",
@@ -77,7 +79,6 @@ rtc_library("video") {
"../api/crypto:options",
"../api/rtc_event_log",
"../api/task_queue",
- "../api/transport/media:media_transport_interface",
"../api/units:timestamp",
"../api/video:encoded_image",
"../api/video:recordable_encoded_frame",
@@ -111,6 +112,7 @@ rtc_library("video") {
"../modules/video_coding:nack_module",
"../modules/video_coding:video_codec_interface",
"../modules/video_coding:video_coding_utility",
+ "../modules/video_coding/deprecated:nack_module",
"../modules/video_processing",
"../rtc_base:checks",
"../rtc_base:rate_limiter",
@@ -135,6 +137,8 @@ rtc_library("video") {
"../system_wrappers",
"../system_wrappers:field_trial",
"../system_wrappers:metrics",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/memory",
@@ -166,8 +170,8 @@ rtc_library("video_stream_decoder_impl") {
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_task_queue",
"../system_wrappers",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("frame_dumping_decoder") {
@@ -209,6 +213,7 @@ rtc_library("video_stream_encoder_impl") {
deps = [
"../api:rtp_parameters",
+ "../api/adaptation:resource_adaptation_api",
"../api/task_queue:task_queue",
"../api/units:data_rate",
"../api/video:encoded_image",
@@ -250,6 +255,8 @@ rtc_library("video_stream_encoder_impl") {
"../system_wrappers",
"../system_wrappers:field_trial",
"adaptation:video_adaptation",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
@@ -291,7 +298,6 @@ if (rtc_include_tests) {
"../api/rtc_event_log:rtc_event_log_factory",
"../api/task_queue",
"../api/task_queue:default_task_queue_factory",
- "../api/transport/media:media_transport_interface",
"../api/video:builtin_video_bitrate_allocator_factory",
"../api/video:video_bitrate_allocator_factory",
"../api/video:video_frame",
@@ -333,6 +339,8 @@ if (rtc_include_tests) {
"../test:test_support_test_artifacts",
"../test:video_test_common",
"../test:video_test_support",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
@@ -363,8 +371,11 @@ if (rtc_include_tests) {
"../test:test_common",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -415,8 +426,11 @@ if (rtc_include_tests) {
"../test:test_renderer",
"../test:test_support",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -485,6 +499,7 @@ if (rtc_include_tests) {
"//testing/gtest",
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/types:optional",
]
}
@@ -528,12 +543,14 @@ if (rtc_include_tests) {
"receive_statistics_proxy2_unittest.cc",
"receive_statistics_proxy_unittest.cc",
"report_block_stats_unittest.cc",
+ "rtp_video_stream_receiver2_unittest.cc",
"rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc",
"rtp_video_stream_receiver_unittest.cc",
"send_delay_stats_unittest.cc",
"send_statistics_proxy_unittest.cc",
"stats_counter_unittest.cc",
"stream_synchronization_unittest.cc",
+ "video_receive_stream2_unittest.cc",
"video_receive_stream_unittest.cc",
"video_send_stream_impl_unittest.cc",
"video_send_stream_tests.cc",
@@ -559,6 +576,7 @@ if (rtc_include_tests) {
"../api:scoped_refptr",
"../api:simulated_network_api",
"../api:transport_api",
+ "../api/adaptation:resource_adaptation_api",
"../api/crypto:options",
"../api/rtc_event_log",
"../api/task_queue",
@@ -614,6 +632,7 @@ if (rtc_include_tests) {
"../modules/video_coding:webrtc_vp9",
"../rtc_base",
"../rtc_base:checks",
+ "../rtc_base:gunit_helpers",
"../rtc_base:rate_limiter",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_base_tests_utils",
@@ -643,6 +662,8 @@ if (rtc_include_tests) {
"../test/time_controller",
"adaptation:video_adaptation",
"//testing/gtest",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
diff --git a/chromium/third_party/webrtc/video/adaptation/BUILD.gn b/chromium/third_party/webrtc/video/adaptation/BUILD.gn
index 51e6a2d84ec..b88fe92b74c 100644
--- a/chromium/third_party/webrtc/video/adaptation/BUILD.gn
+++ b/chromium/third_party/webrtc/video/adaptation/BUILD.gn
@@ -14,8 +14,12 @@ rtc_library("video_adaptation") {
"encode_usage_resource.h",
"overuse_frame_detector.cc",
"overuse_frame_detector.h",
+ "quality_rampup_experiment_helper.cc",
+ "quality_rampup_experiment_helper.h",
"quality_scaler_resource.cc",
"quality_scaler_resource.h",
+ "video_stream_encoder_resource.cc",
+ "video_stream_encoder_resource.h",
"video_stream_encoder_resource_manager.cc",
"video_stream_encoder_resource_manager.h",
]
@@ -23,7 +27,9 @@ rtc_library("video_adaptation") {
deps = [
"../../api:rtp_parameters",
"../../api:scoped_refptr",
+ "../../api/adaptation:resource_adaptation_api",
"../../api/task_queue:task_queue",
+ "../../api/units:data_rate",
"../../api/video:video_adaptation",
"../../api/video:video_frame",
"../../api/video:video_stream_encoder",
@@ -44,8 +50,11 @@ rtc_library("video_adaptation") {
"../../rtc_base/experiments:quality_scaler_settings",
"../../rtc_base/synchronization:sequence_checker",
"../../rtc_base/task_utils:repeating_task",
+ "../../rtc_base/task_utils:to_queued_task",
"../../system_wrappers:field_trial",
"../../system_wrappers:system_wrappers",
+ ]
+ absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
@@ -60,7 +69,6 @@ if (rtc_include_tests) {
sources = [
"overuse_frame_detector_unittest.cc",
"quality_scaler_resource_unittest.cc",
- "video_stream_encoder_resource_manager_unittest.cc",
]
deps = [
":video_adaptation",
@@ -85,7 +93,7 @@ if (rtc_include_tests) {
"//test:rtc_expect_death",
"//test:test_support",
"//testing/gtest",
- "//third_party/abseil-cpp/absl/types:optional",
]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
}
diff --git a/chromium/third_party/webrtc/video/adaptation/encode_usage_resource.cc b/chromium/third_party/webrtc/video/adaptation/encode_usage_resource.cc
index 49531a3aa42..d6f2334fa1a 100644
--- a/chromium/third_party/webrtc/video/adaptation/encode_usage_resource.cc
+++ b/chromium/third_party/webrtc/video/adaptation/encode_usage_resource.cc
@@ -15,18 +15,28 @@
#include "api/video/video_adaptation_reason.h"
#include "rtc_base/checks.h"
+#include "rtc_base/ref_counted_object.h"
namespace webrtc {
+// static
+rtc::scoped_refptr<EncodeUsageResource> EncodeUsageResource::Create(
+ std::unique_ptr<OveruseFrameDetector> overuse_detector) {
+ return new rtc::RefCountedObject<EncodeUsageResource>(
+ std::move(overuse_detector));
+}
+
EncodeUsageResource::EncodeUsageResource(
std::unique_ptr<OveruseFrameDetector> overuse_detector)
- : rtc::RefCountedObject<Resource>(),
+ : VideoStreamEncoderResource("EncoderUsageResource"),
overuse_detector_(std::move(overuse_detector)),
is_started_(false),
target_frame_rate_(absl::nullopt) {
RTC_DCHECK(overuse_detector_);
}
+EncodeUsageResource::~EncodeUsageResource() {}
+
bool EncodeUsageResource::is_started() const {
RTC_DCHECK_RUN_ON(encoder_queue());
return is_started_;
@@ -81,7 +91,7 @@ void EncodeUsageResource::AdaptUp() {
RTC_DCHECK_RUN_ON(encoder_queue());
// Reference counting guarantees that this object is still alive by the time
// the task is executed.
- resource_adaptation_queue()->PostTask(
+ MaybePostTaskToResourceAdaptationQueue(
[this_ref = rtc::scoped_refptr<EncodeUsageResource>(this)] {
RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
this_ref->OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
@@ -92,7 +102,7 @@ void EncodeUsageResource::AdaptDown() {
RTC_DCHECK_RUN_ON(encoder_queue());
// Reference counting guarantees that this object is still alive by the time
// the task is executed.
- resource_adaptation_queue()->PostTask(
+ MaybePostTaskToResourceAdaptationQueue(
[this_ref = rtc::scoped_refptr<EncodeUsageResource>(this)] {
RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
this_ref->OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
diff --git a/chromium/third_party/webrtc/video/adaptation/encode_usage_resource.h b/chromium/third_party/webrtc/video/adaptation/encode_usage_resource.h
index 3c6f02b2437..257988fa127 100644
--- a/chromium/third_party/webrtc/video/adaptation/encode_usage_resource.h
+++ b/chromium/third_party/webrtc/video/adaptation/encode_usage_resource.h
@@ -15,11 +15,12 @@
#include <string>
#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
#include "api/video/video_adaptation_reason.h"
-#include "call/adaptation/resource.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/task_queue.h"
#include "video/adaptation/overuse_frame_detector.h"
+#include "video/adaptation/video_stream_encoder_resource.h"
namespace webrtc {
@@ -28,11 +29,15 @@ namespace webrtc {
// indirectly by usage in the ResourceAdaptationProcessor (which is only tested
// because of its usage in VideoStreamEncoder); all tests are currently in
// video_stream_encoder_unittest.cc.
-class EncodeUsageResource : public rtc::RefCountedObject<Resource>,
+class EncodeUsageResource : public VideoStreamEncoderResource,
public OveruseFrameDetectorObserverInterface {
public:
+ static rtc::scoped_refptr<EncodeUsageResource> Create(
+ std::unique_ptr<OveruseFrameDetector> overuse_detector);
+
explicit EncodeUsageResource(
std::unique_ptr<OveruseFrameDetector> overuse_detector);
+ ~EncodeUsageResource() override;
bool is_started() const;
@@ -51,8 +56,6 @@ class EncodeUsageResource : public rtc::RefCountedObject<Resource>,
void AdaptUp() override;
void AdaptDown() override;
- std::string name() const override { return "EncoderUsageResource"; }
-
private:
int TargetFrameRateAsInt();
diff --git a/chromium/third_party/webrtc/video/adaptation/overuse_frame_detector_unittest.cc b/chromium/third_party/webrtc/video/adaptation/overuse_frame_detector_unittest.cc
index bb34224b025..d4bf910faa6 100644
--- a/chromium/third_party/webrtc/video/adaptation/overuse_frame_detector_unittest.cc
+++ b/chromium/third_party/webrtc/video/adaptation/overuse_frame_detector_unittest.cc
@@ -41,8 +41,8 @@ class MockCpuOveruseObserver : public OveruseFrameDetectorObserverInterface {
MockCpuOveruseObserver() {}
virtual ~MockCpuOveruseObserver() {}
- MOCK_METHOD0(AdaptUp, void());
- MOCK_METHOD0(AdaptDown, void());
+ MOCK_METHOD(void, AdaptUp, (), (override));
+ MOCK_METHOD(void, AdaptDown, (), (override));
};
class CpuOveruseObserverImpl : public OveruseFrameDetectorObserverInterface {
diff --git a/chromium/third_party/webrtc/video/adaptation/quality_rampup_experiment_helper.cc b/chromium/third_party/webrtc/video/adaptation/quality_rampup_experiment_helper.cc
new file mode 100644
index 00000000000..6d82503fc6f
--- /dev/null
+++ b/chromium/third_party/webrtc/video/adaptation/quality_rampup_experiment_helper.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/adaptation/quality_rampup_experiment_helper.h"
+
+#include <memory>
+#include <utility>
+
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+QualityRampUpExperimentHelper::QualityRampUpExperimentHelper(
+ QualityRampUpExperimentListener* experiment_listener,
+ Clock* clock,
+ QualityRampupExperiment experiment)
+ : experiment_listener_(experiment_listener),
+ clock_(clock),
+ quality_rampup_experiment_(std::move(experiment)),
+ cpu_adapted_(false),
+ qp_resolution_adaptations_(0) {
+ RTC_DCHECK(experiment_listener_);
+ RTC_DCHECK(clock_);
+}
+
+std::unique_ptr<QualityRampUpExperimentHelper>
+QualityRampUpExperimentHelper::CreateIfEnabled(
+ QualityRampUpExperimentListener* experiment_listener,
+ Clock* clock) {
+ QualityRampupExperiment experiment = QualityRampupExperiment::ParseSettings();
+ if (experiment.Enabled()) {
+ return std::unique_ptr<QualityRampUpExperimentHelper>(
+ new QualityRampUpExperimentHelper(experiment_listener, clock,
+ experiment));
+ }
+ return nullptr;
+}
+
+void QualityRampUpExperimentHelper::PerformQualityRampupExperiment(
+ rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource,
+ DataRate bandwidth,
+ DataRate encoder_target_bitrate,
+ DataRate max_bitrate,
+ int pixels) {
+ if (!quality_scaler_resource->is_started())
+ return;
+
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ quality_rampup_experiment_.SetMaxBitrate(pixels, max_bitrate.kbps());
+
+ bool try_quality_rampup = false;
+ if (quality_rampup_experiment_.BwHigh(now_ms, bandwidth.kbps())) {
+ // Verify that encoder is at max bitrate and the QP is low.
+ if (encoder_target_bitrate == max_bitrate &&
+ quality_scaler_resource->QpFastFilterLow()) {
+ try_quality_rampup = true;
+ }
+ }
+ if (try_quality_rampup && qp_resolution_adaptations_ > 0 && !cpu_adapted_) {
+ experiment_listener_->OnQualityRampUp();
+ }
+}
+
+void QualityRampUpExperimentHelper::cpu_adapted(bool cpu_adapted) {
+ cpu_adapted_ = cpu_adapted;
+}
+
+void QualityRampUpExperimentHelper::qp_resolution_adaptations(
+ int qp_resolution_adaptations) {
+ qp_resolution_adaptations_ = qp_resolution_adaptations;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/adaptation/quality_rampup_experiment_helper.h b/chromium/third_party/webrtc/video/adaptation/quality_rampup_experiment_helper.h
new file mode 100644
index 00000000000..81be982e7c7
--- /dev/null
+++ b/chromium/third_party/webrtc/video/adaptation/quality_rampup_experiment_helper.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
+#define VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/units/data_rate.h"
+#include "rtc_base/experiments/quality_rampup_experiment.h"
+#include "system_wrappers/include/clock.h"
+#include "video/adaptation/quality_scaler_resource.h"
+
+namespace webrtc {
+
+class QualityRampUpExperimentListener {
+ public:
+ virtual ~QualityRampUpExperimentListener() = default;
+ virtual void OnQualityRampUp() = 0;
+};
+
+// Helper class for orchestrating the WebRTC-Video-QualityRampupSettings
+// experiment.
+class QualityRampUpExperimentHelper {
+ public:
+ // Returns a QualityRampUpExperimentHelper if the experiment is enabled,
+ // an nullptr otherwise.
+ static std::unique_ptr<QualityRampUpExperimentHelper> CreateIfEnabled(
+ QualityRampUpExperimentListener* experiment_listener,
+ Clock* clock);
+
+ QualityRampUpExperimentHelper(const QualityRampUpExperimentHelper&) = delete;
+ QualityRampUpExperimentHelper& operator=(
+ const QualityRampUpExperimentHelper&) = delete;
+
+ void cpu_adapted(bool cpu_adapted);
+ void qp_resolution_adaptations(int qp_adaptations);
+
+ void PerformQualityRampupExperiment(
+ rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource,
+ DataRate bandwidth,
+ DataRate encoder_target_bitrate,
+ DataRate max_bitrate,
+ int pixels);
+
+ private:
+ QualityRampUpExperimentHelper(
+ QualityRampUpExperimentListener* experiment_listener,
+ Clock* clock,
+ QualityRampupExperiment experiment);
+ QualityRampUpExperimentListener* const experiment_listener_;
+ Clock* clock_;
+ QualityRampupExperiment quality_rampup_experiment_;
+ bool cpu_adapted_;
+ int qp_resolution_adaptations_;
+};
+
+} // namespace webrtc
+
+#endif // VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
diff --git a/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.cc b/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.cc
index 403f6080cac..514a2d765f7 100644
--- a/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.cc
+++ b/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.cc
@@ -13,12 +13,27 @@
#include <utility>
#include "rtc_base/experiments/balanced_degradation_settings.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/task_utils/to_queued_task.h"
+#include "rtc_base/time_utils.h"
namespace webrtc {
+namespace {
+
+const int64_t kUnderuseDueToDisabledCooldownMs = 1000;
+
+} // namespace
+
+// static
+rtc::scoped_refptr<QualityScalerResource> QualityScalerResource::Create() {
+ return new rtc::RefCountedObject<QualityScalerResource>();
+}
+
QualityScalerResource::QualityScalerResource()
- : rtc::RefCountedObject<Resource>(),
+ : VideoStreamEncoderResource("QualityScalerResource"),
quality_scaler_(nullptr),
+ last_underuse_due_to_disabled_timestamp_ms_(absl::nullopt),
num_handled_callbacks_(0),
pending_callbacks_(),
adaptation_processor_(nullptr),
@@ -82,11 +97,18 @@ void QualityScalerResource::OnEncodeCompleted(const EncodedImage& encoded_image,
// mid call.
// Instead it should be done at a higher layer in the same way for all
// resources.
- resource_adaptation_queue()->PostTask(
- [this_ref = rtc::scoped_refptr<QualityScalerResource>(this)] {
- RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
- this_ref->OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
- });
+ int64_t timestamp_ms = rtc::TimeMillis();
+ if (!last_underuse_due_to_disabled_timestamp_ms_.has_value() ||
+ timestamp_ms - last_underuse_due_to_disabled_timestamp_ms_.value() >=
+ kUnderuseDueToDisabledCooldownMs) {
+ last_underuse_due_to_disabled_timestamp_ms_ = timestamp_ms;
+ MaybePostTaskToResourceAdaptationQueue(
+ [this_ref = rtc::scoped_refptr<QualityScalerResource>(this)] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+ this_ref->OnResourceUsageStateMeasured(
+ ResourceUsageState::kUnderuse);
+ });
+ }
}
}
@@ -111,7 +133,7 @@ void QualityScalerResource::OnReportQpUsageHigh(
size_t callback_id = QueuePendingCallback(callback);
// Reference counting guarantees that this object is still alive by the time
// the task is executed.
- resource_adaptation_queue()->PostTask(
+ MaybePostTaskToResourceAdaptationQueue(
[this_ref = rtc::scoped_refptr<QualityScalerResource>(this),
callback_id] {
RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
@@ -131,7 +153,7 @@ void QualityScalerResource::OnReportQpUsageLow(
size_t callback_id = QueuePendingCallback(callback);
// Reference counting guarantees that this object is still alive by the time
// the task is executed.
- resource_adaptation_queue()->PostTask(
+ MaybePostTaskToResourceAdaptationQueue(
[this_ref = rtc::scoped_refptr<QualityScalerResource>(this),
callback_id] {
RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
@@ -191,8 +213,8 @@ void QualityScalerResource::HandlePendingCallback(size_t callback_id,
// Reference counting guarantees that this object is still alive by the time
// the task is executed.
encoder_queue()->PostTask(
- [this_ref = rtc::scoped_refptr<QualityScalerResource>(this), callback_id,
- clear_qp_samples] {
+ ToQueuedTask([this_ref = rtc::scoped_refptr<QualityScalerResource>(this),
+ callback_id, clear_qp_samples] {
RTC_DCHECK_RUN_ON(this_ref->encoder_queue());
if (this_ref->num_handled_callbacks_ >= callback_id) {
// The callback with this ID has already been handled.
@@ -205,7 +227,7 @@ void QualityScalerResource::HandlePendingCallback(size_t callback_id,
clear_qp_samples);
++this_ref->num_handled_callbacks_;
this_ref->pending_callbacks_.pop();
- });
+ }));
}
void QualityScalerResource::AbortPendingCallbacks() {
diff --git a/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.h b/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.h
index 78685823c39..372d0c91b8a 100644
--- a/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.h
+++ b/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource.h
@@ -15,21 +15,27 @@
#include <queue>
#include <string>
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
#include "api/video/video_adaptation_reason.h"
#include "api/video_codecs/video_encoder.h"
-#include "call/adaptation/resource.h"
+#include "call/adaptation/adaptation_listener.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
#include "modules/video_coding/utility/quality_scaler.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/task_queue.h"
+#include "video/adaptation/video_stream_encoder_resource.h"
namespace webrtc {
// Handles interaction with the QualityScaler.
-class QualityScalerResource : public rtc::RefCountedObject<Resource>,
+class QualityScalerResource : public VideoStreamEncoderResource,
+ public AdaptationListener,
public QualityScalerQpUsageHandlerInterface {
public:
+ static rtc::scoped_refptr<QualityScalerResource> Create();
+
QualityScalerResource();
~QualityScalerResource() override;
@@ -55,9 +61,7 @@ class QualityScalerResource : public rtc::RefCountedObject<Resource>,
rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface> callback)
override;
- std::string name() const override { return "QualityScalerResource"; }
-
- // Resource implementation.
+ // AdaptationListener implementation.
void OnAdaptationApplied(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
@@ -74,10 +78,15 @@ class QualityScalerResource : public rtc::RefCountedObject<Resource>,
// Members accessed on the encoder queue.
std::unique_ptr<QualityScaler> quality_scaler_
RTC_GUARDED_BY(encoder_queue());
+ // The timestamp of the last time we reported underuse because this resource
+ // was disabled in order to prevent getting stuck with QP adaptations. Used to
+ // make sure underuse reporting is not too spammy.
+ absl::optional<int64_t> last_underuse_due_to_disabled_timestamp_ms_
+ RTC_GUARDED_BY(encoder_queue());
// Every OnReportQpUsageHigh/Low() operation has a callback that MUST be
- // invoked on the |encoder_queue_|. Because usage measurements are reported on
- // the |encoder_queue_| but handled by the processor on the the
- // |resource_adaptation_queue_|, handling a measurement entails a task queue
+ // invoked on the encoder_queue(). Because usage measurements are reported on
+ // the encoder_queue() but handled by the processor on the the
+ // resource_adaptation_queue_(), handling a measurement entails a task queue
// "ping" round-trip. Multiple callbacks in-flight is thus possible.
size_t num_handled_callbacks_ RTC_GUARDED_BY(encoder_queue());
std::queue<rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface>>
diff --git a/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource_unittest.cc b/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource_unittest.cc
index 66f4e138700..e2098d71b7a 100644
--- a/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource_unittest.cc
+++ b/chromium/third_party/webrtc/video/adaptation/quality_scaler_resource_unittest.cc
@@ -74,9 +74,10 @@ class QualityScalerResourceTest : public ::testing::Test {
encoder_queue_(task_queue_factory_->CreateTaskQueue(
"EncoderQueue",
TaskQueueFactory::Priority::NORMAL)),
- quality_scaler_resource_(new QualityScalerResource()) {
- quality_scaler_resource_->Initialize(&encoder_queue_,
- &resource_adaptation_queue_);
+ quality_scaler_resource_(QualityScalerResource::Create()) {
+ quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_.Get());
+ quality_scaler_resource_->RegisterAdaptationTaskQueue(
+ resource_adaptation_queue_.Get());
rtc::Event event;
encoder_queue_.PostTask([this, &event] {
quality_scaler_resource_->StartCheckForOveruse(
diff --git a/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource.cc b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource.cc
new file mode 100644
index 00000000000..4e99a1dbb32
--- /dev/null
+++ b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource.cc
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/adaptation/video_stream_encoder_resource.h"
+
+#include <algorithm>
+#include <utility>
+
+namespace webrtc {
+
+VideoStreamEncoderResource::VideoStreamEncoderResource(std::string name)
+ : lock_(),
+ name_(std::move(name)),
+ encoder_queue_(nullptr),
+ resource_adaptation_queue_(nullptr),
+ listener_(nullptr) {}
+
+VideoStreamEncoderResource::~VideoStreamEncoderResource() {
+ RTC_DCHECK(!listener_)
+ << "There is a listener depending on a VideoStreamEncoderResource being "
+ << "destroyed.";
+}
+
+void VideoStreamEncoderResource::RegisterEncoderTaskQueue(
+ TaskQueueBase* encoder_queue) {
+ RTC_DCHECK(!encoder_queue_);
+ RTC_DCHECK(encoder_queue);
+ encoder_queue_ = encoder_queue;
+}
+
+void VideoStreamEncoderResource::RegisterAdaptationTaskQueue(
+ TaskQueueBase* resource_adaptation_queue) {
+ rtc::CritScope crit(&lock_);
+ RTC_DCHECK(!resource_adaptation_queue_);
+ RTC_DCHECK(resource_adaptation_queue);
+ resource_adaptation_queue_ = resource_adaptation_queue;
+}
+
+void VideoStreamEncoderResource::UnregisterAdaptationTaskQueue() {
+ rtc::CritScope crit(&lock_);
+ RTC_DCHECK(resource_adaptation_queue_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ resource_adaptation_queue_ = nullptr;
+}
+
+void VideoStreamEncoderResource::SetResourceListener(
+ ResourceListener* listener) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue());
+ // If you want to change listener you need to unregister the old listener by
+ // setting it to null first.
+ RTC_DCHECK(!listener_ || !listener) << "A listener is already set";
+ listener_ = listener;
+}
+
+std::string VideoStreamEncoderResource::Name() const {
+ return name_;
+}
+
+void VideoStreamEncoderResource::OnResourceUsageStateMeasured(
+ ResourceUsageState usage_state) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue());
+ if (listener_) {
+ listener_->OnResourceUsageStateMeasured(this, usage_state);
+ }
+}
+
+TaskQueueBase* VideoStreamEncoderResource::encoder_queue() const {
+ return encoder_queue_;
+}
+
+TaskQueueBase* VideoStreamEncoderResource::resource_adaptation_queue() const {
+ rtc::CritScope crit(&lock_);
+ RTC_DCHECK(resource_adaptation_queue_);
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ return resource_adaptation_queue_;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource.h b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource.h
new file mode 100644
index 00000000000..739702c3635
--- /dev/null
+++ b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
+#define VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
+#include "api/task_queue/task_queue_base.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
+#include "rtc_base/critical_section.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+
+namespace webrtc {
+
+class VideoStreamEncoderResource : public Resource {
+ public:
+ ~VideoStreamEncoderResource() override;
+
+ // Registering task queues must be performed as part of initialization.
+ void RegisterEncoderTaskQueue(TaskQueueBase* encoder_queue);
+
+ // Resource implementation.
+ std::string Name() const override;
+ void SetResourceListener(ResourceListener* listener) override;
+
+ // Provides a pointer to the adaptation task queue. After this call, all
+ // methods defined in this interface, including
+ // UnregisterAdaptationTaskQueue() MUST be invoked on the adaptation task
+ // queue. Registering the adaptation task queue may, however, happen off the
+ // adaptation task queue.
+ void RegisterAdaptationTaskQueue(TaskQueueBase* resource_adaptation_queue);
+ // Signals that the adaptation task queue is no longer safe to use. No
+ // assumptions must be made as to whether or not tasks in-flight will run.
+ void UnregisterAdaptationTaskQueue();
+
+ protected:
+ explicit VideoStreamEncoderResource(std::string name);
+
+ void OnResourceUsageStateMeasured(ResourceUsageState usage_state);
+
+ // The caller is responsible for ensuring the task queue is still valid.
+ TaskQueueBase* encoder_queue() const;
+ // Validity of returned pointer is ensured by only allowing this method to be
+ // called on the adaptation task queue. Designed for use with RTC_GUARDED_BY.
+ // For posting from a different queue, use
+ // MaybePostTaskToResourceAdaptationQueue() instead, which only posts if the
+ // task queue is currently registered.
+ TaskQueueBase* resource_adaptation_queue() const;
+ template <typename Closure>
+ void MaybePostTaskToResourceAdaptationQueue(Closure&& closure) {
+ rtc::CritScope crit(&lock_);
+ if (!resource_adaptation_queue_)
+ return;
+ resource_adaptation_queue_->PostTask(ToQueuedTask(closure));
+ }
+
+ private:
+ rtc::CriticalSection lock_;
+ const std::string name_;
+ // Treated as const after initialization.
+ TaskQueueBase* encoder_queue_;
+ TaskQueueBase* resource_adaptation_queue_ RTC_GUARDED_BY(lock_);
+ ResourceListener* listener_ RTC_GUARDED_BY(resource_adaptation_queue());
+};
+
+} // namespace webrtc
+
+#endif // VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
diff --git a/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc
index b309dd34559..450b10f8bcd 100644
--- a/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc
+++ b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc
@@ -10,22 +10,21 @@
#include "video/adaptation/video_stream_encoder_resource_manager.h"
-#include <algorithm>
#include <cmath>
#include <limits>
#include <memory>
-#include <string>
#include <utility>
#include "absl/algorithm/container.h"
#include "absl/base/macros.h"
+#include "api/adaptation/resource.h"
#include "api/task_queue/task_queue_base.h"
#include "api/video/video_adaptation_reason.h"
#include "api/video/video_source_interface.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/video_source_restrictions.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/ref_counted_object.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/time_utils.h"
@@ -55,15 +54,6 @@ std::string ToString(VideoAdaptationReason reason) {
}
}
-VideoAdaptationReason OtherReason(VideoAdaptationReason reason) {
- switch (reason) {
- case VideoAdaptationReason::kQuality:
- return VideoAdaptationReason::kCpu;
- case VideoAdaptationReason::kCpu:
- return VideoAdaptationReason::kQuality;
- }
-}
-
} // namespace
class VideoStreamEncoderResourceManager::InitialFrameDropper {
@@ -138,96 +128,47 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper {
int initial_framedrop_;
};
-VideoStreamEncoderResourceManager::PreventAdaptUpDueToActiveCounts::
- PreventAdaptUpDueToActiveCounts(VideoStreamEncoderResourceManager* manager)
- : rtc::RefCountedObject<Resource>(),
- manager_(manager),
- adaptation_processor_(nullptr) {}
-
-void VideoStreamEncoderResourceManager::PreventAdaptUpDueToActiveCounts::
- SetAdaptationProcessor(
- ResourceAdaptationProcessorInterface* adaptation_processor) {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue());
- adaptation_processor_ = adaptation_processor;
-}
-
-bool VideoStreamEncoderResourceManager::PreventAdaptUpDueToActiveCounts::
- IsAdaptationUpAllowed(const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) const {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue());
- RTC_DCHECK(adaptation_processor_);
- VideoAdaptationReason reason =
- manager_->GetReasonFromResource(reason_resource);
- {
- // This is the same as |resource_adaptation_queue_|, but need to
- // RTC_DCHECK_RUN_ON() both to avoid compiler error when accessing
- // |manager_->active_counts_|.
- RTC_DCHECK_RUN_ON(manager_->resource_adaptation_queue_);
- // We can't adapt up if we're already at the highest setting.
- // Note that this only includes counts relevant to the current degradation
- // preference. e.g. we previously adapted resolution, now prefer adpating
- // fps, only count the fps adaptations and not the previous resolution
- // adaptations.
- // TODO(hbos): Why would the reason matter? If a particular resource doesn't
- // want us to go up it should prevent us from doing so itself rather than to
- // have this catch-all reason- and stats-based approach.
- int num_downgrades =
- FilterVideoAdaptationCountersByDegradationPreference(
- manager_->active_counts_[reason],
- adaptation_processor_->effective_degradation_preference())
- .Total();
- RTC_DCHECK_GE(num_downgrades, 0);
- return num_downgrades > 0;
- }
-}
-
-VideoStreamEncoderResourceManager::
- PreventIncreaseResolutionDueToBitrateResource::
- PreventIncreaseResolutionDueToBitrateResource(
- VideoStreamEncoderResourceManager* manager)
- : rtc::RefCountedObject<Resource>(),
- manager_(manager),
+VideoStreamEncoderResourceManager::BitrateConstraint::BitrateConstraint(
+ VideoStreamEncoderResourceManager* manager)
+ : manager_(manager),
+ resource_adaptation_queue_(nullptr),
encoder_settings_(absl::nullopt),
encoder_target_bitrate_bps_(absl::nullopt) {}
-void VideoStreamEncoderResourceManager::
- PreventIncreaseResolutionDueToBitrateResource::OnEncoderSettingsUpdated(
- absl::optional<EncoderSettings> encoder_settings) {
- RTC_DCHECK_RUN_ON(encoder_queue());
- resource_adaptation_queue()->PostTask(
- [this_ref =
- rtc::scoped_refptr<PreventIncreaseResolutionDueToBitrateResource>(
- this),
- encoder_settings] {
- RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+void VideoStreamEncoderResourceManager::BitrateConstraint::SetAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) {
+ resource_adaptation_queue_ = resource_adaptation_queue;
+}
+
+void VideoStreamEncoderResourceManager::BitrateConstraint::
+ OnEncoderSettingsUpdated(absl::optional<EncoderSettings> encoder_settings) {
+ RTC_DCHECK_RUN_ON(manager_->encoder_queue_);
+ resource_adaptation_queue_->PostTask(
+ ToQueuedTask([this_ref = rtc::scoped_refptr<BitrateConstraint>(this),
+ encoder_settings] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_);
this_ref->encoder_settings_ = std::move(encoder_settings);
- });
+ }));
}
-void VideoStreamEncoderResourceManager::
- PreventIncreaseResolutionDueToBitrateResource::
- OnEncoderTargetBitrateUpdated(
- absl::optional<uint32_t> encoder_target_bitrate_bps) {
- RTC_DCHECK_RUN_ON(encoder_queue());
- resource_adaptation_queue()->PostTask(
- [this_ref =
- rtc::scoped_refptr<PreventIncreaseResolutionDueToBitrateResource>(
- this),
- encoder_target_bitrate_bps] {
- RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+void VideoStreamEncoderResourceManager::BitrateConstraint::
+ OnEncoderTargetBitrateUpdated(
+ absl::optional<uint32_t> encoder_target_bitrate_bps) {
+ RTC_DCHECK_RUN_ON(manager_->encoder_queue_);
+ resource_adaptation_queue_->PostTask(
+ ToQueuedTask([this_ref = rtc::scoped_refptr<BitrateConstraint>(this),
+ encoder_target_bitrate_bps] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_);
this_ref->encoder_target_bitrate_bps_ = encoder_target_bitrate_bps;
- });
+ }));
}
-bool VideoStreamEncoderResourceManager::
- PreventIncreaseResolutionDueToBitrateResource::IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) const {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue());
+bool VideoStreamEncoderResourceManager::BitrateConstraint::
+ IsAdaptationUpAllowed(const VideoStreamInputState& input_state,
+ const VideoSourceRestrictions& restrictions_before,
+ const VideoSourceRestrictions& restrictions_after,
+ rtc::scoped_refptr<Resource> reason_resource) const {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
VideoAdaptationReason reason =
manager_->GetReasonFromResource(reason_resource);
// If increasing resolution due to kQuality, make sure bitrate limits are not
@@ -256,38 +197,43 @@ bool VideoStreamEncoderResourceManager::
return true;
}
-VideoStreamEncoderResourceManager::PreventAdaptUpInBalancedResource::
- PreventAdaptUpInBalancedResource(VideoStreamEncoderResourceManager* manager)
- : rtc::RefCountedObject<Resource>(),
- manager_(manager),
+VideoStreamEncoderResourceManager::BalancedConstraint::BalancedConstraint(
+ VideoStreamEncoderResourceManager* manager)
+ : manager_(manager),
+ resource_adaptation_queue_(nullptr),
adaptation_processor_(nullptr),
encoder_target_bitrate_bps_(absl::nullopt) {}
-void VideoStreamEncoderResourceManager::PreventAdaptUpInBalancedResource::
+void VideoStreamEncoderResourceManager::BalancedConstraint::SetAdaptationQueue(
+ TaskQueueBase* resource_adaptation_queue) {
+ resource_adaptation_queue_ = resource_adaptation_queue;
+}
+
+void VideoStreamEncoderResourceManager::BalancedConstraint::
SetAdaptationProcessor(
ResourceAdaptationProcessorInterface* adaptation_processor) {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue());
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
adaptation_processor_ = adaptation_processor;
}
-void VideoStreamEncoderResourceManager::PreventAdaptUpInBalancedResource::
+void VideoStreamEncoderResourceManager::BalancedConstraint::
OnEncoderTargetBitrateUpdated(
absl::optional<uint32_t> encoder_target_bitrate_bps) {
- RTC_DCHECK_RUN_ON(encoder_queue());
- resource_adaptation_queue()->PostTask(
- [this_ref = rtc::scoped_refptr<PreventAdaptUpInBalancedResource>(this),
- encoder_target_bitrate_bps] {
- RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue());
+ RTC_DCHECK_RUN_ON(manager_->encoder_queue_);
+ resource_adaptation_queue_->PostTask(
+ ToQueuedTask([this_ref = rtc::scoped_refptr<BalancedConstraint>(this),
+ encoder_target_bitrate_bps] {
+ RTC_DCHECK_RUN_ON(this_ref->resource_adaptation_queue_);
this_ref->encoder_target_bitrate_bps_ = encoder_target_bitrate_bps;
- });
+ }));
}
-bool VideoStreamEncoderResourceManager::PreventAdaptUpInBalancedResource::
+bool VideoStreamEncoderResourceManager::BalancedConstraint::
IsAdaptationUpAllowed(const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
rtc::scoped_refptr<Resource> reason_resource) const {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue());
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
RTC_DCHECK(adaptation_processor_);
VideoAdaptationReason reason =
manager_->GetReasonFromResource(reason_resource);
@@ -321,15 +267,11 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager(
Clock* clock,
bool experiment_cpu_load_estimator,
std::unique_ptr<OveruseFrameDetector> overuse_detector)
- : prevent_adapt_up_due_to_active_counts_(
- new PreventAdaptUpDueToActiveCounts(this)),
- prevent_increase_resolution_due_to_bitrate_resource_(
- new PreventIncreaseResolutionDueToBitrateResource(this)),
- prevent_adapt_up_in_balanced_resource_(
- new PreventAdaptUpInBalancedResource(this)),
+ : bitrate_constraint_(new rtc::RefCountedObject<BitrateConstraint>(this)),
+ balanced_constraint_(new rtc::RefCountedObject<BalancedConstraint>(this)),
encode_usage_resource_(
- new EncodeUsageResource(std::move(overuse_detector))),
- quality_scaler_resource_(new QualityScalerResource()),
+ EncodeUsageResource::Create(std::move(overuse_detector))),
+ quality_scaler_resource_(QualityScalerResource::Create()),
encoder_queue_(nullptr),
resource_adaptation_queue_(nullptr),
input_state_provider_(input_state_provider),
@@ -343,17 +285,10 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager(
std::make_unique<InitialFrameDropper>(quality_scaler_resource_)),
quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()),
encoder_target_bitrate_bps_(absl::nullopt),
- quality_rampup_done_(false),
- quality_rampup_experiment_(QualityRampupExperiment::ParseSettings()),
- encoder_settings_(absl::nullopt),
- active_counts_() {
+ quality_rampup_experiment_(
+ QualityRampUpExperimentHelper::CreateIfEnabled(this, clock_)),
+ encoder_settings_(absl::nullopt) {
RTC_DCHECK(encoder_stats_observer_);
- MapResourceToReason(prevent_adapt_up_due_to_active_counts_,
- VideoAdaptationReason::kQuality);
- MapResourceToReason(prevent_increase_resolution_due_to_bitrate_resource_,
- VideoAdaptationReason::kQuality);
- MapResourceToReason(prevent_adapt_up_in_balanced_resource_,
- VideoAdaptationReason::kQuality);
MapResourceToReason(encode_usage_resource_, VideoAdaptationReason::kCpu);
MapResourceToReason(quality_scaler_resource_,
VideoAdaptationReason::kQuality);
@@ -370,26 +305,21 @@ void VideoStreamEncoderResourceManager::Initialize(
RTC_DCHECK(resource_adaptation_queue);
encoder_queue_ = encoder_queue;
resource_adaptation_queue_ = resource_adaptation_queue;
- prevent_adapt_up_due_to_active_counts_->Initialize(
- encoder_queue_, resource_adaptation_queue_);
- prevent_increase_resolution_due_to_bitrate_resource_->Initialize(
- encoder_queue_, resource_adaptation_queue_);
- prevent_adapt_up_in_balanced_resource_->Initialize(
- encoder_queue_, resource_adaptation_queue_);
- encode_usage_resource_->Initialize(encoder_queue_,
- resource_adaptation_queue_);
- quality_scaler_resource_->Initialize(encoder_queue_,
- resource_adaptation_queue_);
+ bitrate_constraint_->SetAdaptationQueue(resource_adaptation_queue_->Get());
+ balanced_constraint_->SetAdaptationQueue(resource_adaptation_queue_->Get());
+ encode_usage_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get());
+ encode_usage_resource_->RegisterAdaptationTaskQueue(
+ resource_adaptation_queue_->Get());
+ quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_->Get());
+ quality_scaler_resource_->RegisterAdaptationTaskQueue(
+ resource_adaptation_queue_->Get());
}
void VideoStreamEncoderResourceManager::SetAdaptationProcessor(
ResourceAdaptationProcessorInterface* adaptation_processor) {
RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
adaptation_processor_ = adaptation_processor;
- prevent_adapt_up_due_to_active_counts_->SetAdaptationProcessor(
- adaptation_processor);
- prevent_adapt_up_in_balanced_resource_->SetAdaptationProcessor(
- adaptation_processor);
+ balanced_constraint_->SetAdaptationProcessor(adaptation_processor);
quality_scaler_resource_->SetAdaptationProcessor(adaptation_processor);
}
@@ -428,7 +358,7 @@ void VideoStreamEncoderResourceManager::MapResourceToReason(
[resource](const ResourceAndReason& r) {
return r.resource == resource;
}) == resources_.end())
- << "Resource " << resource->name() << " already was inserted";
+ << "Resource " << resource->Name() << " already was inserted";
resources_.emplace_back(resource, reason);
}
@@ -442,6 +372,16 @@ VideoStreamEncoderResourceManager::MappedResources() const {
return resources;
}
+std::vector<AdaptationConstraint*>
+VideoStreamEncoderResourceManager::AdaptationConstraints() const {
+ return {bitrate_constraint_, balanced_constraint_};
+}
+
+std::vector<AdaptationListener*>
+VideoStreamEncoderResourceManager::AdaptationListeners() const {
+ return {quality_scaler_resource_};
+}
+
rtc::scoped_refptr<QualityScalerResource>
VideoStreamEncoderResourceManager::quality_scaler_resource_for_testing() {
rtc::CritScope crit(&resource_lock_);
@@ -452,12 +392,7 @@ void VideoStreamEncoderResourceManager::SetEncoderSettings(
EncoderSettings encoder_settings) {
RTC_DCHECK_RUN_ON(encoder_queue_);
encoder_settings_ = std::move(encoder_settings);
- prevent_increase_resolution_due_to_bitrate_resource_
- ->OnEncoderSettingsUpdated(encoder_settings_);
-
- quality_rampup_experiment_.SetMaxBitrate(
- LastInputFrameSizeOrDefault(),
- encoder_settings_->video_codec().maxBitrate);
+ bitrate_constraint_->OnEncoderSettingsUpdated(encoder_settings_);
MaybeUpdateTargetFrameRate();
}
@@ -466,9 +401,9 @@ void VideoStreamEncoderResourceManager::SetStartBitrate(
RTC_DCHECK_RUN_ON(encoder_queue_);
if (!start_bitrate.IsZero()) {
encoder_target_bitrate_bps_ = start_bitrate.bps();
- prevent_increase_resolution_due_to_bitrate_resource_
- ->OnEncoderTargetBitrateUpdated(encoder_target_bitrate_bps_);
- prevent_adapt_up_in_balanced_resource_->OnEncoderTargetBitrateUpdated(
+ bitrate_constraint_->OnEncoderTargetBitrateUpdated(
+ encoder_target_bitrate_bps_);
+ balanced_constraint_->OnEncoderTargetBitrateUpdated(
encoder_target_bitrate_bps_);
}
initial_frame_dropper_->SetStartBitrate(start_bitrate,
@@ -480,9 +415,9 @@ void VideoStreamEncoderResourceManager::SetTargetBitrate(
RTC_DCHECK_RUN_ON(encoder_queue_);
if (!target_bitrate.IsZero()) {
encoder_target_bitrate_bps_ = target_bitrate.bps();
- prevent_increase_resolution_due_to_bitrate_resource_
- ->OnEncoderTargetBitrateUpdated(encoder_target_bitrate_bps_);
- prevent_adapt_up_in_balanced_resource_->OnEncoderTargetBitrateUpdated(
+ bitrate_constraint_->OnEncoderTargetBitrateUpdated(
+ encoder_target_bitrate_bps_);
+ balanced_constraint_->OnEncoderTargetBitrateUpdated(
encoder_target_bitrate_bps_);
}
initial_frame_dropper_->SetTargetBitrate(target_bitrate,
@@ -551,7 +486,16 @@ bool VideoStreamEncoderResourceManager::DropInitialFrames() const {
void VideoStreamEncoderResourceManager::OnMaybeEncodeFrame() {
RTC_DCHECK_RUN_ON(encoder_queue_);
initial_frame_dropper_->OnMaybeEncodeFrame();
- MaybePerformQualityRampupExperiment();
+ if (quality_rampup_experiment_) {
+ DataRate bandwidth = encoder_rates_.has_value()
+ ? encoder_rates_->bandwidth_allocation
+ : DataRate::Zero();
+ quality_rampup_experiment_->PerformQualityRampupExperiment(
+ quality_scaler_resource_, bandwidth,
+ DataRate::BitsPerSec(encoder_target_bitrate_bps_.value_or(0)),
+ DataRate::KilobitsPerSec(encoder_settings_->video_codec().maxBitrate),
+ LastInputFrameSizeOrDefault());
+ }
}
void VideoStreamEncoderResourceManager::UpdateQualityScalerSettings(
@@ -616,7 +560,7 @@ VideoAdaptationReason VideoStreamEncoderResourceManager::GetReasonFromResource(
return r.resource == resource;
});
RTC_DCHECK(registered_resource != resources_.end())
- << resource->name() << " not found.";
+ << resource->Name() << " not found.";
return registered_resource->reason;
}
@@ -654,29 +598,11 @@ void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated(
const VideoAdaptationCounters& adaptation_counters,
rtc::scoped_refptr<Resource> reason) {
RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- VideoAdaptationCounters previous_adaptation_counters =
- active_counts_[VideoAdaptationReason::kQuality] +
- active_counts_[VideoAdaptationReason::kCpu];
- int adaptation_counters_total_abs_diff = std::abs(
- adaptation_counters.Total() - previous_adaptation_counters.Total());
- if (reason) {
- // A resource signal triggered this adaptation. The adaptation counters have
- // to be updated every time the adaptation counter is incremented or
- // decremented due to a resource.
- RTC_DCHECK_EQ(adaptation_counters_total_abs_diff, 1);
- VideoAdaptationReason reason_type = GetReasonFromResource(reason);
- UpdateAdaptationStats(adaptation_counters, reason_type);
- } else if (adaptation_counters.Total() == 0) {
+ // TODO(bugs.webrtc.org/11553) Remove reason parameter and add reset callback.
+ if (!reason && adaptation_counters.Total() == 0) {
// Adaptation was manually reset - clear the per-reason counters too.
- ResetActiveCounts();
encoder_stats_observer_->ClearAdaptationStats();
- } else {
- // If a reason did not increase or decrease the Total() by 1 and the
- // restrictions were not just reset, the adaptation counters MUST not have
- // been modified and there is nothing to do stats-wise.
- RTC_DCHECK_EQ(adaptation_counters_total_abs_diff, 0);
}
- RTC_LOG(LS_INFO) << ActiveCountsToString();
// The VideoStreamEncoder makes the manager outlive the encoder queue. This
// means that if the task gets executed, |this| has not been freed yet.
@@ -687,6 +613,49 @@ void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated(
});
}
+void VideoStreamEncoderResourceManager::OnResourceLimitationChanged(
+ rtc::scoped_refptr<Resource> resource,
+ const std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters>&
+ resource_limitations) {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ if (!resource) {
+ encoder_stats_observer_->ClearAdaptationStats();
+ return;
+ }
+
+ std::map<VideoAdaptationReason, VideoAdaptationCounters> limitations;
+ for (auto& resource_counter : resource_limitations) {
+ std::map<VideoAdaptationReason, VideoAdaptationCounters>::iterator it;
+ bool inserted;
+ std::tie(it, inserted) = limitations.emplace(
+ GetReasonFromResource(resource_counter.first), resource_counter.second);
+ if (!inserted && it->second.Total() < resource_counter.second.Total()) {
+ it->second = resource_counter.second;
+ }
+ }
+
+ VideoAdaptationReason adaptation_reason = GetReasonFromResource(resource);
+ encoder_stats_observer_->OnAdaptationChanged(
+ adaptation_reason, limitations[VideoAdaptationReason::kCpu],
+ limitations[VideoAdaptationReason::kQuality]);
+
+ encoder_queue_->PostTask(ToQueuedTask(
+ [cpu_limited = limitations.at(VideoAdaptationReason::kCpu).Total() > 0,
+ qp_resolution_adaptations =
+ limitations.at(VideoAdaptationReason::kQuality)
+ .resolution_adaptations,
+ this]() {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ if (quality_rampup_experiment_) {
+ quality_rampup_experiment_->cpu_adapted(cpu_limited);
+ quality_rampup_experiment_->qp_resolution_adaptations(
+ qp_resolution_adaptations);
+ }
+ }));
+
+ RTC_LOG(LS_INFO) << ActiveCountsToString(limitations);
+}
+
void VideoStreamEncoderResourceManager::MaybeUpdateTargetFrameRate() {
RTC_DCHECK_RUN_ON(encoder_queue_);
absl::optional<double> codec_max_frame_rate =
@@ -708,84 +677,6 @@ void VideoStreamEncoderResourceManager::MaybeUpdateTargetFrameRate() {
encode_usage_resource_->SetTargetFrameRate(target_frame_rate);
}
-void VideoStreamEncoderResourceManager::OnAdaptationCountChanged(
- const VideoAdaptationCounters& adaptation_count,
- VideoAdaptationCounters* active_count,
- VideoAdaptationCounters* other_active) {
- RTC_DCHECK(active_count);
- RTC_DCHECK(other_active);
- const int active_total = active_count->Total();
- const int other_total = other_active->Total();
- const VideoAdaptationCounters prev_total = *active_count + *other_active;
- const int delta_resolution_adaptations =
- adaptation_count.resolution_adaptations -
- prev_total.resolution_adaptations;
- const int delta_fps_adaptations =
- adaptation_count.fps_adaptations - prev_total.fps_adaptations;
-
- RTC_DCHECK_EQ(
- std::abs(delta_resolution_adaptations) + std::abs(delta_fps_adaptations),
- 1)
- << "Adaptation took more than one step!";
-
- if (delta_resolution_adaptations > 0) {
- ++active_count->resolution_adaptations;
- } else if (delta_resolution_adaptations < 0) {
- if (active_count->resolution_adaptations == 0) {
- RTC_DCHECK_GT(active_count->fps_adaptations, 0) << "No downgrades left";
- RTC_DCHECK_GT(other_active->resolution_adaptations, 0)
- << "No resolution adaptation to borrow from";
- // Lend an fps adaptation to other and take one resolution adaptation.
- --active_count->fps_adaptations;
- ++other_active->fps_adaptations;
- --other_active->resolution_adaptations;
- } else {
- --active_count->resolution_adaptations;
- }
- }
- if (delta_fps_adaptations > 0) {
- ++active_count->fps_adaptations;
- } else if (delta_fps_adaptations < 0) {
- if (active_count->fps_adaptations == 0) {
- RTC_DCHECK_GT(active_count->resolution_adaptations, 0)
- << "No downgrades left";
- RTC_DCHECK_GT(other_active->fps_adaptations, 0)
- << "No fps adaptation to borrow from";
- // Lend a resolution adaptation to other and take one fps adaptation.
- --active_count->resolution_adaptations;
- ++other_active->resolution_adaptations;
- --other_active->fps_adaptations;
- } else {
- --active_count->fps_adaptations;
- }
- }
-
- RTC_DCHECK(*active_count + *other_active == adaptation_count);
- RTC_DCHECK_EQ(other_active->Total(), other_total);
- RTC_DCHECK_EQ(
- active_count->Total(),
- active_total + delta_resolution_adaptations + delta_fps_adaptations);
- RTC_DCHECK_GE(active_count->resolution_adaptations, 0);
- RTC_DCHECK_GE(active_count->fps_adaptations, 0);
- RTC_DCHECK_GE(other_active->resolution_adaptations, 0);
- RTC_DCHECK_GE(other_active->fps_adaptations, 0);
-}
-
-void VideoStreamEncoderResourceManager::UpdateAdaptationStats(
- const VideoAdaptationCounters& total_counts,
- VideoAdaptationReason reason) {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- // Update active counts
- VideoAdaptationCounters& active_count = active_counts_[reason];
- VideoAdaptationCounters& other_active = active_counts_[OtherReason(reason)];
-
- OnAdaptationCountChanged(total_counts, &active_count, &other_active);
-
- encoder_stats_observer_->OnAdaptationChanged(
- reason, active_counts_[VideoAdaptationReason::kCpu],
- active_counts_[VideoAdaptationReason::kQuality]);
-}
-
void VideoStreamEncoderResourceManager::UpdateStatsAdaptationSettings() const {
RTC_DCHECK_RUN_ON(encoder_queue_);
VideoStreamEncoderObserver::AdaptationSettings cpu_settings(
@@ -800,76 +691,19 @@ void VideoStreamEncoderResourceManager::UpdateStatsAdaptationSettings() const {
quality_settings);
}
-void VideoStreamEncoderResourceManager::MaybePerformQualityRampupExperiment() {
- RTC_DCHECK_RUN_ON(encoder_queue_);
- if (!quality_scaler_resource_->is_started())
- return;
-
- if (quality_rampup_done_)
- return;
-
- int64_t now_ms = clock_->TimeInMilliseconds();
- uint32_t bw_kbps = encoder_rates_.has_value()
- ? encoder_rates_.value().bandwidth_allocation.kbps()
- : 0;
-
- bool try_quality_rampup = false;
- if (quality_rampup_experiment_.BwHigh(now_ms, bw_kbps)) {
- // Verify that encoder is at max bitrate and the QP is low.
- if (encoder_settings_ &&
- encoder_target_bitrate_bps_.value_or(0) ==
- encoder_settings_->video_codec().maxBitrate * 1000 &&
- quality_scaler_resource_->QpFastFilterLow()) {
- try_quality_rampup = true;
- }
- }
- if (try_quality_rampup) {
- // The VideoStreamEncoder makes the manager outlive the adaptation queue.
- // This means that if the task gets executed, |this| has not been freed yet.
- // TODO(https://crbug.com/webrtc/11565): When the manager no longer outlives
- // the adaptation queue, add logic to prevent use-after-free on |this|.
- resource_adaptation_queue_->PostTask([this] {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- if (!adaptation_processor_) {
- // The processor nulled before this task had a chance to execute. This
- // happens if the processor is destroyed. No action needed.
- return;
- }
- // TODO(https://crbug.com/webrtc/11392): See if we can rely on the total
- // counts or the stats, and not the active counts.
- const VideoAdaptationCounters& qp_counts =
- active_counts_[VideoAdaptationReason::kQuality];
- const VideoAdaptationCounters& cpu_counts =
- active_counts_[VideoAdaptationReason::kCpu];
- if (!quality_rampup_done_ && qp_counts.resolution_adaptations > 0 &&
- cpu_counts.Total() == 0) {
- RTC_LOG(LS_INFO) << "Reset quality limitations.";
- adaptation_processor_->ResetVideoSourceRestrictions();
- quality_rampup_done_ = true;
- }
- });
- }
-}
-
-void VideoStreamEncoderResourceManager::ResetActiveCounts() {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- active_counts_.clear();
- active_counts_[VideoAdaptationReason::kCpu] = VideoAdaptationCounters();
- active_counts_[VideoAdaptationReason::kQuality] = VideoAdaptationCounters();
-}
-
-std::string VideoStreamEncoderResourceManager::ActiveCountsToString() const {
- RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
- RTC_DCHECK_EQ(2, active_counts_.size());
+// static
+std::string VideoStreamEncoderResourceManager::ActiveCountsToString(
+ const std::map<VideoAdaptationReason, VideoAdaptationCounters>&
+ active_counts) {
rtc::StringBuilder ss;
ss << "Downgrade counts: fps: {";
- for (auto& reason_count : active_counts_) {
+ for (auto& reason_count : active_counts) {
ss << ToString(reason_count.first) << ":";
ss << reason_count.second.fps_adaptations;
}
ss << "}, resolution {";
- for (auto& reason_count : active_counts_) {
+ for (auto& reason_count : active_counts) {
ss << ToString(reason_count.first) << ":";
ss << reason_count.second.resolution_adaptations;
}
@@ -877,4 +711,23 @@ std::string VideoStreamEncoderResourceManager::ActiveCountsToString() const {
return ss.Release();
}
+
+void VideoStreamEncoderResourceManager::OnQualityRampUp() {
+ RTC_DCHECK_RUN_ON(encoder_queue_);
+ // The VideoStreamEncoder makes the manager outlive the adaptation queue.
+ // This means that if the task gets executed, |this| has not been freed yet.
+ // TODO(https://crbug.com/webrtc/11565): When the manager no longer outlives
+ // the adaptation queue, add logic to prevent use-after-free on |this|.
+ resource_adaptation_queue_->PostTask([this] {
+ RTC_DCHECK_RUN_ON(resource_adaptation_queue_);
+ if (!adaptation_processor_) {
+ // The processor nulled before this task had a chance to execute. This
+ // happens if the processor is destroyed. No action needed.
+ return;
+ }
+ RTC_LOG(LS_INFO) << "Reset quality limitations.";
+ adaptation_processor_->ResetVideoSourceRestrictions();
+ });
+ quality_rampup_experiment_.reset();
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.h b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.h
index d028e5049a0..61ae29b6bf9 100644
--- a/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.h
+++ b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager.h
@@ -20,8 +20,10 @@
#include <vector>
#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
#include "api/rtp_parameters.h"
#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
#include "api/video/video_adaptation_counters.h"
#include "api/video/video_adaptation_reason.h"
#include "api/video/video_frame.h"
@@ -30,19 +32,20 @@
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_config.h"
-#include "call/adaptation/resource.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
#include "call/adaptation/video_stream_adapter.h"
#include "call/adaptation/video_stream_input_state_provider.h"
#include "rtc_base/critical_section.h"
-#include "rtc_base/experiments/quality_rampup_experiment.h"
#include "rtc_base/experiments/quality_scaler_settings.h"
+#include "rtc_base/ref_count.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/task_queue.h"
#include "system_wrappers/include/clock.h"
#include "video/adaptation/encode_usage_resource.h"
#include "video/adaptation/overuse_frame_detector.h"
+#include "video/adaptation/quality_rampup_experiment_helper.h"
#include "video/adaptation/quality_scaler_resource.h"
+#include "video/adaptation/video_stream_encoder_resource.h"
namespace webrtc {
@@ -61,7 +64,8 @@ extern const int kDefaultInputPixelsHeight;
// The manager is also involved with various mitigations not part of the
// ResourceAdaptationProcessor code such as the inital frame dropping.
class VideoStreamEncoderResourceManager
- : public ResourceAdaptationProcessorListener {
+ : public VideoSourceRestrictionsListener,
+ public QualityRampUpExperimentListener {
public:
VideoStreamEncoderResourceManager(
VideoStreamInputStateProvider* input_state_provider,
@@ -109,37 +113,31 @@ class VideoStreamEncoderResourceManager
void OnFrameDropped(EncodedImageCallback::DropReason reason);
// Resources need to be mapped to an AdaptReason (kCpu or kQuality) in order
- // to be able to update |active_counts_|, which is used...
- // - Legacy getStats() purposes.
- // - Preventing adapting up in some circumstances (which may be questionable).
- // TODO(hbos): Can we get rid of this?
+ // to update legacy getStats().
void MapResourceToReason(rtc::scoped_refptr<Resource> resource,
VideoAdaptationReason reason);
std::vector<rtc::scoped_refptr<Resource>> MappedResources() const;
+ std::vector<AdaptationConstraint*> AdaptationConstraints() const;
+ std::vector<AdaptationListener*> AdaptationListeners() const;
rtc::scoped_refptr<QualityScalerResource>
quality_scaler_resource_for_testing();
// If true, the VideoStreamEncoder should eexecute its logic to maybe drop
// frames baseed on size and bitrate.
bool DropInitialFrames() const;
- // ResourceAdaptationProcessorListener implementation.
- // Updates |video_source_restrictions_| and |active_counts_|.
+ // VideoSourceRestrictionsListener implementation.
+ // Updates |video_source_restrictions_|.
void OnVideoSourceRestrictionsUpdated(
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
rtc::scoped_refptr<Resource> reason) override;
+ void OnResourceLimitationChanged(
+ rtc::scoped_refptr<Resource> resource,
+ const std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters>&
+ resource_limitations) override;
- // For reasons of adaptation and statistics, we not only count the total
- // number of adaptations, but we also count the number of adaptations per
- // reason.
- // This method takes the new total number of adaptations and allocates that to
- // the "active" count - number of adaptations for the current reason.
- // The "other" count is the number of adaptations for the other reason.
- // This must be called for each adaptation step made.
- static void OnAdaptationCountChanged(
- const VideoAdaptationCounters& adaptation_count,
- VideoAdaptationCounters* active_count,
- VideoAdaptationCounters* other_active);
+ // QualityRampUpExperimentListener implementation.
+ void OnQualityRampUp() override;
private:
class InitialFrameDropper;
@@ -158,70 +156,28 @@ class VideoStreamEncoderResourceManager
void UpdateQualityScalerSettings(
absl::optional<VideoEncoder::QpThresholds> qp_thresholds);
- void UpdateAdaptationStats(const VideoAdaptationCounters& total_counts,
- VideoAdaptationReason reason);
void UpdateStatsAdaptationSettings() const;
- // Checks to see if we should execute the quality rampup experiment. The
- // experiment resets all video restrictions at the start of the call in the
- // case the bandwidth estimate is high enough.
- // TODO(https://crbug.com/webrtc/11222) Move experiment details into an inner
- // class.
- void MaybePerformQualityRampupExperiment();
-
- void ResetActiveCounts();
- std::string ActiveCountsToString() const;
-
- // TODO(hbos): Consider moving all of the manager's resources into separate
- // files for testability.
-
- // Does not trigger adaptations, only prevents adapting up based on
- // |active_counts_|.
- class PreventAdaptUpDueToActiveCounts final
- : public rtc::RefCountedObject<Resource> {
- public:
- explicit PreventAdaptUpDueToActiveCounts(
- VideoStreamEncoderResourceManager* manager);
- ~PreventAdaptUpDueToActiveCounts() override = default;
-
- void SetAdaptationProcessor(
- ResourceAdaptationProcessorInterface* adaptation_processor);
-
- // Resource overrides.
- std::string name() const override {
- return "PreventAdaptUpDueToActiveCounts";
- }
- bool IsAdaptationUpAllowed(
- const VideoStreamInputState& input_state,
- const VideoSourceRestrictions& restrictions_before,
- const VideoSourceRestrictions& restrictions_after,
- rtc::scoped_refptr<Resource> reason_resource) const override;
-
- private:
- // The |manager_| must be alive as long as this resource is added to the
- // ResourceAdaptationProcessor, i.e. when IsAdaptationUpAllowed() is called.
- VideoStreamEncoderResourceManager* const manager_;
- ResourceAdaptationProcessorInterface* adaptation_processor_
- RTC_GUARDED_BY(resource_adaptation_queue());
- };
+ static std::string ActiveCountsToString(
+ const std::map<VideoAdaptationReason, VideoAdaptationCounters>&
+ active_counts);
+ // TODO(hbos): Add tests for manager's constraints.
// Does not trigger adaptations, only prevents adapting up resolution.
- class PreventIncreaseResolutionDueToBitrateResource final
- : public rtc::RefCountedObject<Resource> {
+ class BitrateConstraint : public rtc::RefCountInterface,
+ public AdaptationConstraint {
public:
- explicit PreventIncreaseResolutionDueToBitrateResource(
- VideoStreamEncoderResourceManager* manager);
- ~PreventIncreaseResolutionDueToBitrateResource() override = default;
+ explicit BitrateConstraint(VideoStreamEncoderResourceManager* manager);
+ ~BitrateConstraint() override = default;
+ void SetAdaptationQueue(TaskQueueBase* resource_adaptation_queue);
void OnEncoderSettingsUpdated(
absl::optional<EncoderSettings> encoder_settings);
void OnEncoderTargetBitrateUpdated(
absl::optional<uint32_t> encoder_target_bitrate_bps);
- // Resource overrides.
- std::string name() const override {
- return "PreventIncreaseResolutionDueToBitrateResource";
- }
+ // AdaptationConstraint implementation.
+ std::string Name() const override { return "BitrateConstraint"; }
bool IsAdaptationUpAllowed(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
@@ -232,29 +188,28 @@ class VideoStreamEncoderResourceManager
// The |manager_| must be alive as long as this resource is added to the
// ResourceAdaptationProcessor, i.e. when IsAdaptationUpAllowed() is called.
VideoStreamEncoderResourceManager* const manager_;
+ TaskQueueBase* resource_adaptation_queue_;
absl::optional<EncoderSettings> encoder_settings_
- RTC_GUARDED_BY(resource_adaptation_queue());
+ RTC_GUARDED_BY(resource_adaptation_queue_);
absl::optional<uint32_t> encoder_target_bitrate_bps_
- RTC_GUARDED_BY(resource_adaptation_queue());
+ RTC_GUARDED_BY(resource_adaptation_queue_);
};
// Does not trigger adaptations, only prevents adapting up in BALANCED.
- class PreventAdaptUpInBalancedResource final
- : public rtc::RefCountedObject<Resource> {
+ class BalancedConstraint : public rtc::RefCountInterface,
+ public AdaptationConstraint {
public:
- explicit PreventAdaptUpInBalancedResource(
- VideoStreamEncoderResourceManager* manager);
- ~PreventAdaptUpInBalancedResource() override = default;
+ explicit BalancedConstraint(VideoStreamEncoderResourceManager* manager);
+ ~BalancedConstraint() override = default;
+ void SetAdaptationQueue(TaskQueueBase* resource_adaptation_queue);
void SetAdaptationProcessor(
ResourceAdaptationProcessorInterface* adaptation_processor);
void OnEncoderTargetBitrateUpdated(
absl::optional<uint32_t> encoder_target_bitrate_bps);
- // Resource overrides.
- std::string name() const override {
- return "PreventAdaptUpInBalancedResource";
- }
+ // AdaptationConstraint implementation.
+ std::string Name() const override { return "BalancedConstraint"; }
bool IsAdaptationUpAllowed(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
@@ -265,18 +220,15 @@ class VideoStreamEncoderResourceManager
// The |manager_| must be alive as long as this resource is added to the
// ResourceAdaptationProcessor, i.e. when IsAdaptationUpAllowed() is called.
VideoStreamEncoderResourceManager* const manager_;
+ TaskQueueBase* resource_adaptation_queue_;
ResourceAdaptationProcessorInterface* adaptation_processor_
- RTC_GUARDED_BY(resource_adaptation_queue());
+ RTC_GUARDED_BY(resource_adaptation_queue_);
absl::optional<uint32_t> encoder_target_bitrate_bps_
- RTC_GUARDED_BY(resource_adaptation_queue());
+ RTC_GUARDED_BY(resource_adaptation_queue_);
};
- const rtc::scoped_refptr<PreventAdaptUpDueToActiveCounts>
- prevent_adapt_up_due_to_active_counts_;
- const rtc::scoped_refptr<PreventIncreaseResolutionDueToBitrateResource>
- prevent_increase_resolution_due_to_bitrate_resource_;
- const rtc::scoped_refptr<PreventAdaptUpInBalancedResource>
- prevent_adapt_up_in_balanced_resource_;
+ const rtc::scoped_refptr<BitrateConstraint> bitrate_constraint_;
+ const rtc::scoped_refptr<BalancedConstraint> balanced_constraint_;
const rtc::scoped_refptr<EncodeUsageResource> encode_usage_resource_;
const rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource_;
@@ -303,9 +255,7 @@ class VideoStreamEncoderResourceManager
RTC_GUARDED_BY(encoder_queue_);
absl::optional<VideoEncoder::RateControlParameters> encoder_rates_
RTC_GUARDED_BY(encoder_queue_);
- // Used on both the encoder queue and resource adaptation queue.
- std::atomic<bool> quality_rampup_done_;
- QualityRampupExperiment quality_rampup_experiment_
+ std::unique_ptr<QualityRampUpExperimentHelper> quality_rampup_experiment_
RTC_GUARDED_BY(encoder_queue_);
absl::optional<EncoderSettings> encoder_settings_
RTC_GUARDED_BY(encoder_queue_);
@@ -323,15 +273,6 @@ class VideoStreamEncoderResourceManager
};
rtc::CriticalSection resource_lock_;
std::vector<ResourceAndReason> resources_ RTC_GUARDED_BY(&resource_lock_);
- // One AdaptationCounter for each reason, tracking the number of times we have
- // adapted for each reason. The sum of active_counts_ MUST always equal the
- // total adaptation provided by the VideoSourceRestrictions.
- // TODO(https://crbug.com/webrtc/11542): When we have an adaptation queue,
- // guard the activec counts by it instead. The |encoder_stats_observer_| is
- // thread-safe anyway, and active counts are used by
- // PreventAdaptUpDueToActiveCounts to make decisions.
- std::unordered_map<VideoAdaptationReason, VideoAdaptationCounters>
- active_counts_ RTC_GUARDED_BY(resource_adaptation_queue_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager_unittest.cc b/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager_unittest.cc
deleted file mode 100644
index 38ebba63346..00000000000
--- a/chromium/third_party/webrtc/video/adaptation/video_stream_encoder_resource_manager_unittest.cc
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "video/adaptation/video_stream_encoder_resource_manager.h"
-
-#include "api/video/video_adaptation_counters.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-
-TEST(VideoStreamEncoderResourceManagerTest, FirstAdaptationDown_Fps) {
- VideoAdaptationCounters cpu;
- VideoAdaptationCounters qp;
- VideoAdaptationCounters total(0, 1);
-
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
- VideoAdaptationCounters expected_cpu(0, 1);
- VideoAdaptationCounters expected_qp;
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, FirstAdaptationDown_Resolution) {
- VideoAdaptationCounters cpu;
- VideoAdaptationCounters qp;
- VideoAdaptationCounters total(1, 0);
-
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
- VideoAdaptationCounters expected_cpu(1, 0);
- VideoAdaptationCounters expected_qp;
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, LastAdaptUp_Fps) {
- VideoAdaptationCounters cpu(0, 1);
- VideoAdaptationCounters qp;
- VideoAdaptationCounters total;
-
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
- VideoAdaptationCounters expected_cpu;
- VideoAdaptationCounters expected_qp;
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, LastAdaptUp_Resolution) {
- VideoAdaptationCounters cpu(1, 0);
- VideoAdaptationCounters qp;
- VideoAdaptationCounters total;
-
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
- VideoAdaptationCounters expected_cpu;
- VideoAdaptationCounters expected_qp;
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, AdaptUpWithBorrow_Resolution) {
- VideoAdaptationCounters cpu(0, 1);
- VideoAdaptationCounters qp(1, 0);
- VideoAdaptationCounters total(0, 1);
-
- // CPU adaptation for resolution, but no resolution adaptation left from CPU.
- // We then borrow the resolution adaptation from qp, and give qp the fps
- // adaptation from CPU.
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
-
- VideoAdaptationCounters expected_cpu(0, 0);
- VideoAdaptationCounters expected_qp(0, 1);
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-TEST(VideoStreamEncoderResourceManagerTest, AdaptUpWithBorrow_Fps) {
- VideoAdaptationCounters cpu(1, 0);
- VideoAdaptationCounters qp(0, 1);
- VideoAdaptationCounters total(1, 0);
-
- // CPU adaptation for fps, but no fps adaptation left from CPU. We then borrow
- // the fps adaptation from qp, and give qp the resolution adaptation from CPU.
- VideoStreamEncoderResourceManager::OnAdaptationCountChanged(total, &cpu, &qp);
-
- VideoAdaptationCounters expected_cpu(0, 0);
- VideoAdaptationCounters expected_qp(1, 0);
- EXPECT_EQ(expected_cpu, cpu);
- EXPECT_EQ(expected_qp, qp);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/call_stats2_unittest.cc b/chromium/third_party/webrtc/video/call_stats2_unittest.cc
index 73fe4b45ca6..b3d43cb92ab 100644
--- a/chromium/third_party/webrtc/video/call_stats2_unittest.cc
+++ b/chromium/third_party/webrtc/video/call_stats2_unittest.cc
@@ -33,7 +33,7 @@ class MockStatsObserver : public CallStatsObserver {
MockStatsObserver() {}
virtual ~MockStatsObserver() {}
- MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t));
+ MOCK_METHOD(void, OnRttUpdate, (int64_t, int64_t), (override));
};
class CallStats2Test : public ::testing::Test {
diff --git a/chromium/third_party/webrtc/video/call_stats_unittest.cc b/chromium/third_party/webrtc/video/call_stats_unittest.cc
index c560ccbee6b..e85c4f8c542 100644
--- a/chromium/third_party/webrtc/video/call_stats_unittest.cc
+++ b/chromium/third_party/webrtc/video/call_stats_unittest.cc
@@ -32,7 +32,7 @@ class MockStatsObserver : public CallStatsObserver {
MockStatsObserver() {}
virtual ~MockStatsObserver() {}
- MOCK_METHOD2(OnRttUpdate, void(int64_t, int64_t));
+ MOCK_METHOD(void, OnRttUpdate, (int64_t, int64_t), (override));
};
class CallStatsTest : public ::testing::Test {
diff --git a/chromium/third_party/webrtc/video/end_to_end_tests/bandwidth_tests.cc b/chromium/third_party/webrtc/video/end_to_end_tests/bandwidth_tests.cc
index 16b35d68f8a..19384944066 100644
--- a/chromium/third_party/webrtc/video/end_to_end_tests/bandwidth_tests.cc
+++ b/chromium/third_party/webrtc/video/end_to_end_tests/bandwidth_tests.cc
@@ -16,7 +16,7 @@
#include "api/video/video_bitrate_allocation.h"
#include "call/fake_network_pipe.h"
#include "call/simulated_network.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "rtc_base/rate_limiter.h"
#include "rtc_base/task_queue_for_test.h"
#include "rtc_base/task_utils/to_queued_task.h"
@@ -205,8 +205,9 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) {
~BweObserver() override {
// Block until all already posted tasks run to avoid races when such task
- // accesses |this|.
- SendTask(RTC_FROM_HERE, task_queue_, [] {});
+ // accesses |this|. Also make sure we free |rtp_rtcp_| on the correct
+ // thread/task queue.
+ SendTask(RTC_FROM_HERE, task_queue_, [this]() { rtp_rtcp_ = nullptr; });
}
std::unique_ptr<test::PacketTransport> CreateReceiveTransport(
@@ -237,13 +238,13 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) {
encoder_config->max_bitrate_bps = 2000000;
ASSERT_EQ(1u, receive_configs->size());
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.receiver_only = true;
config.clock = clock_;
config.outgoing_transport = receive_transport_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
config.local_media_ssrc = (*receive_configs)[0].rtp.local_ssrc;
- rtp_rtcp_ = RtpRtcp::Create(config);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config);
rtp_rtcp_->SetRemoteSSRC((*receive_configs)[0].rtp.remote_ssrc);
rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize);
}
@@ -302,7 +303,7 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) {
Clock* const clock_;
uint32_t sender_ssrc_;
int remb_bitrate_bps_;
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
test::PacketTransport* receive_transport_;
TestState state_;
RateLimiter retransmission_rate_limiter_;
diff --git a/chromium/third_party/webrtc/video/end_to_end_tests/codec_tests.cc b/chromium/third_party/webrtc/video/end_to_end_tests/codec_tests.cc
index b73b289ec83..d10e08daf10 100644
--- a/chromium/third_party/webrtc/video/end_to_end_tests/codec_tests.cc
+++ b/chromium/third_party/webrtc/video/end_to_end_tests/codec_tests.cc
@@ -34,18 +34,14 @@ enum : int { // The first valid value is 1.
};
} // namespace
-class CodecEndToEndTest : public test::CallTest,
- public ::testing::WithParamInterface<std::string> {
+class CodecEndToEndTest : public test::CallTest {
public:
- CodecEndToEndTest() : field_trial_(GetParam()) {
+ CodecEndToEndTest() {
RegisterRtpExtension(
RtpExtension(RtpExtension::kColorSpaceUri, kColorSpaceExtensionId));
RegisterRtpExtension(RtpExtension(RtpExtension::kVideoRotationUri,
kVideoRotationExtensionId));
}
-
- private:
- test::ScopedFieldTrials field_trial_;
};
class CodecObserver : public test::EndToEndTest,
@@ -121,13 +117,7 @@ class CodecObserver : public test::EndToEndTest,
int frame_counter_;
};
-INSTANTIATE_TEST_SUITE_P(
- GenericDescriptor,
- CodecEndToEndTest,
- ::testing::Values("WebRTC-GenericDescriptor/Disabled/",
- "WebRTC-GenericDescriptor/Enabled/"));
-
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP8) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP8) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP8Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -137,7 +127,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP8) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP8Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -148,7 +138,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) {
}
#if defined(RTC_ENABLE_VP9)
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP9) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP9Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -158,7 +148,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP9Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -168,7 +158,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP9Encoder::Create(); });
test::FunctionVideoDecoderFactory decoder_factory(
@@ -179,7 +169,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest,
+TEST_F(CodecEndToEndTest,
SendsAndReceivesVP9ExplicitColorSpaceWithHdrMetadata) {
test::FunctionVideoEncoderFactory encoder_factory(
[]() { return VP9Encoder::Create(); });
@@ -192,7 +182,7 @@ TEST_P(CodecEndToEndTest,
}
// Mutiplex tests are using VP9 as the underlying implementation.
-TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplex) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplex) {
InternalEncoderFactory internal_encoder_factory;
InternalDecoderFactory internal_decoder_factory;
test::FunctionVideoEncoderFactory encoder_factory(
@@ -211,7 +201,7 @@ TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplex) {
RunBaseTest(&test);
}
-TEST_P(CodecEndToEndTest, SendsAndReceivesMultiplexVideoRotation90) {
+TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplexVideoRotation90) {
InternalEncoderFactory internal_encoder_factory;
InternalDecoderFactory internal_decoder_factory;
test::FunctionVideoEncoderFactory encoder_factory(
diff --git a/chromium/third_party/webrtc/video/full_stack_tests.cc b/chromium/third_party/webrtc/video/full_stack_tests.cc
index 7307b462b73..a12b3342301 100644
--- a/chromium/third_party/webrtc/video/full_stack_tests.cc
+++ b/chromium/third_party/webrtc/video/full_stack_tests.cc
@@ -90,26 +90,6 @@ std::string ClipNameToClipPath(const char* clip_name) {
// logs // bool
// };
-class GenericDescriptorTest : public ::testing::TestWithParam<std::string> {
- public:
- GenericDescriptorTest()
- : field_trial_(AppendFieldTrials(GetParam())),
- generic_descriptor_enabled_(
- field_trial::IsEnabled("WebRTC-GenericDescriptor")) {}
-
- std::string GetTestName(std::string base) {
- if (generic_descriptor_enabled_)
- base += "_generic_descriptor";
- return base;
- }
-
- bool GenericDescriptorEnabled() const { return generic_descriptor_enabled_; }
-
- private:
- test::ScopedFieldTrials field_trial_;
- bool generic_descriptor_enabled_;
-};
-
#if defined(RTC_ENABLE_VP9)
TEST(FullStackTest, ForemanCifWithoutPacketLossVp9) {
auto fixture = CreateVideoQualityTestFixture();
@@ -125,7 +105,7 @@ TEST(FullStackTest, ForemanCifWithoutPacketLossVp9) {
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCifPlr5Vp9) {
+TEST(GenericDescriptorTest, ForemanCifPlr5Vp9) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -134,11 +114,11 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5Vp9) {
30000, 500000, 2000000, false,
"VP9", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_VP9"), 0.0,
- 0.0, kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_VP9_generic_descriptor",
+ 0.0, 0.0, kFullStackTestDurationSecs};
foreman_cif.config->loss_percent = 5;
foreman_cif.config->queue_delay_ms = 50;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -217,7 +197,7 @@ TEST(FullStackTest, MAYBE_ParisQcifWithoutPacketLoss) {
fixture->RunWithAnalyzer(paris_qcif);
}
-TEST_P(GenericDescriptorTest, ForemanCifWithoutPacketLoss) {
+TEST(GenericDescriptorTest, ForemanCifWithoutPacketLoss) {
auto fixture = CreateVideoQualityTestFixture();
// TODO(pbos): Decide on psnr/ssim thresholds for foreman_cif.
ParamsWithLogging foreman_cif;
@@ -227,13 +207,13 @@ TEST_P(GenericDescriptorTest, ForemanCifWithoutPacketLoss) {
700000, 700000, 700000, false,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_net_delay_0_0_plr_0"), 0.0,
- 0.0, kFullStackTestDurationSecs};
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_generic_descriptor",
+ 0.0, 0.0, kFullStackTestDurationSecs};
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
+TEST(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -242,15 +222,16 @@ TEST_P(GenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
30000, 30000, 30000, false,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_30kbps_net_delay_0_0_plr_0"),
- 0.0, 0.0, kFullStackTestDurationSecs};
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.analyzer = {
+ "foreman_cif_30kbps_net_delay_0_0_plr_0_generic_descriptor", 0.0, 0.0,
+ kFullStackTestDurationSecs};
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
// TODO(webrtc:9722): Remove when experiment is cleaned up.
-TEST_P(GenericDescriptorTest,
- ForemanCif30kbpsWithoutPacketLossTrustedRateControl) {
+TEST(GenericDescriptorTest,
+ ForemanCif30kbpsWithoutPacketLossTrustedRateControl) {
test::ScopedFieldTrials override_field_trials(
AppendFieldTrials(kVp8TrustedRateControllerFieldTrial));
auto fixture = CreateVideoQualityTestFixture();
@@ -263,9 +244,10 @@ TEST_P(GenericDescriptorTest,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
foreman_cif.analyzer = {
- GetTestName("foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl"),
+ "foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl_generic_"
+ "descriptor",
0.0, 0.0, kFullStackTestDurationSecs};
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -328,7 +310,7 @@ TEST(FullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) {
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCifPlr5) {
+TEST(GenericDescriptorTest, ForemanCifPlr5) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -337,15 +319,15 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5) {
30000, 500000, 2000000, false,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5"), 0.0, 0.0,
- kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {"foreman_cif_delay_50_0_plr_5_generic_descriptor",
+ 0.0, 0.0, kFullStackTestDurationSecs};
foreman_cif.config->loss_percent = 5;
foreman_cif.config->queue_delay_ms = 50;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCifPlr5Ulpfec) {
+TEST(GenericDescriptorTest, ForemanCifPlr5Ulpfec) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -354,11 +336,12 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5Ulpfec) {
30000, 500000, 2000000, false,
"VP8", 1, 0, 0,
true, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_ulpfec"),
- 0.0, 0.0, kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {
+ "foreman_cif_delay_50_0_plr_5_ulpfec_generic_descriptor", 0.0, 0.0,
+ kFullStackTestDurationSecs};
foreman_cif.config->loss_percent = 5;
foreman_cif.config->queue_delay_ms = 50;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -442,7 +425,7 @@ TEST(FullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCifPlr5H264) {
+TEST(GenericDescriptorTest, ForemanCifPlr5H264) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -451,11 +434,12 @@ TEST_P(GenericDescriptorTest, ForemanCifPlr5H264) {
30000, 500000, 2000000, false,
"H264", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_delay_50_0_plr_5_H264"), 0.0,
- 0.0, kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {
+ "foreman_cif_delay_50_0_plr_5_H264_generic_descriptor", 0.0, 0.0,
+ kFullStackTestDurationSecs};
foreman_cif.config->loss_percent = 5;
foreman_cif.config->queue_delay_ms = 50;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -565,7 +549,7 @@ TEST(FullStackTest, ForemanCif500kbps100ms) {
fixture->RunWithAnalyzer(foreman_cif);
}
-TEST_P(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
+TEST(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@@ -574,12 +558,13 @@ TEST_P(GenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
30000, 500000, 2000000, false,
"VP8", 1, 0, 0,
false, false, true, ClipNameToClipPath("foreman_cif")};
- foreman_cif.analyzer = {GetTestName("foreman_cif_500kbps_100ms_32pkts_queue"),
- 0.0, 0.0, kFullStackTestDurationSecs};
+ foreman_cif.analyzer = {
+ "foreman_cif_500kbps_100ms_32pkts_queue_generic_descriptor", 0.0, 0.0,
+ kFullStackTestDurationSecs};
foreman_cif.config->queue_length_packets = 32;
foreman_cif.config->queue_delay_ms = 100;
foreman_cif.config->link_capacity_kbps = 500;
- foreman_cif.call.generic_descriptor = GenericDescriptorEnabled();
+ foreman_cif.call.generic_descriptor = true;
fixture->RunWithAnalyzer(foreman_cif);
}
@@ -666,7 +651,7 @@ TEST(FullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) {
fixture->RunWithAnalyzer(conf_motion_hd);
}
-TEST_P(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
+TEST(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging conf_motion_hd;
conf_motion_hd.call.send_side_bwe = true;
@@ -680,13 +665,13 @@ TEST_P(GenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
false, false,
false, ClipNameToClipPath("ConferenceMotion_1280_720_50")};
conf_motion_hd.analyzer = {
- GetTestName("conference_motion_hd_2tl_moderate_limits"), 0.0, 0.0,
+ "conference_motion_hd_2tl_moderate_limits_generic_descriptor", 0.0, 0.0,
kFullStackTestDurationSecs};
conf_motion_hd.config->queue_length_packets = 50;
conf_motion_hd.config->loss_percent = 3;
conf_motion_hd.config->queue_delay_ms = 100;
conf_motion_hd.config->link_capacity_kbps = 2000;
- conf_motion_hd.call.generic_descriptor = GenericDescriptorEnabled();
+ conf_motion_hd.call.generic_descriptor = true;
fixture->RunWithAnalyzer(conf_motion_hd);
}
@@ -867,7 +852,7 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Scroll) {
fixture->RunWithAnalyzer(config);
}
-TEST_P(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
+TEST(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging screenshare;
screenshare.call.send_side_bwe = true;
@@ -875,12 +860,12 @@ TEST_P(GenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
1000000, false, "VP8", 2, 1, 400000,
false, false, false, ""};
screenshare.screenshare[0] = {true, false, 10};
- screenshare.analyzer = {GetTestName("screenshare_slides_lossy_net"), 0.0, 0.0,
- kFullStackTestDurationSecs};
+ screenshare.analyzer = {"screenshare_slides_lossy_net_generic_descriptor",
+ 0.0, 0.0, kFullStackTestDurationSecs};
screenshare.config->loss_percent = 5;
screenshare.config->queue_delay_ms = 200;
screenshare.config->link_capacity_kbps = 500;
- screenshare.call.generic_descriptor = GenericDescriptorEnabled();
+ screenshare.call.generic_descriptor = true;
fixture->RunWithAnalyzer(screenshare);
}
@@ -1246,10 +1231,4 @@ TEST(FullStackTest, MAYBE_LargeRoomVP8_50thumb) {
fixture->RunWithAnalyzer(large_room);
}
-INSTANTIATE_TEST_SUITE_P(
- FullStackTest,
- GenericDescriptorTest,
- ::testing::Values("WebRTC-GenericDescriptor/Disabled/",
- "WebRTC-GenericDescriptor/Enabled/"));
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/pc_full_stack_tests.cc b/chromium/third_party/webrtc/video/pc_full_stack_tests.cc
index 4ec382ef54b..8a6029cee41 100644
--- a/chromium/third_party/webrtc/video/pc_full_stack_tests.cc
+++ b/chromium/third_party/webrtc/video/pc_full_stack_tests.cc
@@ -106,24 +106,6 @@ std::string ClipNameToClipPath(const char* clip_name) {
} // namespace
-class PCGenericDescriptorTest : public ::testing::TestWithParam<std::string> {
- public:
- PCGenericDescriptorTest()
- : field_trial_(AppendFieldTrials(GetParam())),
- generic_descriptor_enabled_(
- field_trial::IsEnabled("WebRTC-GenericDescriptor")) {}
-
- std::string GetTestName(std::string base) {
- if (generic_descriptor_enabled_)
- base += "_generic_descriptor";
- return base;
- }
-
- private:
- test::ScopedFieldTrials field_trial_;
- bool generic_descriptor_enabled_;
-};
-
#if defined(RTC_ENABLE_VP9)
TEST(PCFullStackTest, ForemanCifWithoutPacketLossVp9) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
@@ -149,14 +131,14 @@ TEST(PCFullStackTest, ForemanCifWithoutPacketLossVp9) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifPlr5Vp9) {
+TEST(PCGenericDescriptorTest, ForemanCifPlr5Vp9) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_delay_50_0_plr_5_VP9"),
+ "pc_foreman_cif_delay_50_0_plr_5_VP9_generic_descriptor",
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -263,11 +245,11 @@ TEST(PCFullStackTest, ParisQcifWithoutPacketLoss) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) {
+TEST(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_net_delay_0_0_plr_0"),
+ "pc_foreman_cif_net_delay_0_0_plr_0_generic_descriptor",
CreateTwoNetworkLinks(network_emulation_manager.get(),
BuiltInNetworkBehaviorConfig()),
[](PeerConfigurer* alice) {
@@ -285,12 +267,12 @@ TEST_P(PCGenericDescriptorTest, ForemanCifWithoutPacketLoss) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
+TEST(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_30kbps_net_delay_0_0_plr_0"),
+ "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_generic_descriptor",
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 10);
@@ -299,11 +281,11 @@ TEST_P(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
video, ClipNameToClipPath("foreman_cif"));
alice->AddVideoConfig(std::move(video), std::move(frame_generator));
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.min_bitrate_bps = 30000;
- bitrate_params.current_bitrate_bps = 30000;
- bitrate_params.max_bitrate_bps = 30000;
- alice->SetBitrateParameters(bitrate_params);
+ BitrateSettings bitrate_settings;
+ bitrate_settings.min_bitrate_bps = 30000;
+ bitrate_settings.start_bitrate_bps = 30000;
+ bitrate_settings.max_bitrate_bps = 30000;
+ alice->SetBitrateSettings(bitrate_settings);
},
[](PeerConfigurer* bob) {});
RunParams run_params(TimeDelta::Seconds(kTestDurationSec));
@@ -314,16 +296,16 @@ TEST_P(PCGenericDescriptorTest, ForemanCif30kbpsWithoutPacketLoss) {
}
// TODO(webrtc:9722): Remove when experiment is cleaned up.
-TEST_P(PCGenericDescriptorTest,
- ForemanCif30kbpsWithoutPacketLossTrustedRateControl) {
+TEST(PCGenericDescriptorTest,
+ ForemanCif30kbpsWithoutPacketLossTrustedRateControl) {
test::ScopedFieldTrials override_field_trials(
AppendFieldTrials(kVp8TrustedRateControllerFieldTrial));
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
auto fixture = CreateTestFixture(
- GetTestName(
- "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl"),
+ "pc_foreman_cif_30kbps_net_delay_0_0_plr_0_trusted_rate_ctrl_generic_"
+ "descriptor",
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 10);
@@ -332,11 +314,11 @@ TEST_P(PCGenericDescriptorTest,
video, ClipNameToClipPath("foreman_cif"));
alice->AddVideoConfig(std::move(video), std::move(frame_generator));
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.min_bitrate_bps = 30000;
- bitrate_params.current_bitrate_bps = 30000;
- bitrate_params.max_bitrate_bps = 30000;
- alice->SetBitrateParameters(bitrate_params);
+ BitrateSettings bitrate_settings;
+ bitrate_settings.min_bitrate_bps = 30000;
+ bitrate_settings.start_bitrate_bps = 30000;
+ bitrate_settings.max_bitrate_bps = 30000;
+ alice->SetBitrateSettings(bitrate_settings);
},
[](PeerConfigurer* bob) {});
RunParams run_params(TimeDelta::Seconds(kTestDurationSec));
@@ -478,14 +460,14 @@ TEST(PCFullStackTest, ForemanCifMediaCapacitySmallLossAndQueue) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifPlr5) {
+TEST(PCGenericDescriptorTest, ForemanCifPlr5) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_delay_50_0_plr_5"),
+ "pc_foreman_cif_delay_50_0_plr_5_generic_descriptor",
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -502,14 +484,14 @@ TEST_P(PCGenericDescriptorTest, ForemanCifPlr5) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifPlr5Ulpfec) {
+TEST(PCGenericDescriptorTest, ForemanCifPlr5Ulpfec) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_delay_50_0_plr_5_ulpfec"),
+ "pc_foreman_cif_delay_50_0_plr_5_ulpfec_generic_descriptor",
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -637,11 +619,11 @@ TEST(PCFullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
video, ClipNameToClipPath("foreman_cif"));
alice->AddVideoConfig(std::move(video), std::move(frame_generator));
- PeerConnectionInterface::BitrateParameters bitrate_params;
- bitrate_params.min_bitrate_bps = 30000;
- bitrate_params.current_bitrate_bps = 30000;
- bitrate_params.max_bitrate_bps = 30000;
- alice->SetBitrateParameters(bitrate_params);
+ BitrateSettings bitrate_settings;
+ bitrate_settings.min_bitrate_bps = 30000;
+ bitrate_settings.start_bitrate_bps = 30000;
+ bitrate_settings.max_bitrate_bps = 30000;
+ alice->SetBitrateSettings(bitrate_settings);
},
[](PeerConfigurer* bob) {});
RunParams run_params(TimeDelta::Seconds(kTestDurationSec));
@@ -651,14 +633,14 @@ TEST(PCFullStackTest, ForemanCif30kbpsWithoutPacketlossH264) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCifPlr5H264) {
+TEST(PCGenericDescriptorTest, ForemanCifPlr5H264) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
config.loss_percent = 5;
config.queue_delay_ms = 50;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_delay_50_0_plr_5_H264"),
+ "pc_foreman_cif_delay_50_0_plr_5_H264_generic_descriptor",
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -828,7 +810,7 @@ TEST(PCFullStackTest, ForemanCif500kbps100ms) {
fixture->Run(std::move(run_params));
}
-TEST_P(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
+TEST(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
CreateNetworkEmulationManager();
BuiltInNetworkBehaviorConfig config;
@@ -836,7 +818,7 @@ TEST_P(PCGenericDescriptorTest, ForemanCif500kbps100msLimitedQueue) {
config.queue_delay_ms = 100;
config.link_capacity_kbps = 500;
auto fixture = CreateTestFixture(
- GetTestName("pc_foreman_cif_500kbps_100ms_32pkts_queue"),
+ "pc_foreman_cif_500kbps_100ms_32pkts_queue_generic_descriptor",
CreateTwoNetworkLinks(network_emulation_manager.get(), config),
[](PeerConfigurer* alice) {
VideoConfig video(352, 288, 30);
@@ -956,7 +938,7 @@ TEST(PCFullStackTest, ConferenceMotionHd1TLModerateLimitsWhitelistVp8) {
/*
// TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework
-TEST_P(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
+TEST(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging conf_motion_hd;
conf_motion_hd.call.send_side_bwe = true;
@@ -970,7 +952,7 @@ TEST_P(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) {
false, false,
false, ClipNameToClipPath("ConferenceMotion_1280_720_50")};
conf_motion_hd.analyzer = {
- GetTestName("conference_motion_hd_2tl_moderate_limits"), 0.0, 0.0,
+ "conference_motion_hd_2tl_moderate_limits_generic_descriptor", 0.0, 0.0,
kTestDurationSec};
conf_motion_hd.config->queue_length_packets = 50;
conf_motion_hd.config->loss_percent = 3;
@@ -1295,7 +1277,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_Scroll) {
}
// TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework
-TEST_P(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
+TEST(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging screenshare;
screenshare.call.send_side_bwe = true;
@@ -1303,12 +1285,12 @@ TEST_P(PCGenericDescriptorTest, ScreenshareSlidesVP8_2TL_LossyNet) {
1000000, false, "VP8", 2, 1, 400000,
false, false, false, ""};
screenshare.screenshare[0] = {true, false, 10};
- screenshare.analyzer = {GetTestName("screenshare_slides_lossy_net"), 0.0, 0.0,
- kTestDurationSec};
+ screenshare.analyzer = {"screenshare_slides_lossy_net_generic_descriptor",
+ 0.0, 0.0, kTestDurationSec};
screenshare.config->loss_percent = 5;
screenshare.config->queue_delay_ms = 200;
screenshare.config->link_capacity_kbps = 500;
- screenshare.call.generic_descriptor = GenericDescriptorEnabled();
+ screenshare.call.generic_descriptor = true;
fixture->RunWithAnalyzer(screenshare);
}
@@ -1813,12 +1795,6 @@ TEST(PCFullStackTest, MAYBE_LargeRoomVP8_50thumb) {
}
*/
-INSTANTIATE_TEST_SUITE_P(
- PCFullStackTest,
- PCGenericDescriptorTest,
- ::testing::Values("WebRTC-GenericDescriptor/Disabled/",
- "WebRTC-GenericDescriptor/Enabled/"));
-
class PCDualStreamsTest : public ::testing::TestWithParam<int> {};
/*
diff --git a/chromium/third_party/webrtc/video/receive_statistics_proxy2.cc b/chromium/third_party/webrtc/video/receive_statistics_proxy2.cc
index 0ba4d5d44be..3cce3c8ea4e 100644
--- a/chromium/third_party/webrtc/video/receive_statistics_proxy2.cc
+++ b/chromium/third_party/webrtc/video/receive_statistics_proxy2.cc
@@ -782,10 +782,10 @@ void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
return;
if (!IsCurrentTaskQueueOrThread(worker_thread_)) {
- // RtpRtcp::Configuration has a single RtcpPacketTypeCounterObserver and
- // that same configuration may be used for both receiver and sender
- // (see ModuleRtpRtcpImpl::ModuleRtpRtcpImpl).
- // The RTCPSender implementation currently makes calls to this function on a
+ // RtpRtcpInterface::Configuration has a single
+ // RtcpPacketTypeCounterObserver and that same configuration may be used for
+ // both receiver and sender (see ModuleRtpRtcpImpl::ModuleRtpRtcpImpl). The
+ // RTCPSender implementation currently makes calls to this function on a
// process thread whereas the RTCPReceiver implementation calls back on the
// [main] worker thread.
// So until the sender implementation has been updated, we work around this
@@ -1002,7 +1002,8 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
}
void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) {
- RTC_DCHECK_RUN_ON(&decode_queue_);
+ // Can be called on either the decode queue or the worker thread
+ // See FrameBuffer2 for more details.
worker_thread_->PostTask(ToQueuedTask(task_safety_, [frames_dropped, this]() {
RTC_DCHECK_RUN_ON(&main_thread_);
stats_.frames_dropped += frames_dropped;
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver.cc b/chromium/third_party/webrtc/video/rtp_video_stream_receiver.cc
index e1dd736be61..8bbb5866a07 100644
--- a/chromium/third_party/webrtc/video/rtp_video_stream_receiver.cc
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver.cc
@@ -25,7 +25,6 @@
#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "modules/rtp_rtcp/include/receive_statistics.h"
#include "modules/rtp_rtcp/include/rtp_cvo.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/include/ulpfec_receiver.h"
#include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h"
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
@@ -35,13 +34,14 @@
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h"
#include "modules/utility/include/process_thread.h"
+#include "modules/video_coding/deprecated/nack_module.h"
#include "modules/video_coding/frame_object.h"
#include "modules/video_coding/h264_sprop_parameter_sets.h"
#include "modules/video_coding/h264_sps_pps_tracker.h"
-#include "modules/video_coding/nack_module.h"
#include "modules/video_coding/packet_buffer.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
@@ -77,8 +77,6 @@ int PacketBufferMaxSize() {
return packet_buffer_max_size;
}
-} // namespace
-
std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
Clock* clock,
ReceiveStatistics* receive_statistics,
@@ -87,7 +85,7 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
RtcpCnameCallback* rtcp_cname_callback,
uint32_t local_ssrc) {
- RtpRtcp::Configuration configuration;
+ RtpRtcpInterface::Configuration configuration;
configuration.clock = clock;
configuration.audio = false;
configuration.receiver_only = true;
@@ -99,7 +97,7 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
configuration.rtcp_cname_callback = rtcp_cname_callback;
configuration.local_media_ssrc = local_ssrc;
- std::unique_ptr<RtpRtcp> rtp_rtcp = RtpRtcp::Create(configuration);
+ std::unique_ptr<RtpRtcp> rtp_rtcp = RtpRtcp::DEPRECATED_Create(configuration);
rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
return rtp_rtcp;
@@ -107,6 +105,8 @@ std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
static const int kPacketLogIntervalMs = 10000;
+} // namespace
+
RtpVideoStreamReceiver::RtcpFeedbackBuffer::RtcpFeedbackBuffer(
KeyFrameRequestSender* key_frame_request_sender,
NackSender* nack_sender,
@@ -136,7 +136,7 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendNack(
if (!buffering_allowed) {
// Note that while *buffering* is not allowed, *batching* is, meaning that
// previously buffered messages may be sent along with the current message.
- SendBufferedRtcpFeedback();
+ SendRtcpFeedback(ConsumeRtcpFeedbackLocked());
}
}
@@ -155,34 +155,44 @@ void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendLossNotification(
}
void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() {
- bool request_key_frame = false;
- std::vector<uint16_t> nack_sequence_numbers;
- absl::optional<LossNotificationState> lntf_state;
+ SendRtcpFeedback(ConsumeRtcpFeedback());
+}
- {
- rtc::CritScope lock(&cs_);
- std::swap(request_key_frame, request_key_frame_);
- std::swap(nack_sequence_numbers, nack_sequence_numbers_);
- std::swap(lntf_state, lntf_state_);
- }
+RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumedRtcpFeedback
+RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumeRtcpFeedback() {
+ rtc::CritScope lock(&cs_);
+ return ConsumeRtcpFeedbackLocked();
+}
+
+RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumedRtcpFeedback
+RtpVideoStreamReceiver::RtcpFeedbackBuffer::ConsumeRtcpFeedbackLocked() {
+ ConsumedRtcpFeedback feedback;
+ std::swap(feedback.request_key_frame, request_key_frame_);
+ std::swap(feedback.nack_sequence_numbers, nack_sequence_numbers_);
+ std::swap(feedback.lntf_state, lntf_state_);
+ return feedback;
+}
- if (lntf_state) {
+void RtpVideoStreamReceiver::RtcpFeedbackBuffer::SendRtcpFeedback(
+ ConsumedRtcpFeedback feedback) {
+ if (feedback.lntf_state) {
// If either a NACK or a key frame request is sent, we should buffer
// the LNTF and wait for them (NACK or key frame request) to trigger
// the compound feedback message.
// Otherwise, the LNTF should be sent out immediately.
const bool buffering_allowed =
- request_key_frame || !nack_sequence_numbers.empty();
+ feedback.request_key_frame || !feedback.nack_sequence_numbers.empty();
loss_notification_sender_->SendLossNotification(
- lntf_state->last_decoded_seq_num, lntf_state->last_received_seq_num,
- lntf_state->decodability_flag, buffering_allowed);
+ feedback.lntf_state->last_decoded_seq_num,
+ feedback.lntf_state->last_received_seq_num,
+ feedback.lntf_state->decodability_flag, buffering_allowed);
}
- if (request_key_frame) {
+ if (feedback.request_key_frame) {
key_frame_request_sender_->RequestKeyFrame();
- } else if (!nack_sequence_numbers.empty()) {
- nack_sender_->SendNack(nack_sequence_numbers, true);
+ } else if (!feedback.nack_sequence_numbers.empty()) {
+ nack_sender_->SendNack(feedback.nack_sequence_numbers, true);
}
}
@@ -305,8 +315,8 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver(
}
if (config_.rtp.nack.rtp_history_ms != 0) {
- nack_module_ = std::make_unique<NackModule>(clock_, &rtcp_feedback_buffer_,
- &rtcp_feedback_buffer_);
+ nack_module_ = std::make_unique<DEPRECATED_NackModule>(
+ clock_, &rtcp_feedback_buffer_, &rtcp_feedback_buffer_);
process_thread_->RegisterModule(nack_module_.get(), RTC_FROM_HERE);
}
@@ -507,7 +517,6 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData(
video_header.content_type = VideoContentType::UNSPECIFIED;
video_header.video_timing.flags = VideoSendTiming::kInvalid;
video_header.is_last_packet_in_frame |= rtp_packet.Marker();
- video_header.frame_marking.temporal_id = kNoTemporalIdx;
if (const auto* vp9_header =
absl::get_if<RTPVideoHeaderVP9>(&video_header.video_type_header)) {
@@ -525,7 +534,6 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData(
} else {
rtp_packet.GetExtension<PlayoutDelayLimits>(&video_header.playout_delay);
}
- rtp_packet.GetExtension<FrameMarkingExtension>(&video_header.frame_marking);
ParseGenericDependenciesResult generic_descriptor_state =
ParseGenericDependenciesExtension(rtp_packet, &video_header);
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver.h b/chromium/third_party/webrtc/video/rtp_video_stream_receiver.h
index 0289f23a078..68e23eee53c 100644
--- a/chromium/third_party/webrtc/video/rtp_video_stream_receiver.h
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver.h
@@ -53,7 +53,7 @@
namespace webrtc {
-class NackModule;
+class DEPRECATED_NackModule;
class PacketRouter;
class ProcessThread;
class ReceiveStatistics;
@@ -69,7 +69,8 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
public KeyFrameRequestSender,
public video_coding::OnCompleteFrameCallback,
public OnDecryptedFrameCallback,
- public OnDecryptionStatusChangeCallback {
+ public OnDecryptionStatusChangeCallback,
+ public RtpVideoFrameReceiver {
public:
// DEPRECATED due to dependency on ReceiveStatisticsProxy.
RtpVideoStreamReceiver(
@@ -205,9 +206,11 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
void AddSecondarySink(RtpPacketSinkInterface* sink);
void RemoveSecondarySink(const RtpPacketSinkInterface* sink);
- virtual void ManageFrame(std::unique_ptr<video_coding::RtpFrameObject> frame);
-
private:
+ // Implements RtpVideoFrameReceiver.
+ void ManageFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) override;
+
// Used for buffering RTCP feedback messages and sending them all together.
// Note:
// 1. Key frame requests and NACKs are mutually exclusive, with the
@@ -225,35 +228,23 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
~RtcpFeedbackBuffer() override = default;
// KeyFrameRequestSender implementation.
- void RequestKeyFrame() override;
+ void RequestKeyFrame() RTC_LOCKS_EXCLUDED(cs_) override;
// NackSender implementation.
void SendNack(const std::vector<uint16_t>& sequence_numbers,
- bool buffering_allowed) override;
+ bool buffering_allowed) RTC_LOCKS_EXCLUDED(cs_) override;
// LossNotificationSender implementation.
void SendLossNotification(uint16_t last_decoded_seq_num,
uint16_t last_received_seq_num,
bool decodability_flag,
- bool buffering_allowed) override;
+ bool buffering_allowed)
+ RTC_LOCKS_EXCLUDED(cs_) override;
// Send all RTCP feedback messages buffered thus far.
- void SendBufferedRtcpFeedback();
+ void SendBufferedRtcpFeedback() RTC_LOCKS_EXCLUDED(cs_);
private:
- KeyFrameRequestSender* const key_frame_request_sender_;
- NackSender* const nack_sender_;
- LossNotificationSender* const loss_notification_sender_;
-
- // NACKs are accessible from two threads due to nack_module_ being a module.
- rtc::CriticalSection cs_;
-
- // Key-frame-request-related state.
- bool request_key_frame_ RTC_GUARDED_BY(cs_);
-
- // NACK-related state.
- std::vector<uint16_t> nack_sequence_numbers_ RTC_GUARDED_BY(cs_);
-
// LNTF-related state.
struct LossNotificationState {
LossNotificationState(uint16_t last_decoded_seq_num,
@@ -267,6 +258,31 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
uint16_t last_received_seq_num;
bool decodability_flag;
};
+ struct ConsumedRtcpFeedback {
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_sequence_numbers;
+ absl::optional<LossNotificationState> lntf_state;
+ };
+
+ ConsumedRtcpFeedback ConsumeRtcpFeedback() RTC_LOCKS_EXCLUDED(cs_);
+ ConsumedRtcpFeedback ConsumeRtcpFeedbackLocked()
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(cs_);
+ // This method is called both with and without cs_ held.
+ void SendRtcpFeedback(ConsumedRtcpFeedback feedback);
+
+ KeyFrameRequestSender* const key_frame_request_sender_;
+ NackSender* const nack_sender_;
+ LossNotificationSender* const loss_notification_sender_;
+
+ // NACKs are accessible from two threads due to nack_module_ being a module.
+ rtc::CriticalSection cs_;
+
+ // Key-frame-request-related state.
+ bool request_key_frame_ RTC_GUARDED_BY(cs_);
+
+ // NACK-related state.
+ std::vector<uint16_t> nack_sequence_numbers_ RTC_GUARDED_BY(cs_);
+
absl::optional<LossNotificationState> lntf_state_ RTC_GUARDED_BY(cs_);
};
enum ParseGenericDependenciesResult {
@@ -317,7 +333,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender,
KeyFrameRequestSender* const keyframe_request_sender_;
RtcpFeedbackBuffer rtcp_feedback_buffer_;
- std::unique_ptr<NackModule> nack_module_;
+ std::unique_ptr<DEPRECATED_NackModule> nack_module_;
std::unique_ptr<LossNotificationController> loss_notification_controller_;
video_coding::PacketBuffer packet_buffer_;
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver2.cc b/chromium/third_party/webrtc/video/rtp_video_stream_receiver2.cc
new file mode 100644
index 00000000000..3f11bb77c48
--- /dev/null
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver2.cc
@@ -0,0 +1,1154 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/rtp_video_stream_receiver2.h"
+
+#include <algorithm>
+#include <limits>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/base/macros.h"
+#include "absl/memory/memory.h"
+#include "absl/types/optional.h"
+#include "media/base/media_constants.h"
+#include "modules/pacing/packet_router.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/rtp_rtcp/include/receive_statistics.h"
+#include "modules/rtp_rtcp/include/rtp_cvo.h"
+#include "modules/rtp_rtcp/include/ulpfec_receiver.h"
+#include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_format.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h"
+#include "modules/utility/include/process_thread.h"
+#include "modules/video_coding/frame_object.h"
+#include "modules/video_coding/h264_sprop_parameter_sets.h"
+#include "modules/video_coding/h264_sps_pps_tracker.h"
+#include "modules/video_coding/nack_module2.h"
+#include "modules/video_coding/packet_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/location.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+#include "system_wrappers/include/ntp_time.h"
+#include "video/receive_statistics_proxy2.h"
+
+namespace webrtc {
+
+namespace {
+// TODO(philipel): Change kPacketBufferStartSize back to 32 in M63 see:
+// crbug.com/752886
+constexpr int kPacketBufferStartSize = 512;
+constexpr int kPacketBufferMaxSize = 2048;
+
+int PacketBufferMaxSize() {
+ // The group here must be a positive power of 2, in which case that is used as
+ // size. All other values shall result in the default value being used.
+ const std::string group_name =
+ webrtc::field_trial::FindFullName("WebRTC-PacketBufferMaxSize");
+ int packet_buffer_max_size = kPacketBufferMaxSize;
+ if (!group_name.empty() &&
+ (sscanf(group_name.c_str(), "%d", &packet_buffer_max_size) != 1 ||
+ packet_buffer_max_size <= 0 ||
+ // Verify that the number is a positive power of 2.
+ (packet_buffer_max_size & (packet_buffer_max_size - 1)) != 0)) {
+ RTC_LOG(LS_WARNING) << "Invalid packet buffer max size: " << group_name;
+ packet_buffer_max_size = kPacketBufferMaxSize;
+ }
+ return packet_buffer_max_size;
+}
+
+std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpRtcpModule(
+ Clock* clock,
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RtcpCnameCallback* rtcp_cname_callback,
+ uint32_t local_ssrc) {
+ RtpRtcpInterface::Configuration configuration;
+ configuration.clock = clock;
+ configuration.audio = false;
+ configuration.receiver_only = true;
+ configuration.receive_statistics = receive_statistics;
+ configuration.outgoing_transport = outgoing_transport;
+ configuration.rtt_stats = rtt_stats;
+ configuration.rtcp_packet_type_counter_observer =
+ rtcp_packet_type_counter_observer;
+ configuration.rtcp_cname_callback = rtcp_cname_callback;
+ configuration.local_media_ssrc = local_ssrc;
+
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp =
+ ModuleRtpRtcpImpl2::Create(configuration);
+ rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
+
+ return rtp_rtcp;
+}
+
+std::unique_ptr<NackModule2> MaybeConstructNackModule(
+ TaskQueueBase* current_queue,
+ const VideoReceiveStream::Config& config,
+ Clock* clock,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender) {
+ if (config.rtp.nack.rtp_history_ms == 0)
+ return nullptr;
+
+ return std::make_unique<NackModule2>(current_queue, clock, nack_sender,
+ keyframe_request_sender);
+}
+
+static const int kPacketLogIntervalMs = 10000;
+
+} // namespace
+
+RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RtcpFeedbackBuffer(
+ KeyFrameRequestSender* key_frame_request_sender,
+ NackSender* nack_sender,
+ LossNotificationSender* loss_notification_sender)
+ : key_frame_request_sender_(key_frame_request_sender),
+ nack_sender_(nack_sender),
+ loss_notification_sender_(loss_notification_sender),
+ request_key_frame_(false) {
+ RTC_DCHECK(key_frame_request_sender_);
+ RTC_DCHECK(nack_sender_);
+ RTC_DCHECK(loss_notification_sender_);
+}
+
+void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RequestKeyFrame() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ request_key_frame_ = true;
+}
+
+void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendNack(
+ const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK(!sequence_numbers.empty());
+ nack_sequence_numbers_.insert(nack_sequence_numbers_.end(),
+ sequence_numbers.cbegin(),
+ sequence_numbers.cend());
+ if (!buffering_allowed) {
+ // Note that while *buffering* is not allowed, *batching* is, meaning that
+ // previously buffered messages may be sent along with the current message.
+ SendBufferedRtcpFeedback();
+ }
+}
+
+void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendLossNotification(
+ uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK(buffering_allowed);
+ RTC_DCHECK(!lntf_state_)
+ << "SendLossNotification() called twice in a row with no call to "
+ "SendBufferedRtcpFeedback() in between.";
+ lntf_state_ = absl::make_optional<LossNotificationState>(
+ last_decoded_seq_num, last_received_seq_num, decodability_flag);
+}
+
+void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ bool request_key_frame = false;
+ std::vector<uint16_t> nack_sequence_numbers;
+ absl::optional<LossNotificationState> lntf_state;
+
+ std::swap(request_key_frame, request_key_frame_);
+ std::swap(nack_sequence_numbers, nack_sequence_numbers_);
+ std::swap(lntf_state, lntf_state_);
+
+ if (lntf_state) {
+ // If either a NACK or a key frame request is sent, we should buffer
+ // the LNTF and wait for them (NACK or key frame request) to trigger
+ // the compound feedback message.
+ // Otherwise, the LNTF should be sent out immediately.
+ const bool buffering_allowed =
+ request_key_frame || !nack_sequence_numbers.empty();
+
+ loss_notification_sender_->SendLossNotification(
+ lntf_state->last_decoded_seq_num, lntf_state->last_received_seq_num,
+ lntf_state->decodability_flag, buffering_allowed);
+ }
+
+ if (request_key_frame) {
+ key_frame_request_sender_->RequestKeyFrame();
+ } else if (!nack_sequence_numbers.empty()) {
+ nack_sender_->SendNack(nack_sequence_numbers, true);
+ }
+}
+
+RtpVideoStreamReceiver2::RtpVideoStreamReceiver2(
+ TaskQueueBase* current_queue,
+ Clock* clock,
+ Transport* transport,
+ RtcpRttStats* rtt_stats,
+ PacketRouter* packet_router,
+ const VideoReceiveStream::Config* config,
+ ReceiveStatistics* rtp_receive_statistics,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RtcpCnameCallback* rtcp_cname_callback,
+ ProcessThread* process_thread,
+ NackSender* nack_sender,
+ KeyFrameRequestSender* keyframe_request_sender,
+ video_coding::OnCompleteFrameCallback* complete_frame_callback,
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
+ : clock_(clock),
+ config_(*config),
+ packet_router_(packet_router),
+ process_thread_(process_thread),
+ ntp_estimator_(clock),
+ rtp_header_extensions_(config_.rtp.extensions),
+ forced_playout_delay_max_ms_("max_ms", absl::nullopt),
+ forced_playout_delay_min_ms_("min_ms", absl::nullopt),
+ rtp_receive_statistics_(rtp_receive_statistics),
+ ulpfec_receiver_(UlpfecReceiver::Create(config->rtp.remote_ssrc,
+ this,
+ config->rtp.extensions)),
+ receiving_(false),
+ last_packet_log_ms_(-1),
+ rtp_rtcp_(CreateRtpRtcpModule(clock,
+ rtp_receive_statistics_,
+ transport,
+ rtt_stats,
+ rtcp_packet_type_counter_observer,
+ rtcp_cname_callback,
+ config_.rtp.local_ssrc)),
+ complete_frame_callback_(complete_frame_callback),
+ keyframe_request_sender_(keyframe_request_sender),
+ // TODO(bugs.webrtc.org/10336): Let |rtcp_feedback_buffer_| communicate
+ // directly with |rtp_rtcp_|.
+ rtcp_feedback_buffer_(this, nack_sender, this),
+ nack_module_(MaybeConstructNackModule(current_queue,
+ config_,
+ clock_,
+ &rtcp_feedback_buffer_,
+ &rtcp_feedback_buffer_)),
+ packet_buffer_(clock_, kPacketBufferStartSize, PacketBufferMaxSize()),
+ has_received_frame_(false),
+ frames_decryptable_(false),
+ absolute_capture_time_receiver_(clock) {
+ constexpr bool remb_candidate = true;
+ if (packet_router_)
+ packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate);
+
+ RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff)
+ << "A stream should not be configured with RTCP disabled. This value is "
+ "reserved for internal usage.";
+ // TODO(pbos): What's an appropriate local_ssrc for receive-only streams?
+ RTC_DCHECK(config_.rtp.local_ssrc != 0);
+ RTC_DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc);
+
+ rtp_rtcp_->SetRTCPStatus(config_.rtp.rtcp_mode);
+ rtp_rtcp_->SetRemoteSSRC(config_.rtp.remote_ssrc);
+
+ static const int kMaxPacketAgeToNack = 450;
+ const int max_reordering_threshold = (config_.rtp.nack.rtp_history_ms > 0)
+ ? kMaxPacketAgeToNack
+ : kDefaultMaxReorderingThreshold;
+ rtp_receive_statistics_->SetMaxReorderingThreshold(config_.rtp.remote_ssrc,
+ max_reordering_threshold);
+ // TODO(nisse): For historic reasons, we applied the above
+ // max_reordering_threshold also for RTX stats, which makes little sense since
+ // we don't NACK rtx packets. Consider deleting the below block, and rely on
+ // the default threshold.
+ if (config_.rtp.rtx_ssrc) {
+ rtp_receive_statistics_->SetMaxReorderingThreshold(
+ config_.rtp.rtx_ssrc, max_reordering_threshold);
+ }
+ if (config_.rtp.rtcp_xr.receiver_reference_time_report)
+ rtp_rtcp_->SetRtcpXrRrtrStatus(true);
+
+ ParseFieldTrial(
+ {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_},
+ field_trial::FindFullName("WebRTC-ForcePlayoutDelay"));
+
+ process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE);
+
+ if (config_.rtp.lntf.enabled) {
+ loss_notification_controller_ =
+ std::make_unique<LossNotificationController>(&rtcp_feedback_buffer_,
+ &rtcp_feedback_buffer_);
+ }
+
+ reference_finder_ =
+ std::make_unique<video_coding::RtpFrameReferenceFinder>(this);
+
+ // Only construct the encrypted receiver if frame encryption is enabled.
+ if (config_.crypto_options.sframe.require_frame_encryption) {
+ buffered_frame_decryptor_ =
+ std::make_unique<BufferedFrameDecryptor>(this, this);
+ if (frame_decryptor != nullptr) {
+ buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor));
+ }
+ }
+
+ if (frame_transformer) {
+ frame_transformer_delegate_ = new rtc::RefCountedObject<
+ RtpVideoStreamReceiverFrameTransformerDelegate>(
+ this, std::move(frame_transformer), rtc::Thread::Current(),
+ config_.rtp.remote_ssrc);
+ frame_transformer_delegate_->Init();
+ }
+}
+
+RtpVideoStreamReceiver2::~RtpVideoStreamReceiver2() {
+ RTC_DCHECK(secondary_sinks_.empty());
+
+ process_thread_->DeRegisterModule(rtp_rtcp_.get());
+
+ if (packet_router_)
+ packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get());
+ UpdateHistograms();
+ if (frame_transformer_delegate_)
+ frame_transformer_delegate_->Reset();
+}
+
+void RtpVideoStreamReceiver2::AddReceiveCodec(
+ const VideoCodec& video_codec,
+ const std::map<std::string, std::string>& codec_params,
+ bool raw_payload) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ payload_type_map_.emplace(
+ video_codec.plType,
+ raw_payload ? std::make_unique<VideoRtpDepacketizerRaw>()
+ : CreateVideoRtpDepacketizer(video_codec.codecType));
+ pt_codec_params_.emplace(video_codec.plType, codec_params);
+}
+
+absl::optional<Syncable::Info> RtpVideoStreamReceiver2::GetSyncInfo() const {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ Syncable::Info info;
+ if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs,
+ &info.capture_time_ntp_frac, nullptr, nullptr,
+ &info.capture_time_source_clock) != 0) {
+ return absl::nullopt;
+ }
+
+ if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) {
+ return absl::nullopt;
+ }
+ info.latest_received_capture_timestamp = *last_received_rtp_timestamp_;
+ info.latest_receive_time_ms = *last_received_rtp_system_time_ms_;
+
+ // Leaves info.current_delay_ms uninitialized.
+ return info;
+}
+
+RtpVideoStreamReceiver2::ParseGenericDependenciesResult
+RtpVideoStreamReceiver2::ParseGenericDependenciesExtension(
+ const RtpPacketReceived& rtp_packet,
+ RTPVideoHeader* video_header) {
+ if (rtp_packet.HasExtension<RtpDependencyDescriptorExtension>()) {
+ webrtc::DependencyDescriptor dependency_descriptor;
+ if (!rtp_packet.GetExtension<RtpDependencyDescriptorExtension>(
+ video_structure_.get(), &dependency_descriptor)) {
+ // Descriptor is there, but failed to parse. Either it is invalid,
+ // or too old packet (after relevant video_structure_ changed),
+ // or too new packet (before relevant video_structure_ arrived).
+ // Drop such packet to be on the safe side.
+ // TODO(bugs.webrtc.org/10342): Stash too new packet.
+ RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
+ << " Failed to parse dependency descriptor.";
+ return kDropPacket;
+ }
+ if (dependency_descriptor.attached_structure != nullptr &&
+ !dependency_descriptor.first_packet_in_frame) {
+ RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
+ << "Invalid dependency descriptor: structure "
+ "attached to non first packet of a frame.";
+ return kDropPacket;
+ }
+ video_header->is_first_packet_in_frame =
+ dependency_descriptor.first_packet_in_frame;
+ video_header->is_last_packet_in_frame =
+ dependency_descriptor.last_packet_in_frame;
+
+ int64_t frame_id =
+ frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number);
+ auto& generic_descriptor_info = video_header->generic.emplace();
+ generic_descriptor_info.frame_id = frame_id;
+ generic_descriptor_info.spatial_index =
+ dependency_descriptor.frame_dependencies.spatial_id;
+ generic_descriptor_info.temporal_index =
+ dependency_descriptor.frame_dependencies.temporal_id;
+ for (int fdiff : dependency_descriptor.frame_dependencies.frame_diffs) {
+ generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
+ }
+ generic_descriptor_info.decode_target_indications =
+ dependency_descriptor.frame_dependencies.decode_target_indications;
+ if (dependency_descriptor.resolution) {
+ video_header->width = dependency_descriptor.resolution->Width();
+ video_header->height = dependency_descriptor.resolution->Height();
+ }
+
+ // FrameDependencyStructure is sent in dependency descriptor of the first
+ // packet of a key frame and required for parsed dependency descriptor in
+ // all the following packets until next key frame.
+ // Save it if there is a (potentially) new structure.
+ if (dependency_descriptor.attached_structure) {
+ RTC_DCHECK(dependency_descriptor.first_packet_in_frame);
+ if (video_structure_frame_id_ > frame_id) {
+ RTC_LOG(LS_WARNING)
+ << "Arrived key frame with id " << frame_id << " and structure id "
+ << dependency_descriptor.attached_structure->structure_id
+ << " is older than the latest received key frame with id "
+ << *video_structure_frame_id_ << " and structure id "
+ << video_structure_->structure_id;
+ return kDropPacket;
+ }
+ video_structure_ = std::move(dependency_descriptor.attached_structure);
+ video_structure_frame_id_ = frame_id;
+ video_header->frame_type = VideoFrameType::kVideoFrameKey;
+ } else {
+ video_header->frame_type = VideoFrameType::kVideoFrameDelta;
+ }
+ return kHasGenericDescriptor;
+ }
+
+ RtpGenericFrameDescriptor generic_frame_descriptor;
+ if (!rtp_packet.GetExtension<RtpGenericFrameDescriptorExtension00>(
+ &generic_frame_descriptor)) {
+ return kNoGenericDescriptor;
+ }
+
+ video_header->is_first_packet_in_frame =
+ generic_frame_descriptor.FirstPacketInSubFrame();
+ video_header->is_last_packet_in_frame =
+ generic_frame_descriptor.LastPacketInSubFrame();
+
+ if (generic_frame_descriptor.FirstPacketInSubFrame()) {
+ video_header->frame_type =
+ generic_frame_descriptor.FrameDependenciesDiffs().empty()
+ ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta;
+
+ auto& generic_descriptor_info = video_header->generic.emplace();
+ int64_t frame_id =
+ frame_id_unwrapper_.Unwrap(generic_frame_descriptor.FrameId());
+ generic_descriptor_info.frame_id = frame_id;
+ generic_descriptor_info.spatial_index =
+ generic_frame_descriptor.SpatialLayer();
+ generic_descriptor_info.temporal_index =
+ generic_frame_descriptor.TemporalLayer();
+ for (uint16_t fdiff : generic_frame_descriptor.FrameDependenciesDiffs()) {
+ generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
+ }
+ }
+ video_header->width = generic_frame_descriptor.Width();
+ video_header->height = generic_frame_descriptor.Height();
+ return kHasGenericDescriptor;
+}
+
+void RtpVideoStreamReceiver2::OnReceivedPayloadData(
+ rtc::CopyOnWriteBuffer codec_payload,
+ const RtpPacketReceived& rtp_packet,
+ const RTPVideoHeader& video) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ auto packet = std::make_unique<video_coding::PacketBuffer::Packet>(
+ rtp_packet, video, ntp_estimator_.Estimate(rtp_packet.Timestamp()),
+ clock_->TimeInMilliseconds());
+
+ // Try to extrapolate absolute capture time if it is missing.
+ packet->packet_info.set_absolute_capture_time(
+ absolute_capture_time_receiver_.OnReceivePacket(
+ AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(),
+ packet->packet_info.csrcs()),
+ packet->packet_info.rtp_timestamp(),
+ // Assume frequency is the same one for all video frames.
+ kVideoPayloadTypeFrequency,
+ packet->packet_info.absolute_capture_time()));
+
+ RTPVideoHeader& video_header = packet->video_header;
+ video_header.rotation = kVideoRotation_0;
+ video_header.content_type = VideoContentType::UNSPECIFIED;
+ video_header.video_timing.flags = VideoSendTiming::kInvalid;
+ video_header.is_last_packet_in_frame |= rtp_packet.Marker();
+
+ if (const auto* vp9_header =
+ absl::get_if<RTPVideoHeaderVP9>(&video_header.video_type_header)) {
+ video_header.is_last_packet_in_frame |= vp9_header->end_of_frame;
+ video_header.is_first_packet_in_frame |= vp9_header->beginning_of_frame;
+ }
+
+ rtp_packet.GetExtension<VideoOrientation>(&video_header.rotation);
+ rtp_packet.GetExtension<VideoContentTypeExtension>(
+ &video_header.content_type);
+ rtp_packet.GetExtension<VideoTimingExtension>(&video_header.video_timing);
+ if (forced_playout_delay_max_ms_ && forced_playout_delay_min_ms_) {
+ video_header.playout_delay.max_ms = *forced_playout_delay_max_ms_;
+ video_header.playout_delay.min_ms = *forced_playout_delay_min_ms_;
+ } else {
+ rtp_packet.GetExtension<PlayoutDelayLimits>(&video_header.playout_delay);
+ }
+
+ ParseGenericDependenciesResult generic_descriptor_state =
+ ParseGenericDependenciesExtension(rtp_packet, &video_header);
+ if (generic_descriptor_state == kDropPacket)
+ return;
+
+ // Color space should only be transmitted in the last packet of a frame,
+ // therefore, neglect it otherwise so that last_color_space_ is not reset by
+ // mistake.
+ if (video_header.is_last_packet_in_frame) {
+ video_header.color_space = rtp_packet.GetExtension<ColorSpaceExtension>();
+ if (video_header.color_space ||
+ video_header.frame_type == VideoFrameType::kVideoFrameKey) {
+ // Store color space since it's only transmitted when changed or for key
+ // frames. Color space will be cleared if a key frame is transmitted
+ // without color space information.
+ last_color_space_ = video_header.color_space;
+ } else if (last_color_space_) {
+ video_header.color_space = last_color_space_;
+ }
+ }
+
+ if (loss_notification_controller_) {
+ if (rtp_packet.recovered()) {
+ // TODO(bugs.webrtc.org/10336): Implement support for reordering.
+ RTC_LOG(LS_INFO)
+ << "LossNotificationController does not support reordering.";
+ } else if (generic_descriptor_state == kNoGenericDescriptor) {
+ RTC_LOG(LS_WARNING) << "LossNotificationController requires generic "
+ "frame descriptor, but it is missing.";
+ } else {
+ if (video_header.is_first_packet_in_frame) {
+ RTC_DCHECK(video_header.generic);
+ LossNotificationController::FrameDetails frame;
+ frame.is_keyframe =
+ video_header.frame_type == VideoFrameType::kVideoFrameKey;
+ frame.frame_id = video_header.generic->frame_id;
+ frame.frame_dependencies = video_header.generic->dependencies;
+ loss_notification_controller_->OnReceivedPacket(
+ rtp_packet.SequenceNumber(), &frame);
+ } else {
+ loss_notification_controller_->OnReceivedPacket(
+ rtp_packet.SequenceNumber(), nullptr);
+ }
+ }
+ }
+
+ if (nack_module_) {
+ const bool is_keyframe =
+ video_header.is_first_packet_in_frame &&
+ video_header.frame_type == VideoFrameType::kVideoFrameKey;
+
+ packet->times_nacked = nack_module_->OnReceivedPacket(
+ rtp_packet.SequenceNumber(), is_keyframe, rtp_packet.recovered());
+ } else {
+ packet->times_nacked = -1;
+ }
+
+ if (codec_payload.size() == 0) {
+ NotifyReceiverOfEmptyPacket(packet->seq_num);
+ rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
+ return;
+ }
+
+ if (packet->codec() == kVideoCodecH264) {
+ // Only when we start to receive packets will we know what payload type
+ // that will be used. When we know the payload type insert the correct
+ // sps/pps into the tracker.
+ if (packet->payload_type != last_payload_type_) {
+ last_payload_type_ = packet->payload_type;
+ InsertSpsPpsIntoTracker(packet->payload_type);
+ }
+
+ video_coding::H264SpsPpsTracker::FixedBitstream fixed =
+ tracker_.CopyAndFixBitstream(
+ rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()),
+ &packet->video_header);
+
+ switch (fixed.action) {
+ case video_coding::H264SpsPpsTracker::kRequestKeyframe:
+ rtcp_feedback_buffer_.RequestKeyFrame();
+ rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
+ ABSL_FALLTHROUGH_INTENDED;
+ case video_coding::H264SpsPpsTracker::kDrop:
+ return;
+ case video_coding::H264SpsPpsTracker::kInsert:
+ packet->video_payload = std::move(fixed.bitstream);
+ break;
+ }
+
+ } else {
+ packet->video_payload = std::move(codec_payload);
+ }
+
+ rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
+ frame_counter_.Add(packet->timestamp);
+ OnInsertedPacket(packet_buffer_.InsertPacket(std::move(packet)));
+}
+
+void RtpVideoStreamReceiver2::OnRecoveredPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length) {
+ RtpPacketReceived packet;
+ if (!packet.Parse(rtp_packet, rtp_packet_length))
+ return;
+ if (packet.PayloadType() == config_.rtp.red_payload_type) {
+ RTC_LOG(LS_WARNING) << "Discarding recovered packet with RED encapsulation";
+ return;
+ }
+
+ packet.IdentifyExtensions(rtp_header_extensions_);
+ packet.set_payload_type_frequency(kVideoPayloadTypeFrequency);
+ // TODO(nisse): UlpfecReceiverImpl::ProcessReceivedFec passes both
+ // original (decapsulated) media packets and recovered packets to
+ // this callback. We need a way to distinguish, for setting
+ // packet.recovered() correctly. Ideally, move RED decapsulation out
+ // of the Ulpfec implementation.
+
+ ReceivePacket(packet);
+}
+
+// This method handles both regular RTP packets and packets recovered
+// via FlexFEC.
+void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ if (!receiving_) {
+ return;
+ }
+
+ if (!packet.recovered()) {
+ // TODO(nisse): Exclude out-of-order packets?
+ int64_t now_ms = clock_->TimeInMilliseconds();
+
+ last_received_rtp_timestamp_ = packet.Timestamp();
+ last_received_rtp_system_time_ms_ = now_ms;
+
+ // Periodically log the RTP header of incoming packets.
+ if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
+ rtc::StringBuilder ss;
+ ss << "Packet received on SSRC: " << packet.Ssrc()
+ << " with payload type: " << static_cast<int>(packet.PayloadType())
+ << ", timestamp: " << packet.Timestamp()
+ << ", sequence number: " << packet.SequenceNumber()
+ << ", arrival time: " << packet.arrival_time_ms();
+ int32_t time_offset;
+ if (packet.GetExtension<TransmissionOffset>(&time_offset)) {
+ ss << ", toffset: " << time_offset;
+ }
+ uint32_t send_time;
+ if (packet.GetExtension<AbsoluteSendTime>(&send_time)) {
+ ss << ", abs send time: " << send_time;
+ }
+ RTC_LOG(LS_INFO) << ss.str();
+ last_packet_log_ms_ = now_ms;
+ }
+ }
+
+ ReceivePacket(packet);
+
+ // Update receive statistics after ReceivePacket.
+ // Receive statistics will be reset if the payload type changes (make sure
+ // that the first packet is included in the stats).
+ if (!packet.recovered()) {
+ rtp_receive_statistics_->OnRtpPacket(packet);
+ }
+
+ for (RtpPacketSinkInterface* secondary_sink : secondary_sinks_) {
+ secondary_sink->OnRtpPacket(packet);
+ }
+}
+
+void RtpVideoStreamReceiver2::RequestKeyFrame() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ // TODO(bugs.webrtc.org/10336): Allow the sender to ignore key frame requests
+ // issued by anything other than the LossNotificationController if it (the
+ // sender) is relying on LNTF alone.
+ if (keyframe_request_sender_) {
+ keyframe_request_sender_->RequestKeyFrame();
+ } else {
+ rtp_rtcp_->SendPictureLossIndication();
+ }
+}
+
+void RtpVideoStreamReceiver2::SendLossNotification(
+ uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) {
+ RTC_DCHECK(config_.rtp.lntf.enabled);
+ rtp_rtcp_->SendLossNotification(last_decoded_seq_num, last_received_seq_num,
+ decodability_flag, buffering_allowed);
+}
+
+bool RtpVideoStreamReceiver2::IsUlpfecEnabled() const {
+ return config_.rtp.ulpfec_payload_type != -1;
+}
+
+bool RtpVideoStreamReceiver2::IsRetransmissionsEnabled() const {
+ return config_.rtp.nack.rtp_history_ms > 0;
+}
+
+void RtpVideoStreamReceiver2::RequestPacketRetransmit(
+ const std::vector<uint16_t>& sequence_numbers) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ rtp_rtcp_->SendNack(sequence_numbers);
+}
+
+bool RtpVideoStreamReceiver2::IsDecryptable() const {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ return frames_decryptable_;
+}
+
+void RtpVideoStreamReceiver2::OnInsertedPacket(
+ video_coding::PacketBuffer::InsertResult result) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ video_coding::PacketBuffer::Packet* first_packet = nullptr;
+ int max_nack_count;
+ int64_t min_recv_time;
+ int64_t max_recv_time;
+ std::vector<rtc::ArrayView<const uint8_t>> payloads;
+ RtpPacketInfos::vector_type packet_infos;
+
+ bool frame_boundary = true;
+ for (auto& packet : result.packets) {
+ // PacketBuffer promisses frame boundaries are correctly set on each
+ // packet. Document that assumption with the DCHECKs.
+ RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame());
+ if (packet->is_first_packet_in_frame()) {
+ first_packet = packet.get();
+ max_nack_count = packet->times_nacked;
+ min_recv_time = packet->packet_info.receive_time_ms();
+ max_recv_time = packet->packet_info.receive_time_ms();
+ payloads.clear();
+ packet_infos.clear();
+ } else {
+ max_nack_count = std::max(max_nack_count, packet->times_nacked);
+ min_recv_time =
+ std::min(min_recv_time, packet->packet_info.receive_time_ms());
+ max_recv_time =
+ std::max(max_recv_time, packet->packet_info.receive_time_ms());
+ }
+ payloads.emplace_back(packet->video_payload);
+ packet_infos.push_back(packet->packet_info);
+
+ frame_boundary = packet->is_last_packet_in_frame();
+ if (packet->is_last_packet_in_frame()) {
+ auto depacketizer_it = payload_type_map_.find(first_packet->payload_type);
+ RTC_CHECK(depacketizer_it != payload_type_map_.end());
+
+ rtc::scoped_refptr<EncodedImageBuffer> bitstream =
+ depacketizer_it->second->AssembleFrame(payloads);
+ if (!bitstream) {
+ // Failed to assemble a frame. Discard and continue.
+ continue;
+ }
+
+ const video_coding::PacketBuffer::Packet& last_packet = *packet;
+ OnAssembledFrame(std::make_unique<video_coding::RtpFrameObject>(
+ first_packet->seq_num, //
+ last_packet.seq_num, //
+ last_packet.marker_bit, //
+ max_nack_count, //
+ min_recv_time, //
+ max_recv_time, //
+ first_packet->timestamp, //
+ first_packet->ntp_time_ms, //
+ last_packet.video_header.video_timing, //
+ first_packet->payload_type, //
+ first_packet->codec(), //
+ last_packet.video_header.rotation, //
+ last_packet.video_header.content_type, //
+ first_packet->video_header, //
+ last_packet.video_header.color_space, //
+ RtpPacketInfos(std::move(packet_infos)), //
+ std::move(bitstream)));
+ }
+ }
+ RTC_DCHECK(frame_boundary);
+ if (result.buffer_cleared) {
+ RequestKeyFrame();
+ }
+}
+
+void RtpVideoStreamReceiver2::OnAssembledFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK(frame);
+
+ const absl::optional<RTPVideoHeader::GenericDescriptorInfo>& descriptor =
+ frame->GetRtpVideoHeader().generic;
+
+ if (loss_notification_controller_ && descriptor) {
+ loss_notification_controller_->OnAssembledFrame(
+ frame->first_seq_num(), descriptor->frame_id,
+ absl::c_linear_search(descriptor->decode_target_indications,
+ DecodeTargetIndication::kDiscardable),
+ descriptor->dependencies);
+ }
+
+ // If frames arrive before a key frame, they would not be decodable.
+ // In that case, request a key frame ASAP.
+ if (!has_received_frame_) {
+ if (frame->FrameType() != VideoFrameType::kVideoFrameKey) {
+ // |loss_notification_controller_|, if present, would have already
+ // requested a key frame when the first packet for the non-key frame
+ // had arrived, so no need to replicate the request.
+ if (!loss_notification_controller_) {
+ RequestKeyFrame();
+ }
+ }
+ has_received_frame_ = true;
+ }
+
+ // Reset |reference_finder_| if |frame| is new and the codec have changed.
+ if (current_codec_) {
+ bool frame_is_newer =
+ AheadOf(frame->Timestamp(), last_assembled_frame_rtp_timestamp_);
+
+ if (frame->codec_type() != current_codec_) {
+ if (frame_is_newer) {
+ // When we reset the |reference_finder_| we don't want new picture ids
+ // to overlap with old picture ids. To ensure that doesn't happen we
+ // start from the |last_completed_picture_id_| and add an offset in case
+ // of reordering.
+ reference_finder_ =
+ std::make_unique<video_coding::RtpFrameReferenceFinder>(
+ this, last_completed_picture_id_ +
+ std::numeric_limits<uint16_t>::max());
+ current_codec_ = frame->codec_type();
+ } else {
+ // Old frame from before the codec switch, discard it.
+ return;
+ }
+ }
+
+ if (frame_is_newer) {
+ last_assembled_frame_rtp_timestamp_ = frame->Timestamp();
+ }
+ } else {
+ current_codec_ = frame->codec_type();
+ last_assembled_frame_rtp_timestamp_ = frame->Timestamp();
+ }
+
+ if (buffered_frame_decryptor_ != nullptr) {
+ buffered_frame_decryptor_->ManageEncryptedFrame(std::move(frame));
+ } else if (frame_transformer_delegate_) {
+ frame_transformer_delegate_->TransformFrame(std::move(frame));
+ } else {
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+}
+
+void RtpVideoStreamReceiver2::OnCompleteFrame(
+ std::unique_ptr<video_coding::EncodedFrame> frame) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ video_coding::RtpFrameObject* rtp_frame =
+ static_cast<video_coding::RtpFrameObject*>(frame.get());
+ last_seq_num_for_pic_id_[rtp_frame->id.picture_id] =
+ rtp_frame->last_seq_num();
+
+ last_completed_picture_id_ =
+ std::max(last_completed_picture_id_, frame->id.picture_id);
+ complete_frame_callback_->OnCompleteFrame(std::move(frame));
+}
+
+void RtpVideoStreamReceiver2::OnDecryptedFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ reference_finder_->ManageFrame(std::move(frame));
+}
+
+void RtpVideoStreamReceiver2::OnDecryptionStatusChange(
+ FrameDecryptorInterface::Status status) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ // Called from BufferedFrameDecryptor::DecryptFrame.
+ frames_decryptable_ =
+ (status == FrameDecryptorInterface::Status::kOk) ||
+ (status == FrameDecryptorInterface::Status::kRecoverable);
+}
+
+void RtpVideoStreamReceiver2::SetFrameDecryptor(
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (buffered_frame_decryptor_ == nullptr) {
+ buffered_frame_decryptor_ =
+ std::make_unique<BufferedFrameDecryptor>(this, this);
+ }
+ buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor));
+}
+
+void RtpVideoStreamReceiver2::SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ frame_transformer_delegate_ =
+ new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ this, std::move(frame_transformer), rtc::Thread::Current(),
+ config_.rtp.remote_ssrc);
+ frame_transformer_delegate_->Init();
+}
+
+void RtpVideoStreamReceiver2::UpdateRtt(int64_t max_rtt_ms) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (nack_module_)
+ nack_module_->UpdateRtt(max_rtt_ms);
+}
+
+absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedPacketMs() const {
+ return packet_buffer_.LastReceivedPacketMs();
+}
+
+absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs()
+ const {
+ return packet_buffer_.LastReceivedKeyframePacketMs();
+}
+
+void RtpVideoStreamReceiver2::AddSecondarySink(RtpPacketSinkInterface* sink) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ RTC_DCHECK(!absl::c_linear_search(secondary_sinks_, sink));
+ secondary_sinks_.push_back(sink);
+}
+
+void RtpVideoStreamReceiver2::RemoveSecondarySink(
+ const RtpPacketSinkInterface* sink) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ auto it = absl::c_find(secondary_sinks_, sink);
+ if (it == secondary_sinks_.end()) {
+ // We might be rolling-back a call whose setup failed mid-way. In such a
+ // case, it's simpler to remove "everything" rather than remember what
+ // has already been added.
+ RTC_LOG(LS_WARNING) << "Removal of unknown sink.";
+ return;
+ }
+ secondary_sinks_.erase(it);
+}
+
+void RtpVideoStreamReceiver2::ManageFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ reference_finder_->ManageFrame(std::move(frame));
+}
+
+void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (packet.payload_size() == 0) {
+ // Padding or keep-alive packet.
+ // TODO(nisse): Could drop empty packets earlier, but need to figure out how
+ // they should be counted in stats.
+ NotifyReceiverOfEmptyPacket(packet.SequenceNumber());
+ return;
+ }
+ if (packet.PayloadType() == config_.rtp.red_payload_type) {
+ ParseAndHandleEncapsulatingHeader(packet);
+ return;
+ }
+
+ const auto type_it = payload_type_map_.find(packet.PayloadType());
+ if (type_it == payload_type_map_.end()) {
+ return;
+ }
+ absl::optional<VideoRtpDepacketizer::ParsedRtpPayload> parsed_payload =
+ type_it->second->Parse(packet.PayloadBuffer());
+ if (parsed_payload == absl::nullopt) {
+ RTC_LOG(LS_WARNING) << "Failed parsing payload.";
+ return;
+ }
+
+ OnReceivedPayloadData(std::move(parsed_payload->video_payload), packet,
+ parsed_payload->video_header);
+}
+
+void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader(
+ const RtpPacketReceived& packet) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (packet.PayloadType() == config_.rtp.red_payload_type &&
+ packet.payload_size() > 0) {
+ if (packet.payload()[0] == config_.rtp.ulpfec_payload_type) {
+ // Notify video_receiver about received FEC packets to avoid NACKing these
+ // packets.
+ NotifyReceiverOfEmptyPacket(packet.SequenceNumber());
+ }
+ if (!ulpfec_receiver_->AddReceivedRedPacket(
+ packet, config_.rtp.ulpfec_payload_type)) {
+ return;
+ }
+ ulpfec_receiver_->ProcessReceivedFec();
+ }
+}
+
+// In the case of a video stream without picture ids and no rtx the
+// RtpFrameReferenceFinder will need to know about padding to
+// correctly calculate frame references.
+void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ reference_finder_->PaddingReceived(seq_num);
+
+ OnInsertedPacket(packet_buffer_.InsertPadding(seq_num));
+ if (nack_module_) {
+ nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false,
+ /* is _recovered = */ false);
+ }
+ if (loss_notification_controller_) {
+ // TODO(bugs.webrtc.org/10336): Handle empty packets.
+ RTC_LOG(LS_WARNING)
+ << "LossNotificationController does not expect empty packets.";
+ }
+}
+
+bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet,
+ size_t rtcp_packet_length) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ if (!receiving_) {
+ return false;
+ }
+
+ rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+
+ int64_t rtt = 0;
+ rtp_rtcp_->RTT(config_.rtp.remote_ssrc, &rtt, nullptr, nullptr, nullptr);
+ if (rtt == 0) {
+ // Waiting for valid rtt.
+ return true;
+ }
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ uint32_t recieved_ntp_secs = 0;
+ uint32_t recieved_ntp_frac = 0;
+ if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, &recieved_ntp_secs,
+ &recieved_ntp_frac, &rtp_timestamp) != 0) {
+ // Waiting for RTCP.
+ return true;
+ }
+ NtpTime recieved_ntp(recieved_ntp_secs, recieved_ntp_frac);
+ int64_t time_since_recieved =
+ clock_->CurrentNtpInMilliseconds() - recieved_ntp.ToMs();
+ // Don't use old SRs to estimate time.
+ if (time_since_recieved <= 1) {
+ ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
+ absl::optional<int64_t> remote_to_local_clock_offset_ms =
+ ntp_estimator_.EstimateRemoteToLocalClockOffsetMs();
+ if (remote_to_local_clock_offset_ms.has_value()) {
+ absolute_capture_time_receiver_.SetRemoteToLocalClockOffset(
+ Int64MsToQ32x32(*remote_to_local_clock_offset_ms));
+ }
+ }
+
+ return true;
+}
+
+void RtpVideoStreamReceiver2::FrameContinuous(int64_t picture_id) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ if (!nack_module_)
+ return;
+
+ int seq_num = -1;
+ auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
+ if (seq_num_it != last_seq_num_for_pic_id_.end())
+ seq_num = seq_num_it->second;
+ if (seq_num != -1)
+ nack_module_->ClearUpTo(seq_num);
+}
+
+void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ // Running on the decoder thread.
+ int seq_num = -1;
+ auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
+ if (seq_num_it != last_seq_num_for_pic_id_.end()) {
+ seq_num = seq_num_it->second;
+ last_seq_num_for_pic_id_.erase(last_seq_num_for_pic_id_.begin(),
+ ++seq_num_it);
+ }
+
+ if (seq_num != -1) {
+ packet_buffer_.ClearTo(seq_num);
+ reference_finder_->ClearTo(seq_num);
+ }
+}
+
+void RtpVideoStreamReceiver2::SignalNetworkState(NetworkState state) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ rtp_rtcp_->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode
+ : RtcpMode::kOff);
+}
+
+void RtpVideoStreamReceiver2::StartReceive() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ receiving_ = true;
+}
+
+void RtpVideoStreamReceiver2::StopReceive() {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ receiving_ = false;
+}
+
+void RtpVideoStreamReceiver2::UpdateHistograms() {
+ FecPacketCounter counter = ulpfec_receiver_->GetPacketCounter();
+ if (counter.first_packet_time_ms == -1)
+ return;
+
+ int64_t elapsed_sec =
+ (clock_->TimeInMilliseconds() - counter.first_packet_time_ms) / 1000;
+ if (elapsed_sec < metrics::kMinRunTimeInSeconds)
+ return;
+
+ if (counter.num_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE(
+ "WebRTC.Video.ReceivedFecPacketsInPercent",
+ static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
+ }
+ if (counter.num_fec_packets > 0) {
+ RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
+ static_cast<int>(counter.num_recovered_packets *
+ 100 / counter.num_fec_packets));
+ }
+ if (config_.rtp.ulpfec_payload_type != -1) {
+ RTC_HISTOGRAM_COUNTS_10000(
+ "WebRTC.Video.FecBitrateReceivedInKbps",
+ static_cast<int>(counter.num_bytes * 8 / elapsed_sec / 1000));
+ }
+}
+
+void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+
+ auto codec_params_it = pt_codec_params_.find(payload_type);
+ if (codec_params_it == pt_codec_params_.end())
+ return;
+
+ RTC_LOG(LS_INFO) << "Found out of band supplied codec parameters for"
+ " payload type: "
+ << static_cast<int>(payload_type);
+
+ H264SpropParameterSets sprop_decoder;
+ auto sprop_base64_it =
+ codec_params_it->second.find(cricket::kH264FmtpSpropParameterSets);
+
+ if (sprop_base64_it == codec_params_it->second.end())
+ return;
+
+ if (!sprop_decoder.DecodeSprop(sprop_base64_it->second.c_str()))
+ return;
+
+ tracker_.InsertSpsPpsNalus(sprop_decoder.sps_nalu(),
+ sprop_decoder.pps_nalu());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver2.h b/chromium/third_party/webrtc/video/rtp_video_stream_receiver2.h
new file mode 100644
index 00000000000..d82a7abbfe0
--- /dev/null
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver2.h
@@ -0,0 +1,367 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_
+#define VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/video/color_space.h"
+#include "api/video_codecs/video_codec.h"
+#include "call/rtp_packet_sink_interface.h"
+#include "call/syncable.h"
+#include "call/video_receive_stream.h"
+#include "modules/rtp_rtcp/include/receive_statistics.h"
+#include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
+#include "modules/video_coding/h264_sps_pps_tracker.h"
+#include "modules/video_coding/loss_notification_controller.h"
+#include "modules/video_coding/packet_buffer.h"
+#include "modules/video_coding/rtp_frame_reference_finder.h"
+#include "modules/video_coding/unique_timestamp_counter.h"
+#include "rtc_base/constructor_magic.h"
+#include "rtc_base/experiments/field_trial_parser.h"
+#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/synchronization/sequence_checker.h"
+#include "rtc_base/thread_annotations.h"
+#include "video/buffered_frame_decryptor.h"
+#include "video/rtp_video_stream_receiver_frame_transformer_delegate.h"
+
+namespace webrtc {
+
+class NackModule2;
+class PacketRouter;
+class ProcessThread;
+class ReceiveStatistics;
+class RtcpRttStats;
+class RtpPacketReceived;
+class Transport;
+class UlpfecReceiver;
+
+class RtpVideoStreamReceiver2 : public LossNotificationSender,
+ public RecoveredPacketReceiver,
+ public RtpPacketSinkInterface,
+ public KeyFrameRequestSender,
+ public video_coding::OnCompleteFrameCallback,
+ public OnDecryptedFrameCallback,
+ public OnDecryptionStatusChangeCallback,
+ public RtpVideoFrameReceiver {
+ public:
+ RtpVideoStreamReceiver2(
+ TaskQueueBase* current_queue,
+ Clock* clock,
+ Transport* transport,
+ RtcpRttStats* rtt_stats,
+ // The packet router is optional; if provided, the RtpRtcp module for this
+ // stream is registered as a candidate for sending REMB and transport
+ // feedback.
+ PacketRouter* packet_router,
+ const VideoReceiveStream::Config* config,
+ ReceiveStatistics* rtp_receive_statistics,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RtcpCnameCallback* rtcp_cname_callback,
+ ProcessThread* process_thread,
+ NackSender* nack_sender,
+ // The KeyFrameRequestSender is optional; if not provided, key frame
+ // requests are sent via the internal RtpRtcp module.
+ KeyFrameRequestSender* keyframe_request_sender,
+ video_coding::OnCompleteFrameCallback* complete_frame_callback,
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
+ ~RtpVideoStreamReceiver2() override;
+
+ void AddReceiveCodec(const VideoCodec& video_codec,
+ const std::map<std::string, std::string>& codec_params,
+ bool raw_payload);
+
+ void StartReceive();
+ void StopReceive();
+
+ // Produces the transport-related timestamps; current_delay_ms is left unset.
+ absl::optional<Syncable::Info> GetSyncInfo() const;
+
+ bool DeliverRtcp(const uint8_t* rtcp_packet, size_t rtcp_packet_length);
+
+ void FrameContinuous(int64_t seq_num);
+
+ void FrameDecoded(int64_t seq_num);
+
+ void SignalNetworkState(NetworkState state);
+
+ // Returns number of different frames seen.
+ int GetUniqueFramesSeen() const {
+ RTC_DCHECK_RUN_ON(&worker_task_checker_);
+ return frame_counter_.GetUniqueSeen();
+ }
+
+ // Implements RtpPacketSinkInterface.
+ void OnRtpPacket(const RtpPacketReceived& packet) override;
+
+ // TODO(philipel): Stop using VCMPacket in the new jitter buffer and then
+ // remove this function. Public only for tests.
+ void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload,
+ const RtpPacketReceived& rtp_packet,
+ const RTPVideoHeader& video);
+
+ // Implements RecoveredPacketReceiver.
+ void OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override;
+
+ // Send an RTCP keyframe request.
+ void RequestKeyFrame() override;
+
+ // Implements LossNotificationSender.
+ void SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) override;
+
+ bool IsUlpfecEnabled() const;
+ bool IsRetransmissionsEnabled() const;
+
+ // Returns true if a decryptor is attached and frames can be decrypted.
+ // Updated by OnDecryptionStatusChangeCallback. Note this refers to Frame
+ // Decryption not SRTP.
+ bool IsDecryptable() const;
+
+ // Don't use, still experimental.
+ void RequestPacketRetransmit(const std::vector<uint16_t>& sequence_numbers);
+
+ // Implements OnCompleteFrameCallback.
+ void OnCompleteFrame(
+ std::unique_ptr<video_coding::EncodedFrame> frame) override;
+
+ // Implements OnDecryptedFrameCallback.
+ void OnDecryptedFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) override;
+
+ // Implements OnDecryptionStatusChangeCallback.
+ void OnDecryptionStatusChange(
+ FrameDecryptorInterface::Status status) override;
+
+ // Optionally set a frame decryptor after a stream has started. This will not
+ // reset the decoder state.
+ void SetFrameDecryptor(
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor);
+
+ // Sets a frame transformer after a stream has started, if no transformer
+ // has previously been set. Does not reset the decoder state.
+ void SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer);
+
+ // Called by VideoReceiveStream when stats are updated.
+ void UpdateRtt(int64_t max_rtt_ms);
+
+ absl::optional<int64_t> LastReceivedPacketMs() const;
+ absl::optional<int64_t> LastReceivedKeyframePacketMs() const;
+
+ // RtpDemuxer only forwards a given RTP packet to one sink. However, some
+ // sinks, such as FlexFEC, might wish to be informed of all of the packets
+ // a given sink receives (or any set of sinks). They may do so by registering
+ // themselves as secondary sinks.
+ void AddSecondarySink(RtpPacketSinkInterface* sink);
+ void RemoveSecondarySink(const RtpPacketSinkInterface* sink);
+
+ private:
+ // Implements RtpVideoFrameReceiver.
+ void ManageFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) override;
+
+ // Used for buffering RTCP feedback messages and sending them all together.
+ // Note:
+ // 1. Key frame requests and NACKs are mutually exclusive, with the
+ // former taking precedence over the latter.
+ // 2. Loss notifications are orthogonal to either. (That is, may be sent
+ // alongside either.)
+ class RtcpFeedbackBuffer : public KeyFrameRequestSender,
+ public NackSender,
+ public LossNotificationSender {
+ public:
+ RtcpFeedbackBuffer(KeyFrameRequestSender* key_frame_request_sender,
+ NackSender* nack_sender,
+ LossNotificationSender* loss_notification_sender);
+
+ ~RtcpFeedbackBuffer() override = default;
+
+ // KeyFrameRequestSender implementation.
+ void RequestKeyFrame() override;
+
+ // NackSender implementation.
+ void SendNack(const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed) override;
+
+ // LossNotificationSender implementation.
+ void SendLossNotification(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag,
+ bool buffering_allowed) override;
+
+ // Send all RTCP feedback messages buffered thus far.
+ void SendBufferedRtcpFeedback();
+
+ private:
+ // LNTF-related state.
+ struct LossNotificationState {
+ LossNotificationState(uint16_t last_decoded_seq_num,
+ uint16_t last_received_seq_num,
+ bool decodability_flag)
+ : last_decoded_seq_num(last_decoded_seq_num),
+ last_received_seq_num(last_received_seq_num),
+ decodability_flag(decodability_flag) {}
+
+ uint16_t last_decoded_seq_num;
+ uint16_t last_received_seq_num;
+ bool decodability_flag;
+ };
+
+ SequenceChecker worker_task_checker_;
+ KeyFrameRequestSender* const key_frame_request_sender_;
+ NackSender* const nack_sender_;
+ LossNotificationSender* const loss_notification_sender_;
+
+ // Key-frame-request-related state.
+ bool request_key_frame_ RTC_GUARDED_BY(worker_task_checker_);
+
+ // NACK-related state.
+ std::vector<uint16_t> nack_sequence_numbers_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ absl::optional<LossNotificationState> lntf_state_
+ RTC_GUARDED_BY(worker_task_checker_);
+ };
+ enum ParseGenericDependenciesResult {
+ kDropPacket,
+ kHasGenericDescriptor,
+ kNoGenericDescriptor
+ };
+
+ // Entry point doing non-stats work for a received packet. Called
+ // for the same packet both before and after RED decapsulation.
+ void ReceivePacket(const RtpPacketReceived& packet);
+ // Parses and handles RED headers.
+ // This function assumes that it's being called from only one thread.
+ void ParseAndHandleEncapsulatingHeader(const RtpPacketReceived& packet);
+ void NotifyReceiverOfEmptyPacket(uint16_t seq_num);
+ void UpdateHistograms();
+ bool IsRedEnabled() const;
+ void InsertSpsPpsIntoTracker(uint8_t payload_type);
+ void OnInsertedPacket(video_coding::PacketBuffer::InsertResult result);
+ ParseGenericDependenciesResult ParseGenericDependenciesExtension(
+ const RtpPacketReceived& rtp_packet,
+ RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_);
+ void OnAssembledFrame(std::unique_ptr<video_coding::RtpFrameObject> frame);
+
+ Clock* const clock_;
+ // Ownership of this object lies with VideoReceiveStream, which owns |this|.
+ const VideoReceiveStream::Config& config_;
+ PacketRouter* const packet_router_;
+ ProcessThread* const process_thread_;
+
+ RemoteNtpTimeEstimator ntp_estimator_;
+
+ RtpHeaderExtensionMap rtp_header_extensions_;
+ // Set by the field trial WebRTC-ForcePlayoutDelay to override any playout
+ // delay that is specified in the received packets.
+ FieldTrialOptional<int> forced_playout_delay_max_ms_;
+ FieldTrialOptional<int> forced_playout_delay_min_ms_;
+ ReceiveStatistics* const rtp_receive_statistics_;
+ std::unique_ptr<UlpfecReceiver> ulpfec_receiver_;
+
+ SequenceChecker worker_task_checker_;
+ bool receiving_ RTC_GUARDED_BY(worker_task_checker_);
+ int64_t last_packet_log_ms_ RTC_GUARDED_BY(worker_task_checker_);
+
+ const std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
+
+ video_coding::OnCompleteFrameCallback* complete_frame_callback_;
+ KeyFrameRequestSender* const keyframe_request_sender_;
+
+ RtcpFeedbackBuffer rtcp_feedback_buffer_;
+ const std::unique_ptr<NackModule2> nack_module_;
+ std::unique_ptr<LossNotificationController> loss_notification_controller_;
+
+ video_coding::PacketBuffer packet_buffer_;
+ UniqueTimestampCounter frame_counter_ RTC_GUARDED_BY(worker_task_checker_);
+ SeqNumUnwrapper<uint16_t> frame_id_unwrapper_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ // Video structure provided in the dependency descriptor in a first packet
+ // of a key frame. It is required to parse dependency descriptor in the
+ // following delta packets.
+ std::unique_ptr<FrameDependencyStructure> video_structure_
+ RTC_GUARDED_BY(worker_task_checker_);
+ // Frame id of the last frame with the attached video structure.
+ // absl::nullopt when `video_structure_ == nullptr`;
+ absl::optional<int64_t> video_structure_frame_id_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ std::unique_ptr<video_coding::RtpFrameReferenceFinder> reference_finder_
+ RTC_GUARDED_BY(worker_task_checker_);
+ absl::optional<VideoCodecType> current_codec_
+ RTC_GUARDED_BY(worker_task_checker_);
+ uint32_t last_assembled_frame_rtp_timestamp_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ std::map<int64_t, uint16_t> last_seq_num_for_pic_id_
+ RTC_GUARDED_BY(worker_task_checker_);
+ video_coding::H264SpsPpsTracker tracker_ RTC_GUARDED_BY(worker_task_checker_);
+
+ // Maps payload id to the depacketizer.
+ std::map<uint8_t, std::unique_ptr<VideoRtpDepacketizer>> payload_type_map_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ // TODO(johan): Remove pt_codec_params_ once
+ // https://bugs.chromium.org/p/webrtc/issues/detail?id=6883 is resolved.
+ // Maps a payload type to a map of out-of-band supplied codec parameters.
+ std::map<uint8_t, std::map<std::string, std::string>> pt_codec_params_
+ RTC_GUARDED_BY(worker_task_checker_);
+ int16_t last_payload_type_ RTC_GUARDED_BY(worker_task_checker_) = -1;
+
+ bool has_received_frame_ RTC_GUARDED_BY(worker_task_checker_);
+
+ std::vector<RtpPacketSinkInterface*> secondary_sinks_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ absl::optional<uint32_t> last_received_rtp_timestamp_
+ RTC_GUARDED_BY(worker_task_checker_);
+ absl::optional<int64_t> last_received_rtp_system_time_ms_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ // Handles incoming encrypted frames and forwards them to the
+ // rtp_reference_finder if they are decryptable.
+ std::unique_ptr<BufferedFrameDecryptor> buffered_frame_decryptor_
+ RTC_PT_GUARDED_BY(worker_task_checker_);
+ bool frames_decryptable_ RTC_GUARDED_BY(worker_task_checker_);
+ absl::optional<ColorSpace> last_color_space_;
+
+ AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_
+ RTC_GUARDED_BY(worker_task_checker_);
+
+ int64_t last_completed_picture_id_ = 0;
+
+ rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate>
+ frame_transformer_delegate_;
+};
+
+} // namespace webrtc
+
+#endif // VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver2_unittest.cc b/chromium/third_party/webrtc/video/rtp_video_stream_receiver2_unittest.cc
new file mode 100644
index 00000000000..cda0fe5cfaf
--- /dev/null
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver2_unittest.cc
@@ -0,0 +1,1221 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/rtp_video_stream_receiver2.h"
+
+#include <memory>
+#include <utility>
+
+#include "api/video/video_codec_type.h"
+#include "api/video/video_frame_type.h"
+#include "common_video/h264/h264_common.h"
+#include "media/base/media_constants.h"
+#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
+#include "modules/rtp_rtcp/source/rtp_format.h"
+#include "modules/rtp_rtcp/source/rtp_format_vp9.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/utility/include/process_thread.h"
+#include "modules/video_coding/frame_object.h"
+#include "modules/video_coding/include/video_coding_defines.h"
+#include "modules/video_coding/rtp_frame_reference_finder.h"
+#include "rtc_base/byte_buffer.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/clock.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/mock_frame_transformer.h"
+#include "test/time_controller/simulated_task_queue.h"
+
+using ::testing::_;
+using ::testing::ElementsAre;
+using ::testing::Invoke;
+using ::testing::SizeIs;
+using ::testing::Values;
+
+namespace webrtc {
+
+namespace {
+
+const uint8_t kH264StartCode[] = {0x00, 0x00, 0x00, 0x01};
+
+std::vector<uint64_t> GetAbsoluteCaptureTimestamps(
+ const video_coding::EncodedFrame* frame) {
+ std::vector<uint64_t> result;
+ for (const auto& packet_info : frame->PacketInfos()) {
+ if (packet_info.absolute_capture_time()) {
+ result.push_back(
+ packet_info.absolute_capture_time()->absolute_capture_timestamp);
+ }
+ }
+ return result;
+}
+
+RTPVideoHeader GetGenericVideoHeader(VideoFrameType frame_type) {
+ RTPVideoHeader video_header;
+ video_header.is_first_packet_in_frame = true;
+ video_header.is_last_packet_in_frame = true;
+ video_header.codec = kVideoCodecGeneric;
+ video_header.frame_type = frame_type;
+ return video_header;
+}
+
+class MockTransport : public Transport {
+ public:
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t length, const PacketOptions& options),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
+};
+
+class MockNackSender : public NackSender {
+ public:
+ MOCK_METHOD(void,
+ SendNack,
+ (const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed),
+ (override));
+};
+
+class MockKeyFrameRequestSender : public KeyFrameRequestSender {
+ public:
+ MOCK_METHOD(void, RequestKeyFrame, (), (override));
+};
+
+class MockOnCompleteFrameCallback
+ : public video_coding::OnCompleteFrameCallback {
+ public:
+ MOCK_METHOD(void, DoOnCompleteFrame, (video_coding::EncodedFrame*), ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailNullptr,
+ (video_coding::EncodedFrame*),
+ ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailLength,
+ (video_coding::EncodedFrame*),
+ ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailBitstream,
+ (video_coding::EncodedFrame*),
+ ());
+ void OnCompleteFrame(
+ std::unique_ptr<video_coding::EncodedFrame> frame) override {
+ if (!frame) {
+ DoOnCompleteFrameFailNullptr(nullptr);
+ return;
+ }
+ EXPECT_EQ(buffer_.Length(), frame->size());
+ if (buffer_.Length() != frame->size()) {
+ DoOnCompleteFrameFailLength(frame.get());
+ return;
+ }
+ if (frame->size() != buffer_.Length() ||
+ memcmp(buffer_.Data(), frame->data(), buffer_.Length()) != 0) {
+ DoOnCompleteFrameFailBitstream(frame.get());
+ return;
+ }
+ DoOnCompleteFrame(frame.get());
+ }
+
+ void ClearExpectedBitstream() { buffer_.Clear(); }
+
+ void AppendExpectedBitstream(const uint8_t data[], size_t size_in_bytes) {
+ // TODO(Johan): Let rtc::ByteBuffer handle uint8_t* instead of char*.
+ buffer_.WriteBytes(reinterpret_cast<const char*>(data), size_in_bytes);
+ }
+ rtc::ByteBufferWriter buffer_;
+};
+
+class MockRtpPacketSink : public RtpPacketSinkInterface {
+ public:
+ MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override));
+};
+
+constexpr uint32_t kSsrc = 111;
+constexpr uint16_t kSequenceNumber = 222;
+constexpr int kPayloadType = 100;
+constexpr int kRedPayloadType = 125;
+
+std::unique_ptr<RtpPacketReceived> CreateRtpPacketReceived() {
+ auto packet = std::make_unique<RtpPacketReceived>();
+ packet->SetSsrc(kSsrc);
+ packet->SetSequenceNumber(kSequenceNumber);
+ packet->SetPayloadType(kPayloadType);
+ return packet;
+}
+
+MATCHER_P(SamePacketAs, other, "") {
+ return arg.Ssrc() == other.Ssrc() &&
+ arg.SequenceNumber() == other.SequenceNumber();
+}
+
+} // namespace
+
+class RtpVideoStreamReceiver2Test : public ::testing::Test {
+ public:
+ RtpVideoStreamReceiver2Test() : RtpVideoStreamReceiver2Test("") {}
+ explicit RtpVideoStreamReceiver2Test(std::string field_trials)
+ : override_field_trials_(field_trials),
+ config_(CreateConfig()),
+ process_thread_(ProcessThread::Create("TestThread")) {
+ rtp_receive_statistics_ =
+ ReceiveStatistics::Create(Clock::GetRealTimeClock());
+ rtp_video_stream_receiver_ = std::make_unique<RtpVideoStreamReceiver2>(
+ TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_,
+ nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr,
+ nullptr, process_thread_.get(), &mock_nack_sender_,
+ &mock_key_frame_request_sender_, &mock_on_complete_frame_callback_,
+ nullptr, nullptr);
+ VideoCodec codec;
+ codec.plType = kPayloadType;
+ codec.codecType = kVideoCodecGeneric;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, {},
+ /*raw_payload=*/false);
+ }
+
+ RTPVideoHeader GetDefaultH264VideoHeader() {
+ RTPVideoHeader video_header;
+ video_header.codec = kVideoCodecH264;
+ video_header.video_type_header.emplace<RTPVideoHeaderH264>();
+ return video_header;
+ }
+
+ // TODO(Johan): refactor h264_sps_pps_tracker_unittests.cc to avoid duplicate
+ // code.
+ void AddSps(RTPVideoHeader* video_header,
+ uint8_t sps_id,
+ rtc::CopyOnWriteBuffer* data) {
+ NaluInfo info;
+ info.type = H264::NaluType::kSps;
+ info.sps_id = sps_id;
+ info.pps_id = -1;
+ data->AppendData({H264::NaluType::kSps, sps_id});
+ auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
+ h264.nalus[h264.nalus_length++] = info;
+ }
+
+ void AddPps(RTPVideoHeader* video_header,
+ uint8_t sps_id,
+ uint8_t pps_id,
+ rtc::CopyOnWriteBuffer* data) {
+ NaluInfo info;
+ info.type = H264::NaluType::kPps;
+ info.sps_id = sps_id;
+ info.pps_id = pps_id;
+ data->AppendData({H264::NaluType::kPps, pps_id});
+ auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
+ h264.nalus[h264.nalus_length++] = info;
+ }
+
+ void AddIdr(RTPVideoHeader* video_header, int pps_id) {
+ NaluInfo info;
+ info.type = H264::NaluType::kIdr;
+ info.sps_id = -1;
+ info.pps_id = pps_id;
+ auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
+ h264.nalus[h264.nalus_length++] = info;
+ }
+
+ protected:
+ static VideoReceiveStream::Config CreateConfig() {
+ VideoReceiveStream::Config config(nullptr);
+ config.rtp.remote_ssrc = 1111;
+ config.rtp.local_ssrc = 2222;
+ config.rtp.red_payload_type = kRedPayloadType;
+ return config;
+ }
+
+ TokenTaskQueue task_queue_;
+ TokenTaskQueue::CurrentTaskQueueSetter task_queue_setter_{&task_queue_};
+
+ const webrtc::test::ScopedFieldTrials override_field_trials_;
+ VideoReceiveStream::Config config_;
+ MockNackSender mock_nack_sender_;
+ MockKeyFrameRequestSender mock_key_frame_request_sender_;
+ MockTransport mock_transport_;
+ MockOnCompleteFrameCallback mock_on_complete_frame_callback_;
+ std::unique_ptr<ProcessThread> process_thread_;
+ std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
+ std::unique_ptr<RtpVideoStreamReceiver2> rtp_video_stream_receiver_;
+};
+
+TEST_F(RtpVideoStreamReceiver2Test, CacheColorSpaceFromLastPacketOfKeyframe) {
+ // Test that color space is cached from the last packet of a key frame and
+ // that it's not reset by padding packets without color space.
+ constexpr int kVp9PayloadType = 99;
+ const ColorSpace kColorSpace(
+ ColorSpace::PrimaryID::kFILM, ColorSpace::TransferID::kBT2020_12,
+ ColorSpace::MatrixID::kBT2020_NCL, ColorSpace::RangeID::kFull);
+ const std::vector<uint8_t> kKeyFramePayload = {0, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 10};
+ const std::vector<uint8_t> kDeltaFramePayload = {0, 1, 2, 3, 4};
+
+ // Anonymous helper class that generates received packets.
+ class {
+ public:
+ void SetPayload(const std::vector<uint8_t>& payload,
+ VideoFrameType video_frame_type) {
+ video_frame_type_ = video_frame_type;
+ RtpPacketizer::PayloadSizeLimits pay_load_size_limits;
+ // Reduce max payload length to make sure the key frame generates two
+ // packets.
+ pay_load_size_limits.max_payload_len = 8;
+ RTPVideoHeaderVP9 rtp_video_header_vp9;
+ rtp_video_header_vp9.InitRTPVideoHeaderVP9();
+ rtp_video_header_vp9.inter_pic_predicted =
+ (video_frame_type == VideoFrameType::kVideoFrameDelta);
+ rtp_packetizer_ = std::make_unique<RtpPacketizerVp9>(
+ payload, pay_load_size_limits, rtp_video_header_vp9);
+ }
+
+ size_t NumPackets() { return rtp_packetizer_->NumPackets(); }
+ void SetColorSpace(const ColorSpace& color_space) {
+ color_space_ = color_space;
+ }
+
+ RtpPacketReceived NextPacket() {
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<ColorSpaceExtension>(1);
+ RtpPacketToSend packet_to_send(&extension_map);
+ packet_to_send.SetSequenceNumber(sequence_number_++);
+ packet_to_send.SetSsrc(kSsrc);
+ packet_to_send.SetPayloadType(kVp9PayloadType);
+ bool include_color_space =
+ (rtp_packetizer_->NumPackets() == 1u &&
+ video_frame_type_ == VideoFrameType::kVideoFrameKey);
+ if (include_color_space) {
+ EXPECT_TRUE(
+ packet_to_send.SetExtension<ColorSpaceExtension>(color_space_));
+ }
+ rtp_packetizer_->NextPacket(&packet_to_send);
+
+ RtpPacketReceived received_packet(&extension_map);
+ received_packet.Parse(packet_to_send.data(), packet_to_send.size());
+ return received_packet;
+ }
+
+ private:
+ uint16_t sequence_number_ = 0;
+ VideoFrameType video_frame_type_;
+ ColorSpace color_space_;
+ std::unique_ptr<RtpPacketizer> rtp_packetizer_;
+ } received_packet_generator;
+ received_packet_generator.SetColorSpace(kColorSpace);
+
+ // Prepare the receiver for VP9.
+ VideoCodec codec;
+ codec.plType = kVp9PayloadType;
+ codec.codecType = kVideoCodecVP9;
+ std::map<std::string, std::string> codec_params;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, codec_params,
+ /*raw_payload=*/false);
+
+ // Generate key frame packets.
+ received_packet_generator.SetPayload(kKeyFramePayload,
+ VideoFrameType::kVideoFrameKey);
+ EXPECT_EQ(received_packet_generator.NumPackets(), 2u);
+ RtpPacketReceived key_frame_packet1 = received_packet_generator.NextPacket();
+ RtpPacketReceived key_frame_packet2 = received_packet_generator.NextPacket();
+
+ // Generate delta frame packet.
+ received_packet_generator.SetPayload(kDeltaFramePayload,
+ VideoFrameType::kVideoFrameDelta);
+ EXPECT_EQ(received_packet_generator.NumPackets(), 1u);
+ RtpPacketReceived delta_frame_packet = received_packet_generator.NextPacket();
+
+ rtp_video_stream_receiver_->StartReceive();
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kKeyFramePayload.data(), kKeyFramePayload.size());
+
+ // Send the key frame and expect a callback with color space information.
+ EXPECT_FALSE(key_frame_packet1.GetExtension<ColorSpaceExtension>());
+ EXPECT_TRUE(key_frame_packet2.GetExtension<ColorSpaceExtension>());
+ rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1);
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke([kColorSpace](video_coding::EncodedFrame* frame) {
+ ASSERT_TRUE(frame->EncodedImage().ColorSpace());
+ EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace);
+ }));
+ rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet2);
+ // Resend the first key frame packet to simulate padding for example.
+ rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1);
+
+ mock_on_complete_frame_callback_.ClearExpectedBitstream();
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kDeltaFramePayload.data(), kDeltaFramePayload.size());
+
+ // Expect delta frame to have color space set even though color space not
+ // included in the RTP packet.
+ EXPECT_FALSE(delta_frame_packet.GetExtension<ColorSpaceExtension>());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke([kColorSpace](video_coding::EncodedFrame* frame) {
+ ASSERT_TRUE(frame->EncodedImage().ColorSpace());
+ EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace);
+ }));
+ rtp_video_stream_receiver_->OnRtpPacket(delta_frame_packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrame) {
+ RtpPacketReceived rtp_packet;
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(1);
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, PacketInfoIsPropagatedIntoVideoFrames) {
+ constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
+ constexpr int kId0 = 1;
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
+ RtpPacketReceived rtp_packet(&extension_map);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetSequenceNumber(1);
+ rtp_packet.SetTimestamp(1);
+ rtp_packet.SetSsrc(kSsrc);
+ rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
+ AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
+ /*estimated_capture_clock_offset=*/absl::nullopt});
+
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke(
+ [kAbsoluteCaptureTimestamp](video_coding::EncodedFrame* frame) {
+ EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame),
+ ElementsAre(kAbsoluteCaptureTimestamp));
+ }));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ MissingAbsoluteCaptureTimeIsFilledWithExtrapolatedValue) {
+ constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
+ constexpr int kId0 = 1;
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
+ RtpPacketReceived rtp_packet(&extension_map);
+ rtp_packet.SetPayloadType(kPayloadType);
+
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ uint16_t sequence_number = 1;
+ uint32_t rtp_timestamp = 1;
+ rtp_packet.SetSequenceNumber(sequence_number);
+ rtp_packet.SetTimestamp(rtp_timestamp);
+ rtp_packet.SetSsrc(kSsrc);
+ rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
+ AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
+ /*estimated_capture_clock_offset=*/absl::nullopt});
+
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+
+ // Rtp packet without absolute capture time.
+ rtp_packet = RtpPacketReceived(&extension_map);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(++sequence_number);
+ rtp_packet.SetTimestamp(++rtp_timestamp);
+ rtp_packet.SetSsrc(kSsrc);
+
+ // There is no absolute capture time in the second packet.
+ // Expect rtp video stream receiver to extrapolate it for the resulting video
+ // frame using absolute capture time from the previous packet.
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke([](video_coding::EncodedFrame* frame) {
+ EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), SizeIs(1));
+ }));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ NoInfiniteRecursionOnEncapsulatedRedPacket) {
+ const std::vector<uint8_t> data({
+ 0x80, // RTP version.
+ kRedPayloadType, // Payload type.
+ 0, 0, 0, 0, 0, 0, // Don't care.
+ 0, 0, 0x4, 0x57, // SSRC
+ kRedPayloadType, // RED header.
+ 0, 0, 0, 0, 0 // Don't care.
+ });
+ RtpPacketReceived packet;
+ EXPECT_TRUE(packet.Parse(data.data(), data.size()));
+ rtp_video_stream_receiver_->StartReceive();
+ rtp_video_stream_receiver_->OnRtpPacket(packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ DropsPacketWithRedPayloadTypeAndEmptyPayload) {
+ const uint8_t kRedPayloadType = 125;
+ config_.rtp.red_payload_type = kRedPayloadType;
+ SetUp(); // re-create rtp_video_stream_receiver with red payload type.
+ // clang-format off
+ const uint8_t data[] = {
+ 0x80, // RTP version.
+ kRedPayloadType, // Payload type.
+ 0, 0, 0, 0, 0, 0, // Don't care.
+ 0, 0, 0x4, 0x57, // SSRC
+ // Empty rtp payload.
+ };
+ // clang-format on
+ RtpPacketReceived packet;
+ // Manually convert to CopyOnWriteBuffer to be sure capacity == size
+ // and asan bot can catch read buffer overflow.
+ EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(data)));
+ rtp_video_stream_receiver_->StartReceive();
+ rtp_video_stream_receiver_->OnRtpPacket(packet);
+ // Expect asan doesn't find anything.
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrameBitstreamError) {
+ RtpPacketReceived rtp_packet;
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetSequenceNumber(1);
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ constexpr uint8_t expected_bitsteam[] = {1, 2, 3, 0xff};
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ expected_bitsteam, sizeof(expected_bitsteam));
+ EXPECT_CALL(mock_on_complete_frame_callback_,
+ DoOnCompleteFrameFailBitstream(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+class RtpVideoStreamReceiver2TestH264
+ : public RtpVideoStreamReceiver2Test,
+ public ::testing::WithParamInterface<std::string> {
+ protected:
+ RtpVideoStreamReceiver2TestH264() : RtpVideoStreamReceiver2Test(GetParam()) {}
+};
+
+INSTANTIATE_TEST_SUITE_P(SpsPpsIdrIsKeyframe,
+ RtpVideoStreamReceiver2TestH264,
+ Values("", "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/"));
+
+// Fails on MSAN: https://bugs.chromium.org/p/webrtc/issues/detail?id=11376.
+#if defined(MEMORY_SANITIZER)
+#define MAYBE_InBandSpsPps DISABLED_InBandSpsPps
+#else
+#define MAYBE_InBandSpsPps InBandSpsPps
+#endif
+TEST_P(RtpVideoStreamReceiver2TestH264, MAYBE_InBandSpsPps) {
+ rtc::CopyOnWriteBuffer sps_data;
+ RtpPacketReceived rtp_packet;
+ RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader();
+ AddSps(&sps_video_header, 0, &sps_data);
+ rtp_packet.SetSequenceNumber(0);
+ rtp_packet.SetPayloadType(kPayloadType);
+ sps_video_header.is_first_packet_in_frame = true;
+ sps_video_header.frame_type = VideoFrameType::kEmptyFrame;
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(),
+ sps_data.size());
+ rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet,
+ sps_video_header);
+
+ rtc::CopyOnWriteBuffer pps_data;
+ RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader();
+ AddPps(&pps_video_header, 0, 1, &pps_data);
+ rtp_packet.SetSequenceNumber(1);
+ pps_video_header.is_first_packet_in_frame = true;
+ pps_video_header.frame_type = VideoFrameType::kEmptyFrame;
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(),
+ pps_data.size());
+ rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet,
+ pps_video_header);
+
+ rtc::CopyOnWriteBuffer idr_data;
+ RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader();
+ AddIdr(&idr_video_header, 1);
+ rtp_packet.SetSequenceNumber(2);
+ idr_video_header.is_first_packet_in_frame = true;
+ idr_video_header.is_last_packet_in_frame = true;
+ idr_video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ const uint8_t idr[] = {0x65, 1, 2, 3};
+ idr_data.AppendData(idr);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(),
+ idr_data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet,
+ idr_video_header);
+}
+
+TEST_P(RtpVideoStreamReceiver2TestH264, OutOfBandFmtpSpsPps) {
+ constexpr int kPayloadType = 99;
+ VideoCodec codec;
+ codec.plType = kPayloadType;
+ std::map<std::string, std::string> codec_params;
+ // Example parameter sets from https://tools.ietf.org/html/rfc3984#section-8.2
+ // .
+ codec_params.insert(
+ {cricket::kH264FmtpSpropParameterSets, "Z0IACpZTBYmI,aMljiA=="});
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, codec_params,
+ /*raw_payload=*/false);
+ const uint8_t binary_sps[] = {0x67, 0x42, 0x00, 0x0a, 0x96,
+ 0x53, 0x05, 0x89, 0x88};
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_sps,
+ sizeof(binary_sps));
+ const uint8_t binary_pps[] = {0x68, 0xc9, 0x63, 0x88};
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_pps,
+ sizeof(binary_pps));
+
+ RtpPacketReceived rtp_packet;
+ RTPVideoHeader video_header = GetDefaultH264VideoHeader();
+ AddIdr(&video_header, 0);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(2);
+ video_header.is_first_packet_in_frame = true;
+ video_header.is_last_packet_in_frame = true;
+ video_header.codec = kVideoCodecH264;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ rtc::CopyOnWriteBuffer data({1, 2, 3});
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(
+ kH264StartCode, sizeof(kH264StartCode));
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, PaddingInMediaStream) {
+ RtpPacketReceived rtp_packet;
+ RTPVideoHeader video_header = GetDefaultH264VideoHeader();
+ rtc::CopyOnWriteBuffer data({1, 2, 3});
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(2);
+ video_header.is_first_packet_in_frame = true;
+ video_header.is_last_packet_in_frame = true;
+ video_header.codec = kVideoCodecGeneric;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+
+ rtp_packet.SetSequenceNumber(3);
+ rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet,
+ video_header);
+
+ rtp_packet.SetSequenceNumber(4);
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ video_header.frame_type = VideoFrameType::kVideoFrameDelta;
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+
+ rtp_packet.SetSequenceNumber(6);
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+ rtp_packet.SetSequenceNumber(5);
+ rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeIfFirstFrameIsDelta) {
+ RtpPacketReceived rtp_packet;
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetSequenceNumber(1);
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta);
+ EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame());
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeWhenPacketBufferGetsFull) {
+ constexpr int kPacketBufferMaxSize = 2048;
+
+ RtpPacketReceived rtp_packet;
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta);
+ // Incomplete frames so that the packet buffer is filling up.
+ video_header.is_last_packet_in_frame = false;
+ uint16_t start_sequence_number = 1234;
+ rtp_packet.SetSequenceNumber(start_sequence_number);
+ while (rtp_packet.SequenceNumber() - start_sequence_number <
+ kPacketBufferMaxSize) {
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+ rtp_packet.SetSequenceNumber(rtp_packet.SequenceNumber() + 2);
+ }
+
+ EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame());
+ rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+ video_header);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, SecondarySinksGetRtpNotifications) {
+ rtp_video_stream_receiver_->StartReceive();
+
+ MockRtpPacketSink secondary_sink_1;
+ MockRtpPacketSink secondary_sink_2;
+
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink_1);
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink_2);
+
+ auto rtp_packet = CreateRtpPacketReceived();
+ EXPECT_CALL(secondary_sink_1, OnRtpPacket(SamePacketAs(*rtp_packet)));
+ EXPECT_CALL(secondary_sink_2, OnRtpPacket(SamePacketAs(*rtp_packet)));
+
+ rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->StopReceive();
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink_1);
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink_2);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ RemovedSecondarySinksGetNoRtpNotifications) {
+ rtp_video_stream_receiver_->StartReceive();
+
+ MockRtpPacketSink secondary_sink;
+
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
+
+ auto rtp_packet = CreateRtpPacketReceived();
+
+ EXPECT_CALL(secondary_sink, OnRtpPacket(_)).Times(0);
+
+ rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->StopReceive();
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ OnlyRemovedSecondarySinksExcludedFromNotifications) {
+ rtp_video_stream_receiver_->StartReceive();
+
+ MockRtpPacketSink kept_secondary_sink;
+ MockRtpPacketSink removed_secondary_sink;
+
+ rtp_video_stream_receiver_->AddSecondarySink(&kept_secondary_sink);
+ rtp_video_stream_receiver_->AddSecondarySink(&removed_secondary_sink);
+ rtp_video_stream_receiver_->RemoveSecondarySink(&removed_secondary_sink);
+
+ auto rtp_packet = CreateRtpPacketReceived();
+ EXPECT_CALL(kept_secondary_sink, OnRtpPacket(SamePacketAs(*rtp_packet)));
+
+ rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->StopReceive();
+ rtp_video_stream_receiver_->RemoveSecondarySink(&kept_secondary_sink);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test,
+ SecondariesOfNonStartedStreamGetNoNotifications) {
+ // Explicitly showing that the stream is not in the |started| state,
+ // regardless of whether streams start out |started| or |stopped|.
+ rtp_video_stream_receiver_->StopReceive();
+
+ MockRtpPacketSink secondary_sink;
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
+
+ auto rtp_packet = CreateRtpPacketReceived();
+ EXPECT_CALL(secondary_sink, OnRtpPacket(_)).Times(0);
+
+ rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorOnePacket) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ const int kSpatialIndex = 1;
+
+ rtp_video_stream_receiver_->StartReceive();
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
+ RtpPacketReceived rtp_packet(&extension_map);
+ rtp_packet.SetPayloadType(kPayloadType);
+
+ RtpGenericFrameDescriptor generic_descriptor;
+ generic_descriptor.SetFirstPacketInSubFrame(true);
+ generic_descriptor.SetLastPacketInSubFrame(true);
+ generic_descriptor.SetFrameId(100);
+ generic_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex);
+ generic_descriptor.AddFrameDependencyDiff(90);
+ generic_descriptor.AddFrameDependencyDiff(80);
+ ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ generic_descriptor));
+
+ uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
+ memcpy(payload, data.data(), data.size());
+ // The first byte is the header, so we ignore the first byte of |data|.
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
+ data.size() - 1);
+
+ rtp_packet.SetMarker(true);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(1);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce(Invoke([kSpatialIndex](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->num_references, 2U);
+ EXPECT_EQ(frame->references[0], frame->id.picture_id - 90);
+ EXPECT_EQ(frame->references[1], frame->id.picture_id - 80);
+ EXPECT_EQ(frame->id.spatial_layer, kSpatialIndex);
+ EXPECT_THAT(frame->PacketInfos(), SizeIs(1));
+ }));
+
+ rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorTwoPackets) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ const int kSpatialIndex = 1;
+
+ rtp_video_stream_receiver_->StartReceive();
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
+ RtpPacketReceived first_packet(&extension_map);
+
+ RtpGenericFrameDescriptor first_packet_descriptor;
+ first_packet_descriptor.SetFirstPacketInSubFrame(true);
+ first_packet_descriptor.SetLastPacketInSubFrame(false);
+ first_packet_descriptor.SetFrameId(100);
+ first_packet_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex);
+ first_packet_descriptor.SetResolution(480, 360);
+ ASSERT_TRUE(first_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ first_packet_descriptor));
+
+ uint8_t* first_packet_payload = first_packet.SetPayloadSize(data.size());
+ memcpy(first_packet_payload, data.data(), data.size());
+ // The first byte is the header, so we ignore the first byte of |data|.
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
+ data.size() - 1);
+
+ first_packet.SetPayloadType(kPayloadType);
+ first_packet.SetSequenceNumber(1);
+ rtp_video_stream_receiver_->OnRtpPacket(first_packet);
+
+ RtpPacketReceived second_packet(&extension_map);
+ RtpGenericFrameDescriptor second_packet_descriptor;
+ second_packet_descriptor.SetFirstPacketInSubFrame(false);
+ second_packet_descriptor.SetLastPacketInSubFrame(true);
+ ASSERT_TRUE(second_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ second_packet_descriptor));
+
+ second_packet.SetMarker(true);
+ second_packet.SetPayloadType(kPayloadType);
+ second_packet.SetSequenceNumber(2);
+
+ uint8_t* second_packet_payload = second_packet.SetPayloadSize(data.size());
+ memcpy(second_packet_payload, data.data(), data.size());
+ // The first byte is the header, so we ignore the first byte of |data|.
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
+ data.size() - 1);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce(Invoke([kSpatialIndex](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->num_references, 0U);
+ EXPECT_EQ(frame->id.spatial_layer, kSpatialIndex);
+ EXPECT_EQ(frame->EncodedImage()._encodedWidth, 480u);
+ EXPECT_EQ(frame->EncodedImage()._encodedHeight, 360u);
+ EXPECT_THAT(frame->PacketInfos(), SizeIs(2));
+ }));
+
+ rtp_video_stream_receiver_->OnRtpPacket(second_packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorRawPayload) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ const int kRawPayloadType = 123;
+
+ VideoCodec codec;
+ codec.plType = kRawPayloadType;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/true);
+ rtp_video_stream_receiver_->StartReceive();
+
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
+ RtpPacketReceived rtp_packet(&extension_map);
+
+ RtpGenericFrameDescriptor generic_descriptor;
+ generic_descriptor.SetFirstPacketInSubFrame(true);
+ generic_descriptor.SetLastPacketInSubFrame(true);
+ ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ generic_descriptor));
+
+ uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
+ memcpy(payload, data.data(), data.size());
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+
+ rtp_packet.SetMarker(true);
+ rtp_packet.SetPayloadType(kRawPayloadType);
+ rtp_packet.SetSequenceNumber(1);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
+ rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
+}
+
+TEST_F(RtpVideoStreamReceiver2Test, UnwrapsFrameId) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ const int kPayloadType = 123;
+
+ VideoCodec codec;
+ codec.plType = kPayloadType;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, {}, /*raw_payload=*/true);
+ rtp_video_stream_receiver_->StartReceive();
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
+
+ uint16_t rtp_sequence_number = 1;
+ auto inject_packet = [&](uint16_t wrapped_frame_id) {
+ RtpPacketReceived rtp_packet(&extension_map);
+
+ RtpGenericFrameDescriptor generic_descriptor;
+ generic_descriptor.SetFirstPacketInSubFrame(true);
+ generic_descriptor.SetLastPacketInSubFrame(true);
+ generic_descriptor.SetFrameId(wrapped_frame_id);
+ ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
+ generic_descriptor));
+
+ uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
+ ASSERT_TRUE(payload);
+ memcpy(payload, data.data(), data.size());
+ mock_on_complete_frame_callback_.ClearExpectedBitstream();
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ rtp_packet.SetMarker(true);
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtp_packet.SetSequenceNumber(++rtp_sequence_number);
+ rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
+ };
+
+ int64_t first_picture_id;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ first_picture_id = frame->id.picture_id;
+ });
+ inject_packet(/*wrapped_frame_id=*/0xffff);
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->id.picture_id - first_picture_id, 3);
+ });
+ inject_packet(/*wrapped_frame_id=*/0x0002);
+}
+
+class RtpVideoStreamReceiver2DependencyDescriptorTest
+ : public RtpVideoStreamReceiver2Test {
+ public:
+ RtpVideoStreamReceiver2DependencyDescriptorTest() {
+ VideoCodec codec;
+ codec.plType = payload_type_;
+ rtp_video_stream_receiver_->AddReceiveCodec(codec, {},
+ /*raw_payload=*/true);
+ extension_map_.Register<RtpDependencyDescriptorExtension>(7);
+ rtp_video_stream_receiver_->StartReceive();
+ }
+
+ // Returns some valid structure for the DependencyDescriptors.
+ // First template of that structure always fit for a key frame.
+ static FrameDependencyStructure CreateStreamStructure() {
+ FrameDependencyStructure stream_structure;
+ stream_structure.num_decode_targets = 1;
+ stream_structure.templates = {
+ FrameDependencyTemplate().Dtis("S"),
+ FrameDependencyTemplate().Dtis("S").FrameDiffs({1}),
+ };
+ return stream_structure;
+ }
+
+ void InjectPacketWith(const FrameDependencyStructure& stream_structure,
+ const DependencyDescriptor& dependency_descriptor) {
+ const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
+ RtpPacketReceived rtp_packet(&extension_map_);
+ ASSERT_TRUE(rtp_packet.SetExtension<RtpDependencyDescriptorExtension>(
+ stream_structure, dependency_descriptor));
+ uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
+ ASSERT_TRUE(payload);
+ memcpy(payload, data.data(), data.size());
+ mock_on_complete_frame_callback_.ClearExpectedBitstream();
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ rtp_packet.SetMarker(true);
+ rtp_packet.SetPayloadType(payload_type_);
+ rtp_packet.SetSequenceNumber(++rtp_sequence_number_);
+ rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
+ }
+
+ private:
+ const int payload_type_ = 123;
+ RtpHeaderExtensionMap extension_map_;
+ uint16_t rtp_sequence_number_ = 321;
+};
+
+TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, UnwrapsFrameId) {
+ FrameDependencyStructure stream_structure = CreateStreamStructure();
+
+ DependencyDescriptor keyframe_descriptor;
+ keyframe_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure);
+ keyframe_descriptor.frame_dependencies = stream_structure.templates[0];
+ keyframe_descriptor.frame_number = 0xfff0;
+ // DependencyDescriptor doesn't support reordering delta frame before
+ // keyframe. Thus feed a key frame first, then test reodered delta frames.
+ int64_t first_picture_id;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ first_picture_id = frame->id.picture_id;
+ });
+ InjectPacketWith(stream_structure, keyframe_descriptor);
+
+ DependencyDescriptor deltaframe1_descriptor;
+ deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1];
+ deltaframe1_descriptor.frame_number = 0xfffe;
+
+ DependencyDescriptor deltaframe2_descriptor;
+ deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1];
+ deltaframe2_descriptor.frame_number = 0x0002;
+
+ // Parser should unwrap frame ids correctly even if packets were reordered by
+ // the network.
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ // 0x0002 - 0xfff0
+ EXPECT_EQ(frame->id.picture_id - first_picture_id, 18);
+ })
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ // 0xfffe - 0xfff0
+ EXPECT_EQ(frame->id.picture_id - first_picture_id, 14);
+ });
+ InjectPacketWith(stream_structure, deltaframe2_descriptor);
+ InjectPacketWith(stream_structure, deltaframe1_descriptor);
+}
+
+TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest,
+ DropsLateDeltaFramePacketWithDependencyDescriptorExtension) {
+ FrameDependencyStructure stream_structure1 = CreateStreamStructure();
+ FrameDependencyStructure stream_structure2 = CreateStreamStructure();
+ // Make sure template ids for these two structures do not collide:
+ // adjust structure_id (that is also used as template id offset).
+ stream_structure1.structure_id = 13;
+ stream_structure2.structure_id =
+ stream_structure1.structure_id + stream_structure1.templates.size();
+
+ DependencyDescriptor keyframe1_descriptor;
+ keyframe1_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure1);
+ keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0];
+ keyframe1_descriptor.frame_number = 1;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
+ InjectPacketWith(stream_structure1, keyframe1_descriptor);
+
+ // Pass in 2nd key frame with different structure.
+ DependencyDescriptor keyframe2_descriptor;
+ keyframe2_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure2);
+ keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0];
+ keyframe2_descriptor.frame_number = 3;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
+ InjectPacketWith(stream_structure2, keyframe2_descriptor);
+
+ // Pass in late delta frame that uses structure of the 1st key frame.
+ DependencyDescriptor deltaframe_descriptor;
+ deltaframe_descriptor.frame_dependencies = stream_structure1.templates[0];
+ deltaframe_descriptor.frame_number = 2;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame).Times(0);
+ InjectPacketWith(stream_structure1, deltaframe_descriptor);
+}
+
+TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest,
+ DropsLateKeyFramePacketWithDependencyDescriptorExtension) {
+ FrameDependencyStructure stream_structure1 = CreateStreamStructure();
+ FrameDependencyStructure stream_structure2 = CreateStreamStructure();
+ // Make sure template ids for these two structures do not collide:
+ // adjust structure_id (that is also used as template id offset).
+ stream_structure1.structure_id = 13;
+ stream_structure2.structure_id =
+ stream_structure1.structure_id + stream_structure1.templates.size();
+
+ DependencyDescriptor keyframe1_descriptor;
+ keyframe1_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure1);
+ keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0];
+ keyframe1_descriptor.frame_number = 1;
+
+ DependencyDescriptor keyframe2_descriptor;
+ keyframe2_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(stream_structure2);
+ keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0];
+ keyframe2_descriptor.frame_number = 3;
+
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->id.picture_id & 0xFFFF, 3);
+ });
+ InjectPacketWith(stream_structure2, keyframe2_descriptor);
+ InjectPacketWith(stream_structure1, keyframe1_descriptor);
+
+ // Pass in delta frame that uses structure of the 2nd key frame. Late key
+ // frame shouldn't block it.
+ DependencyDescriptor deltaframe_descriptor;
+ deltaframe_descriptor.frame_dependencies = stream_structure2.templates[0];
+ deltaframe_descriptor.frame_number = 4;
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
+ .WillOnce([&](video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->id.picture_id & 0xFFFF, 4);
+ });
+ InjectPacketWith(stream_structure2, deltaframe_descriptor);
+}
+
+#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+using RtpVideoStreamReceiver2DeathTest = RtpVideoStreamReceiver2Test;
+TEST_F(RtpVideoStreamReceiver2DeathTest, RepeatedSecondarySinkDisallowed) {
+ MockRtpPacketSink secondary_sink;
+
+ rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
+ EXPECT_DEATH(rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink),
+ "");
+
+ // Test tear-down.
+ rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
+}
+#endif
+
+TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) {
+ rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
+ new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+ EXPECT_CALL(*mock_frame_transformer,
+ RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
+ auto receiver = std::make_unique<RtpVideoStreamReceiver2>(
+ TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_,
+ nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr,
+ nullptr, process_thread_.get(), &mock_nack_sender_, nullptr,
+ &mock_on_complete_frame_callback_, nullptr, mock_frame_transformer);
+ VideoCodec video_codec;
+ video_codec.plType = kPayloadType;
+ video_codec.codecType = kVideoCodecGeneric;
+ receiver->AddReceiveCodec(video_codec, {}, /*raw_payload=*/false);
+
+ RtpPacketReceived rtp_packet;
+ rtp_packet.SetPayloadType(kPayloadType);
+ rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
+ rtp_packet.SetSequenceNumber(1);
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+ data.size());
+ EXPECT_CALL(*mock_frame_transformer, Transform(_));
+ receiver->OnReceivedPayloadData(data, rtp_packet, video_header);
+
+ EXPECT_CALL(*mock_frame_transformer,
+ UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc));
+ receiver = nullptr;
+}
+
+// Test default behavior and when playout delay is overridden by field trial.
+const PlayoutDelay kTransmittedPlayoutDelay = {100, 200};
+const PlayoutDelay kForcedPlayoutDelay = {70, 90};
+struct PlayoutDelayOptions {
+ std::string field_trial;
+ PlayoutDelay expected_delay;
+};
+const PlayoutDelayOptions kDefaultBehavior = {
+ /*field_trial=*/"", /*expected_delay=*/kTransmittedPlayoutDelay};
+const PlayoutDelayOptions kOverridePlayoutDelay = {
+ /*field_trial=*/"WebRTC-ForcePlayoutDelay/min_ms:70,max_ms:90/",
+ /*expected_delay=*/kForcedPlayoutDelay};
+
+class RtpVideoStreamReceiver2TestPlayoutDelay
+ : public RtpVideoStreamReceiver2Test,
+ public ::testing::WithParamInterface<PlayoutDelayOptions> {
+ protected:
+ RtpVideoStreamReceiver2TestPlayoutDelay()
+ : RtpVideoStreamReceiver2Test(GetParam().field_trial) {}
+};
+
+INSTANTIATE_TEST_SUITE_P(PlayoutDelay,
+ RtpVideoStreamReceiver2TestPlayoutDelay,
+ Values(kDefaultBehavior, kOverridePlayoutDelay));
+
+TEST_P(RtpVideoStreamReceiver2TestPlayoutDelay, PlayoutDelay) {
+ rtc::CopyOnWriteBuffer payload_data({1, 2, 3, 4});
+ RtpHeaderExtensionMap extension_map;
+ extension_map.Register<PlayoutDelayLimits>(1);
+ RtpPacketToSend packet_to_send(&extension_map);
+ packet_to_send.SetPayloadType(kPayloadType);
+ packet_to_send.SetSequenceNumber(1);
+
+ // Set playout delay on outgoing packet.
+ EXPECT_TRUE(packet_to_send.SetExtension<PlayoutDelayLimits>(
+ kTransmittedPlayoutDelay));
+ uint8_t* payload = packet_to_send.AllocatePayload(payload_data.size());
+ memcpy(payload, payload_data.data(), payload_data.size());
+
+ RtpPacketReceived received_packet(&extension_map);
+ received_packet.Parse(packet_to_send.data(), packet_to_send.size());
+
+ RTPVideoHeader video_header =
+ GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
+ mock_on_complete_frame_callback_.AppendExpectedBitstream(payload_data.data(),
+ payload_data.size());
+ // Expect the playout delay of encoded frame to be the same as the transmitted
+ // playout delay unless it was overridden by a field trial.
+ EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+ .WillOnce(Invoke([expected_playout_delay = GetParam().expected_delay](
+ video_coding::EncodedFrame* frame) {
+ EXPECT_EQ(frame->EncodedImage().playout_delay_, expected_playout_delay);
+ }));
+ rtp_video_stream_receiver_->OnReceivedPayloadData(
+ received_packet.PayloadBuffer(), received_packet, video_header);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc b/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc
index 8324b191367..31eb344d5b6 100644
--- a/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc
@@ -17,7 +17,6 @@
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/thread.h"
-#include "video/rtp_video_stream_receiver.h"
namespace webrtc {
@@ -28,7 +27,9 @@ class TransformableVideoReceiverFrame
TransformableVideoReceiverFrame(
std::unique_ptr<video_coding::RtpFrameObject> frame,
uint32_t ssrc)
- : frame_(std::move(frame)), ssrc_(ssrc) {}
+ : frame_(std::move(frame)),
+ metadata_(frame_->GetRtpVideoHeader()),
+ ssrc_(ssrc) {}
~TransformableVideoReceiverFrame() override = default;
// Implements TransformableVideoFrameInterface.
@@ -52,19 +53,22 @@ class TransformableVideoReceiverFrame
return RtpDescriptorAuthentication(frame_->GetRtpVideoHeader());
}
+ const VideoFrameMetadata& GetMetadata() const override { return metadata_; }
+
std::unique_ptr<video_coding::RtpFrameObject> ExtractFrame() && {
return std::move(frame_);
}
private:
std::unique_ptr<video_coding::RtpFrameObject> frame_;
+ const VideoFrameMetadata metadata_;
const uint32_t ssrc_;
};
} // namespace
RtpVideoStreamReceiverFrameTransformerDelegate::
RtpVideoStreamReceiverFrameTransformerDelegate(
- RtpVideoStreamReceiver* receiver,
+ RtpVideoFrameReceiver* receiver,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
rtc::Thread* network_thread,
uint32_t ssrc)
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h b/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h
index 0a106c956ac..e687e7f47b7 100644
--- a/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h
@@ -20,7 +20,16 @@
namespace webrtc {
-class RtpVideoStreamReceiver;
+// Called back by RtpVideoStreamReceiverFrameTransformerDelegate on the network
+// thread after transformation.
+class RtpVideoFrameReceiver {
+ public:
+ virtual void ManageFrame(
+ std::unique_ptr<video_coding::RtpFrameObject> frame) = 0;
+
+ protected:
+ virtual ~RtpVideoFrameReceiver() = default;
+};
// Delegates calls to FrameTransformerInterface to transform frames, and to
// RtpVideoStreamReceiver to manage transformed frames on the |network_thread_|.
@@ -28,7 +37,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
: public TransformedFrameCallback {
public:
RtpVideoStreamReceiverFrameTransformerDelegate(
- RtpVideoStreamReceiver* receiver,
+ RtpVideoFrameReceiver* receiver,
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer,
rtc::Thread* network_thread,
uint32_t ssrc);
@@ -44,7 +53,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
void OnTransformedFrame(
std::unique_ptr<TransformableFrameInterface> frame) override;
- // Delegates the call to RtpVideoReceiver::ManageFrame on the
+ // Delegates the call to RtpVideoFrameReceiver::ManageFrame on the
// |network_thread_|.
void ManageFrame(std::unique_ptr<TransformableFrameInterface> frame);
@@ -53,7 +62,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
private:
SequenceChecker network_sequence_checker_;
- RtpVideoStreamReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_);
+ RtpVideoFrameReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_
RTC_GUARDED_BY(network_sequence_checker_);
rtc::Thread* const network_thread_;
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc b/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
index c481f502a45..a411ca6e9ab 100644
--- a/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
@@ -15,95 +15,42 @@
#include <utility>
#include <vector>
+#include "absl/memory/memory.h"
#include "api/call/transport.h"
#include "call/video_receive_stream.h"
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
#include "modules/utility/include/process_thread.h"
#include "rtc_base/event.h"
+#include "rtc_base/ref_counted_object.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_frame_transformer.h"
-#include "video/rtp_video_stream_receiver.h"
namespace webrtc {
namespace {
using ::testing::_;
+using ::testing::ElementsAre;
using ::testing::NiceMock;
using ::testing::SaveArg;
-std::unique_ptr<video_coding::RtpFrameObject> CreateRtpFrameObject() {
+std::unique_ptr<video_coding::RtpFrameObject> CreateRtpFrameObject(
+ const RTPVideoHeader& video_header) {
return std::make_unique<video_coding::RtpFrameObject>(
- 0, 0, true, 0, 0, 0, 0, 0, VideoSendTiming(), 0, kVideoCodecGeneric,
- kVideoRotation_0, VideoContentType::UNSPECIFIED, RTPVideoHeader(),
+ 0, 0, true, 0, 0, 0, 0, 0, VideoSendTiming(), 0, video_header.codec,
+ kVideoRotation_0, VideoContentType::UNSPECIFIED, video_header,
absl::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(0));
}
-class FakeTransport : public Transport {
- public:
- bool SendRtp(const uint8_t* packet,
- size_t length,
- const PacketOptions& options) {
- return true;
- }
- bool SendRtcp(const uint8_t* packet, size_t length) { return true; }
-};
-
-class FakeNackSender : public NackSender {
- public:
- void SendNack(const std::vector<uint16_t>& sequence_numbers) {}
- void SendNack(const std::vector<uint16_t>& sequence_numbers,
- bool buffering_allowed) {}
-};
-
-class FakeOnCompleteFrameCallback
- : public video_coding::OnCompleteFrameCallback {
- public:
- void OnCompleteFrame(
- std::unique_ptr<video_coding::EncodedFrame> frame) override {}
-};
-
-class TestRtpVideoStreamReceiverInitializer {
- public:
- TestRtpVideoStreamReceiverInitializer()
- : test_config_(nullptr),
- test_process_thread_(ProcessThread::Create("TestThread")) {
- test_config_.rtp.remote_ssrc = 1111;
- test_config_.rtp.local_ssrc = 2222;
- test_rtp_receive_statistics_ =
- ReceiveStatistics::Create(Clock::GetRealTimeClock());
- }
-
- protected:
- VideoReceiveStream::Config test_config_;
- FakeTransport fake_transport_;
- FakeNackSender fake_nack_sender_;
- FakeOnCompleteFrameCallback fake_on_complete_frame_callback_;
- std::unique_ptr<ProcessThread> test_process_thread_;
- std::unique_ptr<ReceiveStatistics> test_rtp_receive_statistics_;
-};
+std::unique_ptr<video_coding::RtpFrameObject> CreateRtpFrameObject() {
+ return CreateRtpFrameObject(RTPVideoHeader());
+}
-class TestRtpVideoStreamReceiver : public TestRtpVideoStreamReceiverInitializer,
- public RtpVideoStreamReceiver {
+class TestRtpVideoFrameReceiver : public RtpVideoFrameReceiver {
public:
- TestRtpVideoStreamReceiver()
- : TestRtpVideoStreamReceiverInitializer(),
- RtpVideoStreamReceiver(Clock::GetRealTimeClock(),
- &fake_transport_,
- nullptr,
- nullptr,
- &test_config_,
- test_rtp_receive_statistics_.get(),
- nullptr,
- nullptr,
- test_process_thread_.get(),
- &fake_nack_sender_,
- nullptr,
- &fake_on_complete_frame_callback_,
- nullptr,
- nullptr) {}
- ~TestRtpVideoStreamReceiver() override = default;
+ TestRtpVideoFrameReceiver() {}
+ ~TestRtpVideoFrameReceiver() override = default;
MOCK_METHOD(void,
ManageFrame,
@@ -113,7 +60,7 @@ class TestRtpVideoStreamReceiver : public TestRtpVideoStreamReceiverInitializer,
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
RegisterTransformedFrameCallbackSinkOnInit) {
- TestRtpVideoStreamReceiver receiver;
+ TestRtpVideoFrameReceiver receiver;
rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
new rtc::RefCountedObject<MockFrameTransformer>());
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
@@ -127,7 +74,7 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
UnregisterTransformedFrameSinkCallbackOnReset) {
- TestRtpVideoStreamReceiver receiver;
+ TestRtpVideoFrameReceiver receiver;
rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
new rtc::RefCountedObject<MockFrameTransformer>());
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
@@ -139,7 +86,7 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
}
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) {
- TestRtpVideoStreamReceiver receiver;
+ TestRtpVideoFrameReceiver receiver;
rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>());
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
@@ -153,7 +100,7 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) {
TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
ManageFrameOnTransformedFrame) {
- TestRtpVideoStreamReceiver receiver;
+ TestRtpVideoFrameReceiver receiver;
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer(
new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>());
rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate =
@@ -177,5 +124,48 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
rtc::ThreadManager::ProcessAllMessageQueuesForTesting();
}
+TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
+ TransformableFrameMetadataHasCorrectValue) {
+ TestRtpVideoFrameReceiver receiver;
+ rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
+ new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
+ rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate =
+ new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ &receiver, mock_frame_transformer, rtc::Thread::Current(), 1111);
+ delegate->Init();
+ RTPVideoHeader video_header;
+ video_header.width = 1280u;
+ video_header.height = 720u;
+ RTPVideoHeader::GenericDescriptorInfo& generic =
+ video_header.generic.emplace();
+ generic.frame_id = 10;
+ generic.temporal_index = 3;
+ generic.spatial_index = 2;
+ generic.decode_target_indications = {DecodeTargetIndication::kSwitch};
+ generic.dependencies = {5};
+
+ // Check that the transformable frame passed to the frame transformer has the
+ // correct metadata.
+ EXPECT_CALL(*mock_frame_transformer, Transform)
+ .WillOnce(
+ [](std::unique_ptr<TransformableFrameInterface> transformable_frame) {
+ auto frame =
+ absl::WrapUnique(static_cast<TransformableVideoFrameInterface*>(
+ transformable_frame.release()));
+ ASSERT_TRUE(frame);
+ auto metadata = frame->GetMetadata();
+ EXPECT_EQ(metadata.GetWidth(), 1280u);
+ EXPECT_EQ(metadata.GetHeight(), 720u);
+ EXPECT_EQ(metadata.GetFrameId(), 10);
+ EXPECT_EQ(metadata.GetTemporalIndex(), 3);
+ EXPECT_EQ(metadata.GetSpatialIndex(), 2);
+ EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5));
+ EXPECT_THAT(metadata.GetDecodeTargetIndications(),
+ ElementsAre(DecodeTargetIndication::kSwitch));
+ });
+ // The delegate creates a transformable frame from the RtpFrameObject.
+ delegate->TransformFrame(CreateRtpFrameObject(video_header));
+}
+
} // namespace
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/rtp_video_stream_receiver_unittest.cc b/chromium/third_party/webrtc/video/rtp_video_stream_receiver_unittest.cc
index 255de54e8be..20d6ae88ad9 100644
--- a/chromium/third_party/webrtc/video/rtp_video_stream_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/video/rtp_video_stream_receiver_unittest.cc
@@ -73,37 +73,45 @@ RTPVideoHeader GetGenericVideoHeader(VideoFrameType frame_type) {
class MockTransport : public Transport {
public:
- MOCK_METHOD3(SendRtp,
- bool(const uint8_t* packet,
- size_t length,
- const PacketOptions& options));
- MOCK_METHOD2(SendRtcp, bool(const uint8_t* packet, size_t length));
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t length, const PacketOptions& options),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
};
class MockNackSender : public NackSender {
public:
- MOCK_METHOD1(SendNack, void(const std::vector<uint16_t>& sequence_numbers));
- MOCK_METHOD2(SendNack,
- void(const std::vector<uint16_t>& sequence_numbers,
- bool buffering_allowed));
+ MOCK_METHOD(void,
+ SendNack,
+ (const std::vector<uint16_t>& sequence_numbers,
+ bool buffering_allowed),
+ (override));
};
class MockKeyFrameRequestSender : public KeyFrameRequestSender {
public:
- MOCK_METHOD0(RequestKeyFrame, void());
+ MOCK_METHOD(void, RequestKeyFrame, (), (override));
};
class MockOnCompleteFrameCallback
: public video_coding::OnCompleteFrameCallback {
public:
- MOCK_METHOD1(DoOnCompleteFrame, void(video_coding::EncodedFrame* frame));
- MOCK_METHOD1(DoOnCompleteFrameFailNullptr,
- void(video_coding::EncodedFrame* frame));
- MOCK_METHOD1(DoOnCompleteFrameFailLength,
- void(video_coding::EncodedFrame* frame));
- MOCK_METHOD1(DoOnCompleteFrameFailBitstream,
- void(video_coding::EncodedFrame* frame));
- void OnCompleteFrame(std::unique_ptr<video_coding::EncodedFrame> frame) {
+ MOCK_METHOD(void, DoOnCompleteFrame, (video_coding::EncodedFrame*), ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailNullptr,
+ (video_coding::EncodedFrame*),
+ ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailLength,
+ (video_coding::EncodedFrame*),
+ ());
+ MOCK_METHOD(void,
+ DoOnCompleteFrameFailBitstream,
+ (video_coding::EncodedFrame*),
+ ());
+ void OnCompleteFrame(
+ std::unique_ptr<video_coding::EncodedFrame> frame) override {
if (!frame) {
DoOnCompleteFrameFailNullptr(nullptr);
return;
@@ -132,7 +140,7 @@ class MockOnCompleteFrameCallback
class MockRtpPacketSink : public RtpPacketSinkInterface {
public:
- MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived&));
+ MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override));
};
constexpr uint32_t kSsrc = 111;
@@ -955,8 +963,8 @@ class RtpVideoStreamReceiverDependencyDescriptorTest
FrameDependencyStructure stream_structure;
stream_structure.num_decode_targets = 1;
stream_structure.templates = {
- GenericFrameInfo::Builder().Dtis("S").Build(),
- GenericFrameInfo::Builder().Dtis("S").Fdiffs({1}).Build(),
+ FrameDependencyTemplate().Dtis("S"),
+ FrameDependencyTemplate().Dtis("S").FrameDiffs({1}),
};
return stream_structure;
}
@@ -1102,7 +1110,8 @@ TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest,
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
-TEST_F(RtpVideoStreamReceiverTest, RepeatedSecondarySinkDisallowed) {
+using RtpVideoStreamReceiverDeathTest = RtpVideoStreamReceiverTest;
+TEST_F(RtpVideoStreamReceiverDeathTest, RepeatedSecondarySinkDisallowed) {
MockRtpPacketSink secondary_sink;
rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
diff --git a/chromium/third_party/webrtc/video/send_statistics_proxy.cc b/chromium/third_party/webrtc/video/send_statistics_proxy.cc
index f8d768f9d2d..b5bcbe6bf1d 100644
--- a/chromium/third_party/webrtc/video/send_statistics_proxy.cc
+++ b/chromium/third_party/webrtc/video/send_statistics_proxy.cc
@@ -717,9 +717,11 @@ void SendStatisticsProxy::OnSuspendChange(bool is_suspended) {
uma_container_->quality_adapt_timer_.Stop(now_ms);
} else {
// Start adaptation stats if scaling is enabled.
- if (adaptations_.MaskedCpuCounts().resolution_adaptations.has_value())
+ if (adaptation_limitations_.MaskedCpuCounts()
+ .resolution_adaptations.has_value())
uma_container_->cpu_adapt_timer_.Start(now_ms);
- if (adaptations_.MaskedQualityCounts().resolution_adaptations.has_value())
+ if (adaptation_limitations_.MaskedQualityCounts()
+ .resolution_adaptations.has_value())
uma_container_->quality_adapt_timer_.Start(now_ms);
// Stop pause explicitly for stats that may be zero/not updated for some
// time.
@@ -1021,7 +1023,7 @@ void SendStatisticsProxy::OnSendEncodedImage(
}
absl::optional<int> downscales =
- adaptations_.MaskedQualityCounts().resolution_adaptations;
+ adaptation_limitations_.MaskedQualityCounts().resolution_adaptations;
stats_.bw_limited_resolution |=
(downscales.has_value() && downscales.value() > 0);
@@ -1056,7 +1058,8 @@ void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
uma_container_->input_fps_counter_.Add(1);
uma_container_->input_width_counter_.Add(width);
uma_container_->input_height_counter_.Add(height);
- if (adaptations_.MaskedCpuCounts().resolution_adaptations.has_value()) {
+ if (adaptation_limitations_.MaskedCpuCounts()
+ .resolution_adaptations.has_value()) {
uma_container_->cpu_limited_frame_counter_.Add(
stats_.cpu_limited_resolution);
}
@@ -1090,8 +1093,8 @@ void SendStatisticsProxy::OnFrameDropped(DropReason reason) {
void SendStatisticsProxy::ClearAdaptationStats() {
rtc::CritScope lock(&crit_);
- adaptations_.set_cpu_counts(VideoAdaptationCounters());
- adaptations_.set_quality_counts(VideoAdaptationCounters());
+ adaptation_limitations_.set_cpu_counts(VideoAdaptationCounters());
+ adaptation_limitations_.set_quality_counts(VideoAdaptationCounters());
UpdateAdaptationStats();
}
@@ -1099,10 +1102,10 @@ void SendStatisticsProxy::UpdateAdaptationSettings(
VideoStreamEncoderObserver::AdaptationSettings cpu_settings,
VideoStreamEncoderObserver::AdaptationSettings quality_settings) {
rtc::CritScope lock(&crit_);
- adaptations_.UpdateMaskingSettings(cpu_settings, quality_settings);
- SetAdaptTimer(adaptations_.MaskedCpuCounts(),
+ adaptation_limitations_.UpdateMaskingSettings(cpu_settings, quality_settings);
+ SetAdaptTimer(adaptation_limitations_.MaskedCpuCounts(),
&uma_container_->cpu_adapt_timer_);
- SetAdaptTimer(adaptations_.MaskedQualityCounts(),
+ SetAdaptTimer(adaptation_limitations_.MaskedQualityCounts(),
&uma_container_->quality_adapt_timer_);
UpdateAdaptationStats();
}
@@ -1113,9 +1116,10 @@ void SendStatisticsProxy::OnAdaptationChanged(
const VideoAdaptationCounters& quality_counters) {
rtc::CritScope lock(&crit_);
- MaskedAdaptationCounts receiver = adaptations_.MaskedQualityCounts();
- adaptations_.set_cpu_counts(cpu_counters);
- adaptations_.set_quality_counts(quality_counters);
+ MaskedAdaptationCounts receiver =
+ adaptation_limitations_.MaskedQualityCounts();
+ adaptation_limitations_.set_cpu_counts(cpu_counters);
+ adaptation_limitations_.set_quality_counts(quality_counters);
switch (reason) {
case VideoAdaptationReason::kCpu:
++stats_.number_of_cpu_adapt_changes;
@@ -1123,7 +1127,7 @@ void SendStatisticsProxy::OnAdaptationChanged(
case VideoAdaptationReason::kQuality:
TryUpdateInitialQualityResolutionAdaptUp(
receiver.resolution_adaptations,
- adaptations_.MaskedQualityCounts().resolution_adaptations);
+ adaptation_limitations_.MaskedQualityCounts().resolution_adaptations);
++stats_.number_of_quality_adapt_changes;
break;
}
@@ -1131,8 +1135,8 @@ void SendStatisticsProxy::OnAdaptationChanged(
}
void SendStatisticsProxy::UpdateAdaptationStats() {
- auto cpu_counts = adaptations_.MaskedCpuCounts();
- auto quality_counts = adaptations_.MaskedQualityCounts();
+ auto cpu_counts = adaptation_limitations_.MaskedCpuCounts();
+ auto quality_counts = adaptation_limitations_.MaskedQualityCounts();
bool is_cpu_limited = cpu_counts.resolution_adaptations > 0 ||
cpu_counts.num_framerate_reductions > 0;
@@ -1459,6 +1463,16 @@ void SendStatisticsProxy::Adaptations::set_quality_counts(
const VideoAdaptationCounters& quality_counts) {
quality_counts_ = quality_counts;
}
+
+VideoAdaptationCounters SendStatisticsProxy::Adaptations::cpu_counts() const {
+ return cpu_counts_;
+}
+
+VideoAdaptationCounters SendStatisticsProxy::Adaptations::quality_counts()
+ const {
+ return quality_counts_;
+}
+
void SendStatisticsProxy::Adaptations::UpdateMaskingSettings(
VideoStreamEncoderObserver::AdaptationSettings cpu_settings,
VideoStreamEncoderObserver::AdaptationSettings quality_settings) {
diff --git a/chromium/third_party/webrtc/video/send_statistics_proxy.h b/chromium/third_party/webrtc/video/send_statistics_proxy.h
index 1d2fd21cfa0..ff3b786be93 100644
--- a/chromium/third_party/webrtc/video/send_statistics_proxy.h
+++ b/chromium/third_party/webrtc/video/send_statistics_proxy.h
@@ -240,6 +240,9 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
void set_cpu_counts(const VideoAdaptationCounters& cpu_counts);
void set_quality_counts(const VideoAdaptationCounters& quality_counts);
+ VideoAdaptationCounters cpu_counts() const;
+ VideoAdaptationCounters quality_counts() const;
+
void UpdateMaskingSettings(AdaptationSettings cpu_settings,
AdaptationSettings quality_settings);
@@ -299,7 +302,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver,
bool bw_limited_layers_ RTC_GUARDED_BY(crit_);
// Indicastes if the encoder internally downscales input image.
bool internal_encoder_scaler_ RTC_GUARDED_BY(crit_);
- Adaptations adaptations_ RTC_GUARDED_BY(crit_);
+ Adaptations adaptation_limitations_ RTC_GUARDED_BY(crit_);
struct EncoderChangeEvent {
std::string previous_encoder_implementation;
diff --git a/chromium/third_party/webrtc/video/test/mock_video_stream_encoder.h b/chromium/third_party/webrtc/video/test/mock_video_stream_encoder.h
index 8e429681b8d..c9efc765985 100644
--- a/chromium/third_party/webrtc/video/test/mock_video_stream_encoder.h
+++ b/chromium/third_party/webrtc/video/test/mock_video_stream_encoder.h
@@ -10,6 +10,8 @@
#ifndef VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_
#define VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_
+#include <vector>
+
#include "api/video/video_stream_encoder_interface.h"
#include "test/gmock.h"
@@ -17,22 +19,44 @@ namespace webrtc {
class MockVideoStreamEncoder : public VideoStreamEncoderInterface {
public:
- MOCK_METHOD2(SetSource,
- void(rtc::VideoSourceInterface<VideoFrame>*,
- const DegradationPreference&));
- MOCK_METHOD2(SetSink, void(EncoderSink*, bool));
- MOCK_METHOD1(SetStartBitrate, void(int));
- MOCK_METHOD0(SendKeyFrame, void());
- MOCK_METHOD1(OnLossNotification, void(const VideoEncoder::LossNotification&));
- MOCK_METHOD6(OnBitrateUpdated,
- void(DataRate, DataRate, DataRate, uint8_t, int64_t, double));
- MOCK_METHOD1(OnFrame, void(const VideoFrame&));
- MOCK_METHOD1(SetBitrateAllocationObserver,
- void(VideoBitrateAllocationObserver*));
- MOCK_METHOD1(SetFecControllerOverride, void(FecControllerOverride*));
- MOCK_METHOD0(Stop, void());
+ MOCK_METHOD(void,
+ AddAdaptationResource,
+ (rtc::scoped_refptr<Resource>),
+ (override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<Resource>>,
+ GetAdaptationResources,
+ (),
+ (override));
+ MOCK_METHOD(void,
+ SetSource,
+ (rtc::VideoSourceInterface<VideoFrame>*,
+ const DegradationPreference&),
+ (override));
+ MOCK_METHOD(void, SetSink, (EncoderSink*, bool), (override));
+ MOCK_METHOD(void, SetStartBitrate, (int), (override));
+ MOCK_METHOD(void, SendKeyFrame, (), (override));
+ MOCK_METHOD(void,
+ OnLossNotification,
+ (const VideoEncoder::LossNotification&),
+ (override));
+ MOCK_METHOD(void,
+ OnBitrateUpdated,
+ (DataRate, DataRate, DataRate, uint8_t, int64_t, double),
+ (override));
+ MOCK_METHOD(void, OnFrame, (const VideoFrame&), (override));
+ MOCK_METHOD(void,
+ SetBitrateAllocationObserver,
+ (VideoBitrateAllocationObserver*),
+ (override));
+ MOCK_METHOD(void,
+ SetFecControllerOverride,
+ (FecControllerOverride*),
+ (override));
+ MOCK_METHOD(void, Stop, (), (override));
- MOCK_METHOD2(MockedConfigureEncoder, void(const VideoEncoderConfig&, size_t));
+ MOCK_METHOD(void,
+ MockedConfigureEncoder,
+ (const VideoEncoderConfig&, size_t));
// gtest generates implicit copy which is not allowed on VideoEncoderConfig,
// so we can't mock ConfigureEncoder directly.
void ConfigureEncoder(VideoEncoderConfig config,
diff --git a/chromium/third_party/webrtc/video/video_quality_test.cc b/chromium/third_party/webrtc/video/video_quality_test.cc
index 94ce268fa92..88270b4b2e3 100644
--- a/chromium/third_party/webrtc/video/video_quality_test.cc
+++ b/chromium/third_party/webrtc/video/video_quality_test.cc
@@ -815,11 +815,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
}
if (params_.call.generic_descriptor) {
- // The generic descriptor is currently behind a field trial, so it needs
- // to be set for this flag to have any effect.
- // TODO(philipel): Remove this check when the experiment is removed.
- RTC_CHECK(field_trial::IsEnabled("WebRTC-GenericDescriptor"));
-
video_send_configs_[video_idx].rtp.extensions.emplace_back(
RtpExtension::kGenericFrameDescriptorUri00,
kGenericFrameDescriptorExtensionId00);
diff --git a/chromium/third_party/webrtc/video/video_receive_stream.h b/chromium/third_party/webrtc/video/video_receive_stream.h
index c1ebf2b600e..8a5136a4b1d 100644
--- a/chromium/third_party/webrtc/video/video_receive_stream.h
+++ b/chromium/third_party/webrtc/video/video_receive_stream.h
@@ -15,7 +15,6 @@
#include <vector>
#include "api/task_queue/task_queue_factory.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/video/recordable_encoded_frame.h"
#include "call/rtp_packet_sink_interface.h"
#include "call/syncable.h"
diff --git a/chromium/third_party/webrtc/video/video_receive_stream2.cc b/chromium/third_party/webrtc/video/video_receive_stream2.cc
index b1b482da298..9413b72354f 100644
--- a/chromium/third_party/webrtc/video/video_receive_stream2.cc
+++ b/chromium/third_party/webrtc/video/video_receive_stream2.cc
@@ -49,7 +49,7 @@
#include "system_wrappers/include/field_trial.h"
#include "video/call_stats2.h"
#include "video/frame_dumping_decoder.h"
-#include "video/receive_statistics_proxy.h"
+#include "video/receive_statistics_proxy2.h"
namespace webrtc {
@@ -201,7 +201,8 @@ VideoReceiveStream2::VideoReceiveStream2(
rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
timing_(timing),
video_receiver_(clock_, timing_.get()),
- rtp_video_stream_receiver_(clock_,
+ rtp_video_stream_receiver_(worker_thread_,
+ clock_,
&transport_adapter_,
call_stats->AsRtcpRttStats(),
packet_router,
@@ -232,7 +233,6 @@ VideoReceiveStream2::VideoReceiveStream2(
RTC_DCHECK(call_stats_);
module_process_sequence_checker_.Detach();
- network_sequence_checker_.Detach();
RTC_DCHECK(!config_.decoders.empty());
std::set<int> decoder_payload_types;
@@ -472,8 +472,6 @@ bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
return false;
}
- // TODO(bugs.webrtc.org/11489): Consider posting to worker.
- rtc::CritScope cs(&playout_delay_lock_);
base_minimum_playout_delay_ms_ = delay_ms;
UpdatePlayoutDelays();
return true;
@@ -481,8 +479,6 @@ bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
-
- rtc::CritScope cs(&playout_delay_lock_);
return base_minimum_playout_delay_ms_;
}
@@ -522,18 +518,26 @@ void VideoReceiveStream2::SetDepacketizerToDecoderFrameTransformer(
void VideoReceiveStream2::SendNack(
const std::vector<uint16_t>& sequence_numbers,
bool buffering_allowed) {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
RTC_DCHECK(buffering_allowed);
rtp_video_stream_receiver_.RequestPacketRetransmit(sequence_numbers);
}
void VideoReceiveStream2::RequestKeyFrame(int64_t timestamp_ms) {
+ // Running on worker_sequence_checker_.
+ // Called from RtpVideoStreamReceiver (rtp_video_stream_receiver_ is
+ // ultimately responsible).
rtp_video_stream_receiver_.RequestKeyFrame();
- last_keyframe_request_ms_ = timestamp_ms;
+ decode_queue_.PostTask([this, timestamp_ms]() {
+ RTC_DCHECK_RUN_ON(&decode_queue_);
+ last_keyframe_request_ms_ = timestamp_ms;
+ });
}
void VideoReceiveStream2::OnCompleteFrame(
std::unique_ptr<video_coding::EncodedFrame> frame) {
- RTC_DCHECK_RUN_ON(&network_sequence_checker_);
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+
// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround.
int64_t time_now_ms = clock_->TimeInMilliseconds();
if (last_complete_frame_time_ms_ > 0 &&
@@ -542,19 +546,13 @@ void VideoReceiveStream2::OnCompleteFrame(
}
last_complete_frame_time_ms_ = time_now_ms;
- // TODO(bugs.webrtc.org/11489): We grab the playout_delay_lock_ lock
- // potentially twice. Consider checking both min/max and posting to worker if
- // there's a change. If we always update playout delays on the worker, we
- // don't need a lock.
const PlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_;
if (playout_delay.min_ms >= 0) {
- rtc::CritScope cs(&playout_delay_lock_);
frame_minimum_playout_delay_ms_ = playout_delay.min_ms;
UpdatePlayoutDelays();
}
if (playout_delay.max_ms >= 0) {
- rtc::CritScope cs(&playout_delay_lock_);
frame_maximum_playout_delay_ms_ = playout_delay.max_ms;
UpdatePlayoutDelays();
}
@@ -602,22 +600,20 @@ void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs(
void VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
- // TODO(bugs.webrtc.org/11489): See if we can't get rid of the
- // |playout_delay_lock_|
- rtc::CritScope cs(&playout_delay_lock_);
syncable_minimum_playout_delay_ms_ = delay_ms;
UpdatePlayoutDelays();
}
-int64_t VideoReceiveStream2::GetWaitMs() const {
+int64_t VideoReceiveStream2::GetMaxWaitMs() const {
return keyframe_required_ ? max_wait_for_keyframe_ms_
: max_wait_for_frame_ms_;
}
void VideoReceiveStream2::StartNextDecode() {
+ // Running on the decode thread.
TRACE_EVENT0("webrtc", "VideoReceiveStream2::StartNextDecode");
frame_buffer_->NextFrame(
- GetWaitMs(), keyframe_required_, &decode_queue_,
+ GetMaxWaitMs(), keyframe_required_, &decode_queue_,
/* encoded frame handler */
[this](std::unique_ptr<EncodedFrame> frame, ReturnReason res) {
RTC_DCHECK_EQ(frame == nullptr, res == ReturnReason::kTimeout);
@@ -629,7 +625,12 @@ void VideoReceiveStream2::StartNextDecode() {
if (frame) {
HandleEncodedFrame(std::move(frame));
} else {
- HandleFrameBufferTimeout();
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ worker_thread_->PostTask(ToQueuedTask(
+ task_safety_, [this, now_ms, wait_ms = GetMaxWaitMs()]() {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ HandleFrameBufferTimeout(now_ms, wait_ms);
+ }));
}
StartNextDecode();
});
@@ -649,25 +650,48 @@ void VideoReceiveStream2::HandleEncodedFrame(
}
}
stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp);
- HandleKeyFrameGeneration(frame->FrameType() == VideoFrameType::kVideoFrameKey,
- now_ms);
+
+ bool force_request_key_frame = false;
+ int64_t decoded_frame_picture_id = -1;
+
+ const bool keyframe_request_is_due =
+ now_ms >= (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_);
+
int decode_result = video_receiver_.Decode(frame.get());
if (decode_result == WEBRTC_VIDEO_CODEC_OK ||
decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) {
keyframe_required_ = false;
frame_decoded_ = true;
- rtp_video_stream_receiver_.FrameDecoded(frame->id.picture_id);
+
+ decoded_frame_picture_id = frame->id.picture_id;
if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME)
- RequestKeyFrame(now_ms);
+ force_request_key_frame = true;
} else if (!frame_decoded_ || !keyframe_required_ ||
- (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ < now_ms)) {
+ keyframe_request_is_due) {
keyframe_required_ = true;
// TODO(philipel): Remove this keyframe request when downstream project
// has been fixed.
- RequestKeyFrame(now_ms);
+ force_request_key_frame = true;
}
+ bool received_frame_is_keyframe =
+ frame->FrameType() == VideoFrameType::kVideoFrameKey;
+
+ worker_thread_->PostTask(ToQueuedTask(
+ task_safety_,
+ [this, now_ms, received_frame_is_keyframe, force_request_key_frame,
+ decoded_frame_picture_id, keyframe_request_is_due]() {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+
+ if (decoded_frame_picture_id != -1)
+ rtp_video_stream_receiver_.FrameDecoded(decoded_frame_picture_id);
+
+ HandleKeyFrameGeneration(received_frame_is_keyframe, now_ms,
+ force_request_key_frame,
+ keyframe_request_is_due);
+ }));
+
if (encoded_frame_buffer_function_) {
frame->Retain();
encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame));
@@ -676,48 +700,58 @@ void VideoReceiveStream2::HandleEncodedFrame(
void VideoReceiveStream2::HandleKeyFrameGeneration(
bool received_frame_is_keyframe,
- int64_t now_ms) {
+ int64_t now_ms,
+ bool always_request_key_frame,
+ bool keyframe_request_is_due) {
+ // Running on worker_sequence_checker_.
+
+ bool request_key_frame = always_request_key_frame;
+
// Repeat sending keyframe requests if we've requested a keyframe.
- if (!keyframe_generation_requested_) {
- return;
- }
- if (received_frame_is_keyframe) {
- keyframe_generation_requested_ = false;
- } else if (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ <= now_ms) {
- if (!IsReceivingKeyFrame(now_ms)) {
- RequestKeyFrame(now_ms);
+ if (keyframe_generation_requested_) {
+ if (received_frame_is_keyframe) {
+ keyframe_generation_requested_ = false;
+ } else if (keyframe_request_is_due) {
+ if (!IsReceivingKeyFrame(now_ms)) {
+ request_key_frame = true;
+ }
+ } else {
+ // It hasn't been long enough since the last keyframe request, do nothing.
}
- } else {
- // It hasn't been long enough since the last keyframe request, do nothing.
+ }
+
+ if (request_key_frame) {
+ // HandleKeyFrameGeneration is initated from the decode thread -
+ // RequestKeyFrame() triggers a call back to the decode thread.
+ // Perhaps there's a way to avoid that.
+ RequestKeyFrame(now_ms);
}
}
-void VideoReceiveStream2::HandleFrameBufferTimeout() {
- // Running on |decode_queue_|.
- int64_t now_ms = clock_->TimeInMilliseconds();
+void VideoReceiveStream2::HandleFrameBufferTimeout(int64_t now_ms,
+ int64_t wait_ms) {
+ // Running on |worker_sequence_checker_|.
absl::optional<int64_t> last_packet_ms =
rtp_video_stream_receiver_.LastReceivedPacketMs();
// To avoid spamming keyframe requests for a stream that is not active we
// check if we have received a packet within the last 5 seconds.
- bool stream_is_active = last_packet_ms && now_ms - *last_packet_ms < 5000;
- if (!stream_is_active) {
- worker_thread_->PostTask(ToQueuedTask(task_safety_, [this]() {
- RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
- stats_proxy_.OnStreamInactive();
- }));
- }
+ const bool stream_is_active =
+ last_packet_ms && now_ms - *last_packet_ms < 5000;
+ if (!stream_is_active)
+ stats_proxy_.OnStreamInactive();
if (stream_is_active && !IsReceivingKeyFrame(now_ms) &&
(!config_.crypto_options.sframe.require_frame_encryption ||
rtp_video_stream_receiver_.IsDecryptable())) {
- RTC_LOG(LS_WARNING) << "No decodable frame in " << GetWaitMs()
+ RTC_LOG(LS_WARNING) << "No decodable frame in " << wait_ms
<< " ms, requesting keyframe.";
RequestKeyFrame(now_ms);
}
}
bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const {
+ // Running on worker_sequence_checker_.
absl::optional<int64_t> last_keyframe_packet_ms =
rtp_video_stream_receiver_.LastReceivedKeyframePacketMs();
@@ -730,6 +764,7 @@ bool VideoReceiveStream2::IsReceivingKeyFrame(int64_t timestamp_ms) const {
}
void VideoReceiveStream2::UpdatePlayoutDelays() const {
+ // Running on worker_sequence_checker_.
const int minimum_delay_ms =
std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_,
syncable_minimum_playout_delay_ms_});
@@ -752,36 +787,43 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state,
bool generate_key_frame) {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
rtc::Event event;
+
+ // Save old state, set the new state.
RecordingState old_state;
- decode_queue_.PostTask([this, &event, &old_state, generate_key_frame,
- state = std::move(state)] {
- RTC_DCHECK_RUN_ON(&decode_queue_);
- // Save old state.
- old_state.callback = std::move(encoded_frame_buffer_function_);
- old_state.keyframe_needed = keyframe_generation_requested_;
- old_state.last_keyframe_request_ms = last_keyframe_request_ms_;
-
- // Set new state.
- encoded_frame_buffer_function_ = std::move(state.callback);
- if (generate_key_frame) {
- RequestKeyFrame(clock_->TimeInMilliseconds());
- keyframe_generation_requested_ = true;
- } else {
- keyframe_generation_requested_ = state.keyframe_needed;
- last_keyframe_request_ms_ = state.last_keyframe_request_ms.value_or(0);
- }
- event.Set();
- });
+
+ decode_queue_.PostTask(
+ [this, &event, &old_state, callback = std::move(state.callback),
+ generate_key_frame,
+ last_keyframe_request = state.last_keyframe_request_ms.value_or(0)] {
+ RTC_DCHECK_RUN_ON(&decode_queue_);
+ old_state.callback = std::move(encoded_frame_buffer_function_);
+ encoded_frame_buffer_function_ = std::move(callback);
+
+ old_state.last_keyframe_request_ms = last_keyframe_request_ms_;
+ last_keyframe_request_ms_ = generate_key_frame
+ ? clock_->TimeInMilliseconds()
+ : last_keyframe_request;
+
+ event.Set();
+ });
+
+ old_state.keyframe_needed = keyframe_generation_requested_;
+
+ if (generate_key_frame) {
+ rtp_video_stream_receiver_.RequestKeyFrame();
+ keyframe_generation_requested_ = true;
+ } else {
+ keyframe_generation_requested_ = state.keyframe_needed;
+ }
+
event.Wait(rtc::Event::kForever);
return old_state;
}
void VideoReceiveStream2::GenerateKeyFrame() {
- decode_queue_.PostTask([this]() {
- RTC_DCHECK_RUN_ON(&decode_queue_);
- RequestKeyFrame(clock_->TimeInMilliseconds());
- keyframe_generation_requested_ = true;
- });
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RequestKeyFrame(clock_->TimeInMilliseconds());
+ keyframe_generation_requested_ = true;
}
} // namespace internal
diff --git a/chromium/third_party/webrtc/video/video_receive_stream2.h b/chromium/third_party/webrtc/video/video_receive_stream2.h
index f8cd65dc9db..71b336e587e 100644
--- a/chromium/third_party/webrtc/video/video_receive_stream2.h
+++ b/chromium/third_party/webrtc/video/video_receive_stream2.h
@@ -15,7 +15,6 @@
#include <vector>
#include "api/task_queue/task_queue_factory.h"
-#include "api/transport/media/media_transport_interface.h"
#include "api/units/timestamp.h"
#include "api/video/recordable_encoded_frame.h"
#include "call/rtp_packet_sink_interface.h"
@@ -27,10 +26,11 @@
#include "modules/video_coding/video_receiver2.h"
#include "rtc_base/synchronization/sequence_checker.h"
#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/pending_task_safety_flag.h"
#include "system_wrappers/include/clock.h"
#include "video/receive_statistics_proxy2.h"
#include "video/rtp_streams_synchronizer2.h"
-#include "video/rtp_video_stream_receiver.h"
+#include "video/rtp_video_stream_receiver2.h"
#include "video/transport_adapter.h"
#include "video/video_stream_decoder2.h"
@@ -158,24 +158,28 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
void GenerateKeyFrame() override;
private:
- int64_t GetWaitMs() const;
+ int64_t GetMaxWaitMs() const RTC_RUN_ON(decode_queue_);
void StartNextDecode() RTC_RUN_ON(decode_queue_);
void HandleEncodedFrame(std::unique_ptr<video_coding::EncodedFrame> frame)
RTC_RUN_ON(decode_queue_);
- void HandleFrameBufferTimeout() RTC_RUN_ON(decode_queue_);
+ void HandleFrameBufferTimeout(int64_t now_ms, int64_t wait_ms)
+ RTC_RUN_ON(worker_sequence_checker_);
void UpdatePlayoutDelays() const
- RTC_EXCLUSIVE_LOCKS_REQUIRED(playout_delay_lock_);
- void RequestKeyFrame(int64_t timestamp_ms) RTC_RUN_ON(decode_queue_);
- void HandleKeyFrameGeneration(bool received_frame_is_keyframe, int64_t now_ms)
- RTC_RUN_ON(decode_queue_);
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_sequence_checker_);
+ void RequestKeyFrame(int64_t timestamp_ms)
+ RTC_RUN_ON(worker_sequence_checker_);
+ void HandleKeyFrameGeneration(bool received_frame_is_keyframe,
+ int64_t now_ms,
+ bool always_request_key_frame,
+ bool keyframe_request_is_due)
+ RTC_RUN_ON(worker_sequence_checker_);
bool IsReceivingKeyFrame(int64_t timestamp_ms) const
- RTC_RUN_ON(decode_queue_);
+ RTC_RUN_ON(worker_sequence_checker_);
void UpdateHistograms();
SequenceChecker worker_sequence_checker_;
SequenceChecker module_process_sequence_checker_;
- SequenceChecker network_sequence_checker_;
TaskQueueFactory* const task_queue_factory_;
@@ -199,7 +203,7 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
std::unique_ptr<VCMTiming> timing_; // Jitter buffer experiment.
VideoReceiver2 video_receiver_;
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> incoming_video_stream_;
- RtpVideoStreamReceiver rtp_video_stream_receiver_;
+ RtpVideoStreamReceiver2 rtp_video_stream_receiver_;
std::unique_ptr<VideoStreamDecoder> video_stream_decoder_;
RtpStreamsSynchronizer rtp_stream_sync_;
@@ -216,40 +220,43 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
// Whenever we are in an undecodable state (stream has just started or due to
// a decoding error) we require a keyframe to restart the stream.
- bool keyframe_required_ = true;
+ bool keyframe_required_ RTC_GUARDED_BY(decode_queue_) = true;
// If we have successfully decoded any frame.
- bool frame_decoded_ = false;
+ bool frame_decoded_ RTC_GUARDED_BY(decode_queue_) = false;
- int64_t last_keyframe_request_ms_ = 0;
- int64_t last_complete_frame_time_ms_ = 0;
+ int64_t last_keyframe_request_ms_ RTC_GUARDED_BY(decode_queue_) = 0;
+ int64_t last_complete_frame_time_ms_
+ RTC_GUARDED_BY(worker_sequence_checker_) = 0;
// Keyframe request intervals are configurable through field trials.
const int max_wait_for_keyframe_ms_;
const int max_wait_for_frame_ms_;
- rtc::CriticalSection playout_delay_lock_;
-
// All of them tries to change current min_playout_delay on |timing_| but
// source of the change request is different in each case. Among them the
// biggest delay is used. -1 means use default value from the |timing_|.
//
// Minimum delay as decided by the RTP playout delay extension.
- int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
+ int frame_minimum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) =
+ -1;
// Minimum delay as decided by the setLatency function in "webrtc/api".
- int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
- // Minimum delay as decided by the A/V synchronization feature.
- int syncable_minimum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) =
+ int base_minimum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) =
-1;
+ // Minimum delay as decided by the A/V synchronization feature.
+ int syncable_minimum_playout_delay_ms_
+ RTC_GUARDED_BY(worker_sequence_checker_) = -1;
// Maximum delay as decided by the RTP playout delay extension.
- int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(playout_delay_lock_) = -1;
+ int frame_maximum_playout_delay_ms_ RTC_GUARDED_BY(worker_sequence_checker_) =
+ -1;
// Function that is triggered with encoded frames, if not empty.
std::function<void(const RecordableEncodedFrame&)>
encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_);
// Set to true while we're requesting keyframes but not yet received one.
- bool keyframe_generation_requested_ RTC_GUARDED_BY(decode_queue_) = false;
+ bool keyframe_generation_requested_ RTC_GUARDED_BY(worker_sequence_checker_) =
+ false;
// Defined last so they are destroyed before all other members.
rtc::TaskQueue decode_queue_;
diff --git a/chromium/third_party/webrtc/video/video_receive_stream2_unittest.cc b/chromium/third_party/webrtc/video/video_receive_stream2_unittest.cc
new file mode 100644
index 00000000000..a411cec740d
--- /dev/null
+++ b/chromium/third_party/webrtc/video/video_receive_stream2_unittest.cc
@@ -0,0 +1,571 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/video_receive_stream2.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/test/video/function_video_decoder_factory.h"
+#include "api/video_codecs/video_decoder.h"
+#include "call/rtp_stream_receiver_controller.h"
+#include "common_video/test/utilities.h"
+#include "media/base/fake_video_renderer.h"
+#include "modules/pacing/packet_router.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/utility/include/process_thread.h"
+#include "modules/video_coding/encoded_frame.h"
+#include "rtc_base/critical_section.h"
+#include "rtc_base/event.h"
+#include "system_wrappers/include/clock.h"
+#include "test/fake_decoder.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+#include "test/time_controller/simulated_time_controller.h"
+#include "test/video_decoder_proxy_factory.h"
+#include "video/call_stats2.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::_;
+using ::testing::ElementsAreArray;
+using ::testing::Invoke;
+using ::testing::IsEmpty;
+using ::testing::SizeIs;
+
+constexpr int kDefaultTimeOutMs = 50;
+
+class MockTransport : public Transport {
+ public:
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t length, const PacketOptions& options),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
+};
+
+class MockVideoDecoder : public VideoDecoder {
+ public:
+ MOCK_METHOD(int32_t,
+ InitDecode,
+ (const VideoCodec*, int32_t number_of_cores),
+ (override));
+ MOCK_METHOD(int32_t,
+ Decode,
+ (const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms),
+ (override));
+ MOCK_METHOD(int32_t,
+ RegisterDecodeCompleteCallback,
+ (DecodedImageCallback*),
+ (override));
+ MOCK_METHOD(int32_t, Release, (), (override));
+ const char* ImplementationName() const { return "MockVideoDecoder"; }
+};
+
+class FrameObjectFake : public video_coding::EncodedFrame {
+ public:
+ void SetPayloadType(uint8_t payload_type) { _payloadType = payload_type; }
+
+ void SetRotation(const VideoRotation& rotation) { rotation_ = rotation; }
+
+ void SetNtpTime(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; }
+
+ int64_t ReceivedTime() const override { return 0; }
+
+ int64_t RenderTime() const override { return _renderTimeMs; }
+};
+
+} // namespace
+
+class VideoReceiveStream2Test : public ::testing::Test {
+ public:
+ VideoReceiveStream2Test()
+ : process_thread_(ProcessThread::Create("TestThread")),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ config_(&mock_transport_),
+ call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()),
+ h264_decoder_factory_(&mock_h264_video_decoder_),
+ null_decoder_factory_(&mock_null_video_decoder_) {}
+
+ void SetUp() {
+ constexpr int kDefaultNumCpuCores = 2;
+ config_.rtp.remote_ssrc = 1111;
+ config_.rtp.local_ssrc = 2222;
+ config_.renderer = &fake_renderer_;
+ VideoReceiveStream::Decoder h264_decoder;
+ h264_decoder.payload_type = 99;
+ h264_decoder.video_format = SdpVideoFormat("H264");
+ h264_decoder.video_format.parameters.insert(
+ {"sprop-parameter-sets", "Z0IACpZTBYmI,aMljiA=="});
+ h264_decoder.decoder_factory = &h264_decoder_factory_;
+ config_.decoders.push_back(h264_decoder);
+ VideoReceiveStream::Decoder null_decoder;
+ null_decoder.payload_type = 98;
+ null_decoder.video_format = SdpVideoFormat("null");
+ null_decoder.decoder_factory = &null_decoder_factory_;
+ config_.decoders.push_back(null_decoder);
+
+ clock_ = Clock::GetRealTimeClock();
+ timing_ = new VCMTiming(clock_);
+
+ video_receive_stream_ =
+ std::make_unique<webrtc::internal::VideoReceiveStream2>(
+ task_queue_factory_.get(), loop_.task_queue(),
+ &rtp_stream_receiver_controller_, kDefaultNumCpuCores,
+ &packet_router_, config_.Copy(), process_thread_.get(),
+ &call_stats_, clock_, timing_);
+ }
+
+ protected:
+ test::RunLoop loop_;
+ std::unique_ptr<ProcessThread> process_thread_;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ VideoReceiveStream::Config config_;
+ internal::CallStats call_stats_;
+ MockVideoDecoder mock_h264_video_decoder_;
+ MockVideoDecoder mock_null_video_decoder_;
+ test::VideoDecoderProxyFactory h264_decoder_factory_;
+ test::VideoDecoderProxyFactory null_decoder_factory_;
+ cricket::FakeVideoRenderer fake_renderer_;
+ MockTransport mock_transport_;
+ PacketRouter packet_router_;
+ RtpStreamReceiverController rtp_stream_receiver_controller_;
+ std::unique_ptr<webrtc::internal::VideoReceiveStream2> video_receive_stream_;
+ Clock* clock_;
+ VCMTiming* timing_;
+};
+
+TEST_F(VideoReceiveStream2Test, CreateFrameFromH264FmtpSpropAndIdr) {
+ constexpr uint8_t idr_nalu[] = {0x05, 0xFF, 0xFF, 0xFF};
+ RtpPacketToSend rtppacket(nullptr);
+ uint8_t* payload = rtppacket.AllocatePayload(sizeof(idr_nalu));
+ memcpy(payload, idr_nalu, sizeof(idr_nalu));
+ rtppacket.SetMarker(true);
+ rtppacket.SetSsrc(1111);
+ rtppacket.SetPayloadType(99);
+ rtppacket.SetSequenceNumber(1);
+ rtppacket.SetTimestamp(0);
+ rtc::Event init_decode_event_;
+ EXPECT_CALL(mock_h264_video_decoder_, InitDecode(_, _))
+ .WillOnce(Invoke([&init_decode_event_](const VideoCodec* config,
+ int32_t number_of_cores) {
+ init_decode_event_.Set();
+ return 0;
+ }));
+ EXPECT_CALL(mock_h264_video_decoder_, RegisterDecodeCompleteCallback(_));
+ video_receive_stream_->Start();
+ EXPECT_CALL(mock_h264_video_decoder_, Decode(_, false, _));
+ RtpPacketReceived parsed_packet;
+ ASSERT_TRUE(parsed_packet.Parse(rtppacket.data(), rtppacket.size()));
+ rtp_stream_receiver_controller_.OnRtpPacket(parsed_packet);
+ EXPECT_CALL(mock_h264_video_decoder_, Release());
+ // Make sure the decoder thread had a chance to run.
+ init_decode_event_.Wait(kDefaultTimeOutMs);
+}
+
+TEST_F(VideoReceiveStream2Test, PlayoutDelay) {
+ const PlayoutDelay kPlayoutDelayMs = {123, 321};
+ std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
+ test_frame->id.picture_id = 0;
+ test_frame->SetPlayoutDelay(kPlayoutDelayMs);
+
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
+ EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
+
+ // Check that the biggest minimum delay is chosen.
+ video_receive_stream_->SetMinimumPlayoutDelay(400);
+ EXPECT_EQ(400, timing_->min_playout_delay());
+
+ // Check base minimum delay validation.
+ EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(12345));
+ EXPECT_FALSE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(-1));
+ EXPECT_TRUE(video_receive_stream_->SetBaseMinimumPlayoutDelayMs(500));
+ EXPECT_EQ(500, timing_->min_playout_delay());
+
+ // Check that intermidiate values are remembered and the biggest remembered
+ // is chosen.
+ video_receive_stream_->SetBaseMinimumPlayoutDelayMs(0);
+ EXPECT_EQ(400, timing_->min_playout_delay());
+
+ video_receive_stream_->SetMinimumPlayoutDelay(0);
+ EXPECT_EQ(123, timing_->min_playout_delay());
+}
+
+TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMaxValue) {
+ const int default_max_playout_latency = timing_->max_playout_delay();
+ const PlayoutDelay kPlayoutDelayMs = {123, -1};
+
+ std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
+ test_frame->id.picture_id = 0;
+ test_frame->SetPlayoutDelay(kPlayoutDelayMs);
+
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+
+ // Ensure that -1 preserves default maximum value from |timing_|.
+ EXPECT_EQ(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
+ EXPECT_NE(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
+ EXPECT_EQ(default_max_playout_latency, timing_->max_playout_delay());
+}
+
+TEST_F(VideoReceiveStream2Test, PlayoutDelayPreservesDefaultMinValue) {
+ const int default_min_playout_latency = timing_->min_playout_delay();
+ const PlayoutDelay kPlayoutDelayMs = {-1, 321};
+
+ std::unique_ptr<FrameObjectFake> test_frame(new FrameObjectFake());
+ test_frame->id.picture_id = 0;
+ test_frame->SetPlayoutDelay(kPlayoutDelayMs);
+
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+
+ // Ensure that -1 preserves default minimum value from |timing_|.
+ EXPECT_NE(kPlayoutDelayMs.min_ms, timing_->min_playout_delay());
+ EXPECT_EQ(kPlayoutDelayMs.max_ms, timing_->max_playout_delay());
+ EXPECT_EQ(default_min_playout_latency, timing_->min_playout_delay());
+}
+
+class VideoReceiveStream2TestWithFakeDecoder : public ::testing::Test {
+ public:
+ VideoReceiveStream2TestWithFakeDecoder()
+ : fake_decoder_factory_(
+ []() { return std::make_unique<test::FakeDecoder>(); }),
+ process_thread_(ProcessThread::Create("TestThread")),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ config_(&mock_transport_),
+ call_stats_(Clock::GetRealTimeClock(), loop_.task_queue()) {}
+
+ void SetUp() {
+ config_.rtp.remote_ssrc = 1111;
+ config_.rtp.local_ssrc = 2222;
+ config_.renderer = &fake_renderer_;
+ VideoReceiveStream::Decoder fake_decoder;
+ fake_decoder.payload_type = 99;
+ fake_decoder.video_format = SdpVideoFormat("VP8");
+ fake_decoder.decoder_factory = &fake_decoder_factory_;
+ config_.decoders.push_back(fake_decoder);
+ clock_ = Clock::GetRealTimeClock();
+ ReCreateReceiveStream(VideoReceiveStream::RecordingState());
+ }
+
+ void ReCreateReceiveStream(VideoReceiveStream::RecordingState state) {
+ constexpr int kDefaultNumCpuCores = 2;
+ video_receive_stream_ = nullptr;
+ timing_ = new VCMTiming(clock_);
+ video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream2(
+ task_queue_factory_.get(), loop_.task_queue(),
+ &rtp_stream_receiver_controller_, kDefaultNumCpuCores, &packet_router_,
+ config_.Copy(), process_thread_.get(), &call_stats_, clock_, timing_));
+ video_receive_stream_->SetAndGetRecordingState(std::move(state), false);
+ }
+
+ protected:
+ test::RunLoop loop_;
+ test::FunctionVideoDecoderFactory fake_decoder_factory_;
+ std::unique_ptr<ProcessThread> process_thread_;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ VideoReceiveStream::Config config_;
+ internal::CallStats call_stats_;
+ cricket::FakeVideoRenderer fake_renderer_;
+ MockTransport mock_transport_;
+ PacketRouter packet_router_;
+ RtpStreamReceiverController rtp_stream_receiver_controller_;
+ std::unique_ptr<webrtc::internal::VideoReceiveStream2> video_receive_stream_;
+ Clock* clock_;
+ VCMTiming* timing_;
+};
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesNtpTime) {
+ const int64_t kNtpTimestamp = 12345;
+ auto test_frame = std::make_unique<FrameObjectFake>();
+ test_frame->SetPayloadType(99);
+ test_frame->id.picture_id = 0;
+ test_frame->SetNtpTime(kNtpTimestamp);
+
+ video_receive_stream_->Start();
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+ EXPECT_EQ(kNtpTimestamp, fake_renderer_.ntp_time_ms());
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesRotation) {
+ const webrtc::VideoRotation kRotation = webrtc::kVideoRotation_180;
+ auto test_frame = std::make_unique<FrameObjectFake>();
+ test_frame->SetPayloadType(99);
+ test_frame->id.picture_id = 0;
+ test_frame->SetRotation(kRotation);
+
+ video_receive_stream_->Start();
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+
+ EXPECT_EQ(kRotation, fake_renderer_.rotation());
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder, PassesPacketInfos) {
+ auto test_frame = std::make_unique<FrameObjectFake>();
+ test_frame->SetPayloadType(99);
+ test_frame->id.picture_id = 0;
+ RtpPacketInfos packet_infos = CreatePacketInfos(3);
+ test_frame->SetPacketInfos(packet_infos);
+
+ video_receive_stream_->Start();
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+
+ EXPECT_THAT(fake_renderer_.packet_infos(), ElementsAreArray(packet_infos));
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder, RenderedFrameUpdatesGetSources) {
+ constexpr uint32_t kSsrc = 1111;
+ constexpr uint32_t kCsrc = 9001;
+ constexpr uint32_t kRtpTimestamp = 12345;
+
+ // Prepare one video frame with per-packet information.
+ auto test_frame = std::make_unique<FrameObjectFake>();
+ test_frame->SetPayloadType(99);
+ test_frame->id.picture_id = 0;
+ RtpPacketInfos packet_infos;
+ {
+ RtpPacketInfos::vector_type infos;
+
+ RtpPacketInfo info;
+ info.set_ssrc(kSsrc);
+ info.set_csrcs({kCsrc});
+ info.set_rtp_timestamp(kRtpTimestamp);
+
+ info.set_receive_time_ms(clock_->TimeInMilliseconds() - 5000);
+ infos.push_back(info);
+
+ info.set_receive_time_ms(clock_->TimeInMilliseconds() - 3000);
+ infos.push_back(info);
+
+ info.set_receive_time_ms(clock_->TimeInMilliseconds() - 2000);
+ infos.push_back(info);
+
+ info.set_receive_time_ms(clock_->TimeInMilliseconds() - 4000);
+ infos.push_back(info);
+
+ packet_infos = RtpPacketInfos(std::move(infos));
+ }
+ test_frame->SetPacketInfos(packet_infos);
+
+ // Start receive stream.
+ video_receive_stream_->Start();
+ EXPECT_THAT(video_receive_stream_->GetSources(), IsEmpty());
+
+ // Render one video frame.
+ int64_t timestamp_ms_min = clock_->TimeInMilliseconds();
+ video_receive_stream_->OnCompleteFrame(std::move(test_frame));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+ int64_t timestamp_ms_max = clock_->TimeInMilliseconds();
+
+ // Verify that the per-packet information is passed to the renderer.
+ EXPECT_THAT(fake_renderer_.packet_infos(), ElementsAreArray(packet_infos));
+
+ // Verify that the per-packet information also updates |GetSources()|.
+ std::vector<RtpSource> sources = video_receive_stream_->GetSources();
+ ASSERT_THAT(sources, SizeIs(2));
+ {
+ auto it = std::find_if(sources.begin(), sources.end(),
+ [](const RtpSource& source) {
+ return source.source_type() == RtpSourceType::SSRC;
+ });
+ ASSERT_NE(it, sources.end());
+
+ EXPECT_EQ(it->source_id(), kSsrc);
+ EXPECT_EQ(it->source_type(), RtpSourceType::SSRC);
+ EXPECT_EQ(it->rtp_timestamp(), kRtpTimestamp);
+ EXPECT_GE(it->timestamp_ms(), timestamp_ms_min);
+ EXPECT_LE(it->timestamp_ms(), timestamp_ms_max);
+ }
+ {
+ auto it = std::find_if(sources.begin(), sources.end(),
+ [](const RtpSource& source) {
+ return source.source_type() == RtpSourceType::CSRC;
+ });
+ ASSERT_NE(it, sources.end());
+
+ EXPECT_EQ(it->source_id(), kCsrc);
+ EXPECT_EQ(it->source_type(), RtpSourceType::CSRC);
+ EXPECT_EQ(it->rtp_timestamp(), kRtpTimestamp);
+ EXPECT_GE(it->timestamp_ms(), timestamp_ms_min);
+ EXPECT_LE(it->timestamp_ms(), timestamp_ms_max);
+ }
+}
+
+std::unique_ptr<FrameObjectFake> MakeFrame(VideoFrameType frame_type,
+ int picture_id) {
+ auto frame = std::make_unique<FrameObjectFake>();
+ frame->SetPayloadType(99);
+ frame->id.picture_id = picture_id;
+ frame->SetFrameType(frame_type);
+ return frame;
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder,
+ PassesFrameWhenEncodedFramesCallbackSet) {
+ testing::MockFunction<void(const RecordableEncodedFrame&)> callback;
+ video_receive_stream_->Start();
+ // Expect a keyframe request to be generated
+ EXPECT_CALL(mock_transport_, SendRtcp);
+ EXPECT_CALL(callback, Call);
+ video_receive_stream_->SetAndGetRecordingState(
+ VideoReceiveStream::RecordingState(callback.AsStdFunction()), true);
+ video_receive_stream_->OnCompleteFrame(
+ MakeFrame(VideoFrameType::kVideoFrameKey, 0));
+ EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
+ video_receive_stream_->Stop();
+}
+
+TEST_F(VideoReceiveStream2TestWithFakeDecoder,
+ MovesEncodedFrameDispatchStateWhenReCreating) {
+ testing::MockFunction<void(const RecordableEncodedFrame&)> callback;
+ video_receive_stream_->Start();
+ // Expect a key frame request over RTCP.
+ EXPECT_CALL(mock_transport_, SendRtcp).Times(1);
+ video_receive_stream_->SetAndGetRecordingState(
+ VideoReceiveStream::RecordingState(callback.AsStdFunction()), true);
+ video_receive_stream_->Stop();
+ VideoReceiveStream::RecordingState old_state =
+ video_receive_stream_->SetAndGetRecordingState(
+ VideoReceiveStream::RecordingState(), false);
+ ReCreateReceiveStream(std::move(old_state));
+ video_receive_stream_->Stop();
+}
+
+class VideoReceiveStream2TestWithSimulatedClock : public ::testing::Test {
+ public:
+ class FakeDecoder2 : public test::FakeDecoder {
+ public:
+ explicit FakeDecoder2(std::function<void()> decode_callback)
+ : callback_(decode_callback) {}
+
+ int32_t Decode(const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms) override {
+ int32_t result =
+ FakeDecoder::Decode(input, missing_frames, render_time_ms);
+ callback_();
+ return result;
+ }
+
+ private:
+ std::function<void()> callback_;
+ };
+
+ static VideoReceiveStream::Config GetConfig(
+ Transport* transport,
+ VideoDecoderFactory* decoder_factory,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* renderer) {
+ VideoReceiveStream::Config config(transport);
+ config.rtp.remote_ssrc = 1111;
+ config.rtp.local_ssrc = 2222;
+ config.renderer = renderer;
+ VideoReceiveStream::Decoder fake_decoder;
+ fake_decoder.payload_type = 99;
+ fake_decoder.video_format = SdpVideoFormat("VP8");
+ fake_decoder.decoder_factory = decoder_factory;
+ config.decoders.push_back(fake_decoder);
+ return config;
+ }
+
+ VideoReceiveStream2TestWithSimulatedClock()
+ : time_controller_(Timestamp::Millis(4711)),
+ fake_decoder_factory_([this] {
+ return std::make_unique<FakeDecoder2>([this] { OnFrameDecoded(); });
+ }),
+ process_thread_(time_controller_.CreateProcessThread("ProcessThread")),
+ config_(GetConfig(&mock_transport_,
+ &fake_decoder_factory_,
+ &fake_renderer_)),
+ call_stats_(time_controller_.GetClock(), loop_.task_queue()),
+ video_receive_stream_(time_controller_.GetTaskQueueFactory(),
+ loop_.task_queue(),
+ &rtp_stream_receiver_controller_,
+ /*num_cores=*/2,
+ &packet_router_,
+ config_.Copy(),
+ process_thread_.get(),
+ &call_stats_,
+ time_controller_.GetClock(),
+ new VCMTiming(time_controller_.GetClock())) {
+ video_receive_stream_.Start();
+ }
+
+ void OnFrameDecoded() { event_->Set(); }
+
+ void PassEncodedFrameAndWait(
+ std::unique_ptr<video_coding::EncodedFrame> frame) {
+ event_ = std::make_unique<rtc::Event>();
+ // This call will eventually end up in the Decoded method where the
+ // event is set.
+ video_receive_stream_.OnCompleteFrame(std::move(frame));
+ event_->Wait(rtc::Event::kForever);
+ }
+
+ protected:
+ GlobalSimulatedTimeController time_controller_;
+ test::RunLoop loop_;
+ test::FunctionVideoDecoderFactory fake_decoder_factory_;
+ std::unique_ptr<ProcessThread> process_thread_;
+ MockTransport mock_transport_;
+ cricket::FakeVideoRenderer fake_renderer_;
+ VideoReceiveStream::Config config_;
+ internal::CallStats call_stats_;
+ PacketRouter packet_router_;
+ RtpStreamReceiverController rtp_stream_receiver_controller_;
+ webrtc::internal::VideoReceiveStream2 video_receive_stream_;
+ std::unique_ptr<rtc::Event> event_;
+};
+
+TEST_F(VideoReceiveStream2TestWithSimulatedClock,
+ RequestsKeyFramesUntilKeyFrameReceived) {
+ auto tick = TimeDelta::Millis(
+ internal::VideoReceiveStream2::kMaxWaitForKeyFrameMs / 2);
+ EXPECT_CALL(mock_transport_, SendRtcp).Times(1).WillOnce(Invoke([this]() {
+ loop_.Quit();
+ return 0;
+ }));
+ video_receive_stream_.GenerateKeyFrame();
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 0));
+ time_controller_.AdvanceTime(tick);
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 1));
+ loop_.Run();
+ testing::Mock::VerifyAndClearExpectations(&mock_transport_);
+
+ // T+200ms: still no key frame received, expect key frame request sent again.
+ EXPECT_CALL(mock_transport_, SendRtcp).Times(1).WillOnce(Invoke([this]() {
+ loop_.Quit();
+ return 0;
+ }));
+ time_controller_.AdvanceTime(tick);
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 2));
+ loop_.Run();
+ testing::Mock::VerifyAndClearExpectations(&mock_transport_);
+
+ // T+200ms: now send a key frame - we should not observe new key frame
+ // requests after this.
+ EXPECT_CALL(mock_transport_, SendRtcp).Times(0);
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameKey, 3));
+ time_controller_.AdvanceTime(2 * tick);
+ PassEncodedFrameAndWait(MakeFrame(VideoFrameType::kVideoFrameDelta, 4));
+ loop_.PostTask([this]() { loop_.Quit(); });
+ loop_.Run();
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_receive_stream_unittest.cc b/chromium/third_party/webrtc/video/video_receive_stream_unittest.cc
index 54896e89d8a..07032fe4684 100644
--- a/chromium/third_party/webrtc/video/video_receive_stream_unittest.cc
+++ b/chromium/third_party/webrtc/video/video_receive_stream_unittest.cc
@@ -49,24 +49,30 @@ constexpr int kDefaultTimeOutMs = 50;
class MockTransport : public Transport {
public:
- MOCK_METHOD3(SendRtp,
- bool(const uint8_t* packet,
- size_t length,
- const PacketOptions& options));
- MOCK_METHOD2(SendRtcp, bool(const uint8_t* packet, size_t length));
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t length, const PacketOptions& options),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
};
class MockVideoDecoder : public VideoDecoder {
public:
- MOCK_METHOD2(InitDecode,
- int32_t(const VideoCodec* config, int32_t number_of_cores));
- MOCK_METHOD3(Decode,
- int32_t(const EncodedImage& input,
- bool missing_frames,
- int64_t render_time_ms));
- MOCK_METHOD1(RegisterDecodeCompleteCallback,
- int32_t(DecodedImageCallback* callback));
- MOCK_METHOD0(Release, int32_t(void));
+ MOCK_METHOD(int32_t,
+ InitDecode,
+ (const VideoCodec*, int32_t number_of_cores),
+ (override));
+ MOCK_METHOD(int32_t,
+ Decode,
+ (const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms),
+ (override));
+ MOCK_METHOD(int32_t,
+ RegisterDecodeCompleteCallback,
+ (DecodedImageCallback*),
+ (override));
+ MOCK_METHOD(int32_t, Release, (), (override));
const char* ImplementationName() const { return "MockVideoDecoder"; }
};
diff --git a/chromium/third_party/webrtc/video/video_send_stream.cc b/chromium/third_party/webrtc/video/video_send_stream.cc
index bc9a0cd5f31..30ed86dbd18 100644
--- a/chromium/third_party/webrtc/video/video_send_stream.cc
+++ b/chromium/third_party/webrtc/video/video_send_stream.cc
@@ -179,6 +179,18 @@ void VideoSendStream::Stop() {
worker_queue_->PostTask([send_stream] { send_stream->Stop(); });
}
+void VideoSendStream::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ video_stream_encoder_->AddAdaptationResource(resource);
+}
+
+std::vector<rtc::scoped_refptr<Resource>>
+VideoSendStream::GetAdaptationResources() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return video_stream_encoder_->GetAdaptationResources();
+}
+
void VideoSendStream::SetSource(
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) {
diff --git a/chromium/third_party/webrtc/video/video_send_stream.h b/chromium/third_party/webrtc/video/video_send_stream.h
index addaee49c25..78d8926e96e 100644
--- a/chromium/third_party/webrtc/video/video_send_stream.h
+++ b/chromium/third_party/webrtc/video/video_send_stream.h
@@ -79,6 +79,9 @@ class VideoSendStream : public webrtc::VideoSendStream {
void Start() override;
void Stop() override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+ std::vector<rtc::scoped_refptr<Resource>> GetAdaptationResources() override;
+
void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) override;
diff --git a/chromium/third_party/webrtc/video/video_send_stream_impl_unittest.cc b/chromium/third_party/webrtc/video/video_send_stream_impl_unittest.cc
index a0f1201cbd8..bb702ba270d 100644
--- a/chromium/third_party/webrtc/video/video_send_stream_impl_unittest.cc
+++ b/chromium/third_party/webrtc/video/video_send_stream_impl_unittest.cc
@@ -61,33 +61,45 @@ std::string GetAlrProbingExperimentString() {
}
class MockRtpVideoSender : public RtpVideoSenderInterface {
public:
- MOCK_METHOD1(RegisterProcessThread, void(ProcessThread*));
- MOCK_METHOD0(DeRegisterProcessThread, void());
- MOCK_METHOD1(SetActive, void(bool));
- MOCK_METHOD1(SetActiveModules, void(const std::vector<bool>));
- MOCK_METHOD0(IsActive, bool());
- MOCK_METHOD1(OnNetworkAvailability, void(bool));
- MOCK_CONST_METHOD0(GetRtpStates, std::map<uint32_t, RtpState>());
- MOCK_CONST_METHOD0(GetRtpPayloadStates,
- std::map<uint32_t, RtpPayloadState>());
- MOCK_METHOD2(DeliverRtcp, void(const uint8_t*, size_t));
- MOCK_METHOD1(OnBitrateAllocationUpdated, void(const VideoBitrateAllocation&));
- MOCK_METHOD3(OnEncodedImage,
- EncodedImageCallback::Result(const EncodedImage&,
- const CodecSpecificInfo*,
- const RTPFragmentationHeader*));
- MOCK_METHOD1(OnTransportOverheadChanged, void(size_t));
- MOCK_METHOD1(OnOverheadChanged, void(size_t));
- MOCK_METHOD2(OnBitrateUpdated, void(BitrateAllocationUpdate, int));
- MOCK_CONST_METHOD0(GetPayloadBitrateBps, uint32_t());
- MOCK_CONST_METHOD0(GetProtectionBitrateBps, uint32_t());
- MOCK_METHOD3(SetEncodingData, void(size_t, size_t, size_t));
- MOCK_CONST_METHOD2(GetSentRtpPacketInfos,
- std::vector<RtpSequenceNumberMap::Info>(
- uint32_t ssrc,
- rtc::ArrayView<const uint16_t> sequence_numbers));
-
- MOCK_METHOD1(SetFecAllowed, void(bool fec_allowed));
+ MOCK_METHOD(void, RegisterProcessThread, (ProcessThread*), (override));
+ MOCK_METHOD(void, DeRegisterProcessThread, (), (override));
+ MOCK_METHOD(void, SetActive, (bool), (override));
+ MOCK_METHOD(void, SetActiveModules, (const std::vector<bool>), (override));
+ MOCK_METHOD(bool, IsActive, (), (override));
+ MOCK_METHOD(void, OnNetworkAvailability, (bool), (override));
+ MOCK_METHOD((std::map<uint32_t, RtpState>),
+ GetRtpStates,
+ (),
+ (const, override));
+ MOCK_METHOD((std::map<uint32_t, RtpPayloadState>),
+ GetRtpPayloadStates,
+ (),
+ (const, override));
+ MOCK_METHOD(void, DeliverRtcp, (const uint8_t*, size_t), (override));
+ MOCK_METHOD(void,
+ OnBitrateAllocationUpdated,
+ (const VideoBitrateAllocation&),
+ (override));
+ MOCK_METHOD(EncodedImageCallback::Result,
+ OnEncodedImage,
+ (const EncodedImage&,
+ const CodecSpecificInfo*,
+ const RTPFragmentationHeader*),
+ (override));
+ MOCK_METHOD(void, OnTransportOverheadChanged, (size_t), (override));
+ MOCK_METHOD(void,
+ OnBitrateUpdated,
+ (BitrateAllocationUpdate, int),
+ (override));
+ MOCK_METHOD(uint32_t, GetPayloadBitrateBps, (), (const, override));
+ MOCK_METHOD(uint32_t, GetProtectionBitrateBps, (), (const, override));
+ MOCK_METHOD(void, SetEncodingData, (size_t, size_t, size_t), (override));
+ MOCK_METHOD(std::vector<RtpSequenceNumberMap::Info>,
+ GetSentRtpPacketInfos,
+ (uint32_t ssrc, rtc::ArrayView<const uint16_t> sequence_numbers),
+ (const, override));
+
+ MOCK_METHOD(void, SetFecAllowed, (bool fec_allowed), (override));
};
BitrateAllocationUpdate CreateAllocation(int bitrate_bps) {
diff --git a/chromium/third_party/webrtc/video/video_send_stream_tests.cc b/chromium/third_party/webrtc/video/video_send_stream_tests.cc
index e38653831b8..09d7abc062e 100644
--- a/chromium/third_party/webrtc/video/video_send_stream_tests.cc
+++ b/chromium/third_party/webrtc/video/video_send_stream_tests.cc
@@ -25,10 +25,10 @@
#include "call/simulated_network.h"
#include "call/video_send_stream.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp.h"
#include "modules/rtp_rtcp/source/rtcp_sender.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
@@ -948,7 +948,7 @@ void VideoSendStreamTest::TestNackRetransmission(
non_padding_sequence_numbers_.end() - kNackedPacketsAtOnceCount,
non_padding_sequence_numbers_.end());
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = Clock::GetRealTimeClock();
config.outgoing_transport = transport_adapter_.get();
config.rtcp_report_interval_ms = kRtcpIntervalMs;
@@ -1164,7 +1164,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
kVideoSendSsrcs[0], rtp_packet.SequenceNumber(),
packets_lost_, // Cumulative lost.
loss_ratio); // Loss percent.
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = Clock::GetRealTimeClock();
config.receive_statistics = &lossy_receive_stats;
config.outgoing_transport = transport_adapter_.get();
@@ -1416,7 +1416,7 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_) {
FakeReceiveStatistics receive_stats(kVideoSendSsrcs[0],
last_sequence_number_, rtp_count_, 0);
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = clock_;
config.receive_statistics = &receive_stats;
config.outgoing_transport = transport_adapter_.get();
@@ -1627,12 +1627,18 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
static const int kRembRespectedBitrateBps = 100000;
class BitrateObserver : public test::SendTest {
public:
- BitrateObserver()
+ explicit BitrateObserver(TaskQueueBase* task_queue)
: SendTest(kDefaultTimeoutMs),
+ task_queue_(task_queue),
retranmission_rate_limiter_(Clock::GetRealTimeClock(), 1000),
stream_(nullptr),
bitrate_capped_(false) {}
+ ~BitrateObserver() override {
+ // Make sure we free |rtp_rtcp_| in the same context as we constructed it.
+ SendTask(RTC_FROM_HERE, task_queue_, [this]() { rtp_rtcp_ = nullptr; });
+ }
+
private:
Action OnSendRtp(const uint8_t* packet, size_t length) override {
if (RtpHeaderParser::IsRtcp(packet, length))
@@ -1667,11 +1673,11 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
stream_ = send_stream;
- RtpRtcp::Configuration config;
+ RtpRtcpInterface::Configuration config;
config.clock = Clock::GetRealTimeClock();
config.outgoing_transport = feedback_transport_.get();
config.retransmission_rate_limiter = &retranmission_rate_limiter_;
- rtp_rtcp_ = RtpRtcp::Create(config);
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config);
rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize);
}
@@ -1690,12 +1696,13 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
<< "Timeout while waiting for low bitrate stats after REMB.";
}
- std::unique_ptr<RtpRtcp> rtp_rtcp_;
+ TaskQueueBase* const task_queue_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
std::unique_ptr<internal::TransportAdapter> feedback_transport_;
RateLimiter retranmission_rate_limiter_;
VideoSendStream* stream_;
bool bitrate_capped_;
- } test;
+ } test(task_queue());
RunBaseTest(&test);
}
@@ -2476,29 +2483,34 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
released_(false),
encoder_factory_(this) {}
- bool IsReleased() {
+ bool IsReleased() RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope lock(&crit_);
return released_;
}
- bool IsReadyForEncode() {
+ bool IsReadyForEncode() RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope lock(&crit_);
- return initialized_ && callback_registered_;
+ return IsReadyForEncodeLocked();
}
- size_t num_releases() {
+ size_t num_releases() RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope lock(&crit_);
return num_releases_;
}
private:
+ bool IsReadyForEncodeLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_) {
+ return initialized_ && callback_registered_;
+ }
+
void SetFecControllerOverride(
FecControllerOverride* fec_controller_override) override {
// Ignored.
}
int32_t InitEncode(const VideoCodec* codecSettings,
- const Settings& settings) override {
+ const Settings& settings) override
+ RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope lock(&crit_);
EXPECT_FALSE(initialized_);
initialized_ = true;
@@ -2515,16 +2527,16 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
}
int32_t RegisterEncodeCompleteCallback(
- EncodedImageCallback* callback) override {
+ EncodedImageCallback* callback) override RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope lock(&crit_);
EXPECT_TRUE(initialized_);
callback_registered_ = true;
return 0;
}
- int32_t Release() override {
+ int32_t Release() override RTC_LOCKS_EXCLUDED(crit_) {
rtc::CritScope lock(&crit_);
- EXPECT_TRUE(IsReadyForEncode());
+ EXPECT_TRUE(IsReadyForEncodeLocked());
EXPECT_FALSE(released_);
initialized_ = false;
callback_registered_ = false;
diff --git a/chromium/third_party/webrtc/video/video_source_sink_controller.cc b/chromium/third_party/webrtc/video/video_source_sink_controller.cc
index a649adc68c1..7c24eadef58 100644
--- a/chromium/third_party/webrtc/video/video_source_sink_controller.cc
+++ b/chromium/third_party/webrtc/video/video_source_sink_controller.cc
@@ -14,10 +14,28 @@
#include <limits>
#include <utility>
+#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/strings/string_builder.h"
namespace webrtc {
+namespace {
+
+std::string WantsToString(const rtc::VideoSinkWants& wants) {
+ rtc::StringBuilder ss;
+
+ ss << "max_fps=" << wants.max_framerate_fps
+ << " max_pixel_count=" << wants.max_pixel_count << " target_pixel_count="
+ << (wants.target_pixel_count.has_value()
+ ? std::to_string(wants.target_pixel_count.value())
+ : "null");
+
+ return ss.Release();
+}
+
+} // namespace
+
VideoSourceSinkController::VideoSourceSinkController(
rtc::VideoSinkInterface<VideoFrame>* sink,
rtc::VideoSourceInterface<VideoFrame>* source)
@@ -46,7 +64,9 @@ void VideoSourceSinkController::PushSourceSinkSettings() {
rtc::CritScope lock(&crit_);
if (!source_)
return;
- source_->AddOrUpdateSink(sink_, CurrentSettingsToSinkWants());
+ rtc::VideoSinkWants wants = CurrentSettingsToSinkWants();
+ RTC_LOG(INFO) << "Pushing SourceSink restrictions: " << WantsToString(wants);
+ source_->AddOrUpdateSink(sink_, wants);
}
VideoSourceRestrictions VideoSourceSinkController::restrictions() const {
diff --git a/chromium/third_party/webrtc/video/video_source_sink_controller.h b/chromium/third_party/webrtc/video/video_source_sink_controller.h
index 68fef3f0719..665493aa3d1 100644
--- a/chromium/third_party/webrtc/video/video_source_sink_controller.h
+++ b/chromium/third_party/webrtc/video/video_source_sink_controller.h
@@ -11,6 +11,8 @@
#ifndef VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_
#define VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_
+#include <string>
+
#include "absl/types/optional.h"
#include "api/video/video_frame.h"
#include "api/video/video_sink_interface.h"
diff --git a/chromium/third_party/webrtc/video/video_source_sink_controller_unittest.cc b/chromium/third_party/webrtc/video/video_source_sink_controller_unittest.cc
index c4e2ea11d2f..66881cd0238 100644
--- a/chromium/third_party/webrtc/video/video_source_sink_controller_unittest.cc
+++ b/chromium/third_party/webrtc/video/video_source_sink_controller_unittest.cc
@@ -30,8 +30,8 @@ class MockVideoSinkWithVideoFrame : public rtc::VideoSinkInterface<VideoFrame> {
public:
~MockVideoSinkWithVideoFrame() override {}
- MOCK_METHOD1(OnFrame, void(const VideoFrame& frame));
- MOCK_METHOD0(OnDiscardedFrame, void());
+ MOCK_METHOD(void, OnFrame, (const VideoFrame& frame), (override));
+ MOCK_METHOD(void, OnDiscardedFrame, (), (override));
};
class MockVideoSourceWithVideoFrame
@@ -39,10 +39,15 @@ class MockVideoSourceWithVideoFrame
public:
~MockVideoSourceWithVideoFrame() override {}
- MOCK_METHOD2(AddOrUpdateSink,
- void(rtc::VideoSinkInterface<VideoFrame>*,
- const rtc::VideoSinkWants&));
- MOCK_METHOD1(RemoveSink, void(rtc::VideoSinkInterface<VideoFrame>*));
+ MOCK_METHOD(void,
+ AddOrUpdateSink,
+ (rtc::VideoSinkInterface<VideoFrame>*,
+ const rtc::VideoSinkWants&),
+ (override));
+ MOCK_METHOD(void,
+ RemoveSink,
+ (rtc::VideoSinkInterface<VideoFrame>*),
+ (override));
};
} // namespace
diff --git a/chromium/third_party/webrtc/video/video_stream_decoder_impl_unittest.cc b/chromium/third_party/webrtc/video/video_stream_decoder_impl_unittest.cc
index 44e914001d4..a45a12ccae1 100644
--- a/chromium/third_party/webrtc/video/video_stream_decoder_impl_unittest.cc
+++ b/chromium/third_party/webrtc/video/video_stream_decoder_impl_unittest.cc
@@ -27,21 +27,25 @@ using ::testing::Return;
class MockVideoStreamDecoderCallbacks
: public VideoStreamDecoderInterface::Callbacks {
public:
- MOCK_METHOD0(OnNonDecodableState, void());
- MOCK_METHOD1(OnContinuousUntil,
- void(const video_coding::VideoLayerFrameId& key));
- MOCK_METHOD1(OnEncodedFrame, void(const video_coding::EncodedFrame& frame));
- MOCK_METHOD3(OnDecodedFrame,
- void(VideoFrame decodedImage,
- absl::optional<int> decode_time_ms,
- absl::optional<int> qp));
+ MOCK_METHOD(void, OnNonDecodableState, (), (override));
+ MOCK_METHOD(void,
+ OnContinuousUntil,
+ (const video_coding::VideoLayerFrameId& key),
+ (override));
+ MOCK_METHOD(void,
+ OnDecodedFrame,
+ (VideoFrame decodedImage,
+ absl::optional<int> decode_time_ms,
+ absl::optional<int> qp),
+ (override));
};
class StubVideoDecoder : public VideoDecoder {
public:
- MOCK_METHOD2(InitDecode,
- int32_t(const VideoCodec* codec_settings,
- int32_t number_of_cores));
+ MOCK_METHOD(int32_t,
+ InitDecode,
+ (const VideoCodec*, int32_t number_of_cores),
+ (override));
int32_t Decode(const EncodedImage& input_image,
bool missing_frames,
@@ -57,10 +61,12 @@ class StubVideoDecoder : public VideoDecoder {
return ret_code;
}
- MOCK_METHOD3(DecodeCall,
- int32_t(const EncodedImage& input_image,
- bool missing_frames,
- int64_t render_time_ms));
+ MOCK_METHOD(int32_t,
+ DecodeCall,
+ (const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms),
+ ());
int32_t Release() override { return 0; }
diff --git a/chromium/third_party/webrtc/video/video_stream_encoder.cc b/chromium/third_party/webrtc/video/video_stream_encoder.cc
index 92ab5fc5c33..0ed73a3e636 100644
--- a/chromium/third_party/webrtc/video/video_stream_encoder.cc
+++ b/chromium/third_party/webrtc/video/video_stream_encoder.cc
@@ -251,7 +251,6 @@ VideoStreamEncoder::VideoStreamEncoder(
next_frame_types_(1, VideoFrameType::kVideoFrameDelta),
frame_encode_metadata_writer_(this),
experiment_groups_(GetExperimentGroups()),
- next_frame_id_(0),
encoder_switch_experiment_(ParseEncoderSwitchFieldTrial()),
automatic_animation_detection_experiment_(
ParseAutomatincAnimationDetectionFieldTrial()),
@@ -261,6 +260,8 @@ VideoStreamEncoder::VideoStreamEncoder(
std::make_unique<ResourceAdaptationProcessor>(
&input_state_provider_,
encoder_stats_observer)),
+ adaptation_constraints_(),
+ adaptation_listeners_(),
stream_resource_manager_(&input_state_provider_,
encoder_stats_observer,
clock_,
@@ -283,21 +284,29 @@ VideoStreamEncoder::VideoStreamEncoder(
rtc::Event initialize_processor_event;
resource_adaptation_queue_.PostTask([this, &initialize_processor_event] {
RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
- resource_adaptation_processor_->InitializeOnResourceAdaptationQueue();
+ resource_adaptation_processor_->SetResourceAdaptationQueue(
+ resource_adaptation_queue_.Get());
stream_resource_manager_.SetAdaptationProcessor(
resource_adaptation_processor_.get());
- resource_adaptation_processor_->AddAdaptationListener(
+ resource_adaptation_processor_->AddRestrictionsListener(
&stream_resource_manager_);
- resource_adaptation_processor_->AddAdaptationListener(this);
+ resource_adaptation_processor_->AddRestrictionsListener(this);
+
// Add the stream resource manager's resources to the processor.
- for (Resource* resource : stream_resource_manager_.MappedResources())
+ adaptation_constraints_ = stream_resource_manager_.AdaptationConstraints();
+ adaptation_listeners_ = stream_resource_manager_.AdaptationListeners();
+ for (auto& resource : stream_resource_manager_.MappedResources()) {
resource_adaptation_processor_->AddResource(resource);
+ }
+ for (auto* constraint : adaptation_constraints_) {
+ resource_adaptation_processor_->AddAdaptationConstraint(constraint);
+ }
+ for (auto* listener : adaptation_listeners_) {
+ resource_adaptation_processor_->AddAdaptationListener(listener);
+ }
initialize_processor_event.Set();
});
initialize_processor_event.Wait(rtc::Event::kForever);
-
- for (auto& state : encoder_buffer_state_)
- state.fill(std::numeric_limits<int64_t>::max());
}
VideoStreamEncoder::~VideoStreamEncoder() {
@@ -315,12 +324,17 @@ void VideoStreamEncoder::Stop() {
&shutdown_adaptation_processor_event] {
RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
if (resource_adaptation_processor_) {
- resource_adaptation_processor_->StopResourceAdaptation();
- for (Resource* resource : stream_resource_manager_.MappedResources()) {
+ for (auto& resource : stream_resource_manager_.MappedResources()) {
resource_adaptation_processor_->RemoveResource(resource);
}
- resource_adaptation_processor_->RemoveAdaptationListener(this);
- resource_adaptation_processor_->RemoveAdaptationListener(
+ for (auto* constraint : adaptation_constraints_) {
+ resource_adaptation_processor_->RemoveAdaptationConstraint(constraint);
+ }
+ for (auto* listener : adaptation_listeners_) {
+ resource_adaptation_processor_->RemoveAdaptationListener(listener);
+ }
+ resource_adaptation_processor_->RemoveRestrictionsListener(this);
+ resource_adaptation_processor_->RemoveRestrictionsListener(
&stream_resource_manager_);
stream_resource_manager_.SetAdaptationProcessor(nullptr);
resource_adaptation_processor_.reset();
@@ -361,6 +375,53 @@ void VideoStreamEncoder::SetFecControllerOverride(
});
}
+void VideoStreamEncoder::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ // Map any externally added resources as kCpu for the sake of stats reporting.
+ // TODO(hbos): Make the manager map any unknown resources to kCpu and get rid
+ // of this MapResourceToReason() call.
+ rtc::Event map_resource_event;
+ encoder_queue_.PostTask([this, resource, &map_resource_event] {
+ RTC_DCHECK_RUN_ON(&encoder_queue_);
+ stream_resource_manager_.MapResourceToReason(resource,
+ VideoAdaptationReason::kCpu);
+ map_resource_event.Set();
+ });
+ map_resource_event.Wait(rtc::Event::kForever);
+
+ // Add the resource to the processor.
+ rtc::Event add_resource_event;
+ resource_adaptation_queue_.PostTask([this, resource, &add_resource_event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ if (!resource_adaptation_processor_) {
+ // The VideoStreamEncoder was stopped and the processor destroyed before
+ // this task had a chance to execute. No action needed.
+ return;
+ }
+ resource_adaptation_processor_->AddResource(resource);
+ add_resource_event.Set();
+ });
+ add_resource_event.Wait(rtc::Event::kForever);
+}
+
+std::vector<rtc::scoped_refptr<Resource>>
+VideoStreamEncoder::GetAdaptationResources() {
+ std::vector<rtc::scoped_refptr<Resource>> resources;
+ rtc::Event event;
+ resource_adaptation_queue_.PostTask([this, &resources, &event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ if (!resource_adaptation_processor_) {
+ // The VideoStreamEncoder was stopped and the processor destroyed before
+ // this task had a chance to execute. No action needed.
+ return;
+ }
+ resources = resource_adaptation_processor_->GetResources();
+ event.Set();
+ });
+ event.Wait(rtc::Event::kForever);
+ return resources;
+}
+
void VideoStreamEncoder::SetSource(
rtc::VideoSourceInterface<VideoFrame>* source,
const DegradationPreference& degradation_preference) {
@@ -722,16 +783,6 @@ void VideoStreamEncoder::ReconfigureEncoder() {
// invoked later in this method.)
stream_resource_manager_.StopManagedResources();
stream_resource_manager_.StartEncodeUsageResource();
- resource_adaptation_queue_.PostTask([this] {
- RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
- if (!resource_adaptation_processor_) {
- // The VideoStreamEncoder was stopped and the processor destroyed before
- // this task had a chance to execute. No action needed.
- return;
- }
- // Ensures started. If already started this is a NO-OP.
- resource_adaptation_processor_->StartResourceAdaptation();
- });
pending_encoder_creation_ = false;
}
@@ -1506,48 +1557,8 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
simulcast_id = encoded_image.SpatialIndex().value_or(0);
}
- std::unique_ptr<CodecSpecificInfo> codec_info_copy;
- {
- rtc::CritScope cs(&encoded_image_lock_);
-
- if (codec_specific_info && codec_specific_info->generic_frame_info) {
- codec_info_copy =
- std::make_unique<CodecSpecificInfo>(*codec_specific_info);
- GenericFrameInfo& generic_info = *codec_info_copy->generic_frame_info;
- generic_info.frame_id = next_frame_id_++;
-
- if (encoder_buffer_state_.size() <= static_cast<size_t>(simulcast_id)) {
- RTC_LOG(LS_ERROR) << "At most " << encoder_buffer_state_.size()
- << " simulcast streams supported.";
- } else {
- std::array<int64_t, kMaxEncoderBuffers>& state =
- encoder_buffer_state_[simulcast_id];
- for (const CodecBufferUsage& buffer : generic_info.encoder_buffers) {
- if (state.size() <= static_cast<size_t>(buffer.id)) {
- RTC_LOG(LS_ERROR)
- << "At most " << state.size() << " encoder buffers supported.";
- break;
- }
-
- if (buffer.referenced) {
- int64_t diff = generic_info.frame_id - state[buffer.id];
- if (diff <= 0) {
- RTC_LOG(LS_ERROR) << "Invalid frame diff: " << diff << ".";
- } else if (absl::c_find(generic_info.frame_diffs, diff) ==
- generic_info.frame_diffs.end()) {
- generic_info.frame_diffs.push_back(diff);
- }
- }
-
- if (buffer.updated)
- state[buffer.id] = generic_info.frame_id;
- }
- }
- }
- }
-
EncodedImageCallback::Result result = sink_->OnEncodedImage(
- image_copy, codec_info_copy ? codec_info_copy.get() : codec_specific_info,
+ image_copy, codec_specific_info,
fragmentation_copy ? fragmentation_copy.get() : fragmentation);
// We are only interested in propagating the meta-data about the image, not
@@ -1750,6 +1761,9 @@ void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated(
const VideoAdaptationCounters& adaptation_counters,
rtc::scoped_refptr<Resource> reason) {
RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ std::string resource_name = reason ? reason->Name() : "<null>";
+ RTC_LOG(INFO) << "Updating sink restrictions from " << resource_name << " to "
+ << restrictions.ToString();
video_source_sink_controller_.SetRestrictions(std::move(restrictions));
video_source_sink_controller_.PushSourceSinkSettings();
}
@@ -2027,7 +2041,8 @@ void VideoStreamEncoder::InjectAdaptationResource(
});
map_resource_event.Wait(rtc::Event::kForever);
- resource_adaptation_queue_.PostTask([this, resource] {
+ rtc::Event add_resource_event;
+ resource_adaptation_queue_.PostTask([this, resource, &add_resource_event] {
RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
if (!resource_adaptation_processor_) {
// The VideoStreamEncoder was stopped and the processor destroyed before
@@ -2035,7 +2050,44 @@ void VideoStreamEncoder::InjectAdaptationResource(
return;
}
resource_adaptation_processor_->AddResource(resource);
+ add_resource_event.Set();
});
+ add_resource_event.Wait(rtc::Event::kForever);
+}
+
+void VideoStreamEncoder::InjectAdaptationConstraint(
+ AdaptationConstraint* adaptation_constraint) {
+ rtc::Event event;
+ resource_adaptation_queue_.PostTask([this, adaptation_constraint, &event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ if (!resource_adaptation_processor_) {
+ // The VideoStreamEncoder was stopped and the processor destroyed before
+ // this task had a chance to execute. No action needed.
+ return;
+ }
+ adaptation_constraints_.push_back(adaptation_constraint);
+ resource_adaptation_processor_->AddAdaptationConstraint(
+ adaptation_constraint);
+ event.Set();
+ });
+ event.Wait(rtc::Event::kForever);
+}
+
+void VideoStreamEncoder::InjectAdaptationListener(
+ AdaptationListener* adaptation_listener) {
+ rtc::Event event;
+ resource_adaptation_queue_.PostTask([this, adaptation_listener, &event] {
+ RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
+ if (!resource_adaptation_processor_) {
+ // The VideoStreamEncoder was stopped and the processor destroyed before
+ // this task had a chance to execute. No action needed.
+ return;
+ }
+ adaptation_listeners_.push_back(adaptation_listener);
+ resource_adaptation_processor_->AddAdaptationListener(adaptation_listener);
+ event.Set();
+ });
+ event.Wait(rtc::Event::kForever);
}
rtc::scoped_refptr<QualityScalerResource>
@@ -2044,26 +2096,27 @@ VideoStreamEncoder::quality_scaler_resource_for_testing() {
return stream_resource_manager_.quality_scaler_resource_for_testing();
}
-void VideoStreamEncoder::AddAdaptationListenerForTesting(
- ResourceAdaptationProcessorListener* adaptation_listener) {
+void VideoStreamEncoder::AddRestrictionsListenerForTesting(
+ VideoSourceRestrictionsListener* restrictions_listener) {
rtc::Event event;
- resource_adaptation_queue_.PostTask([this, adaptation_listener, &event] {
+ resource_adaptation_queue_.PostTask([this, restrictions_listener, &event] {
RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
RTC_DCHECK(resource_adaptation_processor_);
- resource_adaptation_processor_->AddAdaptationListener(adaptation_listener);
+ resource_adaptation_processor_->AddRestrictionsListener(
+ restrictions_listener);
event.Set();
});
event.Wait(rtc::Event::kForever);
}
-void VideoStreamEncoder::RemoveAdaptationListenerForTesting(
- ResourceAdaptationProcessorListener* adaptation_listener) {
+void VideoStreamEncoder::RemoveRestrictionsListenerForTesting(
+ VideoSourceRestrictionsListener* restrictions_listener) {
rtc::Event event;
- resource_adaptation_queue_.PostTask([this, adaptation_listener, &event] {
+ resource_adaptation_queue_.PostTask([this, restrictions_listener, &event] {
RTC_DCHECK_RUN_ON(&resource_adaptation_queue_);
RTC_DCHECK(resource_adaptation_processor_);
- resource_adaptation_processor_->RemoveAdaptationListener(
- adaptation_listener);
+ resource_adaptation_processor_->RemoveRestrictionsListener(
+ restrictions_listener);
event.Set();
});
event.Wait(rtc::Event::kForever);
diff --git a/chromium/third_party/webrtc/video/video_stream_encoder.h b/chromium/third_party/webrtc/video/video_stream_encoder.h
index 13b2bdf46bd..68b264deac3 100644
--- a/chromium/third_party/webrtc/video/video_stream_encoder.h
+++ b/chromium/third_party/webrtc/video/video_stream_encoder.h
@@ -17,6 +17,7 @@
#include <string>
#include <vector>
+#include "api/adaptation/resource.h"
#include "api/units/data_rate.h"
#include "api/video/video_bitrate_allocator.h"
#include "api/video/video_rotation.h"
@@ -26,6 +27,8 @@
#include "api/video/video_stream_encoder_settings.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
+#include "call/adaptation/adaptation_constraint.h"
+#include "call/adaptation/adaptation_listener.h"
#include "call/adaptation/resource_adaptation_processor_interface.h"
#include "call/adaptation/video_source_restrictions.h"
#include "call/adaptation/video_stream_input_state_provider.h"
@@ -44,6 +47,7 @@
#include "video/encoder_bitrate_adjuster.h"
#include "video/frame_encode_metadata_writer.h"
#include "video/video_source_sink_controller.h"
+
namespace webrtc {
// VideoStreamEncoder represent a video encoder that accepts raw video frames as
@@ -56,7 +60,7 @@ namespace webrtc {
// Call Stop() when done.
class VideoStreamEncoder : public VideoStreamEncoderInterface,
private EncodedImageCallback,
- public ResourceAdaptationProcessorListener {
+ public VideoSourceRestrictionsListener {
public:
VideoStreamEncoder(Clock* clock,
uint32_t number_of_cores,
@@ -66,6 +70,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
TaskQueueFactory* task_queue_factory);
~VideoStreamEncoder() override;
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+ std::vector<rtc::scoped_refptr<Resource>> GetAdaptationResources() override;
+
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source,
const DegradationPreference& degradation_preference) override;
@@ -118,16 +125,17 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
// Used for injected test resources.
// TODO(eshr): Move all adaptation tests out of VideoStreamEncoder tests.
void InjectAdaptationResource(rtc::scoped_refptr<Resource> resource,
- VideoAdaptationReason reason)
- RTC_RUN_ON(&encoder_queue_);
+ VideoAdaptationReason reason);
+ void InjectAdaptationConstraint(AdaptationConstraint* adaptation_constraint);
+ void InjectAdaptationListener(AdaptationListener* adaptation_listener);
rtc::scoped_refptr<QualityScalerResource>
quality_scaler_resource_for_testing();
- void AddAdaptationListenerForTesting(
- ResourceAdaptationProcessorListener* adaptation_listener);
- void RemoveAdaptationListenerForTesting(
- ResourceAdaptationProcessorListener* adaptation_listener);
+ void AddRestrictionsListenerForTesting(
+ VideoSourceRestrictionsListener* restrictions_listener);
+ void RemoveRestrictionsListenerForTesting(
+ VideoSourceRestrictionsListener* restrictions_listener);
private:
class VideoFrameInfo {
@@ -341,17 +349,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
// experiment group numbers incremented by 1.
const std::array<uint8_t, 2> experiment_groups_;
- // TODO(philipel): Remove this lock and run on |encoder_queue_| instead.
- rtc::CriticalSection encoded_image_lock_;
-
- int64_t next_frame_id_ RTC_GUARDED_BY(encoded_image_lock_);
-
- // This array is used as a map from simulcast id to an encoder's buffer
- // state. For every buffer of the encoder we keep track of the last frame id
- // that updated that buffer.
- std::array<std::array<int64_t, kMaxEncoderBuffers>, kMaxSimulcastStreams>
- encoder_buffer_state_ RTC_GUARDED_BY(encoded_image_lock_);
-
struct EncoderSwitchExperiment {
struct Thresholds {
absl::optional<DataRate> bitrate;
@@ -417,6 +414,10 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
std::unique_ptr<ResourceAdaptationProcessorInterface>
resource_adaptation_processor_
RTC_GUARDED_BY(&resource_adaptation_queue_);
+ std::vector<AdaptationConstraint*> adaptation_constraints_
+ RTC_GUARDED_BY(&resource_adaptation_queue_);
+ std::vector<AdaptationListener*> adaptation_listeners_
+ RTC_GUARDED_BY(&resource_adaptation_queue_);
// Handles input, output and stats reporting related to VideoStreamEncoder
// specific resources, such as "encode usage percent" measurements and "QP
// scaling". Also involved with various mitigations such as inital frame
diff --git a/chromium/third_party/webrtc/video/video_stream_encoder_unittest.cc b/chromium/third_party/webrtc/video/video_stream_encoder_unittest.cc
index 1c334fc3b3a..e963619e607 100644
--- a/chromium/third_party/webrtc/video/video_stream_encoder_unittest.cc
+++ b/chromium/third_party/webrtc/video/video_stream_encoder_unittest.cc
@@ -26,6 +26,8 @@
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/vp8_temporal_layers.h"
#include "api/video_codecs/vp8_temporal_layers_factory.h"
+#include "call/adaptation/test/fake_adaptation_constraint.h"
+#include "call/adaptation/test/fake_adaptation_listener.h"
#include "call/adaptation/test/fake_resource.h"
#include "common_video/h264/h264_common.h"
#include "common_video/include/video_frame_buffer.h"
@@ -34,6 +36,7 @@
#include "modules/video_coding/utility/quality_scaler.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "rtc_base/fake_clock.h"
+#include "rtc_base/gunit.h"
#include "rtc_base/logging.h"
#include "rtc_base/ref_counted_object.h"
#include "system_wrappers/include/field_trial.h"
@@ -52,7 +55,12 @@ namespace webrtc {
using ::testing::_;
using ::testing::AllOf;
+using ::testing::Eq;
using ::testing::Field;
+using ::testing::Ge;
+using ::testing::Gt;
+using ::testing::Le;
+using ::testing::Lt;
using ::testing::Matcher;
using ::testing::NiceMock;
using ::testing::Return;
@@ -180,12 +188,12 @@ class FakeQualityScalerQpUsageHandlerCallback
absl::optional<bool> clear_qp_samples_result_;
};
-class VideoSourceRestrictionsUpdatedListener
- : public ResourceAdaptationProcessorListener {
+class FakeVideoSourceRestrictionsListener
+ : public VideoSourceRestrictionsListener {
public:
- VideoSourceRestrictionsUpdatedListener()
+ FakeVideoSourceRestrictionsListener()
: was_restrictions_updated_(false), restrictions_updated_event_() {}
- ~VideoSourceRestrictionsUpdatedListener() override {
+ ~FakeVideoSourceRestrictionsListener() override {
RTC_DCHECK(was_restrictions_updated_);
}
@@ -193,7 +201,7 @@ class VideoSourceRestrictionsUpdatedListener
return &restrictions_updated_event_;
}
- // ResourceAdaptationProcessorListener implementation.
+ // VideoSourceRestrictionsListener implementation.
void OnVideoSourceRestrictionsUpdated(
VideoSourceRestrictions restrictions,
const VideoAdaptationCounters& adaptation_counters,
@@ -207,6 +215,96 @@ class VideoSourceRestrictionsUpdatedListener
rtc::Event restrictions_updated_event_;
};
+auto WantsFps(Matcher<int> fps_matcher) {
+ return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps,
+ fps_matcher);
+}
+
+auto WantsMaxPixels(Matcher<int> max_pixel_matcher) {
+ return Field("max_pixel_count", &rtc::VideoSinkWants::max_pixel_count,
+ AllOf(max_pixel_matcher, Gt(0)));
+}
+
+auto ResolutionMax() {
+ return AllOf(
+ WantsMaxPixels(Eq(std::numeric_limits<int>::max())),
+ Field("target_pixel_count", &rtc::VideoSinkWants::target_pixel_count,
+ Eq(absl::nullopt)));
+}
+
+auto FpsMax() {
+ return WantsFps(Eq(kDefaultFramerate));
+}
+
+auto FpsUnlimited() {
+ return WantsFps(Eq(std::numeric_limits<int>::max()));
+}
+
+auto FpsMatchesResolutionMax(Matcher<int> fps_matcher) {
+ return AllOf(WantsFps(fps_matcher), ResolutionMax());
+}
+
+auto FpsMaxResolutionMatches(Matcher<int> pixel_matcher) {
+ return AllOf(FpsMax(), WantsMaxPixels(pixel_matcher));
+}
+
+auto FpsMaxResolutionMax() {
+ return AllOf(FpsMax(), ResolutionMax());
+}
+
+auto UnlimitedSinkWants() {
+ return AllOf(FpsUnlimited(), ResolutionMax());
+}
+
+auto FpsInRangeForPixelsInBalanced(int last_frame_pixels) {
+ Matcher<int> fps_range_matcher;
+
+ if (last_frame_pixels <= 320 * 240) {
+ fps_range_matcher = AllOf(Ge(7), Le(10));
+ } else if (last_frame_pixels <= 480 * 270) {
+ fps_range_matcher = AllOf(Ge(10), Le(15));
+ } else if (last_frame_pixels <= 640 * 480) {
+ fps_range_matcher = Ge(15);
+ } else {
+ fps_range_matcher = Eq(kDefaultFramerate);
+ }
+ return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps,
+ fps_range_matcher);
+}
+
+auto FpsEqResolutionEqTo(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Eq(other_wants.max_pixel_count)));
+}
+
+auto FpsMaxResolutionLt(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(FpsMax(), WantsMaxPixels(Lt(other_wants.max_pixel_count)));
+}
+
+auto FpsMaxResolutionGt(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(FpsMax(), WantsMaxPixels(Gt(other_wants.max_pixel_count)));
+}
+
+auto FpsLtResolutionEq(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Lt(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Eq(other_wants.max_pixel_count)));
+}
+
+auto FpsGtResolutionEq(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Gt(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Eq(other_wants.max_pixel_count)));
+}
+
+auto FpsEqResolutionLt(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Lt(other_wants.max_pixel_count)));
+}
+
+auto FpsEqResolutionGt(const rtc::VideoSinkWants& other_wants) {
+ return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)),
+ WantsMaxPixels(Gt(other_wants.max_pixel_count)));
+}
+
class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
public:
VideoStreamEncoderUnderTest(SendStatisticsProxy* stats_proxy,
@@ -220,25 +318,25 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
overuse_detector_proxy_ =
new CpuOveruseDetectorProxy(stats_proxy)),
task_queue_factory),
- fake_cpu_resource_(new FakeResource("FakeResource[CPU]")),
- fake_quality_resource_(new FakeResource("FakeResource[QP]")) {
- fake_cpu_resource_->Initialize(encoder_queue(),
- resource_adaptation_queue());
- fake_quality_resource_->Initialize(encoder_queue(),
- resource_adaptation_queue());
+ fake_cpu_resource_(FakeResource::Create("FakeResource[CPU]")),
+ fake_quality_resource_(FakeResource::Create("FakeResource[QP]")),
+ fake_adaptation_constraint_("FakeAdaptationConstraint"),
+ fake_adaptation_listener_() {
InjectAdaptationResource(fake_quality_resource_,
VideoAdaptationReason::kQuality);
InjectAdaptationResource(fake_cpu_resource_, VideoAdaptationReason::kCpu);
+ InjectAdaptationConstraint(&fake_adaptation_constraint_);
+ InjectAdaptationListener(&fake_adaptation_listener_);
}
void SetSourceAndWaitForRestrictionsUpdated(
rtc::VideoSourceInterface<VideoFrame>* source,
const DegradationPreference& degradation_preference) {
- VideoSourceRestrictionsUpdatedListener listener;
- AddAdaptationListenerForTesting(&listener);
+ FakeVideoSourceRestrictionsListener listener;
+ AddRestrictionsListenerForTesting(&listener);
SetSource(source, degradation_preference);
listener.restrictions_updated_event()->Wait(5000);
- RemoveAdaptationListenerForTesting(&listener);
+ RemoveRestrictionsListenerForTesting(&listener);
}
void SetSourceAndWaitForFramerateUpdated(
@@ -283,7 +381,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
void TriggerCpuOveruse() {
rtc::Event event;
resource_adaptation_queue()->PostTask([this, &event] {
- fake_cpu_resource_->set_usage_state(ResourceUsageState::kOveruse);
+ fake_cpu_resource_->SetUsageState(ResourceUsageState::kOveruse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
@@ -291,7 +389,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
void TriggerCpuUnderuse() {
rtc::Event event;
resource_adaptation_queue()->PostTask([this, &event] {
- fake_cpu_resource_->set_usage_state(ResourceUsageState::kUnderuse);
+ fake_cpu_resource_->SetUsageState(ResourceUsageState::kUnderuse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
@@ -301,7 +399,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
void TriggerQualityLow() {
rtc::Event event;
resource_adaptation_queue()->PostTask([this, &event] {
- fake_quality_resource_->set_usage_state(ResourceUsageState::kOveruse);
+ fake_quality_resource_->SetUsageState(ResourceUsageState::kOveruse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
@@ -309,7 +407,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
void TriggerQualityHigh() {
rtc::Event event;
resource_adaptation_queue()->PostTask([this, &event] {
- fake_quality_resource_->set_usage_state(ResourceUsageState::kUnderuse);
+ fake_quality_resource_->SetUsageState(ResourceUsageState::kUnderuse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
@@ -334,6 +432,8 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
CpuOveruseDetectorProxy* overuse_detector_proxy_;
rtc::scoped_refptr<FakeResource> fake_cpu_resource_;
rtc::scoped_refptr<FakeResource> fake_quality_resource_;
+ FakeAdaptationConstraint fake_adaptation_constraint_;
+ FakeAdaptationListener fake_adaptation_listener_;
};
class VideoStreamFactory
@@ -458,9 +558,9 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
rtc::CritScope cs(&crit_);
- last_wants_ = sink_wants();
+ last_wants_ = sink_wants_locked();
adapter_.OnSinkWants(wants);
- test::FrameForwarder::AddOrUpdateSink(sink, wants);
+ test::FrameForwarder::AddOrUpdateSinkLocked(sink, wants);
}
cricket::VideoAdapter adapter_;
bool adaptation_enabled_ RTC_GUARDED_BY(crit_);
@@ -507,16 +607,24 @@ class MockableSendStatisticsProxy : public SendStatisticsProxy {
class MockBitrateObserver : public VideoBitrateAllocationObserver {
public:
- MOCK_METHOD1(OnBitrateAllocationUpdated, void(const VideoBitrateAllocation&));
+ MOCK_METHOD(void,
+ OnBitrateAllocationUpdated,
+ (const VideoBitrateAllocation&),
+ (override));
};
class MockEncoderSelector
: public VideoEncoderFactory::EncoderSelectorInterface {
public:
- MOCK_METHOD1(OnCurrentEncoder, void(const SdpVideoFormat& format));
- MOCK_METHOD1(OnAvailableBitrate,
- absl::optional<SdpVideoFormat>(const DataRate& rate));
- MOCK_METHOD0(OnEncoderBroken, absl::optional<SdpVideoFormat>());
+ MOCK_METHOD(void,
+ OnCurrentEncoder,
+ (const SdpVideoFormat& format),
+ (override));
+ MOCK_METHOD(absl::optional<SdpVideoFormat>,
+ OnAvailableBitrate,
+ (const DataRate& rate),
+ (override));
+ MOCK_METHOD(absl::optional<SdpVideoFormat>, OnEncoderBroken, (), (override));
};
} // namespace
@@ -689,106 +797,6 @@ class VideoStreamEncoderTest : public ::testing::Test {
WaitForEncodedFrame(1);
}
- void VerifyNoLimitation(const rtc::VideoSinkWants& wants) {
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_framerate_fps);
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
- EXPECT_FALSE(wants.target_pixel_count);
- }
-
- void VerifyFpsEqResolutionEq(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsMaxResolutionMax(const rtc::VideoSinkWants& wants) {
- EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps);
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
- EXPECT_FALSE(wants.target_pixel_count);
- }
-
- void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
- EXPECT_LT(wants1.max_pixel_count, wants2.max_pixel_count);
- EXPECT_GT(wants1.max_pixel_count, 0);
- }
-
- void VerifyFpsMaxResolutionGt(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
- EXPECT_GT(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsMaxResolutionEq(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(kDefaultFramerate, wants1.max_framerate_fps);
- EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsLtResolutionEq(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_LT(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsGtResolutionEq(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_GT(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_EQ(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsEqResolutionLt(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_LT(wants1.max_pixel_count, wants2.max_pixel_count);
- EXPECT_GT(wants1.max_pixel_count, 0);
- }
-
- void VerifyFpsEqResolutionGt(const rtc::VideoSinkWants& wants1,
- const rtc::VideoSinkWants& wants2) {
- EXPECT_EQ(wants1.max_framerate_fps, wants2.max_framerate_fps);
- EXPECT_GT(wants1.max_pixel_count, wants2.max_pixel_count);
- }
-
- void VerifyFpsMaxResolutionLt(const rtc::VideoSinkWants& wants,
- int pixel_count) {
- EXPECT_EQ(kDefaultFramerate, wants.max_framerate_fps);
- EXPECT_LT(wants.max_pixel_count, pixel_count);
- EXPECT_GT(wants.max_pixel_count, 0);
- }
-
- void VerifyFpsLtResolutionMax(const rtc::VideoSinkWants& wants, int fps) {
- EXPECT_LT(wants.max_framerate_fps, fps);
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
- EXPECT_FALSE(wants.target_pixel_count);
- }
-
- void VerifyFpsEqResolutionMax(const rtc::VideoSinkWants& wants,
- int expected_fps) {
- EXPECT_EQ(expected_fps, wants.max_framerate_fps);
- EXPECT_EQ(std::numeric_limits<int>::max(), wants.max_pixel_count);
- EXPECT_FALSE(wants.target_pixel_count);
- }
-
- void VerifyBalancedModeFpsRange(const rtc::VideoSinkWants& wants,
- int last_frame_pixels) {
- // Balanced mode should always scale FPS to the desired range before
- // attempting to scale resolution.
- int fps_limit = wants.max_framerate_fps;
- if (last_frame_pixels <= 320 * 240) {
- EXPECT_LE(7, fps_limit);
- EXPECT_LE(fps_limit, 10);
- } else if (last_frame_pixels <= 480 * 270) {
- EXPECT_LE(10, fps_limit);
- EXPECT_LE(fps_limit, 15);
- } else if (last_frame_pixels <= 640 * 480) {
- EXPECT_LE(15, fps_limit);
- } else {
- EXPECT_EQ(kDefaultFramerate, fps_limit);
- }
- }
-
void WaitForEncodedFrame(int64_t expected_ntp_time) {
sink_.WaitForEncodedFrame(expected_ntp_time);
fake_clock_.AdvanceTime(TimeDelta::Seconds(1) / max_framerate_);
@@ -1864,7 +1872,7 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
video_stream_encoder_->SetSource(&video_source_,
webrtc::DegradationPreference::BALANCED);
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -1889,9 +1897,10 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
t += frame_interval_ms;
video_stream_encoder_->TriggerCpuOveruse();
- VerifyBalancedModeFpsRange(
+ EXPECT_THAT(
video_source_.sink_wants(),
- *video_source_.last_sent_width() * *video_source_.last_sent_height());
+ FpsInRangeForPixelsInBalanced(*video_source_.last_sent_width() *
+ *video_source_.last_sent_height()));
} while (video_source_.sink_wants().max_pixel_count <
last_wants.max_pixel_count ||
video_source_.sink_wants().max_framerate_fps <
@@ -1924,16 +1933,17 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
t += frame_interval_ms;
video_stream_encoder_->TriggerCpuUnderuse();
- VerifyBalancedModeFpsRange(
+ EXPECT_THAT(
video_source_.sink_wants(),
- *video_source_.last_sent_width() * *video_source_.last_sent_height());
+ FpsInRangeForPixelsInBalanced(*video_source_.last_sent_width() *
+ *video_source_.last_sent_height()));
EXPECT_TRUE(video_source_.sink_wants().max_pixel_count >
last_wants.max_pixel_count ||
video_source_.sink_wants().max_framerate_fps >
last_wants.max_framerate_fps);
}
- VerifyFpsMaxResolutionMax(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax());
stats_proxy_->ResetMockStats();
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
@@ -1949,7 +1959,7 @@ TEST_F(VideoStreamEncoderTest,
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
@@ -1966,7 +1976,7 @@ TEST_F(VideoStreamEncoderTest,
video_source_.set_adaptation_enabled(true);
video_stream_encoder_->SetSource(
&video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
video_source_.IncomingCapturedFrame(
CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(ntp_time);
@@ -1990,7 +2000,7 @@ TEST_F(VideoStreamEncoderTest,
sink_.WaitForEncodedFrame(ntp_time);
ntp_time += 100;
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
// Give the encoder queue time to process the change in degradation preference
// by waiting for an encoded frame.
@@ -2022,8 +2032,9 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(video_source_.sink_wants().max_pixel_count, pixel_count);
EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps);
- // Change the degradation preference back. CPU underuse should now adapt.
- video_stream_encoder_->SetSource(
+ // Change the degradation preference back. CPU underuse should not adapt since
+ // QP is most limited.
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&video_source_, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
video_source_.IncomingCapturedFrame(
CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
@@ -2042,7 +2053,15 @@ TEST_F(VideoStreamEncoderTest,
CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(ntp_time);
ntp_time += kFrameIntervalMs;
- EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps);
+ EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, restricted_fps);
+
+ // Trigger QP underuse, fps should return to normal.
+ video_stream_encoder_->TriggerQualityHigh();
+ video_source_.IncomingCapturedFrame(
+ CreateFrame(ntp_time, kFrameWidth, kFrameHeight));
+ sink_.WaitForEncodedFrame(ntp_time);
+ ntp_time += kFrameIntervalMs;
+ EXPECT_THAT(video_source_.sink_wants(), FpsMax());
video_stream_encoder_->Stop();
}
@@ -2052,7 +2071,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
const int kFrameWidth = 1280;
const int kFrameHeight = 720;
@@ -2089,7 +2108,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
sink_.WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
// Initially no degradation registered.
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
// Force an input frame rate to be available, or the adaptation call won't
// know what framerate to adapt form.
@@ -2119,7 +2138,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
CreateFrame(frame_timestamp, kFrameWidth, kFrameWidth));
sink_.WaitForEncodedFrame(frame_timestamp);
frame_timestamp += kFrameIntervalMs;
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
video_stream_encoder_->TriggerCpuOveruse();
new_video_source.IncomingCapturedFrame(
@@ -2128,7 +2147,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) {
frame_timestamp += kFrameIntervalMs;
// Still no degradation.
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
// Calling SetSource with resolution scaling enabled apply the old SinkWants.
video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
@@ -2455,7 +2474,7 @@ TEST_F(VideoStreamEncoderTest,
// Set new degradation preference should clear restrictions since we changed
// from BALANCED.
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
WaitForEncodedFrame(sequence++);
@@ -2479,8 +2498,8 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
// Back to BALANCED, should clear the restrictions again.
- video_stream_encoder_->SetSource(&source,
- webrtc::DegradationPreference::BALANCED);
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
+ &source, webrtc::DegradationPreference::BALANCED);
source.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
@@ -2638,7 +2657,7 @@ TEST_F(VideoStreamEncoderTest,
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
// Expect no scaling to begin with.
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(1);
@@ -2695,13 +2714,14 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(1);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerCpuOveruse();
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
const int kLastMaxPixelCount = source.sink_wants().max_pixel_count;
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2729,11 +2749,12 @@ TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) {
webrtc::DegradationPreference::BALANCED);
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(1);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerQualityLow();
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
const int kLastMaxPixelCount = source.sink_wants().max_pixel_count;
@@ -2773,13 +2794,13 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerCpuUnderuse();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2802,13 +2823,13 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerCpuUnderuse();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2830,14 +2851,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) {
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2860,14 +2881,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_DisabledMode) {
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2892,7 +2913,7 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
WaitForEncodedFrame(1);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -2900,13 +2921,14 @@ TEST_F(VideoStreamEncoderTest,
video_stream_encoder_->TriggerQualityLow();
source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
WaitForEncodedFrame(2);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -2931,33 +2953,35 @@ TEST_F(VideoStreamEncoderTest,
// Expect no scaling to begin with (preference: MAINTAIN_FRAMERATE).
video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight));
sink_.WaitForEncodedFrame(1);
- VerifyFpsMaxResolutionMax(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down resolution.
video_stream_encoder_->TriggerQualityLow();
video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight));
sink_.WaitForEncodedFrame(2);
- VerifyFpsMaxResolutionLt(video_source_.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(video_source_.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
// Enable MAINTAIN_RESOLUTION preference.
test::FrameForwarder new_video_source;
- video_stream_encoder_->SetSource(
+ video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated(
&new_video_source, webrtc::DegradationPreference::MAINTAIN_RESOLUTION);
// Give the encoder queue time to process the change in degradation preference
// by waiting for an encoded frame.
new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
sink_.WaitForEncodedFrame(3);
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect reduced framerate.
video_stream_encoder_->TriggerQualityLow();
new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight));
sink_.WaitForEncodedFrame(4);
- VerifyFpsLtResolutionMax(new_video_source.sink_wants(), kInputFps);
+ EXPECT_THAT(new_video_source.sink_wants(),
+ FpsMatchesResolutionMax(Lt(kInputFps)));
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(new_video_source.sink_wants());
+ EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax());
video_stream_encoder_->Stop();
}
@@ -3019,7 +3043,7 @@ TEST_F(VideoStreamEncoderTest,
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3028,7 +3052,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3037,7 +3062,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3046,7 +3071,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3055,7 +3081,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3080,7 +3106,7 @@ TEST_F(VideoStreamEncoderTest,
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -3089,7 +3115,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -3098,7 +3125,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -3107,7 +3134,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -3116,7 +3144,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -3160,7 +3188,7 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) {
// Trigger adapt up. Higher resolution should not be requested duo to lack
// of bitrate.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionLt(source.sink_wants(), 1280 * 720);
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMatches(Lt(1280 * 720)));
// Increase bitrate.
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
@@ -3171,7 +3199,7 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) {
// Trigger adapt up. Higher resolution should be requested.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
video_stream_encoder_->Stop();
}
@@ -3198,7 +3226,7 @@ TEST_F(VideoStreamEncoderTest, DropFirstFramesIfBwEstimateIsTooLow) {
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720));
ExpectDroppedFrame();
- VerifyFpsMaxResolutionLt(source.sink_wants(), 1280 * 720);
+ EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < 1280 * 720, 5000);
// Insert 720p frame. It should be downscaled and encoded.
timestamp_ms += kFrameIntervalMs;
@@ -3256,7 +3284,7 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsFalseIfFpsDiffLtThreshold) {
stats_proxy_->SetMockStats(stats);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down framerate (640x360@24fps).
// Fps diff (input-requested:0) < threshold, expect adapting down not to clear
@@ -3264,7 +3292,7 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsFalseIfFpsDiffLtThreshold) {
EXPECT_FALSE(
video_stream_encoder_
->TriggerQualityScalerHighQpAndReturnIfQpSamplesShouldBeCleared());
- VerifyFpsEqResolutionMax(source_.sink_wants(), 24);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(24)));
video_stream_encoder_->Stop();
}
@@ -3282,7 +3310,7 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsTrueIfFpsDiffGeThreshold) {
stats_proxy_->SetMockStats(stats);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down framerate (640x360@24fps).
// Fps diff (input-requested:1) == threshold, expect adapting down to clear QP
@@ -3290,7 +3318,7 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsTrueIfFpsDiffGeThreshold) {
EXPECT_TRUE(
video_stream_encoder_
->TriggerQualityScalerHighQpAndReturnIfQpSamplesShouldBeCleared());
- VerifyFpsEqResolutionMax(source_.sink_wants(), 24);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(24)));
video_stream_encoder_->Stop();
}
@@ -3304,11 +3332,11 @@ TEST_F(BalancedDegradationTest, AdaptDownUsesCodecSpecificFps) {
EXPECT_EQ(kVideoCodecVP8, video_encoder_config_.codec_type);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
// Trigger adapt down, expect scaled down framerate (640x360@22fps).
video_stream_encoder_->TriggerQualityLow();
- VerifyFpsEqResolutionMax(source_.sink_wants(), 22);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(22)));
video_stream_encoder_->Stop();
}
@@ -3324,25 +3352,25 @@ TEST_F(BalancedDegradationTest, NoAdaptUpIfBwEstimateIsLessThanMinBitrate) {
OnBitrateUpdated(kTooLowMinBitrateBps);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (640x360@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionMax(source_.sink_wants(), 14);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down resolution (480x270@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (480x270@10fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
EXPECT_EQ(source_.sink_wants().max_framerate_fps, 10);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -3369,7 +3397,7 @@ TEST_F(BalancedDegradationTest,
SetupTest();
OnBitrateUpdated(kLowTargetBitrateBps);
- VerifyNoLimitation(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants());
// Insert frame, expect scaled down:
// framerate (640x360@24fps) -> resolution (480x270@24fps).
@@ -3404,31 +3432,31 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kTooLowMinResolutionBitrateBps);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (640x360@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionMax(source_.sink_wants(), 14);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down resolution (480x270@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (480x270@10fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect upscaled fps (no bitrate limit) (480x270@14fps).
video_stream_encoder_->TriggerQualityHigh();
InsertFrameAndWaitForEncoded();
- VerifyFpsGtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsGtResolutionEq(source_.last_wants()));
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no upscale in res (target bitrate < min bitrate).
@@ -3440,7 +3468,7 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kResolutionMinBitrateBps);
video_stream_encoder_->TriggerQualityHigh();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionGt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionGt(source_.last_wants()));
EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
@@ -3460,25 +3488,25 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kTooLowMinBitrateBps);
InsertFrameAndWaitForEncoded();
- VerifyFpsMaxResolutionMax(source_.sink_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (640x360@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionMax(source_.sink_wants(), 14);
+ EXPECT_THAT(source_.sink_wants(), FpsMatchesResolutionMax(Eq(14)));
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down resolution (480x270@14fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionLt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionLt(source_.last_wants()));
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt down, expect scaled down framerate (480x270@10fps).
video_stream_encoder_->TriggerQualityLow();
InsertFrameAndWaitForEncoded();
- VerifyFpsLtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsLtResolutionEq(source_.last_wants()));
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no upscale (target bitrate < min bitrate).
@@ -3490,7 +3518,7 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kMinBitrateBps);
video_stream_encoder_->TriggerQualityHigh();
InsertFrameAndWaitForEncoded();
- VerifyFpsGtResolutionEq(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsGtResolutionEq(source_.last_wants()));
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no upscale in res (target bitrate < min bitrate).
@@ -3503,7 +3531,7 @@ TEST_F(BalancedDegradationTest,
OnBitrateUpdated(kResolutionMinBitrateBps);
video_stream_encoder_->TriggerQualityHigh();
InsertFrameAndWaitForEncoded();
- VerifyFpsEqResolutionGt(source_.sink_wants(), source_.last_wants());
+ EXPECT_THAT(source_.sink_wants(), FpsEqResolutionGt(source_.last_wants()));
EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
@@ -3527,7 +3555,7 @@ TEST_F(VideoStreamEncoderTest,
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3538,7 +3566,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3549,7 +3578,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3560,7 +3589,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -3571,7 +3600,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
rtc::VideoSinkWants last_wants = source.sink_wants();
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
@@ -3583,68 +3612,74 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionEq(source.sink_wants(), last_wants);
+ EXPECT_THAT(source.sink_wants(), FpsMax());
+ EXPECT_EQ(source.sink_wants().max_pixel_count, last_wants.max_pixel_count);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect upscaled resolution (480x270).
- video_stream_encoder_->TriggerCpuUnderuse();
+ // Trigger quality adapt up, expect upscaled resolution (480x270).
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
- EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect upscaled resolution (640x360).
+ // Trigger quality and cpu adapt up since both are most limited, expect
+ // upscaled resolution (640x360).
video_stream_encoder_->TriggerCpuUnderuse();
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
- EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect upscaled resolution (960x540).
+ // Trigger quality and cpu adapt up since both are most limited, expect
+ // upscaled resolution (960x540).
video_stream_encoder_->TriggerCpuUnderuse();
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
last_wants = source.sink_wants();
- EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
- EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(5, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, no cpu downgrades, expect no change (960x540).
+ // Trigger cpu adapt up, expect no change since not most limited (960x540).
+ // However the stats will change since the CPU resource is no longer limited.
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants));
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger quality adapt up, expect no restriction (1280x720).
video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
}
@@ -4015,7 +4050,8 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
ExpectDroppedFrame();
// Expect the sink_wants to specify a scaled frame.
- EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
+ EXPECT_TRUE_WAIT(
+ video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
int last_pixel_count = video_source_.sink_wants().max_pixel_count;
@@ -4026,7 +4062,8 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
// Expect to drop this frame, the wait should time out.
ExpectDroppedFrame();
- EXPECT_LT(video_source_.sink_wants().max_pixel_count, last_pixel_count);
+ EXPECT_TRUE_WAIT(
+ video_source_.sink_wants().max_pixel_count < last_pixel_count, 5000);
video_stream_encoder_->Stop();
}
@@ -4141,7 +4178,8 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenBweDrops) {
ExpectDroppedFrame();
// Expect the sink_wants to specify a scaled frame.
- EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight);
+ EXPECT_TRUE_WAIT(
+ video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000);
video_stream_encoder_->Stop();
}
@@ -4176,7 +4214,8 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) {
int64_t timestamp_ms = kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
ExpectDroppedFrame();
- EXPECT_LT(source.sink_wants().max_pixel_count, kWidth * kHeight);
+ EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < kWidth * kHeight,
+ 5000);
// Increase bitrate to encoder max.
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
@@ -4200,7 +4239,7 @@ TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) {
// The ramp-up code involves the adaptation queue, give it time to execute.
// TODO(hbos): Can we await an appropriate event instead?
video_stream_encoder_->WaitUntilAdaptationTaskQueueIsIdle();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
// Frame should not be adapted.
timestamp_ms += kFrameIntervalMs;
@@ -4224,14 +4263,14 @@ TEST_F(VideoStreamEncoderTest,
test::FrameForwarder source;
video_stream_encoder_->SetSource(
&source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE);
- VerifyNoLimitation(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
// Trigger adapt down, too small frame, expect no change.
source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
WaitForEncodedFrame(1);
video_stream_encoder_->TriggerCpuOveruse();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@@ -4252,7 +4291,7 @@ TEST_F(VideoStreamEncoderTest,
test::FrameForwarder source;
video_stream_encoder_->SetSource(&source,
webrtc::DegradationPreference::BALANCED);
- VerifyNoLimitation(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -4260,7 +4299,7 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight));
WaitForEncodedFrame(1);
video_stream_encoder_->TriggerQualityLow();
- VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
+ EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4269,7 +4308,7 @@ TEST_F(VideoStreamEncoderTest,
source.IncomingCapturedFrame(CreateFrame(2, kTooSmallWidth, kTooSmallHeight));
WaitForEncodedFrame(2);
video_stream_encoder_->TriggerQualityLow();
- VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
+ EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4465,7 +4504,8 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) {
} while (video_source_.sink_wants().max_framerate_fps <
last_wants.max_framerate_fps);
- VerifyFpsEqResolutionMax(video_source_.sink_wants(), kMinFramerateFps);
+ EXPECT_THAT(video_source_.sink_wants(),
+ FpsMatchesResolutionMax(Eq(kMinFramerateFps)));
video_stream_encoder_->Stop();
}
@@ -4489,7 +4529,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4499,7 +4539,8 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4509,7 +4550,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4519,7 +4560,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4529,7 +4570,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4539,7 +4580,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(5, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4549,7 +4590,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(6, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4559,7 +4600,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
rtc::VideoSinkWants last_wants = source.sink_wants();
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
@@ -4570,17 +4611,17 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionEq(source.sink_wants(), last_wants);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(7, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger adapt down, expect expect increased fps (320x180@10fps).
+ // Trigger adapt up, expect expect increased fps (320x180@10fps).
video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsGtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(8, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4590,7 +4631,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(9, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4600,7 +4641,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsGtResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(10, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4610,7 +4651,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(11, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4620,7 +4661,9 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMax());
+ EXPECT_EQ(source.sink_wants().max_pixel_count,
+ source.last_wants().max_pixel_count);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(12, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4630,7 +4673,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(13, stats_proxy_->GetStats().number_of_quality_adapt_changes);
@@ -4640,15 +4683,15 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(14, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
@@ -4672,7 +4715,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4685,7 +4728,8 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight);
+ EXPECT_THAT(source.sink_wants(),
+ FpsMaxResolutionMatches(Lt(kWidth * kHeight)));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4698,7 +4742,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants()));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4711,59 +4755,77 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsLtResolutionEq(source.sink_wants(), source.last_wants());
- EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect increased fps (640x360@30fps).
- video_stream_encoder_->TriggerCpuUnderuse();
+ // Trigger cpu adapt up, expect no change since QP is most limited.
+ {
+ // Store current sink wants since we expect no change and if there is no
+ // change then last_wants() is not updated.
+ auto previous_sink_wants = source.sink_wants();
+ video_stream_encoder_->TriggerCpuUnderuse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants));
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ }
+
+ // Trigger quality adapt up, expect increased fps (640x360@30fps).
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionEq(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
- EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger quality adapt up, expect upscaled resolution (960x540@30fps).
+ // Trigger quality adapt up and Cpu adapt up since both are most limited,
+ // expect increased resolution (960x540@30fps).
video_stream_encoder_->TriggerQualityHigh();
+ video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect no restriction (1280x720fps@30fps).
+ // Trigger quality adapt up and Cpu adapt up since both are most limited,
+ // expect no restriction (1280x720fps@30fps).
+ video_stream_encoder_->TriggerQualityHigh();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants());
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants()));
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
}
@@ -4788,7 +4850,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4801,7 +4863,7 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionMax(source.sink_wants(), kFpsLimit);
+ EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit)));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
@@ -4814,45 +4876,60 @@ TEST_F(VideoStreamEncoderTest,
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionLt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants()));
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution);
- EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
+ EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger cpu adapt up, expect upscaled resolution (640x360@15fps).
- video_stream_encoder_->TriggerCpuUnderuse();
+ // Trigger cpu adapt up, expect no change because quality is most limited.
+ {
+ auto previous_sink_wants = source.sink_wants();
+ // Store current sink wants since we expect no change ind if there is no
+ // change then last__wants() is not updated.
+ video_stream_encoder_->TriggerCpuUnderuse();
+ timestamp_ms += kFrameIntervalMs;
+ source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
+ WaitForEncodedFrame(timestamp_ms);
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants));
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ }
+
+ // Trigger quality adapt up, expect upscaled resolution (640x360@15fps).
+ video_stream_encoder_->TriggerQualityHigh();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsEqResolutionGt(source.sink_wants(), source.last_wants());
+ EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants()));
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
- EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_framerate);
+ EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
+ EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
- // Trigger quality adapt up, expect increased fps (640x360@30fps).
+ // Trigger quality and cpu adapt up, expect increased fps (640x360@30fps).
video_stream_encoder_->TriggerQualityHigh();
+ video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerQualityHigh();
- VerifyFpsMaxResolutionMax(source.sink_wants());
+ EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax());
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
- EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
+ EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes);
video_stream_encoder_->Stop();
}
@@ -5518,10 +5595,12 @@ TEST_F(VideoStreamEncoderTest, EncoderRatesPropagatedOnReconfigure) {
}
struct MockEncoderSwitchRequestCallback : public EncoderSwitchRequestCallback {
- MOCK_METHOD0(RequestEncoderFallback, void());
- MOCK_METHOD1(RequestEncoderSwitch, void(const Config& conf));
- MOCK_METHOD1(RequestEncoderSwitch,
- void(const webrtc::SdpVideoFormat& format));
+ MOCK_METHOD(void, RequestEncoderFallback, (), (override));
+ MOCK_METHOD(void, RequestEncoderSwitch, (const Config& conf), (override));
+ MOCK_METHOD(void,
+ RequestEncoderSwitch,
+ (const webrtc::SdpVideoFormat& format),
+ (override));
};
TEST_F(VideoStreamEncoderTest, BitrateEncoderSwitch) {
@@ -5845,7 +5924,7 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
video_stream_encoder_->SetSource(&video_source_,
webrtc::DegradationPreference::BALANCED);
- VerifyNoLimitation(video_source_.sink_wants());
+ EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants());
VideoFrame frame = CreateFrame(1, kWidth, kHeight);
frame.set_update_rect(VideoFrame::UpdateRect{0, 0, kWidth, kHeight});
@@ -5864,7 +5943,7 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
rtc::VideoSinkWants expected;
expected.max_framerate_fps = kFramerateFps;
expected.max_pixel_count = 1280 * 720 + 1;
- VerifyFpsEqResolutionLt(video_source_.sink_wants(), expected);
+ EXPECT_THAT(video_source_.sink_wants(), FpsEqResolutionLt(expected));
// Pass one frame with no known update.
// Resolution cap should be removed immediately.
@@ -5877,7 +5956,8 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
WaitForEncodedFrame(timestamp_ms);
// Resolution should be unlimited now.
- VerifyFpsEqResolutionMax(video_source_.sink_wants(), kFramerateFps);
+ EXPECT_THAT(video_source_.sink_wants(),
+ FpsMatchesResolutionMax(Eq(kFramerateFps)));
video_stream_encoder_->Stop();
}
diff --git a/chromium/third_party/webrtc/webrtc.gni b/chromium/third_party/webrtc/webrtc.gni
index 4f1d0017f8e..680762f3a14 100644
--- a/chromium/third_party/webrtc/webrtc.gni
+++ b/chromium/third_party/webrtc/webrtc.gni
@@ -155,6 +155,9 @@ declare_args() {
rtc_use_h264 =
proprietary_codecs && !is_android && !is_ios && !(is_win && !is_clang)
+ # Enable this flag to make webrtc::Mutex be implemented by absl::Mutex.
+ rtc_use_absl_mutex = false
+
# By default, use normal platform audio support or dummy audio, but don't
# use file-based audio playout and record.
rtc_use_dummy_audio_file_devices = false
@@ -323,16 +326,19 @@ set_defaults("rtc_test") {
set_defaults("rtc_library") {
configs = rtc_add_configs
suppressed_configs = []
+ absl_deps = []
}
set_defaults("rtc_source_set") {
configs = rtc_add_configs
suppressed_configs = []
+ absl_deps = []
}
set_defaults("rtc_static_library") {
configs = rtc_add_configs
suppressed_configs = []
+ absl_deps = []
}
set_defaults("rtc_executable") {
@@ -525,6 +531,20 @@ template("rtc_source_set") {
if (defined(invoker.public_configs)) {
public_configs += invoker.public_configs
}
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
}
}
@@ -600,6 +620,20 @@ template("rtc_static_library") {
if (defined(invoker.public_configs)) {
public_configs += invoker.public_configs
}
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
}
}
@@ -712,6 +746,20 @@ template("rtc_library") {
if (defined(invoker.public_configs)) {
public_configs += invoker.public_configs
}
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
}
}
@@ -1002,6 +1050,7 @@ if (is_android) {
}
no_build_hooks = true
+ not_needed([ "android_manifest" ])
}
}
@@ -1020,6 +1069,9 @@ if (is_android) {
errorprone_args = []
errorprone_args += [ "-Werror" ]
+ # Use WebRTC-specific android lint suppressions file.
+ lint_suppressions_file = "//tools_webrtc/android/suppressions.xml"
+
if (!defined(deps)) {
deps = []
}
diff --git a/chromium/third_party/webrtc/whitespace.txt b/chromium/third_party/webrtc/whitespace.txt
index 01f2e1c2c96..c405786815b 100644
--- a/chromium/third_party/webrtc/whitespace.txt
+++ b/chromium/third_party/webrtc/whitespace.txt
@@ -3,4 +3,6 @@ Try to write something funny. And please don't add trailing whitespace.
Once upon a time there was an elephant in Stockholm.
Everyone knew about it, but nobody dared say anything.
+In the end it didn't make a difference since everyone was working from home.
+