diff options
author | Allan Sandfeld Jensen <allan.jensen@qt.io> | 2020-10-12 14:27:29 +0200 |
---|---|---|
committer | Allan Sandfeld Jensen <allan.jensen@qt.io> | 2020-10-13 09:35:20 +0000 |
commit | c30a6232df03e1efbd9f3b226777b07e087a1122 (patch) | |
tree | e992f45784689f373bcc38d1b79a239ebe17ee23 /chromium/third_party/webrtc/rtc_tools | |
parent | 7b5b123ac58f58ffde0f4f6e488bcd09aa4decd3 (diff) | |
download | qtwebengine-chromium-85-based.tar.gz |
BASELINE: Update Chromium to 85.0.4183.14085-based
Change-Id: Iaa42f4680837c57725b1344f108c0196741f6057
Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/third_party/webrtc/rtc_tools')
18 files changed, 1609 insertions, 1472 deletions
diff --git a/chromium/third_party/webrtc/rtc_tools/BUILD.gn b/chromium/third_party/webrtc/rtc_tools/BUILD.gn index f293853f6ef..f33d96eff12 100644 --- a/chromium/third_party/webrtc/rtc_tools/BUILD.gn +++ b/chromium/third_party/webrtc/rtc_tools/BUILD.gn @@ -17,12 +17,12 @@ group("rtc_tools") { deps = [ ":frame_analyzer", ":video_file_reader", - ":video_quality_analysis", ] if (!build_with_chromium) { deps += [ ":psnr_ssim_analyzer", ":rgba_to_i420_converter", + ":video_quality_analysis", ] if (rtc_enable_protobuf) { deps += [ ":chart_proto" ] @@ -60,6 +60,8 @@ rtc_library("video_file_reader") { "../api/video:video_rtp_headers", "../rtc_base:checks", "../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -77,6 +79,8 @@ rtc_library("video_file_writer") { "../api/video:video_frame_i420", "../api/video:video_rtp_headers", "../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -107,9 +111,9 @@ rtc_library("video_quality_analysis") { "../rtc_base:checks", "../rtc_base:rtc_base_approved", "../test:perf_test", - "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_executable("frame_analyzer") { @@ -319,8 +323,14 @@ if (!build_with_chromium) { rtc_library("event_log_visualizer_utils") { visibility = [ "*" ] sources = [ + "rtc_event_log_visualizer/alerts.cc", + "rtc_event_log_visualizer/alerts.h", + "rtc_event_log_visualizer/analyze_audio.cc", + "rtc_event_log_visualizer/analyze_audio.h", "rtc_event_log_visualizer/analyzer.cc", "rtc_event_log_visualizer/analyzer.h", + "rtc_event_log_visualizer/analyzer_common.cc", + "rtc_event_log_visualizer/analyzer_common.h", "rtc_event_log_visualizer/log_simulation.cc", "rtc_event_log_visualizer/log_simulation.h", "rtc_event_log_visualizer/plot_base.cc", @@ -329,11 +339,11 @@ if (!build_with_chromium) { "rtc_event_log_visualizer/plot_protobuf.h", "rtc_event_log_visualizer/plot_python.cc", "rtc_event_log_visualizer/plot_python.h", - "rtc_event_log_visualizer/triage_notifications.h", ] deps = [ ":chart_proto", "../api:function_view", + "../rtc_base:deprecation", "../rtc_base:ignore_wundef", # TODO(kwiberg): Remove this dependency. @@ -360,8 +370,12 @@ if (!build_with_chromium) { "../rtc_base:rtc_base_approved", "../rtc_base:rtc_numerics", "../rtc_base:stringutils", + "../test:explicit_key_value_config", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", ] } } diff --git a/chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn b/chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn index 47e600aa856..1156bf5dd8f 100644 --- a/chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn +++ b/chromium/third_party/webrtc/rtc_tools/network_tester/BUILD.gn @@ -50,8 +50,8 @@ if (rtc_enable_protobuf) { "../../rtc_base:rtc_task_queue", "../../rtc_base/synchronization:sequence_checker", "../../rtc_base/third_party/sigslot", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } network_tester_unittests_resources = [ @@ -115,7 +115,7 @@ if (is_android) { rtc_android_library("NetworkTesterMobile_javalib") { testonly = true - android_manifest_for_lint = "androidapp/AndroidManifest.xml" + android_manifest = "androidapp/AndroidManifest.xml" sources = [ "androidapp/src/com/google/media/networktester/MainActivity.java", @@ -138,11 +138,11 @@ if (is_android) { "androidapp/res/mipmap-xhdpi/ic_launcher.png", "androidapp/res/mipmap-xxhdpi/ic_launcher.png", "androidapp/res/mipmap-xxxhdpi/ic_launcher.png", + "androidapp/res/values-v17/styles.xml", + "androidapp/res/values-w820dp/dimens.xml", "androidapp/res/values/colors.xml", "androidapp/res/values/dimens.xml", "androidapp/res/values/strings.xml", - "androidapp/res/values-v17/styles.xml", - "androidapp/res/values-w820dp/dimens.xml", ] # Needed for Bazel converter. diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.cc new file mode 100644 index 00000000000..86372de4cfd --- /dev/null +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.cc @@ -0,0 +1,227 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_tools/rtc_event_log_visualizer/alerts.h" + +#include <stdio.h> + +#include <algorithm> +#include <limits> +#include <map> +#include <string> + +#include "logging/rtc_event_log/rtc_event_processor.h" +#include "rtc_base/checks.h" +#include "rtc_base/format_macros.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +void TriageHelper::Print(FILE* file) { + fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n"); + for (const auto& alert : triage_alerts_) { + fprintf(file, "%d %s. First occurrence at %3.3lf\n", alert.second.count, + alert.second.explanation.c_str(), alert.second.first_occurrence); + } + fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n"); +} + +void TriageHelper::AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log, + PacketDirection direction) { + // With 100 packets/s (~800kbps), false positives would require 10 s without + // data. + constexpr int64_t kMaxSeqNumJump = 1000; + // With a 90 kHz clock, false positives would require 10 s without data. + constexpr int64_t kTicksPerMillisec = 90; + constexpr int64_t kCaptureTimeGraceMs = 10000; + + std::string seq_num_explanation = + direction == kIncomingPacket + ? "Incoming RTP sequence number jumps more than 1000. Counter may " + "have been reset or rewritten incorrectly in a group call." + : "Outgoing RTP sequence number jumps more than 1000. Counter may " + "have been reset."; + std::string capture_time_explanation = + direction == kIncomingPacket ? "Incoming capture time jumps more than " + "10s. Clock might have been reset." + : "Outgoing capture time jumps more than " + "10s. Clock might have been reset."; + TriageAlertType seq_num_alert = direction == kIncomingPacket + ? TriageAlertType::kIncomingSeqNumJump + : TriageAlertType::kOutgoingSeqNumJump; + TriageAlertType capture_time_alert = + direction == kIncomingPacket ? TriageAlertType::kIncomingCaptureTimeJump + : TriageAlertType::kOutgoingCaptureTimeJump; + + const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us(); + + // Check for gaps in sequence numbers and capture timestamps. + for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) { + if (IsRtxSsrc(parsed_log, direction, stream.ssrc)) { + continue; + } + auto packets = stream.packet_view; + if (packets.empty()) { + continue; + } + SeqNumUnwrapper<uint16_t> seq_num_unwrapper; + int64_t last_seq_num = + seq_num_unwrapper.Unwrap(packets[0].header.sequenceNumber); + SeqNumUnwrapper<uint32_t> capture_time_unwrapper; + int64_t last_capture_time = + capture_time_unwrapper.Unwrap(packets[0].header.timestamp); + int64_t last_log_time_ms = packets[0].log_time_ms(); + for (const auto& packet : packets) { + if (packet.log_time_us() > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + + int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber); + if (std::abs(seq_num - last_seq_num) > kMaxSeqNumJump) { + Alert(seq_num_alert, config_.GetCallTimeSec(packet.log_time_us()), + seq_num_explanation); + } + last_seq_num = seq_num; + + int64_t capture_time = + capture_time_unwrapper.Unwrap(packet.header.timestamp); + if (std::abs(capture_time - last_capture_time) > + kTicksPerMillisec * + (kCaptureTimeGraceMs + packet.log_time_ms() - last_log_time_ms)) { + Alert(capture_time_alert, config_.GetCallTimeSec(packet.log_time_us()), + capture_time_explanation); + } + last_capture_time = capture_time; + } + } +} + +void TriageHelper::AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log, + PacketDirection direction) { + constexpr int64_t kMaxRtpTransmissionGap = 500000; + constexpr int64_t kMaxRtcpTransmissionGap = 3000000; + std::string rtp_explanation = + direction == kIncomingPacket + ? "No RTP packets received for more than 500ms. This indicates a " + "network problem. Temporary video freezes and choppy or robotic " + "audio is unavoidable. Unnecessary BWE drops is a known issue." + : "No RTP packets sent for more than 500 ms. This might be an issue " + "with the pacer."; + std::string rtcp_explanation = + direction == kIncomingPacket + ? "No RTCP packets received for more than 3 s. Could be a longer " + "connection outage" + : "No RTCP packets sent for more than 3 s. This is most likely a " + "bug."; + TriageAlertType rtp_alert = direction == kIncomingPacket + ? TriageAlertType::kIncomingRtpGap + : TriageAlertType::kOutgoingRtpGap; + TriageAlertType rtcp_alert = direction == kIncomingPacket + ? TriageAlertType::kIncomingRtcpGap + : TriageAlertType::kOutgoingRtcpGap; + + const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us(); + + // TODO(terelius): The parser could provide a list of all packets, ordered + // by time, for each direction. + std::multimap<int64_t, const LoggedRtpPacket*> rtp_in_direction; + for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) { + for (const LoggedRtpPacket& rtp_packet : stream.packet_view) + rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet); + } + absl::optional<int64_t> last_rtp_time; + for (const auto& kv : rtp_in_direction) { + int64_t timestamp = kv.first; + if (timestamp > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + int64_t duration = timestamp - last_rtp_time.value_or(0); + if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) { + // No packet sent/received for more than 500 ms. + Alert(rtp_alert, config_.GetCallTimeSec(timestamp), rtp_explanation); + } + last_rtp_time.emplace(timestamp); + } + + absl::optional<int64_t> last_rtcp_time; + if (direction == kIncomingPacket) { + for (const auto& rtcp : parsed_log.incoming_rtcp_packets()) { + if (rtcp.log_time_us() > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0); + if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) { + // No feedback sent/received for more than 2000 ms. + Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()), + rtcp_explanation); + } + last_rtcp_time.emplace(rtcp.log_time_us()); + } + } else { + for (const auto& rtcp : parsed_log.outgoing_rtcp_packets()) { + if (rtcp.log_time_us() > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0); + if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) { + // No feedback sent/received for more than 2000 ms. + Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()), + rtcp_explanation); + } + last_rtcp_time.emplace(rtcp.log_time_us()); + } + } +} + +// TODO(terelius): Notifications could possibly be generated by the same code +// that produces the graphs. There is some code duplication that could be +// avoided, but that might be solved anyway when we move functionality from the +// analyzer to the parser. +void TriageHelper::AnalyzeLog(const ParsedRtcEventLog& parsed_log) { + AnalyzeStreamGaps(parsed_log, kIncomingPacket); + AnalyzeStreamGaps(parsed_log, kOutgoingPacket); + AnalyzeTransmissionGaps(parsed_log, kIncomingPacket); + AnalyzeTransmissionGaps(parsed_log, kOutgoingPacket); + + const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us(); + + int64_t first_occurrence = parsed_log.last_timestamp(); + constexpr double kMaxLossFraction = 0.05; + // Loss feedback + int64_t total_lost_packets = 0; + int64_t total_expected_packets = 0; + for (auto& bwe_update : parsed_log.bwe_loss_updates()) { + if (bwe_update.log_time_us() > segment_end_us) { + // Only process the first (LOG_START, LOG_END) segment. + break; + } + int64_t lost_packets = static_cast<double>(bwe_update.fraction_lost) / 255 * + bwe_update.expected_packets; + total_lost_packets += lost_packets; + total_expected_packets += bwe_update.expected_packets; + if (bwe_update.fraction_lost >= 255 * kMaxLossFraction) { + first_occurrence = std::min(first_occurrence, bwe_update.log_time_us()); + } + } + double avg_outgoing_loss = + static_cast<double>(total_lost_packets) / total_expected_packets; + if (avg_outgoing_loss > kMaxLossFraction) { + Alert(TriageAlertType::kOutgoingHighLoss, first_occurrence, + "More than 5% of outgoing packets lost."); + } +} + +} // namespace webrtc diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.h new file mode 100644 index 00000000000..7bd9f052706 --- /dev/null +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/alerts.h @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_ +#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_ + +#include <stdio.h> + +#include <map> +#include <string> +#include <utility> + +#include "absl/strings/string_view.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h" + +namespace webrtc { + +enum class TriageAlertType { + kUnknown = 0, + kIncomingRtpGap, + kOutgoingRtpGap, + kIncomingRtcpGap, + kOutgoingRtcpGap, + kIncomingSeqNumJump, + kOutgoingSeqNumJump, + kIncomingCaptureTimeJump, + kOutgoingCaptureTimeJump, + kOutgoingHighLoss, + kLast, +}; + +struct TriageAlert { + TriageAlertType type = TriageAlertType::kUnknown; + int count = 0; + float first_occurrence = -1; + std::string explanation; +}; + +class TriageHelper { + public: + explicit TriageHelper(const AnalyzerConfig& config) : config_(config) {} + + void AnalyzeLog(const ParsedRtcEventLog& parsed_log); + + void AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log, + PacketDirection direction); + void AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log, + PacketDirection direction); + void Print(FILE* file); + + private: + AnalyzerConfig config_; + std::map<TriageAlertType, TriageAlert> triage_alerts_; + + void Alert(TriageAlertType type, + float time_seconds, + absl::string_view explanation) { + std::map<TriageAlertType, TriageAlert>::iterator it = + triage_alerts_.find(type); + + if (it == triage_alerts_.end()) { + TriageAlert alert; + alert.type = type; + alert.first_occurrence = time_seconds; + alert.count = 1; + alert.explanation = std::string(explanation); + triage_alerts_.insert(std::make_pair(type, alert)); + } else { + it->second.count += 1; + } + } + RTC_DISALLOW_COPY_AND_ASSIGN(TriageHelper); +}; + +} // namespace webrtc + +#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_ diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc new file mode 100644 index 00000000000..becc0044abb --- /dev/null +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc @@ -0,0 +1,503 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h" + +#include <memory> +#include <set> +#include <utility> +#include <vector> + +#include "modules/audio_coding/neteq/tools/audio_sink.h" +#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h" +#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h" +#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h" +#include "modules/audio_coding/neteq/tools/neteq_test.h" +#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event) + -> absl::optional<float> { + if (ana_event.config.bitrate_bps) + return absl::optional<float>( + static_cast<float>(*ana_event.config.bitrate_bps)); + return absl::nullopt; + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints<LoggedAudioNetworkAdaptationEvent>( + ToCallTime, GetAnaBitrateBps, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder target bitrate"); +} + +void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder frame length", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaFrameLengthMs = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.frame_length_ms) + return absl::optional<float>( + static_cast<float>(*ana_event.config.frame_length_ms)); + return absl::optional<float>(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints<LoggedAudioNetworkAdaptationEvent>( + ToCallTime, GetAnaFrameLengthMs, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder frame length"); +} + +void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder uplink packet loss fraction", + LineStyle::kLine, PointStyle::kHighlight); + auto GetAnaPacketLoss = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.uplink_packet_loss_fraction) + return absl::optional<float>(static_cast<float>( + *ana_event.config.uplink_packet_loss_fraction)); + return absl::optional<float>(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints<LoggedAudioNetworkAdaptationEvent>( + ToCallTime, GetAnaPacketLoss, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin, + kTopMargin); + plot->SetTitle("Reported audio encoder lost packets"); +} + +void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder FEC", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaFecEnabled = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.enable_fec) + return absl::optional<float>( + static_cast<float>(*ana_event.config.enable_fec)); + return absl::optional<float>(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints<LoggedAudioNetworkAdaptationEvent>( + ToCallTime, GetAnaFecEnabled, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder FEC"); +} + +void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder DTX", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaDtxEnabled = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.enable_dtx) + return absl::optional<float>( + static_cast<float>(*ana_event.config.enable_dtx)); + return absl::optional<float>(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints<LoggedAudioNetworkAdaptationEvent>( + ToCallTime, GetAnaDtxEnabled, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder DTX"); +} + +void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot) { + TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine, + PointStyle::kHighlight); + auto GetAnaNumChannels = + [](const LoggedAudioNetworkAdaptationEvent& ana_event) { + if (ana_event.config.num_channels) + return absl::optional<float>( + static_cast<float>(*ana_event.config.num_channels)); + return absl::optional<float>(); + }; + auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) { + return config.GetCallTimeSec(packet.log_time_us()); + }; + ProcessPoints<LoggedAudioNetworkAdaptationEvent>( + ToCallTime, GetAnaNumChannels, + parsed_log.audio_network_adaptation_events(), &time_series); + plot->AppendTimeSeries(std::move(time_series)); + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))", + kBottomMargin, kTopMargin); + plot->SetTitle("Reported audio encoder number of channels"); +} + +class NetEqStreamInput : public test::NetEqInput { + public: + // Does not take any ownership, and all pointers must refer to valid objects + // that outlive the one constructed. + NetEqStreamInput(const std::vector<LoggedRtpPacketIncoming>* packet_stream, + const std::vector<LoggedAudioPlayoutEvent>* output_events, + absl::optional<int64_t> end_time_ms) + : packet_stream_(*packet_stream), + packet_stream_it_(packet_stream_.begin()), + output_events_it_(output_events->begin()), + output_events_end_(output_events->end()), + end_time_ms_(end_time_ms) { + RTC_DCHECK(packet_stream); + RTC_DCHECK(output_events); + } + + absl::optional<int64_t> NextPacketTime() const override { + if (packet_stream_it_ == packet_stream_.end()) { + return absl::nullopt; + } + if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) { + return absl::nullopt; + } + return packet_stream_it_->rtp.log_time_ms(); + } + + absl::optional<int64_t> NextOutputEventTime() const override { + if (output_events_it_ == output_events_end_) { + return absl::nullopt; + } + if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) { + return absl::nullopt; + } + return output_events_it_->log_time_ms(); + } + + std::unique_ptr<PacketData> PopPacket() override { + if (packet_stream_it_ == packet_stream_.end()) { + return std::unique_ptr<PacketData>(); + } + std::unique_ptr<PacketData> packet_data(new PacketData()); + packet_data->header = packet_stream_it_->rtp.header; + packet_data->time_ms = packet_stream_it_->rtp.log_time_ms(); + + // This is a header-only "dummy" packet. Set the payload to all zeros, with + // length according to the virtual length. + packet_data->payload.SetSize(packet_stream_it_->rtp.total_length - + packet_stream_it_->rtp.header_length); + std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0); + + ++packet_stream_it_; + return packet_data; + } + + void AdvanceOutputEvent() override { + if (output_events_it_ != output_events_end_) { + ++output_events_it_; + } + } + + bool ended() const override { return !NextEventTime(); } + + absl::optional<RTPHeader> NextHeader() const override { + if (packet_stream_it_ == packet_stream_.end()) { + return absl::nullopt; + } + return packet_stream_it_->rtp.header; + } + + private: + const std::vector<LoggedRtpPacketIncoming>& packet_stream_; + std::vector<LoggedRtpPacketIncoming>::const_iterator packet_stream_it_; + std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_it_; + const std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_end_; + const absl::optional<int64_t> end_time_ms_; +}; + +namespace { + +// Factory to create a "replacement decoder" that produces the decoded audio +// by reading from a file rather than from the encoded payloads. +class ReplacementAudioDecoderFactory : public AudioDecoderFactory { + public: + ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name, + int file_sample_rate_hz) + : replacement_file_name_(replacement_file_name), + file_sample_rate_hz_(file_sample_rate_hz) {} + + std::vector<AudioCodecSpec> GetSupportedDecoders() override { + RTC_NOTREACHED(); + return {}; + } + + bool IsSupportedDecoder(const SdpAudioFormat& format) override { + return true; + } + + std::unique_ptr<AudioDecoder> MakeAudioDecoder( + const SdpAudioFormat& format, + absl::optional<AudioCodecPairId> codec_pair_id) override { + auto replacement_file = std::make_unique<test::ResampleInputAudioFile>( + replacement_file_name_, file_sample_rate_hz_); + replacement_file->set_output_rate_hz(48000); + return std::make_unique<test::FakeDecodeFromFile>( + std::move(replacement_file), 48000, false); + } + + private: + const std::string replacement_file_name_; + const int file_sample_rate_hz_; +}; + +// Creates a NetEq test object and all necessary input and output helpers. Runs +// the test and returns the NetEqDelayAnalyzer object that was used to +// instrument the test. +std::unique_ptr<test::NetEqStatsGetter> CreateNetEqTestAndRun( + const std::vector<LoggedRtpPacketIncoming>* packet_stream, + const std::vector<LoggedAudioPlayoutEvent>* output_events, + absl::optional<int64_t> end_time_ms, + const std::string& replacement_file_name, + int file_sample_rate_hz) { + std::unique_ptr<test::NetEqInput> input( + new NetEqStreamInput(packet_stream, output_events, end_time_ms)); + + constexpr int kReplacementPt = 127; + std::set<uint8_t> cn_types; + std::set<uint8_t> forbidden_types; + input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt, + cn_types, forbidden_types)); + + NetEq::Config config; + config.max_packets_in_buffer = 200; + config.enable_fast_accelerate = true; + + std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink()); + + rtc::scoped_refptr<AudioDecoderFactory> decoder_factory = + new rtc::RefCountedObject<ReplacementAudioDecoderFactory>( + replacement_file_name, file_sample_rate_hz); + + test::NetEqTest::DecoderMap codecs = { + {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}}; + + std::unique_ptr<test::NetEqDelayAnalyzer> delay_cb( + new test::NetEqDelayAnalyzer); + std::unique_ptr<test::NetEqStatsGetter> neteq_stats_getter( + new test::NetEqStatsGetter(std::move(delay_cb))); + test::DefaultNetEqTestErrorCallback error_cb; + test::NetEqTest::Callbacks callbacks; + callbacks.error_callback = &error_cb; + callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer(); + callbacks.get_audio_callback = neteq_stats_getter.get(); + + test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr, + /*factory=*/nullptr, std::move(input), std::move(output), + callbacks); + test.Run(); + return neteq_stats_getter; +} +} // namespace + +NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const std::string& replacement_file_name, + int file_sample_rate_hz) { + NetEqStatsGetterMap neteq_stats; + + for (const auto& stream : parsed_log.incoming_rtp_packets_by_ssrc()) { + const uint32_t ssrc = stream.ssrc; + if (!IsAudioSsrc(parsed_log, kIncomingPacket, ssrc)) + continue; + const std::vector<LoggedRtpPacketIncoming>* audio_packets = + &stream.incoming_packets; + if (audio_packets == nullptr) { + // No incoming audio stream found. + continue; + } + + RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end()); + + std::map<uint32_t, std::vector<LoggedAudioPlayoutEvent>>::const_iterator + output_events_it = parsed_log.audio_playout_events().find(ssrc); + if (output_events_it == parsed_log.audio_playout_events().end()) { + // Could not find output events with SSRC matching the input audio stream. + // Using the first available stream of output events. + output_events_it = parsed_log.audio_playout_events().cbegin(); + } + + int64_t end_time_ms = parsed_log.first_log_segment().stop_time_ms(); + + neteq_stats[ssrc] = CreateNetEqTestAndRun( + audio_packets, &output_events_it->second, end_time_ms, + replacement_file_name, file_sample_rate_hz); + } + + return neteq_stats; +} + +// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created +// for, this method generates a plot for the jitter buffer delay profile. +void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + uint32_t ssrc, + const test::NetEqStatsGetter* stats_getter, + Plot* plot) { + test::NetEqDelayAnalyzer::Delays arrival_delay_ms; + test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms; + test::NetEqDelayAnalyzer::Delays playout_delay_ms; + test::NetEqDelayAnalyzer::Delays target_delay_ms; + + stats_getter->delay_analyzer()->CreateGraphs( + &arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms, + &target_delay_ms); + + TimeSeries time_series_packet_arrival("packet arrival delay", + LineStyle::kLine); + TimeSeries time_series_relative_packet_arrival( + "Relative packet arrival delay", LineStyle::kLine); + TimeSeries time_series_play_time("Playout delay", LineStyle::kLine); + TimeSeries time_series_target_time("Target delay", LineStyle::kLine, + PointStyle::kHighlight); + + for (const auto& data : arrival_delay_ms) { + const float x = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float y = data.second; + time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y)); + } + for (const auto& data : corrected_arrival_delay_ms) { + const float x = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float y = data.second; + time_series_relative_packet_arrival.points.emplace_back( + TimeSeriesPoint(x, y)); + } + for (const auto& data : playout_delay_ms) { + const float x = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float y = data.second; + time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y)); + } + for (const auto& data : target_delay_ms) { + const float x = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float y = data.second; + time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y)); + } + + plot->AppendTimeSeries(std::move(time_series_packet_arrival)); + plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival)); + plot->AppendTimeSeries(std::move(time_series_play_time)); + plot->AppendTimeSeries(std::move(time_series_target_time)); + + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin, + kTopMargin); + plot->SetTitle("NetEq timing for " + + GetStreamName(parsed_log, kIncomingPacket, ssrc)); +} + +template <typename NetEqStatsType> +void CreateNetEqStatsGraphInternal( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats, + rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*( + const test::NetEqStatsGetter*)> data_extractor, + rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor, + const std::string& plot_name, + Plot* plot) { + std::map<uint32_t, TimeSeries> time_series; + + for (const auto& st : neteq_stats) { + const uint32_t ssrc = st.first; + const std::vector<std::pair<int64_t, NetEqStatsType>>* data_vector = + data_extractor(st.second.get()); + for (const auto& data : *data_vector) { + const float time = config.GetCallTimeSec(data.first * 1000); // ms to us. + const float value = stats_extractor(data.second); + time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value)); + } + } + + for (auto& series : time_series) { + series.second.label = + GetStreamName(parsed_log, kIncomingPacket, series.first); + series.second.line_style = LineStyle::kLine; + plot->AppendTimeSeries(std::move(series.second)); + } + + plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)", + kLeftMargin, kRightMargin); + plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin); + plot->SetTitle(plot_name); +} + +void CreateNetEqNetworkStatsGraph( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats, + rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor, + const std::string& plot_name, + Plot* plot) { + CreateNetEqStatsGraphInternal<NetEqNetworkStatistics>( + parsed_log, config, neteq_stats, + [](const test::NetEqStatsGetter* stats_getter) { + return stats_getter->stats(); + }, + stats_extractor, plot_name, plot); +} + +void CreateNetEqLifetimeStatsGraph( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats, + rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor, + const std::string& plot_name, + Plot* plot) { + CreateNetEqStatsGraphInternal<NetEqLifetimeStatistics>( + parsed_log, config, neteq_stats, + [](const test::NetEqStatsGetter* stats_getter) { + return stats_getter->lifetime_stats(); + }, + stats_extractor, plot_name, plot); +} + +} // namespace webrtc diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.h new file mode 100644 index 00000000000..726e84492db --- /dev/null +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyze_audio.h @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_ +#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_ + +#include <cstdint> +#include <map> +#include <memory> +#include <string> + +#include "api/function_view.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" +#include "modules/audio_coding/neteq/tools/neteq_stats_getter.h" +#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h" +#include "rtc_tools/rtc_event_log_visualizer/plot_base.h" + +namespace webrtc { + +void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); +void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + Plot* plot); + +using NetEqStatsGetterMap = + std::map<uint32_t, std::unique_ptr<test::NetEqStatsGetter>>; +NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const std::string& replacement_file_name, + int file_sample_rate_hz); + +void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + uint32_t ssrc, + const test::NetEqStatsGetter* stats_getter, + Plot* plot); +void CreateNetEqNetworkStatsGraph( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats_getters, + rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor, + const std::string& plot_name, + Plot* plot); +void CreateNetEqLifetimeStatsGraph( + const ParsedRtcEventLog& parsed_log, + const AnalyzerConfig& config, + const NetEqStatsGetterMap& neteq_stats_getters, + rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor, + const std::string& plot_name, + Plot* plot); + +} // namespace webrtc + +#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_ diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc index 9fcb510adcb..6d84b1b5ca4 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.cc @@ -31,12 +31,6 @@ #include "logging/rtc_event_log/rtc_event_processor.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" -#include "modules/audio_coding/neteq/tools/audio_sink.h" -#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h" -#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h" -#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h" -#include "modules/audio_coding/neteq/tools/neteq_test.h" -#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h" #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" #include "modules/congestion_controller/goog_cc/bitrate_estimator.h" #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" @@ -45,7 +39,6 @@ #include "modules/pacing/paced_sender.h" #include "modules/pacing/packet_router.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" @@ -54,6 +47,7 @@ #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_utility.h" #include "rtc_base/checks.h" #include "rtc_base/format_macros.h" @@ -62,17 +56,12 @@ #include "rtc_base/rate_statistics.h" #include "rtc_base/strings/string_builder.h" #include "rtc_tools/rtc_event_log_visualizer/log_simulation.h" - -#ifndef BWE_TEST_LOGGING_COMPILE_TIME_ENABLE -#define BWE_TEST_LOGGING_COMPILE_TIME_ENABLE 0 -#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE +#include "test/explicit_key_value_config.h" namespace webrtc { namespace { -const int kNumMicrosecsPerSec = 1000000; - std::string SsrcToString(uint32_t ssrc) { rtc::StringBuilder ss; ss << "SSRC " << ssrc; @@ -168,11 +157,6 @@ absl::optional<uint32_t> EstimateRtpClockFrequency( return absl::nullopt; } -constexpr float kLeftMargin = 0.01f; -constexpr float kRightMargin = 0.02f; -constexpr float kBottomMargin = 0.02f; -constexpr float kTopMargin = 0.05f; - absl::optional<double> NetworkDelayDiff_AbsSendTime( const LoggedRtpPacketIncoming& old_packet, const LoggedRtpPacketIncoming& new_packet) { @@ -222,99 +206,6 @@ absl::optional<double> NetworkDelayDiff_CaptureTime( return delay_change; } -// For each element in data_view, use |f()| to extract a y-coordinate and -// store the result in a TimeSeries. -template <typename DataType, typename IterableType> -void ProcessPoints(rtc::FunctionView<float(const DataType&)> fx, - rtc::FunctionView<absl::optional<float>(const DataType&)> fy, - const IterableType& data_view, - TimeSeries* result) { - for (size_t i = 0; i < data_view.size(); i++) { - const DataType& elem = data_view[i]; - float x = fx(elem); - absl::optional<float> y = fy(elem); - if (y) - result->points.emplace_back(x, *y); - } -} - -// For each pair of adjacent elements in |data|, use |f()| to extract a -// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate -// will be the time of the second element in the pair. -template <typename DataType, typename ResultType, typename IterableType> -void ProcessPairs( - rtc::FunctionView<float(const DataType&)> fx, - rtc::FunctionView<absl::optional<ResultType>(const DataType&, - const DataType&)> fy, - const IterableType& data, - TimeSeries* result) { - for (size_t i = 1; i < data.size(); i++) { - float x = fx(data[i]); - absl::optional<ResultType> y = fy(data[i - 1], data[i]); - if (y) - result->points.emplace_back(x, static_cast<float>(*y)); - } -} - -// For each pair of adjacent elements in |data|, use |f()| to extract a -// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate -// will be the time of the second element in the pair. -template <typename DataType, typename ResultType, typename IterableType> -void AccumulatePairs( - rtc::FunctionView<float(const DataType&)> fx, - rtc::FunctionView<absl::optional<ResultType>(const DataType&, - const DataType&)> fy, - const IterableType& data, - TimeSeries* result) { - ResultType sum = 0; - for (size_t i = 1; i < data.size(); i++) { - float x = fx(data[i]); - absl::optional<ResultType> y = fy(data[i - 1], data[i]); - if (y) { - sum += *y; - result->points.emplace_back(x, static_cast<float>(sum)); - } - } -} - -// Calculates a moving average of |data| and stores the result in a TimeSeries. -// A data point is generated every |step| microseconds from |begin_time| -// to |end_time|. The value of each data point is the average of the data -// during the preceding |window_duration_us| microseconds. -template <typename DataType, typename ResultType, typename IterableType> -void MovingAverage( - rtc::FunctionView<absl::optional<ResultType>(const DataType&)> fy, - const IterableType& data_view, - AnalyzerConfig config, - TimeSeries* result) { - size_t window_index_begin = 0; - size_t window_index_end = 0; - ResultType sum_in_window = 0; - - for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_; - t += config.step_) { - while (window_index_end < data_view.size() && - data_view[window_index_end].log_time_us() < t) { - absl::optional<ResultType> value = fy(data_view[window_index_end]); - if (value) - sum_in_window += *value; - ++window_index_end; - } - while (window_index_begin < data_view.size() && - data_view[window_index_begin].log_time_us() < - t - config.window_duration_) { - absl::optional<ResultType> value = fy(data_view[window_index_begin]); - if (value) - sum_in_window -= *value; - ++window_index_begin; - } - float window_duration_s = - static_cast<float>(config.window_duration_) / kNumMicrosecsPerSec; - float x = config.GetCallTimeSec(t); - float y = sum_in_window / window_duration_s; - result->points.emplace_back(x, y); - } -} template <typename T> TimeSeries CreateRtcpTypeTimeSeries(const std::vector<T>& rtcp_list, @@ -465,32 +356,21 @@ EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log, config_.begin_time_ = config_.end_time_ = 0; } - const auto& log_start_events = parsed_log_.start_log_events(); - const auto& log_end_events = parsed_log_.stop_log_events(); - auto start_iter = log_start_events.begin(); - auto end_iter = log_end_events.begin(); - while (start_iter != log_start_events.end()) { - int64_t start = start_iter->log_time_us(); - ++start_iter; - absl::optional<int64_t> next_start; - if (start_iter != log_start_events.end()) - next_start.emplace(start_iter->log_time_us()); - if (end_iter != log_end_events.end() && - end_iter->log_time_us() <= - next_start.value_or(std::numeric_limits<int64_t>::max())) { - int64_t end = end_iter->log_time_us(); - RTC_DCHECK_LE(start, end); - log_segments_.push_back(std::make_pair(start, end)); - ++end_iter; - } else { - // we're missing an end event. Assume that it occurred just before the - // next start. - log_segments_.push_back( - std::make_pair(start, next_start.value_or(config_.end_time_))); - } - } - RTC_LOG(LS_INFO) << "Found " << log_segments_.size() - << " (LOG_START, LOG_END) segments in log."; + RTC_LOG(LS_INFO) << "Log is " + << (parsed_log_.last_timestamp() - + parsed_log_.first_timestamp()) / + 1000000 + << " seconds long."; +} + +EventLogAnalyzer::EventLogAnalyzer(const ParsedRtcEventLog& log, + const AnalyzerConfig& config) + : parsed_log_(log), config_(config) { + RTC_LOG(LS_INFO) << "Log is " + << (parsed_log_.last_timestamp() - + parsed_log_.first_timestamp()) / + 1000000 + << " seconds long."; } class BitrateObserver : public RemoteBitrateObserver { @@ -527,7 +407,7 @@ void EventLogAnalyzer::CreatePacketGraph(PacketDirection direction, continue; } - TimeSeries time_series(GetStreamName(direction, stream.ssrc), + TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc), LineStyle::kBar); auto GetPacketSize = [](const LoggedRtpPacket& packet) { return absl::optional<float>(packet.total_length); @@ -597,8 +477,8 @@ void EventLogAnalyzer::CreateAccumulatedPacketsGraph(PacketDirection direction, for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { if (!MatchingSsrc(stream.ssrc, desired_ssrc_)) continue; - std::string label = - std::string("RTP ") + GetStreamName(direction, stream.ssrc); + std::string label = std::string("RTP ") + + GetStreamName(parsed_log_, direction, stream.ssrc); CreateAccumulatedPacketsTimeSeries(plot, stream.packet_view, label); } std::string label = @@ -627,7 +507,8 @@ void EventLogAnalyzer::CreatePacketRateGraph(PacketDirection direction, continue; } TimeSeries time_series( - std::string("RTP ") + GetStreamName(direction, stream.ssrc), + std::string("RTP ") + + GetStreamName(parsed_log_, direction, stream.ssrc), LineStyle::kLine); MovingAverage<LoggedRtpPacket, double>(CountPackets, stream.packet_view, config_, &time_series); @@ -736,9 +617,9 @@ void EventLogAnalyzer::CreatePlayoutGraph(Plot* plot) { void EventLogAnalyzer::CreateAudioLevelGraph(PacketDirection direction, Plot* plot) { for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { - if (!IsAudioSsrc(direction, stream.ssrc)) + if (!IsAudioSsrc(parsed_log_, direction, stream.ssrc)) continue; - TimeSeries time_series(GetStreamName(direction, stream.ssrc), + TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc), LineStyle::kLine); for (auto& packet : stream.packet_view) { if (packet.header.extension.hasAudioLevel) { @@ -767,8 +648,9 @@ void EventLogAnalyzer::CreateSequenceNumberGraph(Plot* plot) { continue; } - TimeSeries time_series(GetStreamName(kIncomingPacket, stream.ssrc), - LineStyle::kBar); + TimeSeries time_series( + GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc), + LineStyle::kBar); auto GetSequenceNumberDiff = [](const LoggedRtpPacketIncoming& old_packet, const LoggedRtpPacketIncoming& new_packet) { int64_t diff = @@ -801,8 +683,9 @@ void EventLogAnalyzer::CreateIncomingPacketLossGraph(Plot* plot) { continue; } - TimeSeries time_series(GetStreamName(kIncomingPacket, stream.ssrc), - LineStyle::kLine, PointStyle::kHighlight); + TimeSeries time_series( + GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc), + LineStyle::kLine, PointStyle::kHighlight); // TODO(terelius): Should the window and step size be read from the class // instead? const int64_t kWindowUs = 1000000; @@ -855,7 +738,7 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) { for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) { // Filter on SSRC. if (!MatchingSsrc(stream.ssrc, desired_ssrc_) || - IsRtxSsrc(kIncomingPacket, stream.ssrc)) { + IsRtxSsrc(parsed_log_, kIncomingPacket, stream.ssrc)) { continue; } @@ -866,15 +749,14 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) { << packets.size() << " packets in the stream."; continue; } - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits<int64_t>::max() - : log_segments_.front().second; + int64_t segment_end_us = parsed_log_.first_log_segment().stop_time_us(); absl::optional<uint32_t> estimated_frequency = - EstimateRtpClockFrequency(packets, end_time_us); + EstimateRtpClockFrequency(packets, segment_end_us); if (!estimated_frequency) continue; const double frequency_hz = *estimated_frequency; - if (IsVideoSsrc(kIncomingPacket, stream.ssrc) && frequency_hz != 90000) { + if (IsVideoSsrc(parsed_log_, kIncomingPacket, stream.ssrc) && + frequency_hz != 90000) { RTC_LOG(LS_WARNING) << "Video stream should use a 90 kHz clock but appears to use " << frequency_hz / 1000 << ". Discarding."; @@ -891,14 +773,16 @@ void EventLogAnalyzer::CreateIncomingDelayGraph(Plot* plot) { }; TimeSeries capture_time_data( - GetStreamName(kIncomingPacket, stream.ssrc) + " capture-time", + GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc) + + " capture-time", LineStyle::kLine); AccumulatePairs<LoggedRtpPacketIncoming, double>( ToCallTime, ToNetworkDelay, packets, &capture_time_data); plot->AppendTimeSeries(std::move(capture_time_data)); TimeSeries send_time_data( - GetStreamName(kIncomingPacket, stream.ssrc) + " abs-send-time", + GetStreamName(parsed_log_, kIncomingPacket, stream.ssrc) + + " abs-send-time", LineStyle::kLine); AccumulatePairs<LoggedRtpPacketIncoming, double>( ToCallTime, NetworkDelayDiff_AbsSendTime, packets, &send_time_data); @@ -1191,7 +1075,7 @@ void EventLogAnalyzer::CreateStreamBitrateGraph(PacketDirection direction, continue; } - TimeSeries time_series(GetStreamName(direction, stream.ssrc), + TimeSeries time_series(GetStreamName(parsed_log_, direction, stream.ssrc), LineStyle::kLine); auto GetPacketSizeKilobits = [](const LoggedRtpPacket& packet) { return packet.total_length * 8.0 / 1000.0; @@ -1325,10 +1209,13 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { TimeSeries time_series("Delay-based estimate", LineStyle::kStep, PointStyle::kHighlight); - TimeSeries acked_time_series("Acked bitrate", LineStyle::kLine, + TimeSeries acked_time_series("Raw acked bitrate", LineStyle::kLine, PointStyle::kHighlight); - TimeSeries acked_estimate_time_series( - "Acked bitrate estimate", LineStyle::kLine, PointStyle::kHighlight); + TimeSeries robust_time_series("Robust throughput estimate", LineStyle::kLine, + PointStyle::kHighlight); + TimeSeries acked_estimate_time_series("Ackednowledged bitrate estimate", + LineStyle::kLine, + PointStyle::kHighlight); auto rtp_iterator = outgoing_rtp.begin(); auto rtcp_iterator = incoming_rtcp.begin(); @@ -1354,20 +1241,18 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { return std::numeric_limits<int64_t>::max(); }; - RateStatistics acked_bitrate(250, 8000); -#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) - FieldTrialBasedConfig field_trial_config_; - // The event_log_visualizer should normally not be compiled with - // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE since the normal plots won't work. - // However, compiling with BWE_TEST_LOGGING, running with --plot=sendside_bwe - // and piping the output to plot_dynamics.py can be used as a hack to get the - // internal state of various BWE components. In this case, it is important - // we don't instantiate the AcknowledgedBitrateEstimator both here and in - // GoogCcNetworkController since that would lead to duplicate outputs. + RateStatistics acked_bitrate(750, 8000); + test::ExplicitKeyValueConfig throughput_config( + "WebRTC-Bwe-RobustThroughputEstimatorSettings/" + "enabled:true,reduce_bias:true,assume_shared_link:false,initial_packets:" + "10,min_packets:25,window_duration:750ms,unacked_weight:0.5/"); + std::unique_ptr<AcknowledgedBitrateEstimatorInterface> + robust_throughput_estimator( + AcknowledgedBitrateEstimatorInterface::Create(&throughput_config)); + FieldTrialBasedConfig field_trial_config; std::unique_ptr<AcknowledgedBitrateEstimatorInterface> acknowledged_bitrate_estimator( - AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config_)); -#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) + AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config)); int64_t time_us = std::min({NextRtpTime(), NextRtcpTime(), NextProcessTime()}); int64_t last_update_us = 0; @@ -1377,24 +1262,40 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { RTC_DCHECK_EQ(clock.TimeInMicroseconds(), NextRtpTime()); const RtpPacketType& rtp_packet = *rtp_iterator->second; if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) { - RTC_DCHECK(rtp_packet.rtp.header.extension.hasTransportSequenceNumber); RtpPacketSendInfo packet_info; packet_info.ssrc = rtp_packet.rtp.header.ssrc; packet_info.transport_sequence_number = rtp_packet.rtp.header.extension.transportSequenceNumber; packet_info.rtp_sequence_number = rtp_packet.rtp.header.sequenceNumber; packet_info.length = rtp_packet.rtp.total_length; + if (IsRtxSsrc(parsed_log_, PacketDirection::kOutgoingPacket, + rtp_packet.rtp.header.ssrc)) { + // Don't set the optional media type as we don't know if it is + // a retransmission, FEC or padding. + } else if (IsVideoSsrc(parsed_log_, PacketDirection::kOutgoingPacket, + rtp_packet.rtp.header.ssrc)) { + packet_info.packet_type = RtpPacketMediaType::kVideo; + } else if (IsAudioSsrc(parsed_log_, PacketDirection::kOutgoingPacket, + rtp_packet.rtp.header.ssrc)) { + packet_info.packet_type = RtpPacketMediaType::kAudio; + } transport_feedback.AddPacket( packet_info, 0u, // Per packet overhead bytes. Timestamp::Micros(rtp_packet.rtp.log_time_us())); - rtc::SentPacket sent_packet( - rtp_packet.rtp.header.extension.transportSequenceNumber, - rtp_packet.rtp.log_time_us() / 1000); - auto sent_msg = transport_feedback.ProcessSentPacket(sent_packet); - if (sent_msg) - observer.Update(goog_cc->OnSentPacket(*sent_msg)); } + rtc::SentPacket sent_packet; + sent_packet.send_time_ms = rtp_packet.rtp.log_time_ms(); + sent_packet.info.included_in_allocation = true; + sent_packet.info.packet_size_bytes = rtp_packet.rtp.total_length; + if (rtp_packet.rtp.header.extension.hasTransportSequenceNumber) { + sent_packet.packet_id = + rtp_packet.rtp.header.extension.transportSequenceNumber; + sent_packet.info.included_in_feedback = true; + } + auto sent_msg = transport_feedback.ProcessSentPacket(sent_packet); + if (sent_msg) + observer.Update(goog_cc->OnSentPacket(*sent_msg)); ++rtp_iterator; } if (clock.TimeInMicroseconds() >= NextRtcpTime()) { @@ -1409,13 +1310,13 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { std::vector<PacketResult> feedback = feedback_msg->SortedByReceiveTime(); if (!feedback.empty()) { -#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) acknowledged_bitrate_estimator->IncomingPacketFeedbackVector( feedback); -#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) - for (const PacketResult& packet : feedback) + robust_throughput_estimator->IncomingPacketFeedbackVector(feedback); + for (const PacketResult& packet : feedback) { acked_bitrate.Update(packet.sent_packet.size.bytes(), packet.receive_time.ms()); + } bitrate_bps = acked_bitrate.Rate(feedback.back().receive_time.ms()); } } @@ -1423,12 +1324,14 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { float x = config_.GetCallTimeSec(clock.TimeInMicroseconds()); float y = bitrate_bps.value_or(0) / 1000; acked_time_series.points.emplace_back(x, y); -#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) + y = robust_throughput_estimator->bitrate() + .value_or(DataRate::Zero()) + .kbps(); + robust_time_series.points.emplace_back(x, y); y = acknowledged_bitrate_estimator->bitrate() .value_or(DataRate::Zero()) .kbps(); acked_estimate_time_series.points.emplace_back(x, y); -#endif // !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) ++rtcp_iterator; } if (clock.TimeInMicroseconds() >= NextProcessTime()) { @@ -1449,6 +1352,7 @@ void EventLogAnalyzer::CreateSendSideBweSimulationGraph(Plot* plot) { } // Add the data set to the plot. plot->AppendTimeSeries(std::move(time_series)); + plot->AppendTimeSeries(std::move(robust_time_series)); plot->AppendTimeSeries(std::move(acked_time_series)); plot->AppendTimeSeriesIfNotEmpty(std::move(acked_estimate_time_series)); @@ -1476,14 +1380,16 @@ void EventLogAnalyzer::CreateReceiveSideBweSimulationGraph(Plot* plot) { } private: - uint32_t last_bitrate_bps_; - bool bitrate_updated_; + // We don't know the start bitrate, but assume that it is the default 300 + // kbps. + uint32_t last_bitrate_bps_ = 300000; + bool bitrate_updated_ = false; }; std::multimap<int64_t, const RtpPacketType*> incoming_rtp; for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) { - if (IsVideoSsrc(kIncomingPacket, stream.ssrc)) { + if (IsVideoSsrc(parsed_log_, kIncomingPacket, stream.ssrc)) { for (const auto& rtp_packet : stream.incoming_packets) incoming_rtp.insert( std::make_pair(rtp_packet.rtp.log_time_us(), &rtp_packet)); @@ -1586,7 +1492,7 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) { const std::vector<LoggedRtpPacketOutgoing>& packets = stream.outgoing_packets; - if (IsRtxSsrc(kOutgoingPacket, stream.ssrc)) { + if (IsRtxSsrc(parsed_log_, kOutgoingPacket, stream.ssrc)) { continue; } @@ -1596,14 +1502,12 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) { "pacer delay with less than 2 packets in the stream"; continue; } - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits<int64_t>::max() - : log_segments_.front().second; + int64_t segment_end_us = parsed_log_.first_log_segment().stop_time_us(); absl::optional<uint32_t> estimated_frequency = - EstimateRtpClockFrequency(packets, end_time_us); + EstimateRtpClockFrequency(packets, segment_end_us); if (!estimated_frequency) continue; - if (IsVideoSsrc(kOutgoingPacket, stream.ssrc) && + if (IsVideoSsrc(parsed_log_, kOutgoingPacket, stream.ssrc) && *estimated_frequency != 90000) { RTC_LOG(LS_WARNING) << "Video stream should use a 90 kHz clock but appears to use " @@ -1612,7 +1516,7 @@ void EventLogAnalyzer::CreatePacerDelayGraph(Plot* plot) { } TimeSeries pacer_delay_series( - GetStreamName(kOutgoingPacket, stream.ssrc) + "(" + + GetStreamName(parsed_log_, kOutgoingPacket, stream.ssrc) + "(" + std::to_string(*estimated_frequency / 1000) + " kHz)", LineStyle::kLine, PointStyle::kHighlight); SeqNumUnwrapper<uint32_t> timestamp_unwrapper; @@ -1645,7 +1549,7 @@ void EventLogAnalyzer::CreateTimestampGraph(PacketDirection direction, Plot* plot) { for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { TimeSeries rtp_timestamps( - GetStreamName(direction, stream.ssrc) + " capture-time", + GetStreamName(parsed_log_, direction, stream.ssrc) + " capture-time", LineStyle::kLine, PointStyle::kHighlight); for (const auto& packet : stream.packet_view) { float x = config_.GetCallTimeSec(packet.log_time_us()); @@ -1655,7 +1559,8 @@ void EventLogAnalyzer::CreateTimestampGraph(PacketDirection direction, plot->AppendTimeSeries(std::move(rtp_timestamps)); TimeSeries rtcp_timestamps( - GetStreamName(direction, stream.ssrc) + " rtcp capture-time", + GetStreamName(parsed_log_, direction, stream.ssrc) + + " rtcp capture-time", LineStyle::kLine, PointStyle::kHighlight); // TODO(terelius): Why only sender reports? const auto& sender_reports = parsed_log_.sender_reports(direction); @@ -1692,7 +1597,8 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot( bool inserted; if (sr_report_it == sr_reports_by_ssrc.end()) { std::tie(sr_report_it, inserted) = sr_reports_by_ssrc.emplace( - ssrc, TimeSeries(GetStreamName(direction, ssrc) + " Sender Reports", + ssrc, TimeSeries(GetStreamName(parsed_log_, direction, ssrc) + + " Sender Reports", LineStyle::kLine, PointStyle::kHighlight)); } sr_report_it->second.points.emplace_back(x, y); @@ -1713,9 +1619,9 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot( bool inserted; if (rr_report_it == rr_reports_by_ssrc.end()) { std::tie(rr_report_it, inserted) = rr_reports_by_ssrc.emplace( - ssrc, - TimeSeries(GetStreamName(direction, ssrc) + " Receiver Reports", - LineStyle::kLine, PointStyle::kHighlight)); + ssrc, TimeSeries(GetStreamName(parsed_log_, direction, ssrc) + + " Receiver Reports", + LineStyle::kLine, PointStyle::kHighlight)); } rr_report_it->second.points.emplace_back(x, y); } @@ -1730,463 +1636,6 @@ void EventLogAnalyzer::CreateSenderAndReceiverReportPlot( plot->SetTitle(title); } -void EventLogAnalyzer::CreateAudioEncoderTargetBitrateGraph(Plot* plot) { - TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event) - -> absl::optional<float> { - if (ana_event.config.bitrate_bps) - return absl::optional<float>( - static_cast<float>(*ana_event.config.bitrate_bps)); - return absl::nullopt; - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints<LoggedAudioNetworkAdaptationEvent>( - ToCallTime, GetAnaBitrateBps, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder target bitrate"); -} - -void EventLogAnalyzer::CreateAudioEncoderFrameLengthGraph(Plot* plot) { - TimeSeries time_series("Audio encoder frame length", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaFrameLengthMs = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.frame_length_ms) - return absl::optional<float>( - static_cast<float>(*ana_event.config.frame_length_ms)); - return absl::optional<float>(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints<LoggedAudioNetworkAdaptationEvent>( - ToCallTime, GetAnaFrameLengthMs, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder frame length"); -} - -void EventLogAnalyzer::CreateAudioEncoderPacketLossGraph(Plot* plot) { - TimeSeries time_series("Audio encoder uplink packet loss fraction", - LineStyle::kLine, PointStyle::kHighlight); - auto GetAnaPacketLoss = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.uplink_packet_loss_fraction) - return absl::optional<float>(static_cast<float>( - *ana_event.config.uplink_packet_loss_fraction)); - return absl::optional<float>(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints<LoggedAudioNetworkAdaptationEvent>( - ToCallTime, GetAnaPacketLoss, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin, - kTopMargin); - plot->SetTitle("Reported audio encoder lost packets"); -} - -void EventLogAnalyzer::CreateAudioEncoderEnableFecGraph(Plot* plot) { - TimeSeries time_series("Audio encoder FEC", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaFecEnabled = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.enable_fec) - return absl::optional<float>( - static_cast<float>(*ana_event.config.enable_fec)); - return absl::optional<float>(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints<LoggedAudioNetworkAdaptationEvent>( - ToCallTime, GetAnaFecEnabled, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder FEC"); -} - -void EventLogAnalyzer::CreateAudioEncoderEnableDtxGraph(Plot* plot) { - TimeSeries time_series("Audio encoder DTX", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaDtxEnabled = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.enable_dtx) - return absl::optional<float>( - static_cast<float>(*ana_event.config.enable_dtx)); - return absl::optional<float>(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints<LoggedAudioNetworkAdaptationEvent>( - ToCallTime, GetAnaDtxEnabled, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder DTX"); -} - -void EventLogAnalyzer::CreateAudioEncoderNumChannelsGraph(Plot* plot) { - TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine, - PointStyle::kHighlight); - auto GetAnaNumChannels = - [](const LoggedAudioNetworkAdaptationEvent& ana_event) { - if (ana_event.config.num_channels) - return absl::optional<float>( - static_cast<float>(*ana_event.config.num_channels)); - return absl::optional<float>(); - }; - auto ToCallTime = [this](const LoggedAudioNetworkAdaptationEvent& packet) { - return this->config_.GetCallTimeSec(packet.log_time_us()); - }; - ProcessPoints<LoggedAudioNetworkAdaptationEvent>( - ToCallTime, GetAnaNumChannels, - parsed_log_.audio_network_adaptation_events(), &time_series); - plot->AppendTimeSeries(std::move(time_series)); - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))", - kBottomMargin, kTopMargin); - plot->SetTitle("Reported audio encoder number of channels"); -} - -class NetEqStreamInput : public test::NetEqInput { - public: - // Does not take any ownership, and all pointers must refer to valid objects - // that outlive the one constructed. - NetEqStreamInput(const std::vector<LoggedRtpPacketIncoming>* packet_stream, - const std::vector<LoggedAudioPlayoutEvent>* output_events, - absl::optional<int64_t> end_time_ms) - : packet_stream_(*packet_stream), - packet_stream_it_(packet_stream_.begin()), - output_events_it_(output_events->begin()), - output_events_end_(output_events->end()), - end_time_ms_(end_time_ms) { - RTC_DCHECK(packet_stream); - RTC_DCHECK(output_events); - } - - absl::optional<int64_t> NextPacketTime() const override { - if (packet_stream_it_ == packet_stream_.end()) { - return absl::nullopt; - } - if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) { - return absl::nullopt; - } - return packet_stream_it_->rtp.log_time_ms(); - } - - absl::optional<int64_t> NextOutputEventTime() const override { - if (output_events_it_ == output_events_end_) { - return absl::nullopt; - } - if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) { - return absl::nullopt; - } - return output_events_it_->log_time_ms(); - } - - std::unique_ptr<PacketData> PopPacket() override { - if (packet_stream_it_ == packet_stream_.end()) { - return std::unique_ptr<PacketData>(); - } - std::unique_ptr<PacketData> packet_data(new PacketData()); - packet_data->header = packet_stream_it_->rtp.header; - packet_data->time_ms = packet_stream_it_->rtp.log_time_ms(); - - // This is a header-only "dummy" packet. Set the payload to all zeros, with - // length according to the virtual length. - packet_data->payload.SetSize(packet_stream_it_->rtp.total_length - - packet_stream_it_->rtp.header_length); - std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0); - - ++packet_stream_it_; - return packet_data; - } - - void AdvanceOutputEvent() override { - if (output_events_it_ != output_events_end_) { - ++output_events_it_; - } - } - - bool ended() const override { return !NextEventTime(); } - - absl::optional<RTPHeader> NextHeader() const override { - if (packet_stream_it_ == packet_stream_.end()) { - return absl::nullopt; - } - return packet_stream_it_->rtp.header; - } - - private: - const std::vector<LoggedRtpPacketIncoming>& packet_stream_; - std::vector<LoggedRtpPacketIncoming>::const_iterator packet_stream_it_; - std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_it_; - const std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_end_; - const absl::optional<int64_t> end_time_ms_; -}; - -namespace { - -// Factory to create a "replacement decoder" that produces the decoded audio -// by reading from a file rather than from the encoded payloads. -class ReplacementAudioDecoderFactory : public AudioDecoderFactory { - public: - ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name, - int file_sample_rate_hz) - : replacement_file_name_(replacement_file_name), - file_sample_rate_hz_(file_sample_rate_hz) {} - - std::vector<AudioCodecSpec> GetSupportedDecoders() override { - RTC_NOTREACHED(); - return {}; - } - - bool IsSupportedDecoder(const SdpAudioFormat& format) override { - return true; - } - - std::unique_ptr<AudioDecoder> MakeAudioDecoder( - const SdpAudioFormat& format, - absl::optional<AudioCodecPairId> codec_pair_id) override { - auto replacement_file = std::make_unique<test::ResampleInputAudioFile>( - replacement_file_name_, file_sample_rate_hz_); - replacement_file->set_output_rate_hz(48000); - return std::make_unique<test::FakeDecodeFromFile>( - std::move(replacement_file), 48000, false); - } - - private: - const std::string replacement_file_name_; - const int file_sample_rate_hz_; -}; - -// Creates a NetEq test object and all necessary input and output helpers. Runs -// the test and returns the NetEqDelayAnalyzer object that was used to -// instrument the test. -std::unique_ptr<test::NetEqStatsGetter> CreateNetEqTestAndRun( - const std::vector<LoggedRtpPacketIncoming>* packet_stream, - const std::vector<LoggedAudioPlayoutEvent>* output_events, - absl::optional<int64_t> end_time_ms, - const std::string& replacement_file_name, - int file_sample_rate_hz) { - std::unique_ptr<test::NetEqInput> input( - new NetEqStreamInput(packet_stream, output_events, end_time_ms)); - - constexpr int kReplacementPt = 127; - std::set<uint8_t> cn_types; - std::set<uint8_t> forbidden_types; - input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt, - cn_types, forbidden_types)); - - NetEq::Config config; - config.max_packets_in_buffer = 200; - config.enable_fast_accelerate = true; - - std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink()); - - rtc::scoped_refptr<AudioDecoderFactory> decoder_factory = - new rtc::RefCountedObject<ReplacementAudioDecoderFactory>( - replacement_file_name, file_sample_rate_hz); - - test::NetEqTest::DecoderMap codecs = { - {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}}; - - std::unique_ptr<test::NetEqDelayAnalyzer> delay_cb( - new test::NetEqDelayAnalyzer); - std::unique_ptr<test::NetEqStatsGetter> neteq_stats_getter( - new test::NetEqStatsGetter(std::move(delay_cb))); - test::DefaultNetEqTestErrorCallback error_cb; - test::NetEqTest::Callbacks callbacks; - callbacks.error_callback = &error_cb; - callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer(); - callbacks.get_audio_callback = neteq_stats_getter.get(); - - test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr, - /*factory=*/nullptr, std::move(input), std::move(output), - callbacks); - test.Run(); - return neteq_stats_getter; -} -} // namespace - -EventLogAnalyzer::NetEqStatsGetterMap EventLogAnalyzer::SimulateNetEq( - const std::string& replacement_file_name, - int file_sample_rate_hz) const { - NetEqStatsGetterMap neteq_stats; - - for (const auto& stream : parsed_log_.incoming_rtp_packets_by_ssrc()) { - const uint32_t ssrc = stream.ssrc; - if (!IsAudioSsrc(kIncomingPacket, ssrc)) - continue; - const std::vector<LoggedRtpPacketIncoming>* audio_packets = - &stream.incoming_packets; - if (audio_packets == nullptr) { - // No incoming audio stream found. - continue; - } - - RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end()); - - std::map<uint32_t, std::vector<LoggedAudioPlayoutEvent>>::const_iterator - output_events_it = parsed_log_.audio_playout_events().find(ssrc); - if (output_events_it == parsed_log_.audio_playout_events().end()) { - // Could not find output events with SSRC matching the input audio stream. - // Using the first available stream of output events. - output_events_it = parsed_log_.audio_playout_events().cbegin(); - } - - absl::optional<int64_t> end_time_ms = - log_segments_.empty() - ? absl::nullopt - : absl::optional<int64_t>(log_segments_.front().second / 1000); - - neteq_stats[ssrc] = CreateNetEqTestAndRun( - audio_packets, &output_events_it->second, end_time_ms, - replacement_file_name, file_sample_rate_hz); - } - - return neteq_stats; -} - -// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created -// for, this method generates a plot for the jitter buffer delay profile. -void EventLogAnalyzer::CreateAudioJitterBufferGraph( - uint32_t ssrc, - const test::NetEqStatsGetter* stats_getter, - Plot* plot) const { - test::NetEqDelayAnalyzer::Delays arrival_delay_ms; - test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms; - test::NetEqDelayAnalyzer::Delays playout_delay_ms; - test::NetEqDelayAnalyzer::Delays target_delay_ms; - - stats_getter->delay_analyzer()->CreateGraphs( - &arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms, - &target_delay_ms); - - TimeSeries time_series_packet_arrival("packet arrival delay", - LineStyle::kLine); - TimeSeries time_series_relative_packet_arrival( - "Relative packet arrival delay", LineStyle::kLine); - TimeSeries time_series_play_time("Playout delay", LineStyle::kLine); - TimeSeries time_series_target_time("Target delay", LineStyle::kLine, - PointStyle::kHighlight); - - for (const auto& data : arrival_delay_ms) { - const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float y = data.second; - time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y)); - } - for (const auto& data : corrected_arrival_delay_ms) { - const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float y = data.second; - time_series_relative_packet_arrival.points.emplace_back( - TimeSeriesPoint(x, y)); - } - for (const auto& data : playout_delay_ms) { - const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float y = data.second; - time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y)); - } - for (const auto& data : target_delay_ms) { - const float x = config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float y = data.second; - time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y)); - } - - plot->AppendTimeSeries(std::move(time_series_packet_arrival)); - plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival)); - plot->AppendTimeSeries(std::move(time_series_play_time)); - plot->AppendTimeSeries(std::move(time_series_target_time)); - - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin, - kTopMargin); - plot->SetTitle("NetEq timing for " + GetStreamName(kIncomingPacket, ssrc)); -} - -template <typename NetEqStatsType> -void EventLogAnalyzer::CreateNetEqStatsGraphInternal( - const NetEqStatsGetterMap& neteq_stats, - rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*( - const test::NetEqStatsGetter*)> data_extractor, - rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor, - const std::string& plot_name, - Plot* plot) const { - std::map<uint32_t, TimeSeries> time_series; - - for (const auto& st : neteq_stats) { - const uint32_t ssrc = st.first; - const std::vector<std::pair<int64_t, NetEqStatsType>>* data_vector = - data_extractor(st.second.get()); - for (const auto& data : *data_vector) { - const float time = - config_.GetCallTimeSec(data.first * 1000); // ms to us. - const float value = stats_extractor(data.second); - time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value)); - } - } - - for (auto& series : time_series) { - series.second.label = GetStreamName(kIncomingPacket, series.first); - series.second.line_style = LineStyle::kLine; - plot->AppendTimeSeries(std::move(series.second)); - } - - plot->SetXAxis(config_.CallBeginTimeSec(), config_.CallEndTimeSec(), - "Time (s)", kLeftMargin, kRightMargin); - plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin); - plot->SetTitle(plot_name); -} - -void EventLogAnalyzer::CreateNetEqNetworkStatsGraph( - const NetEqStatsGetterMap& neteq_stats, - rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor, - const std::string& plot_name, - Plot* plot) const { - CreateNetEqStatsGraphInternal<NetEqNetworkStatistics>( - neteq_stats, - [](const test::NetEqStatsGetter* stats_getter) { - return stats_getter->stats(); - }, - stats_extractor, plot_name, plot); -} - -void EventLogAnalyzer::CreateNetEqLifetimeStatsGraph( - const NetEqStatsGetterMap& neteq_stats, - rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor, - const std::string& plot_name, - Plot* plot) const { - CreateNetEqStatsGraphInternal<NetEqLifetimeStatistics>( - neteq_stats, - [](const test::NetEqStatsGetter* stats_getter) { - return stats_getter->lifetime_stats(); - }, - stats_extractor, plot_name, plot); -} - void EventLogAnalyzer::CreateIceCandidatePairConfigGraph(Plot* plot) { std::map<uint32_t, TimeSeries> configs_by_cp_id; for (const auto& config : parsed_log_.ice_candidate_pair_configs()) { @@ -2326,181 +1775,4 @@ void EventLogAnalyzer::CreateDtlsWritableStateGraph(Plot* plot) { plot->SetTitle("DTLS Writable State"); } -void EventLogAnalyzer::PrintNotifications(FILE* file) { - fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n"); - for (const auto& alert : incoming_rtp_recv_time_gaps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : incoming_rtcp_recv_time_gaps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_rtp_send_time_gaps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_rtcp_send_time_gaps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : incoming_seq_num_jumps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : incoming_capture_time_jumps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_seq_num_jumps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_capture_time_jumps_) { - fprintf(file, "%3.3lf s : %s\n", alert.Time(), alert.ToString().c_str()); - } - for (const auto& alert : outgoing_high_loss_alerts_) { - fprintf(file, " : %s\n", alert.ToString().c_str()); - } - fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n"); -} - -void EventLogAnalyzer::CreateStreamGapAlerts(PacketDirection direction) { - // With 100 packets/s (~800kbps), false positives would require 10 s without - // data. - constexpr int64_t kMaxSeqNumJump = 1000; - // With a 90 kHz clock, false positives would require 10 s without data. - constexpr int64_t kMaxCaptureTimeJump = 900000; - - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits<int64_t>::max() - : log_segments_.front().second; - - SeqNumUnwrapper<uint16_t> seq_num_unwrapper; - absl::optional<int64_t> last_seq_num; - SeqNumUnwrapper<uint32_t> capture_time_unwrapper; - absl::optional<int64_t> last_capture_time; - // Check for gaps in sequence numbers and capture timestamps. - for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { - for (const auto& packet : stream.packet_view) { - if (packet.log_time_us() > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - - int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber); - if (last_seq_num.has_value() && - std::abs(seq_num - last_seq_num.value()) > kMaxSeqNumJump) { - Alert_SeqNumJump(direction, - config_.GetCallTimeSec(packet.log_time_us()), - packet.header.ssrc); - } - last_seq_num.emplace(seq_num); - - int64_t capture_time = - capture_time_unwrapper.Unwrap(packet.header.timestamp); - if (last_capture_time.has_value() && - std::abs(capture_time - last_capture_time.value()) > - kMaxCaptureTimeJump) { - Alert_CaptureTimeJump(direction, - config_.GetCallTimeSec(packet.log_time_us()), - packet.header.ssrc); - } - last_capture_time.emplace(capture_time); - } - } -} - -void EventLogAnalyzer::CreateTransmissionGapAlerts(PacketDirection direction) { - constexpr int64_t kMaxRtpTransmissionGap = 500000; - constexpr int64_t kMaxRtcpTransmissionGap = 2000000; - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits<int64_t>::max() - : log_segments_.front().second; - - // TODO(terelius): The parser could provide a list of all packets, ordered - // by time, for each direction. - std::multimap<int64_t, const LoggedRtpPacket*> rtp_in_direction; - for (const auto& stream : parsed_log_.rtp_packets_by_ssrc(direction)) { - for (const LoggedRtpPacket& rtp_packet : stream.packet_view) - rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet); - } - absl::optional<int64_t> last_rtp_time; - for (const auto& kv : rtp_in_direction) { - int64_t timestamp = kv.first; - if (timestamp > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - int64_t duration = timestamp - last_rtp_time.value_or(0); - if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) { - // No packet sent/received for more than 500 ms. - Alert_RtpLogTimeGap(direction, config_.GetCallTimeSec(timestamp), - duration / 1000); - } - last_rtp_time.emplace(timestamp); - } - - absl::optional<int64_t> last_rtcp_time; - if (direction == kIncomingPacket) { - for (const auto& rtcp : parsed_log_.incoming_rtcp_packets()) { - if (rtcp.log_time_us() > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0); - if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) { - // No feedback sent/received for more than 2000 ms. - Alert_RtcpLogTimeGap(direction, - config_.GetCallTimeSec(rtcp.log_time_us()), - duration / 1000); - } - last_rtcp_time.emplace(rtcp.log_time_us()); - } - } else { - for (const auto& rtcp : parsed_log_.outgoing_rtcp_packets()) { - if (rtcp.log_time_us() > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0); - if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) { - // No feedback sent/received for more than 2000 ms. - Alert_RtcpLogTimeGap(direction, - config_.GetCallTimeSec(rtcp.log_time_us()), - duration / 1000); - } - last_rtcp_time.emplace(rtcp.log_time_us()); - } - } -} - -// TODO(terelius): Notifications could possibly be generated by the same code -// that produces the graphs. There is some code duplication that could be -// avoided, but that might be solved anyway when we move functionality from the -// analyzer to the parser. -void EventLogAnalyzer::CreateTriageNotifications() { - CreateStreamGapAlerts(kIncomingPacket); - CreateStreamGapAlerts(kOutgoingPacket); - CreateTransmissionGapAlerts(kIncomingPacket); - CreateTransmissionGapAlerts(kOutgoingPacket); - - int64_t end_time_us = log_segments_.empty() - ? std::numeric_limits<int64_t>::max() - : log_segments_.front().second; - - constexpr double kMaxLossFraction = 0.05; - // Loss feedback - int64_t total_lost_packets = 0; - int64_t total_expected_packets = 0; - for (auto& bwe_update : parsed_log_.bwe_loss_updates()) { - if (bwe_update.log_time_us() > end_time_us) { - // Only process the first (LOG_START, LOG_END) segment. - break; - } - int64_t lost_packets = static_cast<double>(bwe_update.fraction_lost) / 255 * - bwe_update.expected_packets; - total_lost_packets += lost_packets; - total_expected_packets += bwe_update.expected_packets; - } - double avg_outgoing_loss = - static_cast<double>(total_lost_packets) / total_expected_packets; - if (avg_outgoing_loss > kMaxLossFraction) { - Alert_OutgoingHighLoss(avg_outgoing_loss); - } -} - } // namespace webrtc diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h index 1e091099590..4918cf48e1a 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer.h @@ -21,41 +21,18 @@ #include "logging/rtc_event_log/rtc_event_log_parser.h" #include "modules/audio_coding/neteq/tools/neteq_stats_getter.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h" #include "rtc_tools/rtc_event_log_visualizer/plot_base.h" -#include "rtc_tools/rtc_event_log_visualizer/triage_notifications.h" namespace webrtc { -class AnalyzerConfig { - public: - float GetCallTimeSec(int64_t timestamp_us) const { - int64_t offset = normalize_time_ ? begin_time_ : 0; - return static_cast<float>(timestamp_us - offset) / 1000000; - } - - float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); } - - float CallEndTimeSec() const { return GetCallTimeSec(end_time_); } - - // Window and step size used for calculating moving averages, e.g. bitrate. - // The generated data points will be |step_| microseconds apart. - // Only events occurring at most |window_duration_| microseconds before the - // current data point will be part of the average. - int64_t window_duration_; - int64_t step_; - - // First and last events of the log. - int64_t begin_time_; - int64_t end_time_; - bool normalize_time_; -}; - class EventLogAnalyzer { public: // The EventLogAnalyzer keeps a reference to the ParsedRtcEventLogNew for the // duration of its lifetime. The ParsedRtcEventLogNew must not be destroyed or // modified while the EventLogAnalyzer is being used. EventLogAnalyzer(const ParsedRtcEventLog& log, bool normalize_time); + EventLogAnalyzer(const ParsedRtcEventLog& log, const AnalyzerConfig& config); void CreatePacketGraph(PacketDirection direction, Plot* plot); @@ -102,32 +79,6 @@ class EventLogAnalyzer { std::string yaxis_label, Plot* plot); - void CreateAudioEncoderTargetBitrateGraph(Plot* plot); - void CreateAudioEncoderFrameLengthGraph(Plot* plot); - void CreateAudioEncoderPacketLossGraph(Plot* plot); - void CreateAudioEncoderEnableFecGraph(Plot* plot); - void CreateAudioEncoderEnableDtxGraph(Plot* plot); - void CreateAudioEncoderNumChannelsGraph(Plot* plot); - - using NetEqStatsGetterMap = - std::map<uint32_t, std::unique_ptr<test::NetEqStatsGetter>>; - NetEqStatsGetterMap SimulateNetEq(const std::string& replacement_file_name, - int file_sample_rate_hz) const; - - void CreateAudioJitterBufferGraph(uint32_t ssrc, - const test::NetEqStatsGetter* stats_getter, - Plot* plot) const; - void CreateNetEqNetworkStatsGraph( - const NetEqStatsGetterMap& neteq_stats_getters, - rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor, - const std::string& plot_name, - Plot* plot) const; - void CreateNetEqLifetimeStatsGraph( - const NetEqStatsGetterMap& neteq_stats_getters, - rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor, - const std::string& plot_name, - Plot* plot) const; - void CreateIceCandidatePairConfigGraph(Plot* plot); void CreateIceConnectivityCheckGraph(Plot* plot); @@ -138,145 +89,11 @@ class EventLogAnalyzer { void PrintNotifications(FILE* file); private: - struct LayerDescription { - LayerDescription(uint32_t ssrc, - uint8_t spatial_layer, - uint8_t temporal_layer) - : ssrc(ssrc), - spatial_layer(spatial_layer), - temporal_layer(temporal_layer) {} - bool operator<(const LayerDescription& other) const { - if (ssrc != other.ssrc) - return ssrc < other.ssrc; - if (spatial_layer != other.spatial_layer) - return spatial_layer < other.spatial_layer; - return temporal_layer < other.temporal_layer; - } - uint32_t ssrc; - uint8_t spatial_layer; - uint8_t temporal_layer; - }; - - bool IsRtxSsrc(PacketDirection direction, uint32_t ssrc) const { - if (direction == kIncomingPacket) { - return parsed_log_.incoming_rtx_ssrcs().find(ssrc) != - parsed_log_.incoming_rtx_ssrcs().end(); - } else { - return parsed_log_.outgoing_rtx_ssrcs().find(ssrc) != - parsed_log_.outgoing_rtx_ssrcs().end(); - } - } - - bool IsVideoSsrc(PacketDirection direction, uint32_t ssrc) const { - if (direction == kIncomingPacket) { - return parsed_log_.incoming_video_ssrcs().find(ssrc) != - parsed_log_.incoming_video_ssrcs().end(); - } else { - return parsed_log_.outgoing_video_ssrcs().find(ssrc) != - parsed_log_.outgoing_video_ssrcs().end(); - } - } - - bool IsAudioSsrc(PacketDirection direction, uint32_t ssrc) const { - if (direction == kIncomingPacket) { - return parsed_log_.incoming_audio_ssrcs().find(ssrc) != - parsed_log_.incoming_audio_ssrcs().end(); - } else { - return parsed_log_.outgoing_audio_ssrcs().find(ssrc) != - parsed_log_.outgoing_audio_ssrcs().end(); - } - } - - template <typename NetEqStatsType> - void CreateNetEqStatsGraphInternal( - const NetEqStatsGetterMap& neteq_stats, - rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*( - const test::NetEqStatsGetter*)> data_extractor, - rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor, - const std::string& plot_name, - Plot* plot) const; - template <typename IterableType> void CreateAccumulatedPacketsTimeSeries(Plot* plot, const IterableType& packets, const std::string& label); - void CreateStreamGapAlerts(PacketDirection direction); - void CreateTransmissionGapAlerts(PacketDirection direction); - - std::string GetStreamName(PacketDirection direction, uint32_t ssrc) const { - char buffer[200]; - rtc::SimpleStringBuilder name(buffer); - if (IsAudioSsrc(direction, ssrc)) { - name << "Audio "; - } else if (IsVideoSsrc(direction, ssrc)) { - name << "Video "; - } else { - name << "Unknown "; - } - if (IsRtxSsrc(direction, ssrc)) { - name << "RTX "; - } - if (direction == kIncomingPacket) - name << "(In) "; - else - name << "(Out) "; - name << "SSRC " << ssrc; - return name.str(); - } - - std::string GetLayerName(LayerDescription layer) const { - char buffer[100]; - rtc::SimpleStringBuilder name(buffer); - name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl " - << layer.temporal_layer; - return name.str(); - } - - void Alert_RtpLogTimeGap(PacketDirection direction, - float time_seconds, - int64_t duration) { - if (direction == kIncomingPacket) { - incoming_rtp_recv_time_gaps_.emplace_back(time_seconds, duration); - } else { - outgoing_rtp_send_time_gaps_.emplace_back(time_seconds, duration); - } - } - - void Alert_RtcpLogTimeGap(PacketDirection direction, - float time_seconds, - int64_t duration) { - if (direction == kIncomingPacket) { - incoming_rtcp_recv_time_gaps_.emplace_back(time_seconds, duration); - } else { - outgoing_rtcp_send_time_gaps_.emplace_back(time_seconds, duration); - } - } - - void Alert_SeqNumJump(PacketDirection direction, - float time_seconds, - uint32_t ssrc) { - if (direction == kIncomingPacket) { - incoming_seq_num_jumps_.emplace_back(time_seconds, ssrc); - } else { - outgoing_seq_num_jumps_.emplace_back(time_seconds, ssrc); - } - } - - void Alert_CaptureTimeJump(PacketDirection direction, - float time_seconds, - uint32_t ssrc) { - if (direction == kIncomingPacket) { - incoming_capture_time_jumps_.emplace_back(time_seconds, ssrc); - } else { - outgoing_capture_time_jumps_.emplace_back(time_seconds, ssrc); - } - } - - void Alert_OutgoingHighLoss(double avg_loss_fraction) { - outgoing_high_loss_alerts_.emplace_back(avg_loss_fraction); - } - std::string GetCandidatePairLogDescriptionFromId(uint32_t candidate_pair_id); const ParsedRtcEventLog& parsed_log_; @@ -285,20 +102,6 @@ class EventLogAnalyzer { // If left empty, all SSRCs will be considered relevant. std::vector<uint32_t> desired_ssrc_; - // Stores the timestamps for all log segments, in the form of associated start - // and end events. - std::vector<std::pair<int64_t, int64_t>> log_segments_; - - std::vector<IncomingRtpReceiveTimeGap> incoming_rtp_recv_time_gaps_; - std::vector<IncomingRtcpReceiveTimeGap> incoming_rtcp_recv_time_gaps_; - std::vector<OutgoingRtpSendTimeGap> outgoing_rtp_send_time_gaps_; - std::vector<OutgoingRtcpSendTimeGap> outgoing_rtcp_send_time_gaps_; - std::vector<IncomingSeqNumJump> incoming_seq_num_jumps_; - std::vector<IncomingCaptureTimeJump> incoming_capture_time_jumps_; - std::vector<OutgoingSeqNoJump> outgoing_seq_num_jumps_; - std::vector<OutgoingCaptureTimeJump> outgoing_capture_time_jumps_; - std::vector<OutgoingHighLoss> outgoing_high_loss_alerts_; - std::map<uint32_t, std::string> candidate_pair_desc_by_id_; AnalyzerConfig config_; diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc new file mode 100644 index 00000000000..3d3ce5a4aca --- /dev/null +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.cc @@ -0,0 +1,83 @@ + +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h" + +namespace webrtc { + +bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc) { + if (direction == kIncomingPacket) { + return parsed_log.incoming_rtx_ssrcs().find(ssrc) != + parsed_log.incoming_rtx_ssrcs().end(); + } else { + return parsed_log.outgoing_rtx_ssrcs().find(ssrc) != + parsed_log.outgoing_rtx_ssrcs().end(); + } +} + +bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc) { + if (direction == kIncomingPacket) { + return parsed_log.incoming_video_ssrcs().find(ssrc) != + parsed_log.incoming_video_ssrcs().end(); + } else { + return parsed_log.outgoing_video_ssrcs().find(ssrc) != + parsed_log.outgoing_video_ssrcs().end(); + } +} + +bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc) { + if (direction == kIncomingPacket) { + return parsed_log.incoming_audio_ssrcs().find(ssrc) != + parsed_log.incoming_audio_ssrcs().end(); + } else { + return parsed_log.outgoing_audio_ssrcs().find(ssrc) != + parsed_log.outgoing_audio_ssrcs().end(); + } +} + +std::string GetStreamName(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc) { + char buffer[200]; + rtc::SimpleStringBuilder name(buffer); + if (IsAudioSsrc(parsed_log, direction, ssrc)) { + name << "Audio "; + } else if (IsVideoSsrc(parsed_log, direction, ssrc)) { + name << "Video "; + } else { + name << "Unknown "; + } + if (IsRtxSsrc(parsed_log, direction, ssrc)) { + name << "RTX "; + } + if (direction == kIncomingPacket) + name << "(In) "; + else + name << "(Out) "; + name << "SSRC " << ssrc; + return name.str(); +} + +std::string GetLayerName(LayerDescription layer) { + char buffer[100]; + rtc::SimpleStringBuilder name(buffer); + name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl " + << layer.temporal_layer; + return name.str(); +} + +} // namespace webrtc diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.h new file mode 100644 index 00000000000..d5776acf62e --- /dev/null +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/analyzer_common.h @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_ +#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_ + +#include <cstdint> +#include <string> + +#include "absl/types/optional.h" +#include "api/function_view.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" +#include "rtc_tools/rtc_event_log_visualizer/plot_base.h" + +namespace webrtc { + +constexpr int kNumMicrosecsPerSec = 1000000; +constexpr float kLeftMargin = 0.01f; +constexpr float kRightMargin = 0.02f; +constexpr float kBottomMargin = 0.02f; +constexpr float kTopMargin = 0.05f; + +class AnalyzerConfig { + public: + float GetCallTimeSec(int64_t timestamp_us) const { + int64_t offset = normalize_time_ ? begin_time_ : 0; + return static_cast<float>(timestamp_us - offset) / 1000000; + } + + float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); } + + float CallEndTimeSec() const { return GetCallTimeSec(end_time_); } + + // Window and step size used for calculating moving averages, e.g. bitrate. + // The generated data points will be |step_| microseconds apart. + // Only events occurring at most |window_duration_| microseconds before the + // current data point will be part of the average. + int64_t window_duration_; + int64_t step_; + + // First and last events of the log. + int64_t begin_time_; + int64_t end_time_; + bool normalize_time_; +}; + +struct LayerDescription { + LayerDescription(uint32_t ssrc, uint8_t spatial_layer, uint8_t temporal_layer) + : ssrc(ssrc), + spatial_layer(spatial_layer), + temporal_layer(temporal_layer) {} + bool operator<(const LayerDescription& other) const { + if (ssrc != other.ssrc) + return ssrc < other.ssrc; + if (spatial_layer != other.spatial_layer) + return spatial_layer < other.spatial_layer; + return temporal_layer < other.temporal_layer; + } + uint32_t ssrc; + uint8_t spatial_layer; + uint8_t temporal_layer; +}; + +bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc); +bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc); +bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc); + +std::string GetStreamName(const ParsedRtcEventLog& parsed_log, + PacketDirection direction, + uint32_t ssrc); +std::string GetLayerName(LayerDescription layer); + +// For each element in data_view, use |f()| to extract a y-coordinate and +// store the result in a TimeSeries. +template <typename DataType, typename IterableType> +void ProcessPoints(rtc::FunctionView<float(const DataType&)> fx, + rtc::FunctionView<absl::optional<float>(const DataType&)> fy, + const IterableType& data_view, + TimeSeries* result) { + for (size_t i = 0; i < data_view.size(); i++) { + const DataType& elem = data_view[i]; + float x = fx(elem); + absl::optional<float> y = fy(elem); + if (y) + result->points.emplace_back(x, *y); + } +} + +// For each pair of adjacent elements in |data|, use |f()| to extract a +// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate +// will be the time of the second element in the pair. +template <typename DataType, typename ResultType, typename IterableType> +void ProcessPairs( + rtc::FunctionView<float(const DataType&)> fx, + rtc::FunctionView<absl::optional<ResultType>(const DataType&, + const DataType&)> fy, + const IterableType& data, + TimeSeries* result) { + for (size_t i = 1; i < data.size(); i++) { + float x = fx(data[i]); + absl::optional<ResultType> y = fy(data[i - 1], data[i]); + if (y) + result->points.emplace_back(x, static_cast<float>(*y)); + } +} + +// For each pair of adjacent elements in |data|, use |f()| to extract a +// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate +// will be the time of the second element in the pair. +template <typename DataType, typename ResultType, typename IterableType> +void AccumulatePairs( + rtc::FunctionView<float(const DataType&)> fx, + rtc::FunctionView<absl::optional<ResultType>(const DataType&, + const DataType&)> fy, + const IterableType& data, + TimeSeries* result) { + ResultType sum = 0; + for (size_t i = 1; i < data.size(); i++) { + float x = fx(data[i]); + absl::optional<ResultType> y = fy(data[i - 1], data[i]); + if (y) { + sum += *y; + result->points.emplace_back(x, static_cast<float>(sum)); + } + } +} + +// Calculates a moving average of |data| and stores the result in a TimeSeries. +// A data point is generated every |step| microseconds from |begin_time| +// to |end_time|. The value of each data point is the average of the data +// during the preceding |window_duration_us| microseconds. +template <typename DataType, typename ResultType, typename IterableType> +void MovingAverage( + rtc::FunctionView<absl::optional<ResultType>(const DataType&)> fy, + const IterableType& data_view, + AnalyzerConfig config, + TimeSeries* result) { + size_t window_index_begin = 0; + size_t window_index_end = 0; + ResultType sum_in_window = 0; + + for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_; + t += config.step_) { + while (window_index_end < data_view.size() && + data_view[window_index_end].log_time_us() < t) { + absl::optional<ResultType> value = fy(data_view[window_index_end]); + if (value) + sum_in_window += *value; + ++window_index_end; + } + while (window_index_begin < data_view.size() && + data_view[window_index_begin].log_time_us() < + t - config.window_duration_) { + absl::optional<ResultType> value = fy(data_view[window_index_begin]); + if (value) + sum_in_window -= *value; + ++window_index_begin; + } + float window_duration_s = + static_cast<float>(config.window_duration_) / kNumMicrosecsPerSec; + float x = config.GetCallTimeSec(t); + float y = sum_in_window / window_duration_s; + result->points.emplace_back(x, y); + } +} + +} // namespace webrtc + +#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_ diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc index eb36b2679ee..2563338e1a3 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/main.cc @@ -30,10 +30,10 @@ #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_tools/rtc_event_log_visualizer/alerts.h" +#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h" #include "rtc_tools/rtc_event_log_visualizer/analyzer.h" #include "rtc_tools/rtc_event_log_visualizer/plot_base.h" -#include "rtc_tools/rtc_event_log_visualizer/plot_protobuf.h" -#include "rtc_tools/rtc_event_log_visualizer/plot_python.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" #include "test/testsupport/file_utils.h" @@ -77,7 +77,7 @@ ABSL_FLAG(bool, ABSL_FLAG(bool, print_triage_alerts, - false, + true, "Print triage alerts, i.e. a list of potential problems."); ABSL_FLAG(bool, @@ -194,9 +194,9 @@ int main(int argc, char* argv[]) { "A tool for visualizing WebRTC event logs.\n" "Example usage:\n" "./event_log_visualizer <logfile> | python\n"); - absl::FlagsUsageConfig config; - config.contains_help_flags = &ContainsHelppackageFlags; - absl::SetFlagsUsageConfig(config); + absl::FlagsUsageConfig flag_config; + flag_config.contains_help_flags = &ContainsHelppackageFlags; + absl::SetFlagsUsageConfig(flag_config); std::vector<char*> args = absl::ParseCommandLine(argc, argv); // Print RTC_LOG warnings and errors even in release builds. @@ -261,16 +261,22 @@ int main(int argc, char* argv[]) { } } - webrtc::EventLogAnalyzer analyzer(parsed_log, - absl::GetFlag(FLAGS_normalize_time)); - std::unique_ptr<webrtc::PlotCollection> collection; - if (absl::GetFlag(FLAGS_protobuf_output)) { - collection.reset(new webrtc::ProtobufPlotCollection()); - } else { - collection.reset( - new webrtc::PythonPlotCollection(absl::GetFlag(FLAGS_shared_xaxis))); + webrtc::AnalyzerConfig config; + config.window_duration_ = 250000; + config.step_ = 10000; + config.normalize_time_ = absl::GetFlag(FLAGS_normalize_time); + config.begin_time_ = parsed_log.first_timestamp(); + config.end_time_ = parsed_log.last_timestamp(); + if (config.end_time_ < config.begin_time_) { + RTC_LOG(LS_WARNING) << "Log end time " << config.end_time_ + << " not after begin time " << config.begin_time_ + << ". Nothing to analyze. Is the log broken?"; + return -1; } + webrtc::EventLogAnalyzer analyzer(parsed_log, config); + webrtc::PlotCollection collection; + PlotMap plots; plots.RegisterPlot("incoming_packet_sizes", [&](Plot* plot) { analyzer.CreatePacketGraph(webrtc::kIncomingPacket, plot); @@ -423,22 +429,22 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("pacer_delay", [&](Plot* plot) { analyzer.CreatePacerDelayGraph(plot); }); plots.RegisterPlot("audio_encoder_bitrate", [&](Plot* plot) { - analyzer.CreateAudioEncoderTargetBitrateGraph(plot); + CreateAudioEncoderTargetBitrateGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_frame_length", [&](Plot* plot) { - analyzer.CreateAudioEncoderFrameLengthGraph(plot); + CreateAudioEncoderFrameLengthGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_packet_loss", [&](Plot* plot) { - analyzer.CreateAudioEncoderPacketLossGraph(plot); + CreateAudioEncoderPacketLossGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_fec", [&](Plot* plot) { - analyzer.CreateAudioEncoderEnableFecGraph(plot); + CreateAudioEncoderEnableFecGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_dtx", [&](Plot* plot) { - analyzer.CreateAudioEncoderEnableDtxGraph(plot); + CreateAudioEncoderEnableDtxGraph(parsed_log, config, plot); }); plots.RegisterPlot("audio_encoder_num_channels", [&](Plot* plot) { - analyzer.CreateAudioEncoderNumChannelsGraph(plot); + CreateAudioEncoderNumChannelsGraph(parsed_log, config, plot); }); plots.RegisterPlot("ice_candidate_pair_config", [&](Plot* plot) { @@ -461,14 +467,14 @@ int main(int argc, char* argv[]) { wav_path = webrtc::test::ResourcePath( "audio_processing/conversational_speech/EN_script2_F_sp2_B1", "wav"); } - absl::optional<webrtc::EventLogAnalyzer::NetEqStatsGetterMap> neteq_stats; + absl::optional<webrtc::NetEqStatsGetterMap> neteq_stats; plots.RegisterPlot("simulated_neteq_expand_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.expand_rate / 16384.f; }, @@ -477,10 +483,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_speech_expand_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.speech_expand_rate / 16384.f; }, @@ -489,10 +495,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_accelerate_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.accelerate_rate / 16384.f; }, @@ -501,10 +507,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_preemptive_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.preemptive_rate / 16384.f; }, @@ -513,10 +519,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_packet_loss_rate", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.packet_loss_rate / 16384.f; }, @@ -525,10 +531,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_concealment_events", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqLifetimeStatsGraph( - *neteq_stats, + webrtc::CreateNetEqLifetimeStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqLifetimeStatistics& stats) { return static_cast<float>(stats.concealment_events); }, @@ -537,10 +543,10 @@ int main(int argc, char* argv[]) { plots.RegisterPlot("simulated_neteq_preferred_buffer_size", [&](Plot* plot) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - analyzer.CreateNetEqNetworkStatsGraph( - *neteq_stats, + webrtc::CreateNetEqNetworkStatsGraph( + parsed_log, config, *neteq_stats, [](const webrtc::NetEqNetworkStatistics& stats) { return stats.preferred_buffer_size_ms; }, @@ -587,7 +593,7 @@ int main(int argc, char* argv[]) { for (const auto& plot : plots) { if (plot.enabled) { - Plot* output = collection->AppendNewPlot(); + Plot* output = collection.AppendNewPlot(); plot.plot_func(output); output->SetId(plot.label); } @@ -601,21 +607,28 @@ int main(int argc, char* argv[]) { if (absl::c_find(plot_flags, "simulated_neteq_jitter_buffer_delay") != plot_flags.end()) { if (!neteq_stats) { - neteq_stats = analyzer.SimulateNetEq(wav_path, 48000); + neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000); } - for (webrtc::EventLogAnalyzer::NetEqStatsGetterMap::const_iterator it = - neteq_stats->cbegin(); + for (webrtc::NetEqStatsGetterMap::const_iterator it = neteq_stats->cbegin(); it != neteq_stats->cend(); ++it) { - analyzer.CreateAudioJitterBufferGraph(it->first, it->second.get(), - collection->AppendNewPlot()); + webrtc::CreateAudioJitterBufferGraph(parsed_log, config, it->first, + it->second.get(), + collection.AppendNewPlot()); } } - collection->Draw(); + if (absl::GetFlag(FLAGS_protobuf_output)) { + webrtc::analytics::ChartCollection proto_charts; + collection.ExportProtobuf(&proto_charts); + std::cout << proto_charts.SerializeAsString(); + } else { + collection.PrintPythonCode(absl::GetFlag(FLAGS_shared_xaxis)); + } if (absl::GetFlag(FLAGS_print_triage_alerts)) { - analyzer.CreateTriageNotifications(); - analyzer.PrintNotifications(stderr); + webrtc::TriageHelper triage_alerts(config); + triage_alerts.AnalyzeLog(parsed_log); + triage_alerts.Print(stderr); } return 0; diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc index dfcd26fed52..dce601a8329 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.cc @@ -11,6 +11,7 @@ #include "rtc_tools/rtc_event_log_visualizer/plot_base.h" #include <algorithm> +#include <memory> #include "rtc_base/checks.h" @@ -93,4 +94,232 @@ void Plot::AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series) { } } +void Plot::PrintPythonCode() const { + // Write python commands to stdout. Intended program usage is + // ./event_log_visualizer event_log160330.dump | python + + if (!series_list_.empty()) { + printf("color_count = %zu\n", series_list_.size()); + printf( + "hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i " + "in range(color_count)]\n"); + printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n"); + + for (size_t i = 0; i < series_list_.size(); i++) { + printf("\n# === Series: %s ===\n", series_list_[i].label.c_str()); + // List x coordinates + printf("x%zu = [", i); + if (!series_list_[i].points.empty()) + printf("%.3f", series_list_[i].points[0].x); + for (size_t j = 1; j < series_list_[i].points.size(); j++) + printf(", %.3f", series_list_[i].points[j].x); + printf("]\n"); + + // List y coordinates + printf("y%zu = [", i); + if (!series_list_[i].points.empty()) + printf("%G", series_list_[i].points[0].y); + for (size_t j = 1; j < series_list_[i].points.size(); j++) + printf(", %G", series_list_[i].points[j].y); + printf("]\n"); + + if (series_list_[i].line_style == LineStyle::kBar) { + // There is a plt.bar function that draws bar plots, + // but it is *way* too slow to be useful. + printf( + "plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: " + "max(t,0), y%zu), color=colors[%zu], " + "label=\'%s\')\n", + i, i, i, i, series_list_[i].label.c_str()); + if (series_list_[i].point_style == PointStyle::kHighlight) { + printf( + "plt.plot(x%zu, y%zu, color=colors[%zu], " + "marker='.', ls=' ')\n", + i, i, i); + } + } else if (series_list_[i].line_style == LineStyle::kLine) { + if (series_list_[i].point_style == PointStyle::kHighlight) { + printf( + "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', " + "marker='.')\n", + i, i, i, series_list_[i].label.c_str()); + } else { + printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i, + i, i, series_list_[i].label.c_str()); + } + } else if (series_list_[i].line_style == LineStyle::kStep) { + // Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on + // to illustrate the "steps". This can be expressed by duplicating all + // elements except the first in x and the last in y. + printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i); + printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i); + printf( + "plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], " + "label=\'%s\')\n", + i, i, i, series_list_[i].label.c_str()); + if (series_list_[i].point_style == PointStyle::kHighlight) { + printf( + "plt.plot(x%zu, y%zu, color=colors[%zu], " + "marker='.', ls=' ')\n", + i, i, i); + } + } else if (series_list_[i].line_style == LineStyle::kNone) { + printf( + "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', " + "marker='o', ls=' ')\n", + i, i, i, series_list_[i].label.c_str()); + } else { + printf("raise Exception(\"Unknown graph type\")\n"); + } + } + + // IntervalSeries + printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n"); + RTC_CHECK_LE(interval_list_.size(), 4); + // To get the intervals to show up in the legend we have to create patches + // for them. + printf("legend_patches = []\n"); + for (size_t i = 0; i < interval_list_.size(); i++) { + // List intervals + printf("\n# === IntervalSeries: %s ===\n", + interval_list_[i].label.c_str()); + printf("ival%zu = [", i); + if (!interval_list_[i].intervals.empty()) { + printf("(%G, %G)", interval_list_[i].intervals[0].begin, + interval_list_[i].intervals[0].end); + } + for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) { + printf(", (%G, %G)", interval_list_[i].intervals[j].begin, + interval_list_[i].intervals[j].end); + } + printf("]\n"); + + printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size()); + if (interval_list_[i].orientation == IntervalSeries::kVertical) { + printf( + " plt.axhspan(ival%zu[i][0], ival%zu[i][1], " + "facecolor=interval_colors[%zu], " + "alpha=0.3)\n", + i, i, i); + } else { + printf( + " plt.axvspan(ival%zu[i][0], ival%zu[i][1], " + "facecolor=interval_colors[%zu], " + "alpha=0.3)\n", + i, i, i); + } + printf( + "legend_patches.append(mpatches.Patch(ec=\'black\', " + "fc=interval_colors[%zu], label='%s'))\n", + i, interval_list_[i].label.c_str()); + } + } + + printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_); + printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_); + printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str()); + printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str()); + printf("plt.title(\'%s\')\n", title_.c_str()); + printf("fig = plt.gcf()\n"); + printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str()); + if (!yaxis_tick_labels_.empty()) { + printf("yaxis_tick_labels = ["); + for (const auto& kv : yaxis_tick_labels_) { + printf("(%f,\"%s\"),", kv.first, kv.second.c_str()); + } + printf("]\n"); + printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n"); + printf("plt.yticks(*yaxis_tick_labels)\n"); + } + if (!series_list_.empty() || !interval_list_.empty()) { + printf("handles, labels = plt.gca().get_legend_handles_labels()\n"); + printf("for lp in legend_patches:\n"); + printf(" handles.append(lp)\n"); + printf(" labels.append(lp.get_label())\n"); + printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n"); + } +} + +void Plot::ExportProtobuf(webrtc::analytics::Chart* chart) const { + for (size_t i = 0; i < series_list_.size(); i++) { + webrtc::analytics::DataSet* data_set = chart->add_data_sets(); + for (const auto& point : series_list_[i].points) { + data_set->add_x_values(point.x); + } + for (const auto& point : series_list_[i].points) { + data_set->add_y_values(point.y); + } + + if (series_list_[i].line_style == LineStyle::kBar) { + data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART); + } else if (series_list_[i].line_style == LineStyle::kLine) { + data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART); + } else if (series_list_[i].line_style == LineStyle::kStep) { + data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART); + } else if (series_list_[i].line_style == LineStyle::kNone) { + data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART); + } else { + data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED); + } + + if (series_list_[i].point_style == PointStyle::kHighlight) + data_set->set_highlight_points(true); + + data_set->set_label(series_list_[i].label); + } + + chart->set_xaxis_min(xaxis_min_); + chart->set_xaxis_max(xaxis_max_); + chart->set_yaxis_min(yaxis_min_); + chart->set_yaxis_max(yaxis_max_); + chart->set_xaxis_label(xaxis_label_); + chart->set_yaxis_label(yaxis_label_); + chart->set_title(title_); + chart->set_id(id_); + + for (const auto& kv : yaxis_tick_labels_) { + webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels(); + tick->set_value(kv.first); + tick->set_label(kv.second); + } +} + +void PlotCollection::PrintPythonCode(bool shared_xaxis) const { + printf("import matplotlib.pyplot as plt\n"); + printf("plt.rcParams.update({'figure.max_open_warning': 0})\n"); + printf("import matplotlib.patches as mpatches\n"); + printf("import matplotlib.patheffects as pe\n"); + printf("import colorsys\n"); + for (size_t i = 0; i < plots_.size(); i++) { + printf("plt.figure(%zu)\n", i); + if (shared_xaxis) { + // Link x-axes across all figures for synchronized zooming. + if (i == 0) { + printf("axis0 = plt.subplot(111)\n"); + } else { + printf("plt.subplot(111, sharex=axis0)\n"); + } + } + plots_[i]->PrintPythonCode(); + } + printf("plt.show()\n"); +} + +void PlotCollection::ExportProtobuf( + webrtc::analytics::ChartCollection* collection) const { + for (const auto& plot : plots_) { + // TODO(terelius): Ensure that there is no way to insert plots other than + // ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast + // here. + webrtc::analytics::Chart* protobuf_representation = + collection->add_charts(); + plot->ExportProtobuf(protobuf_representation); + } +} + +Plot* PlotCollection::AppendNewPlot() { + plots_.push_back(std::make_unique<Plot>()); + return plots_.back().get(); +} + } // namespace webrtc diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h index 5e4ebfa5222..06a206f0315 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_base.h @@ -15,6 +15,13 @@ #include <utility> #include <vector> +#include "rtc_base/deprecation.h" +#include "rtc_base/ignore_wundef.h" + +RTC_PUSH_IGNORING_WUNDEF() +#include "rtc_tools/rtc_event_log_visualizer/proto/chart.pb.h" +RTC_POP_IGNORING_WUNDEF() + namespace webrtc { enum class LineStyle { @@ -94,8 +101,8 @@ class Plot { public: virtual ~Plot() {} - // Overloaded to draw the plot. - virtual void Draw() = 0; + // Deprecated. Use PrintPythonCode() or ExportProtobuf() instead. + RTC_DEPRECATED virtual void Draw() {} // Sets the lower x-axis limit to min_value (if left_margin == 0). // Sets the upper x-axis limit to max_value (if right_margin == 0). @@ -158,6 +165,12 @@ class Plot { // Otherwise, the call has no effect and the timeseries is destroyed. void AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series); + // Replaces PythonPlot::Draw() + void PrintPythonCode() const; + + // Replaces ProtobufPlot::Draw() + void ExportProtobuf(webrtc::analytics::Chart* chart) const; + protected: float xaxis_min_; float xaxis_max_; @@ -175,8 +188,17 @@ class Plot { class PlotCollection { public: virtual ~PlotCollection() {} - virtual void Draw() = 0; - virtual Plot* AppendNewPlot() = 0; + + // Deprecated. Use PrintPythonCode() or ExportProtobuf() instead. + RTC_DEPRECATED virtual void Draw() {} + + virtual Plot* AppendNewPlot(); + + // Replaces PythonPlotCollection::Draw() + void PrintPythonCode(bool shared_xaxis) const; + + // Replaces ProtobufPlotCollections::Draw() + void ExportProtobuf(webrtc::analytics::ChartCollection* collection) const; protected: std::vector<std::unique_ptr<Plot>> plots_; diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc index 9e82c01ba63..0f43191e8b0 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.cc @@ -24,49 +24,7 @@ ProtobufPlot::~ProtobufPlot() {} void ProtobufPlot::Draw() {} -void ProtobufPlot::ExportProtobuf(webrtc::analytics::Chart* chart) { - for (size_t i = 0; i < series_list_.size(); i++) { - webrtc::analytics::DataSet* data_set = chart->add_data_sets(); - for (const auto& point : series_list_[i].points) { - data_set->add_x_values(point.x); - } - for (const auto& point : series_list_[i].points) { - data_set->add_y_values(point.y); - } - if (series_list_[i].line_style == LineStyle::kBar) { - data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART); - } else if (series_list_[i].line_style == LineStyle::kLine) { - data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART); - } else if (series_list_[i].line_style == LineStyle::kStep) { - data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART); - } else if (series_list_[i].line_style == LineStyle::kNone) { - data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART); - } else { - data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED); - } - - if (series_list_[i].point_style == PointStyle::kHighlight) - data_set->set_highlight_points(true); - - data_set->set_label(series_list_[i].label); - } - - chart->set_xaxis_min(xaxis_min_); - chart->set_xaxis_max(xaxis_max_); - chart->set_yaxis_min(yaxis_min_); - chart->set_yaxis_max(yaxis_max_); - chart->set_xaxis_label(xaxis_label_); - chart->set_yaxis_label(yaxis_label_); - chart->set_title(title_); - chart->set_id(id_); - - for (const auto& kv : yaxis_tick_labels_) { - webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels(); - tick->set_value(kv.first); - tick->set_label(kv.second); - } -} ProtobufPlotCollection::ProtobufPlotCollection() {} @@ -78,19 +36,6 @@ void ProtobufPlotCollection::Draw() { std::cout << collection.SerializeAsString(); } -void ProtobufPlotCollection::ExportProtobuf( - webrtc::analytics::ChartCollection* collection) { - for (const auto& plot : plots_) { - // TODO(terelius): Ensure that there is no way to insert plots other than - // ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast - // here. - webrtc::analytics::Chart* protobuf_representation = - collection->add_charts(); - static_cast<ProtobufPlot*>(plot.get()) - ->ExportProtobuf(protobuf_representation); - } -} - Plot* ProtobufPlotCollection::AppendNewPlot() { Plot* plot = new ProtobufPlot(); plots_.push_back(std::unique_ptr<Plot>(plot)); diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h index 738247a3098..0773b58d208 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_protobuf.h @@ -23,16 +23,15 @@ class ProtobufPlot final : public Plot { ProtobufPlot(); ~ProtobufPlot() override; void Draw() override; - void ExportProtobuf(webrtc::analytics::Chart* chart); }; class ProtobufPlotCollection final : public PlotCollection { public: - ProtobufPlotCollection(); + // This class is deprecated. Use PlotCollection and ExportProtobuf() instead. + RTC_DEPRECATED ProtobufPlotCollection(); ~ProtobufPlotCollection() override; void Draw() override; Plot* AppendNewPlot() override; - void ExportProtobuf(webrtc::analytics::ChartCollection* collection); }; } // namespace webrtc diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc index e7cde45f30e..b3708110dfb 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.cc @@ -25,149 +25,7 @@ PythonPlot::PythonPlot() {} PythonPlot::~PythonPlot() {} void PythonPlot::Draw() { - // Write python commands to stdout. Intended program usage is - // ./event_log_visualizer event_log160330.dump | python - - if (!series_list_.empty()) { - printf("color_count = %zu\n", series_list_.size()); - printf( - "hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i " - "in range(color_count)]\n"); - printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n"); - - for (size_t i = 0; i < series_list_.size(); i++) { - printf("\n# === Series: %s ===\n", series_list_[i].label.c_str()); - // List x coordinates - printf("x%zu = [", i); - if (!series_list_[i].points.empty()) - printf("%.3f", series_list_[i].points[0].x); - for (size_t j = 1; j < series_list_[i].points.size(); j++) - printf(", %.3f", series_list_[i].points[j].x); - printf("]\n"); - - // List y coordinates - printf("y%zu = [", i); - if (!series_list_[i].points.empty()) - printf("%G", series_list_[i].points[0].y); - for (size_t j = 1; j < series_list_[i].points.size(); j++) - printf(", %G", series_list_[i].points[j].y); - printf("]\n"); - - if (series_list_[i].line_style == LineStyle::kBar) { - // There is a plt.bar function that draws bar plots, - // but it is *way* too slow to be useful. - printf( - "plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: " - "max(t,0), y%zu), color=colors[%zu], " - "label=\'%s\')\n", - i, i, i, i, series_list_[i].label.c_str()); - if (series_list_[i].point_style == PointStyle::kHighlight) { - printf( - "plt.plot(x%zu, y%zu, color=colors[%zu], " - "marker='.', ls=' ')\n", - i, i, i); - } - } else if (series_list_[i].line_style == LineStyle::kLine) { - if (series_list_[i].point_style == PointStyle::kHighlight) { - printf( - "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', " - "marker='.')\n", - i, i, i, series_list_[i].label.c_str()); - } else { - printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i, - i, i, series_list_[i].label.c_str()); - } - } else if (series_list_[i].line_style == LineStyle::kStep) { - // Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on - // to illustrate the "steps". This can be expressed by duplicating all - // elements except the first in x and the last in y. - printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i); - printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i); - printf( - "plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], " - "label=\'%s\')\n", - i, i, i, series_list_[i].label.c_str()); - if (series_list_[i].point_style == PointStyle::kHighlight) { - printf( - "plt.plot(x%zu, y%zu, color=colors[%zu], " - "marker='.', ls=' ')\n", - i, i, i); - } - } else if (series_list_[i].line_style == LineStyle::kNone) { - printf( - "plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', " - "marker='o', ls=' ')\n", - i, i, i, series_list_[i].label.c_str()); - } else { - printf("raise Exception(\"Unknown graph type\")\n"); - } - } - - // IntervalSeries - printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n"); - RTC_CHECK_LE(interval_list_.size(), 4); - // To get the intervals to show up in the legend we have to create patches - // for them. - printf("legend_patches = []\n"); - for (size_t i = 0; i < interval_list_.size(); i++) { - // List intervals - printf("\n# === IntervalSeries: %s ===\n", - interval_list_[i].label.c_str()); - printf("ival%zu = [", i); - if (!interval_list_[i].intervals.empty()) { - printf("(%G, %G)", interval_list_[i].intervals[0].begin, - interval_list_[i].intervals[0].end); - } - for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) { - printf(", (%G, %G)", interval_list_[i].intervals[j].begin, - interval_list_[i].intervals[j].end); - } - printf("]\n"); - - printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size()); - if (interval_list_[i].orientation == IntervalSeries::kVertical) { - printf( - " plt.axhspan(ival%zu[i][0], ival%zu[i][1], " - "facecolor=interval_colors[%zu], " - "alpha=0.3)\n", - i, i, i); - } else { - printf( - " plt.axvspan(ival%zu[i][0], ival%zu[i][1], " - "facecolor=interval_colors[%zu], " - "alpha=0.3)\n", - i, i, i); - } - printf( - "legend_patches.append(mpatches.Patch(ec=\'black\', " - "fc=interval_colors[%zu], label='%s'))\n", - i, interval_list_[i].label.c_str()); - } - } - - printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_); - printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_); - printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str()); - printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str()); - printf("plt.title(\'%s\')\n", title_.c_str()); - printf("fig = plt.gcf()\n"); - printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str()); - if (!yaxis_tick_labels_.empty()) { - printf("yaxis_tick_labels = ["); - for (const auto& kv : yaxis_tick_labels_) { - printf("(%f,\"%s\"),", kv.first, kv.second.c_str()); - } - printf("]\n"); - printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n"); - printf("plt.yticks(*yaxis_tick_labels)\n"); - } - if (!series_list_.empty() || !interval_list_.empty()) { - printf("handles, labels = plt.gca().get_legend_handles_labels()\n"); - printf("for lp in legend_patches:\n"); - printf(" handles.append(lp)\n"); - printf(" labels.append(lp.get_label())\n"); - printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n"); - } + PrintPythonCode(); } PythonPlotCollection::PythonPlotCollection(bool shared_xaxis) @@ -176,24 +34,7 @@ PythonPlotCollection::PythonPlotCollection(bool shared_xaxis) PythonPlotCollection::~PythonPlotCollection() {} void PythonPlotCollection::Draw() { - printf("import matplotlib.pyplot as plt\n"); - printf("plt.rcParams.update({'figure.max_open_warning': 0})\n"); - printf("import matplotlib.patches as mpatches\n"); - printf("import matplotlib.patheffects as pe\n"); - printf("import colorsys\n"); - for (size_t i = 0; i < plots_.size(); i++) { - printf("plt.figure(%zu)\n", i); - if (shared_xaxis_) { - // Link x-axes across all figures for synchronized zooming. - if (i == 0) { - printf("axis0 = plt.subplot(111)\n"); - } else { - printf("plt.subplot(111, sharex=axis0)\n"); - } - } - plots_[i]->Draw(); - } - printf("plt.show()\n"); + PrintPythonCode(shared_xaxis_); } Plot* PythonPlotCollection::AppendNewPlot() { diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h index dcdcf23fcf7..998ed7b2215 100644 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h +++ b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/plot_python.h @@ -23,7 +23,8 @@ class PythonPlot final : public Plot { class PythonPlotCollection final : public PlotCollection { public: - explicit PythonPlotCollection(bool shared_xaxis = false); + // This class is deprecated. Use PlotCollection and PrintPythonCode() instead. + RTC_DEPRECATED explicit PythonPlotCollection(bool shared_xaxis = false); ~PythonPlotCollection() override; void Draw() override; Plot* AppendNewPlot() override; diff --git a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/triage_notifications.h b/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/triage_notifications.h deleted file mode 100644 index 23b31ece421..00000000000 --- a/chromium/third_party/webrtc/rtc_tools/rtc_event_log_visualizer/triage_notifications.h +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_ -#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_ - -#include <string> - -namespace webrtc { - -class IncomingRtpReceiveTimeGap { - public: - IncomingRtpReceiveTimeGap(float time_seconds, int64_t duration) - : time_seconds_(time_seconds), duration_(duration) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("No RTP packets received for ") + - std::to_string(duration_) + std::string(" ms"); - } - - private: - float time_seconds_; - int64_t duration_; -}; - -class IncomingRtcpReceiveTimeGap { - public: - IncomingRtcpReceiveTimeGap(float time_seconds, int64_t duration) - : time_seconds_(time_seconds), duration_(duration) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("No RTCP packets received for ") + - std::to_string(duration_) + std::string(" ms"); - } - - private: - float time_seconds_; - int64_t duration_; -}; - -class OutgoingRtpSendTimeGap { - public: - OutgoingRtpSendTimeGap(float time_seconds, int64_t duration) - : time_seconds_(time_seconds), duration_(duration) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("No RTP packets sent for ") + std::to_string(duration_) + - std::string(" ms"); - } - - private: - float time_seconds_; - int64_t duration_; -}; - -class OutgoingRtcpSendTimeGap { - public: - OutgoingRtcpSendTimeGap(float time_seconds, int64_t duration) - : time_seconds_(time_seconds), duration_(duration) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("No RTCP packets sent for ") + - std::to_string(duration_) + std::string(" ms"); - } - - private: - float time_seconds_; - int64_t duration_; -}; - -class IncomingSeqNumJump { - public: - IncomingSeqNumJump(float time_seconds, uint32_t ssrc) - : time_seconds_(time_seconds), ssrc_(ssrc) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("Sequence number jumps on incoming SSRC ") + - std::to_string(ssrc_); - } - - private: - float time_seconds_; - - uint32_t ssrc_; -}; - -class IncomingCaptureTimeJump { - public: - IncomingCaptureTimeJump(float time_seconds, uint32_t ssrc) - : time_seconds_(time_seconds), ssrc_(ssrc) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("Capture timestamp jumps on incoming SSRC ") + - std::to_string(ssrc_); - } - - private: - float time_seconds_; - - uint32_t ssrc_; -}; - -class OutgoingSeqNoJump { - public: - OutgoingSeqNoJump(float time_seconds, uint32_t ssrc) - : time_seconds_(time_seconds), ssrc_(ssrc) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("Sequence number jumps on outgoing SSRC ") + - std::to_string(ssrc_); - } - - private: - float time_seconds_; - - uint32_t ssrc_; -}; - -class OutgoingCaptureTimeJump { - public: - OutgoingCaptureTimeJump(float time_seconds, uint32_t ssrc) - : time_seconds_(time_seconds), ssrc_(ssrc) {} - float Time() const { return time_seconds_; } - std::string ToString() const { - return std::string("Capture timestamp jumps on outgoing SSRC ") + - std::to_string(ssrc_); - } - - private: - float time_seconds_; - - uint32_t ssrc_; -}; - -class OutgoingHighLoss { - public: - explicit OutgoingHighLoss(double avg_loss_fraction) - : avg_loss_fraction_(avg_loss_fraction) {} - std::string ToString() const { - return std::string("High average loss (") + - std::to_string(avg_loss_fraction_ * 100) + - std::string("%) across the call."); - } - - private: - double avg_loss_fraction_; -}; - -} // namespace webrtc - -#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_ |