Delete deprecated 'track' and 'stream' metrics from WebRTC.

Track stats are roughly equal in size as the RTP stream stats which
are the largest objects making up the majority of the RTCStatsReport
size and scales with meeting size. Deleting track/stream reduces the
size in approximately half which should reduce performance overhead
and unblock code simplifications.

Blocked on:
- https://chromium-review.googlesource.com/c/chromium/src/+/4517530

# Relevant bots already passed
NOTRY=True

Bug: webrtc:14175, webrtc:14419
Change-Id: Ib7bdb84c10459b42b829228d11876498e5227312
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/289043
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Henrik Boström <hbos@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#40129}
This commit is contained in:
Henrik Boström 2023-05-24 11:24:13 +02:00 committed by WebRTC LUCI CQ
parent 54c37a5500
commit 4e231eedbd
12 changed files with 55 additions and 1382 deletions

View file

@ -57,9 +57,10 @@ struct RTCDtlsTransportState {
static const char* const kFailed; static const char* const kFailed;
}; };
// `RTCMediaStreamTrackStats::kind` is not an enum in the spec but the only // `RTCRtpStreamStats::kind` is not an enum in the spec but the only valid
// valid values are "audio" and "video". // values are "audio" and "video" as it comes from `MediaStreamTrack::kind`.
// https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-kind // https://w3c.github.io/webrtc-stats/#dom-rtcrtpstreamstats-kind
// https://w3c.github.io/mediacapture-main/#dom-mediadeviceinfo-kind
struct RTCMediaStreamTrackKind { struct RTCMediaStreamTrackKind {
static const char* const kAudio; static const char* const kAudio;
static const char* const kVideo; static const char* const kVideo;
@ -279,67 +280,6 @@ class RTC_EXPORT RTCRemoteIceCandidateStats final
const char* type() const override; const char* type() const override;
}; };
// TODO(https://crbug.com/webrtc/14419): Delete this class, it's deprecated.
class RTC_EXPORT DEPRECATED_RTCMediaStreamStats final : public RTCStats {
public:
WEBRTC_RTCSTATS_DECL();
DEPRECATED_RTCMediaStreamStats(std::string id, Timestamp timestamp);
DEPRECATED_RTCMediaStreamStats(const DEPRECATED_RTCMediaStreamStats& other);
~DEPRECATED_RTCMediaStreamStats() override;
RTCStatsMember<std::string> stream_identifier;
RTCStatsMember<std::vector<std::string>> track_ids;
};
using RTCMediaStreamStats [[deprecated("bugs.webrtc.org/14419")]] =
DEPRECATED_RTCMediaStreamStats;
// TODO(https://crbug.com/webrtc/14175): Delete this class, it's deprecated.
class RTC_EXPORT DEPRECATED_RTCMediaStreamTrackStats final : public RTCStats {
public:
WEBRTC_RTCSTATS_DECL();
DEPRECATED_RTCMediaStreamTrackStats(std::string id,
Timestamp timestamp,
const char* kind);
DEPRECATED_RTCMediaStreamTrackStats(
const DEPRECATED_RTCMediaStreamTrackStats& other);
~DEPRECATED_RTCMediaStreamTrackStats() override;
RTCStatsMember<std::string> track_identifier;
RTCStatsMember<std::string> media_source_id;
RTCStatsMember<bool> remote_source;
RTCStatsMember<bool> ended;
// TODO(https://crbug.com/webrtc/14173): Remove this obsolete metric.
RTCStatsMember<bool> detached;
// Enum type RTCMediaStreamTrackKind.
RTCStatsMember<std::string> kind;
RTCStatsMember<double> jitter_buffer_delay;
RTCStatsMember<uint64_t> jitter_buffer_emitted_count;
// Video-only members
RTCStatsMember<uint32_t> frame_width;
RTCStatsMember<uint32_t> frame_height;
RTCStatsMember<uint32_t> frames_sent;
RTCStatsMember<uint32_t> huge_frames_sent;
RTCStatsMember<uint32_t> frames_received;
RTCStatsMember<uint32_t> frames_decoded;
RTCStatsMember<uint32_t> frames_dropped;
// Audio-only members
RTCStatsMember<double> audio_level; // Receive-only
RTCStatsMember<double> total_audio_energy; // Receive-only
RTCStatsMember<double> echo_return_loss;
RTCStatsMember<double> echo_return_loss_enhancement;
RTCStatsMember<uint64_t> total_samples_received;
RTCStatsMember<double> total_samples_duration; // Receive-only
RTCStatsMember<uint64_t> concealed_samples;
RTCStatsMember<uint64_t> silent_concealed_samples;
RTCStatsMember<uint64_t> concealment_events;
RTCStatsMember<uint64_t> inserted_samples_for_deceleration;
RTCStatsMember<uint64_t> removed_samples_for_acceleration;
};
using RTCMediaStreamTrackStats [[deprecated("bugs.webrtc.org/14175")]] =
DEPRECATED_RTCMediaStreamTrackStats;
// https://w3c.github.io/webrtc-stats/#pcstats-dict* // https://w3c.github.io/webrtc-stats/#pcstats-dict*
class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats { class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats {
public: public:
@ -363,8 +303,6 @@ class RTC_EXPORT RTCRtpStreamStats : public RTCStats {
RTCStatsMember<uint32_t> ssrc; RTCStatsMember<uint32_t> ssrc;
RTCStatsMember<std::string> kind; RTCStatsMember<std::string> kind;
// Obsolete: track_id
RTCStatsMember<std::string> track_id;
RTCStatsMember<std::string> transport_id; RTCStatsMember<std::string> transport_id;
RTCStatsMember<std::string> codec_id; RTCStatsMember<std::string> codec_id;

View file

@ -1418,7 +1418,6 @@ TEST_P(PeerConnectionIntegrationTest,
ASSERT_EQ(1U, inbound_stream_stats.size()); ASSERT_EQ(1U, inbound_stream_stats.size());
ASSERT_TRUE(inbound_stream_stats[0]->bytes_received.is_defined()); ASSERT_TRUE(inbound_stream_stats[0]->bytes_received.is_defined());
ASSERT_GT(*inbound_stream_stats[0]->bytes_received, 0U); ASSERT_GT(*inbound_stream_stats[0]->bytes_received, 0U);
ASSERT_TRUE(inbound_stream_stats[0]->track_id.is_defined());
} }
// Same as above but for the legacy stats implementation. // Same as above but for the legacy stats implementation.
@ -1469,105 +1468,6 @@ TEST_P(PeerConnectionIntegrationTest,
EXPECT_TRUE(inbound_rtps[index]->audio_level.is_defined()); EXPECT_TRUE(inbound_rtps[index]->audio_level.is_defined());
} }
// Helper for test below.
void ModifySsrcs(cricket::SessionDescription* desc) {
for (ContentInfo& content : desc->contents()) {
for (StreamParams& stream :
content.media_description()->mutable_streams()) {
for (uint32_t& ssrc : stream.ssrcs) {
ssrc = rtc::CreateRandomId();
}
}
}
}
// Test that the "DEPRECATED_RTCMediaStreamTrackStats" object is updated
// correctly when SSRCs are unsignaled, and the SSRC of the received (audio)
// stream changes. This should result in two "RTCInboundRtpStreamStats", but
// only one "DEPRECATED_RTCMediaStreamTrackStats", whose counters go up
// continuously rather than being reset to 0 once the SSRC change occurs.
//
// Regression test for this bug:
// https://bugs.chromium.org/p/webrtc/issues/detail?id=8158
//
// The bug causes the track stats to only represent one of the two streams:
// whichever one has the higher SSRC. So with this bug, there was a 50% chance
// that the track stat counters would reset to 0 when the new stream is
// received, and a 50% chance that they'll stop updating (while
// "concealed_samples" continues increasing, due to silence being generated for
// the inactive stream).
TEST_P(PeerConnectionIntegrationTest,
TrackStatsUpdatedCorrectlyWhenUnsignaledSsrcChanges) {
ASSERT_TRUE(CreatePeerConnectionWrappers());
ConnectFakeSignaling();
caller()->AddAudioTrack();
// Remove SSRCs and MSIDs from the received offer SDP, simulating an endpoint
// that doesn't signal SSRCs (from the callee's perspective).
callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
caller()->CreateAndSetAndSignalOffer();
ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
// Wait for 50 audio frames (500ms of audio) to be received by the callee.
{
MediaExpectations media_expectations;
media_expectations.CalleeExpectsSomeAudio(50);
ASSERT_TRUE(ExpectNewFrames(media_expectations));
}
// Some audio frames were received, so we should have nonzero "samples
// received" for the track.
rtc::scoped_refptr<const webrtc::RTCStatsReport> report =
callee()->NewGetStats();
ASSERT_NE(nullptr, report);
auto track_stats =
report->GetStatsOfType<webrtc::DEPRECATED_RTCMediaStreamTrackStats>();
ASSERT_EQ(1U, track_stats.size());
ASSERT_TRUE(track_stats[0]->total_samples_received.is_defined());
ASSERT_GT(*track_stats[0]->total_samples_received, 0U);
// uint64_t prev_samples_received = *track_stats[0]->total_samples_received;
// Create a new offer and munge it to cause the caller to use a new SSRC.
caller()->SetGeneratedSdpMunger(ModifySsrcs);
caller()->CreateAndSetAndSignalOffer();
ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
// Wait for 25 more audio frames (250ms of audio) to be received, from the new
// SSRC.
{
MediaExpectations media_expectations;
media_expectations.CalleeExpectsSomeAudio(25);
ASSERT_TRUE(ExpectNewFrames(media_expectations));
}
report = callee()->NewGetStats();
ASSERT_NE(nullptr, report);
track_stats =
report->GetStatsOfType<webrtc::DEPRECATED_RTCMediaStreamTrackStats>();
ASSERT_EQ(1U, track_stats.size());
ASSERT_TRUE(track_stats[0]->total_samples_received.is_defined());
// The "total samples received" stat should only be greater than it was
// before.
// TODO(deadbeef): Uncomment this assertion once the bug is completely fixed.
// Right now, the new SSRC will cause the counters to reset to 0.
// EXPECT_GT(*track_stats[0]->total_samples_received, prev_samples_received);
// Additionally, the percentage of concealed samples (samples generated to
// conceal packet loss) should be less than 50%. If it's greater, that's a
// good sign that we're seeing stats from the old stream that's no longer
// receiving packets, and is generating concealed samples of silence.
constexpr double kAcceptableConcealedSamplesPercentage = 0.50;
ASSERT_TRUE(track_stats[0]->concealed_samples.is_defined());
EXPECT_LT(*track_stats[0]->concealed_samples,
*track_stats[0]->total_samples_received *
kAcceptableConcealedSamplesPercentage);
// Also ensure that we have two "RTCInboundRtpStreamStats" as expected, as a
// sanity check that the SSRC really changed.
// TODO(deadbeef): This isn't working right now, because we're not returning
// *any* stats for the inactive stream. Uncomment when the bug is completely
// fixed.
// auto inbound_stream_stats =
// report->GetStatsOfType<webrtc::RTCInboundRtpStreamStats>();
// ASSERT_EQ(2U, inbound_stream_stats.size());
}
// Test that DTLS 1.0 is used if both sides only support DTLS 1.0. // Test that DTLS 1.0 is used if both sides only support DTLS 1.0.
TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls10) { TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls10) {
PeerConnectionFactory::Options dtls_10_options; PeerConnectionFactory::Options dtls_10_options;

View file

@ -105,16 +105,6 @@ std::string RTCIceCandidatePairStatsIDFromConnectionInfo(
return sb.str(); return sb.str();
} }
// `direction` is either kDirectionInbound or kDirectionOutbound.
std::string DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
const char direction,
int attachment_id) {
char buf[1024];
rtc::SimpleStringBuilder sb(buf);
sb << "DEPRECATED_T" << direction << attachment_id;
return sb.str();
}
std::string RTCTransportStatsIDFromTransportChannel( std::string RTCTransportStatsIDFromTransportChannel(
const std::string& transport_name, const std::string& transport_name,
int channel_component) { int channel_component) {
@ -402,13 +392,6 @@ std::string GetCodecIdAndMaybeCreateCodecStats(
return codec_id; return codec_id;
} }
void SetMediaStreamTrackStatsFromMediaStreamTrackInterface(
const MediaStreamTrackInterface& track,
DEPRECATED_RTCMediaStreamTrackStats* track_stats) {
track_stats->track_identifier = track.id();
track_stats->ended = (track.state() == MediaStreamTrackInterface::kEnded);
}
// Provides the media independent counters (both audio and video). // Provides the media independent counters (both audio and video).
void SetInboundRTPStreamStatsFromMediaReceiverInfo( void SetInboundRTPStreamStatsFromMediaReceiverInfo(
const cricket::MediaReceiverInfo& media_receiver_info, const cricket::MediaReceiverInfo& media_receiver_info,
@ -1030,249 +1013,6 @@ void SetAudioProcessingStats(StatsType* stats,
} }
} }
std::unique_ptr<DEPRECATED_RTCMediaStreamTrackStats>
ProduceMediaStreamTrackStatsFromVoiceSenderInfo(
Timestamp timestamp,
AudioTrackInterface& audio_track,
const cricket::VoiceSenderInfo& voice_sender_info,
int attachment_id) {
auto audio_track_stats =
std::make_unique<DEPRECATED_RTCMediaStreamTrackStats>(
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionOutbound, attachment_id),
timestamp, RTCMediaStreamTrackKind::kAudio);
SetMediaStreamTrackStatsFromMediaStreamTrackInterface(
audio_track, audio_track_stats.get());
audio_track_stats->media_source_id =
RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_AUDIO,
attachment_id);
audio_track_stats->remote_source = false;
audio_track_stats->detached = false;
// Audio processor may be attached to either the track or the send
// stream, so look in both places.
SetAudioProcessingStats(audio_track_stats.get(),
voice_sender_info.apm_statistics);
auto audio_processor(audio_track.GetAudioProcessor());
if (audio_processor.get()) {
// The `has_remote_tracks` argument is obsolete; makes no difference if it's
// set to true or false.
AudioProcessorInterface::AudioProcessorStatistics ap_stats =
audio_processor->GetStats(/*has_remote_tracks=*/false);
SetAudioProcessingStats(audio_track_stats.get(), ap_stats.apm_statistics);
}
return audio_track_stats;
}
std::unique_ptr<DEPRECATED_RTCMediaStreamTrackStats>
ProduceMediaStreamTrackStatsFromVoiceReceiverInfo(
Timestamp timestamp,
const AudioTrackInterface& audio_track,
const cricket::VoiceReceiverInfo& voice_receiver_info,
int attachment_id) {
// Since receiver tracks can't be reattached, we use the SSRC as
// an attachment identifier.
auto audio_track_stats =
std::make_unique<DEPRECATED_RTCMediaStreamTrackStats>(
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionInbound, attachment_id),
timestamp, RTCMediaStreamTrackKind::kAudio);
SetMediaStreamTrackStatsFromMediaStreamTrackInterface(
audio_track, audio_track_stats.get());
audio_track_stats->remote_source = true;
audio_track_stats->detached = false;
if (voice_receiver_info.audio_level >= 0) {
audio_track_stats->audio_level =
DoubleAudioLevelFromIntAudioLevel(voice_receiver_info.audio_level);
}
audio_track_stats->jitter_buffer_delay =
voice_receiver_info.jitter_buffer_delay_seconds;
audio_track_stats->jitter_buffer_emitted_count =
voice_receiver_info.jitter_buffer_emitted_count;
audio_track_stats->inserted_samples_for_deceleration =
voice_receiver_info.inserted_samples_for_deceleration;
audio_track_stats->removed_samples_for_acceleration =
voice_receiver_info.removed_samples_for_acceleration;
audio_track_stats->total_audio_energy =
voice_receiver_info.total_output_energy;
audio_track_stats->total_samples_received =
voice_receiver_info.total_samples_received;
audio_track_stats->total_samples_duration =
voice_receiver_info.total_output_duration;
audio_track_stats->concealed_samples = voice_receiver_info.concealed_samples;
audio_track_stats->silent_concealed_samples =
voice_receiver_info.silent_concealed_samples;
audio_track_stats->concealment_events =
voice_receiver_info.concealment_events;
return audio_track_stats;
}
std::unique_ptr<DEPRECATED_RTCMediaStreamTrackStats>
ProduceMediaStreamTrackStatsFromVideoSenderInfo(
Timestamp timestamp,
const VideoTrackInterface& video_track,
const cricket::VideoSenderInfo& video_sender_info,
int attachment_id) {
auto video_track_stats =
std::make_unique<DEPRECATED_RTCMediaStreamTrackStats>(
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionOutbound, attachment_id),
timestamp, RTCMediaStreamTrackKind::kVideo);
SetMediaStreamTrackStatsFromMediaStreamTrackInterface(
video_track, video_track_stats.get());
video_track_stats->media_source_id =
RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_VIDEO,
attachment_id);
video_track_stats->remote_source = false;
video_track_stats->detached = false;
video_track_stats->frame_width =
static_cast<uint32_t>(video_sender_info.send_frame_width);
video_track_stats->frame_height =
static_cast<uint32_t>(video_sender_info.send_frame_height);
// TODO(hbos): Will reduce this by frames dropped due to congestion control
// when available. https://crbug.com/659137
video_track_stats->frames_sent = video_sender_info.frames_encoded;
video_track_stats->huge_frames_sent = video_sender_info.huge_frames_sent;
return video_track_stats;
}
std::unique_ptr<DEPRECATED_RTCMediaStreamTrackStats>
ProduceMediaStreamTrackStatsFromVideoReceiverInfo(
Timestamp timestamp,
const VideoTrackInterface& video_track,
const cricket::VideoReceiverInfo& video_receiver_info,
int attachment_id) {
auto video_track_stats =
std::make_unique<DEPRECATED_RTCMediaStreamTrackStats>(
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionInbound, attachment_id),
timestamp, RTCMediaStreamTrackKind::kVideo);
SetMediaStreamTrackStatsFromMediaStreamTrackInterface(
video_track, video_track_stats.get());
video_track_stats->remote_source = true;
video_track_stats->detached = false;
if (video_receiver_info.frame_width > 0 &&
video_receiver_info.frame_height > 0) {
video_track_stats->frame_width =
static_cast<uint32_t>(video_receiver_info.frame_width);
video_track_stats->frame_height =
static_cast<uint32_t>(video_receiver_info.frame_height);
}
video_track_stats->jitter_buffer_delay =
video_receiver_info.jitter_buffer_delay_seconds;
video_track_stats->jitter_buffer_emitted_count =
video_receiver_info.jitter_buffer_emitted_count;
video_track_stats->frames_received = video_receiver_info.frames_received;
// TODO(hbos): When we support receiving simulcast, this should be the total
// number of frames correctly decoded, independent of which SSRC it was
// received from. Since we don't support that, this is correct and is the same
// value as "RTCInboundRtpStreamStats.framesDecoded". https://crbug.com/659137
video_track_stats->frames_decoded = video_receiver_info.frames_decoded;
video_track_stats->frames_dropped = video_receiver_info.frames_dropped;
return video_track_stats;
}
void ProduceSenderMediaTrackStats(
Timestamp timestamp,
const TrackMediaInfoMap& track_media_info_map,
std::vector<rtc::scoped_refptr<RtpSenderInternal>> senders,
RTCStatsReport* report) {
// This function iterates over the senders to generate outgoing track stats.
// TODO(https://crbug.com/webrtc/14175): Stop collecting "track" stats,
// they're deprecated.
for (const auto& sender : senders) {
if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) {
AudioTrackInterface* track =
static_cast<AudioTrackInterface*>(sender->track().get());
if (!track)
continue;
cricket::VoiceSenderInfo null_sender_info;
const cricket::VoiceSenderInfo* voice_sender_info = &null_sender_info;
// TODO(hta): Checking on ssrc is not proper. There should be a way
// to see from a sender whether it's connected or not.
// Related to https://crbug.com/8694 (using ssrc 0 to indicate "none")
if (sender->ssrc() != 0) {
// When pc.close is called, sender info is discarded, so
// we generate zeroes instead. Bug: It should be retained.
// https://crbug.com/807174
const cricket::VoiceSenderInfo* sender_info =
track_media_info_map.GetVoiceSenderInfoBySsrc(sender->ssrc());
if (sender_info) {
voice_sender_info = sender_info;
} else {
RTC_DLOG(LS_INFO)
<< "RTCStatsCollector: No voice sender info for sender with ssrc "
<< sender->ssrc();
}
}
report->AddStats(ProduceMediaStreamTrackStatsFromVoiceSenderInfo(
timestamp, *track, *voice_sender_info, sender->AttachmentId()));
} else if (sender->media_type() == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* track =
static_cast<VideoTrackInterface*>(sender->track().get());
if (!track)
continue;
cricket::VideoSenderInfo null_sender_info;
const cricket::VideoSenderInfo* video_sender_info = &null_sender_info;
// TODO(hta): Check on state not ssrc when state is available
// Related to https://bugs.webrtc.org/8694 (using ssrc 0 to indicate
// "none")
if (sender->ssrc() != 0) {
// When pc.close is called, sender info is discarded, so
// we generate zeroes instead. Bug: It should be retained.
// https://crbug.com/807174
const cricket::VideoSenderInfo* sender_info =
track_media_info_map.GetVideoSenderInfoBySsrc(sender->ssrc());
if (sender_info) {
video_sender_info = sender_info;
} else {
RTC_DLOG(LS_INFO)
<< "No video sender info for sender with ssrc " << sender->ssrc();
}
}
report->AddStats(ProduceMediaStreamTrackStatsFromVideoSenderInfo(
timestamp, *track, *video_sender_info, sender->AttachmentId()));
} else {
RTC_DCHECK_NOTREACHED();
}
}
}
void ProduceReceiverMediaTrackStats(
Timestamp timestamp,
const TrackMediaInfoMap& track_media_info_map,
std::vector<rtc::scoped_refptr<RtpReceiverInternal>> receivers,
RTCStatsReport* report) {
// This function iterates over the receivers to find the remote tracks.
for (const auto& receiver : receivers) {
if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) {
AudioTrackInterface* track =
static_cast<AudioTrackInterface*>(receiver->track().get());
const cricket::VoiceReceiverInfo* voice_receiver_info =
track_media_info_map.GetVoiceReceiverInfo(*track);
if (!voice_receiver_info) {
continue;
}
report->AddStats(ProduceMediaStreamTrackStatsFromVoiceReceiverInfo(
timestamp, *track, *voice_receiver_info, receiver->AttachmentId()));
} else if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* track =
static_cast<VideoTrackInterface*>(receiver->track().get());
const cricket::VideoReceiverInfo* video_receiver_info =
track_media_info_map.GetVideoReceiverInfo(*track);
if (!video_receiver_info) {
continue;
}
report->AddStats(ProduceMediaStreamTrackStatsFromVideoReceiverInfo(
timestamp, *track, *video_receiver_info, receiver->AttachmentId()));
} else {
RTC_DCHECK_NOTREACHED();
}
}
}
} // namespace } // namespace
rtc::scoped_refptr<RTCStatsReport> rtc::scoped_refptr<RTCStatsReport>
@ -1494,8 +1234,6 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl(
RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK_RUN_ON(signaling_thread_);
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
ProduceMediaStreamStats_s(timestamp, partial_report);
ProduceMediaStreamTrackStats_s(timestamp, partial_report);
ProduceMediaSourceStats_s(timestamp, partial_report); ProduceMediaSourceStats_s(timestamp, partial_report);
ProducePeerConnectionStats_s(timestamp, partial_report); ProducePeerConnectionStats_s(timestamp, partial_report);
ProduceAudioPlayoutStats_s(timestamp, partial_report); ProduceAudioPlayoutStats_s(timestamp, partial_report);
@ -1777,68 +1515,6 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n(
} }
} }
void RTCStatsCollector::ProduceMediaStreamStats_s(
Timestamp timestamp,
RTCStatsReport* report) const {
RTC_DCHECK_RUN_ON(signaling_thread_);
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
std::map<std::string, std::vector<std::string>> track_ids;
for (const auto& stats : transceiver_stats_infos_) {
for (const auto& sender : stats.transceiver->senders()) {
std::string track_id =
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionOutbound, sender->internal()->AttachmentId());
for (auto& stream_id : sender->stream_ids()) {
track_ids[stream_id].push_back(track_id);
}
}
for (const auto& receiver : stats.transceiver->receivers()) {
std::string track_id =
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionInbound, receiver->internal()->AttachmentId());
for (auto& stream : receiver->streams()) {
track_ids[stream->id()].push_back(track_id);
}
}
}
// Build stats for each stream ID known.
for (auto& it : track_ids) {
auto stream_stats = std::make_unique<DEPRECATED_RTCMediaStreamStats>(
"DEPRECATED_S" + it.first, timestamp);
stream_stats->stream_identifier = it.first;
stream_stats->track_ids = it.second;
report->AddStats(std::move(stream_stats));
}
}
void RTCStatsCollector::ProduceMediaStreamTrackStats_s(
Timestamp timestamp,
RTCStatsReport* report) const {
RTC_DCHECK_RUN_ON(signaling_thread_);
rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos_) {
std::vector<rtc::scoped_refptr<RtpSenderInternal>> senders;
for (const auto& sender : stats.transceiver->senders()) {
senders.push_back(
rtc::scoped_refptr<RtpSenderInternal>(sender->internal()));
}
ProduceSenderMediaTrackStats(timestamp, stats.track_media_info_map, senders,
report);
std::vector<rtc::scoped_refptr<RtpReceiverInternal>> receivers;
for (const auto& receiver : stats.transceiver->receivers()) {
receivers.push_back(
rtc::scoped_refptr<RtpReceiverInternal>(receiver->internal()));
}
ProduceReceiverMediaTrackStats(timestamp, stats.track_media_info_map,
receivers, report);
}
}
void RTCStatsCollector::ProduceMediaSourceStats_s( void RTCStatsCollector::ProduceMediaSourceStats_s(
Timestamp timestamp, Timestamp timestamp,
RTCStatsReport* report) const { RTCStatsReport* report) const {
@ -2008,11 +1684,6 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n(
rtc::scoped_refptr<AudioTrackInterface> audio_track = rtc::scoped_refptr<AudioTrackInterface> audio_track =
stats.track_media_info_map.GetAudioTrack(voice_receiver_info); stats.track_media_info_map.GetAudioTrack(voice_receiver_info);
if (audio_track) { if (audio_track) {
inbound_audio->track_id =
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionInbound, stats.track_media_info_map
.GetAttachmentIdByTrack(audio_track.get())
.value());
inbound_audio->track_identifier = audio_track->id(); inbound_audio->track_identifier = audio_track->id();
} }
if (audio_device_stats_ && stats.media_type == cricket::MEDIA_TYPE_AUDIO && if (audio_device_stats_ && stats.media_type == cricket::MEDIA_TYPE_AUDIO &&
@ -2059,9 +1730,6 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n(
int attachment_id = int attachment_id =
stats.track_media_info_map.GetAttachmentIdByTrack(audio_track.get()) stats.track_media_info_map.GetAttachmentIdByTrack(audio_track.get())
.value(); .value();
outbound_audio->track_id =
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionOutbound, attachment_id);
outbound_audio->media_source_id = outbound_audio->media_source_id =
RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_AUDIO, RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_AUDIO,
attachment_id); attachment_id);
@ -2115,11 +1783,6 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n(
rtc::scoped_refptr<VideoTrackInterface> video_track = rtc::scoped_refptr<VideoTrackInterface> video_track =
stats.track_media_info_map.GetVideoTrack(video_receiver_info); stats.track_media_info_map.GetVideoTrack(video_receiver_info);
if (video_track) { if (video_track) {
inbound_video->track_id =
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionInbound, stats.track_media_info_map
.GetAttachmentIdByTrack(video_track.get())
.value());
inbound_video->track_identifier = video_track->id(); inbound_video->track_identifier = video_track->id();
} }
if (!report->TryAddStats(std::move(inbound_video))) { if (!report->TryAddStats(std::move(inbound_video))) {
@ -2142,9 +1805,6 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n(
int attachment_id = int attachment_id =
stats.track_media_info_map.GetAttachmentIdByTrack(video_track.get()) stats.track_media_info_map.GetAttachmentIdByTrack(video_track.get())
.value(); .value();
outbound_video->track_id =
DEPRECATED_RTCMediaStreamTrackStatsIDFromDirectionAndAttachment(
kDirectionOutbound, attachment_id);
outbound_video->media_source_id = outbound_video->media_source_id =
RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_VIDEO, RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_VIDEO,
attachment_id); attachment_id);

View file

@ -195,12 +195,6 @@ class RTCStatsCollector : public rtc::RefCountInterface {
transport_stats_by_name, transport_stats_by_name,
const Call::Stats& call_stats, const Call::Stats& call_stats,
RTCStatsReport* report) const; RTCStatsReport* report) const;
// Produces `RTCMediaStreamStats`.
void ProduceMediaStreamStats_s(Timestamp timestamp,
RTCStatsReport* report) const;
// Produces `RTCMediaStreamTrackStats`.
void ProduceMediaStreamTrackStats_s(Timestamp timestamp,
RTCStatsReport* report) const;
// Produces RTCMediaSourceStats, including RTCAudioSourceStats and // Produces RTCMediaSourceStats, including RTCAudioSourceStats and
// RTCVideoSourceStats. // RTCVideoSourceStats.
void ProduceMediaSourceStats_s(Timestamp timestamp, void ProduceMediaSourceStats_s(Timestamp timestamp,

View file

@ -113,15 +113,6 @@ void PrintTo(const RTCPeerConnectionStats& stats, ::std::ostream* os) {
*os << stats.ToJson(); *os << stats.ToJson();
} }
void PrintTo(const DEPRECATED_RTCMediaStreamStats& stats, ::std::ostream* os) {
*os << stats.ToJson();
}
void PrintTo(const DEPRECATED_RTCMediaStreamTrackStats& stats,
::std::ostream* os) {
*os << stats.ToJson();
}
void PrintTo(const RTCInboundRtpStreamStats& stats, ::std::ostream* os) { void PrintTo(const RTCInboundRtpStreamStats& stats, ::std::ostream* os) {
*os << stats.ToJson(); *os << stats.ToJson();
} }
@ -716,9 +707,6 @@ class RTCStatsCollectorTest : public ::testing::Test {
std::string inbound_rtp_id; std::string inbound_rtp_id;
std::string remote_outbound_rtp_id; std::string remote_outbound_rtp_id;
std::string transport_id; std::string transport_id;
std::string sender_track_id;
std::string receiver_track_id;
std::string remote_stream_id;
std::string peer_connection_id; std::string peer_connection_id;
std::string media_source_id; std::string media_source_id;
}; };
@ -765,17 +753,12 @@ class RTCStatsCollectorTest : public ::testing::Test {
// transport // transport
graph.transport_id = "TTransportName1"; graph.transport_id = "TTransportName1";
pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info); pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
// track (sender) // outbound-rtp's sender
graph.sender = stats_->SetupLocalTrackAndSender( graph.sender = stats_->SetupLocalTrackAndSender(
cricket::MEDIA_TYPE_VIDEO, "LocalVideoTrackID", 3, false, 50); cricket::MEDIA_TYPE_VIDEO, "LocalVideoTrackID", 3, false, 50);
graph.sender_track_id = // inbound-rtp's receiver
"DEPRECATED_TO" + rtc::ToString(graph.sender->AttachmentId());
// track (receiver) and stream (remote stream)
graph.receiver = stats_->SetupRemoteTrackAndReceiver( graph.receiver = stats_->SetupRemoteTrackAndReceiver(
cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 4); cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 4);
graph.receiver_track_id =
"DEPRECATED_TI" + rtc::ToString(graph.receiver->AttachmentId());
graph.remote_stream_id = "DEPRECATED_SRemoteStreamId";
// peer-connection // peer-connection
graph.peer_connection_id = "P"; graph.peer_connection_id = "P";
// media-source (kind: video) // media-source (kind: video)
@ -783,38 +766,28 @@ class RTCStatsCollectorTest : public ::testing::Test {
// Expected stats graph: // Expected stats graph:
// //
// +--- track (sender) stream (remote stream) ---> track (receiver) // media-source peer-connection
// | ^ ^ // ^
// | | | // |
// | +--------- outbound-rtp inbound-rtp ---------------+ // +--------- outbound-rtp inbound-rtp
// | | | | | | // | | | |
// | | v v v v // v v v v
// | | codec (send) transport codec (recv) peer-connection // codec (send) transport codec (recv)
// v v
// media-source
// Verify the stats graph is set up correctly. // Verify the stats graph is set up correctly.
graph.full_report = stats_->GetStatsReport(); graph.full_report = stats_->GetStatsReport();
EXPECT_EQ(graph.full_report->size(), 10u); EXPECT_EQ(graph.full_report->size(), 7u);
EXPECT_TRUE(graph.full_report->Get(graph.send_codec_id)); EXPECT_TRUE(graph.full_report->Get(graph.send_codec_id));
EXPECT_TRUE(graph.full_report->Get(graph.recv_codec_id)); EXPECT_TRUE(graph.full_report->Get(graph.recv_codec_id));
EXPECT_TRUE(graph.full_report->Get(graph.outbound_rtp_id)); EXPECT_TRUE(graph.full_report->Get(graph.outbound_rtp_id));
EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id)); EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id));
EXPECT_TRUE(graph.full_report->Get(graph.transport_id)); EXPECT_TRUE(graph.full_report->Get(graph.transport_id));
EXPECT_TRUE(graph.full_report->Get(graph.sender_track_id));
EXPECT_TRUE(graph.full_report->Get(graph.receiver_track_id));
EXPECT_TRUE(graph.full_report->Get(graph.remote_stream_id));
EXPECT_TRUE(graph.full_report->Get(graph.peer_connection_id)); EXPECT_TRUE(graph.full_report->Get(graph.peer_connection_id));
EXPECT_TRUE(graph.full_report->Get(graph.media_source_id)); EXPECT_TRUE(graph.full_report->Get(graph.media_source_id));
const auto& sender_track =
graph.full_report->Get(graph.sender_track_id)
->cast_to<DEPRECATED_RTCMediaStreamTrackStats>();
EXPECT_EQ(*sender_track.media_source_id, graph.media_source_id);
const auto& outbound_rtp = graph.full_report->Get(graph.outbound_rtp_id) const auto& outbound_rtp = graph.full_report->Get(graph.outbound_rtp_id)
->cast_to<RTCOutboundRtpStreamStats>(); ->cast_to<RTCOutboundRtpStreamStats>();
EXPECT_EQ(*outbound_rtp.media_source_id, graph.media_source_id); EXPECT_EQ(*outbound_rtp.media_source_id, graph.media_source_id);
EXPECT_EQ(*outbound_rtp.codec_id, graph.send_codec_id); EXPECT_EQ(*outbound_rtp.codec_id, graph.send_codec_id);
EXPECT_EQ(*outbound_rtp.track_id, graph.sender_track_id);
EXPECT_EQ(*outbound_rtp.transport_id, graph.transport_id); EXPECT_EQ(*outbound_rtp.transport_id, graph.transport_id);
EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id)); EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id));
// We can't use an ASSERT in a function returning non-void, so just return. // We can't use an ASSERT in a function returning non-void, so just return.
@ -824,7 +797,6 @@ class RTCStatsCollectorTest : public ::testing::Test {
const auto& inbound_rtp = graph.full_report->Get(graph.inbound_rtp_id) const auto& inbound_rtp = graph.full_report->Get(graph.inbound_rtp_id)
->cast_to<RTCInboundRtpStreamStats>(); ->cast_to<RTCInboundRtpStreamStats>();
EXPECT_EQ(*inbound_rtp.codec_id, graph.recv_codec_id); EXPECT_EQ(*inbound_rtp.codec_id, graph.recv_codec_id);
EXPECT_EQ(*inbound_rtp.track_id, graph.receiver_track_id);
EXPECT_EQ(*inbound_rtp.transport_id, graph.transport_id); EXPECT_EQ(*inbound_rtp.transport_id, graph.transport_id);
return graph; return graph;
@ -879,22 +851,16 @@ class RTCStatsCollectorTest : public ::testing::Test {
media_info.receivers[0].sender_reports_reports_count = media_info.receivers[0].sender_reports_reports_count =
kRemoteOutboundStatsReportsCount; kRemoteOutboundStatsReportsCount;
} }
// transport // transport
graph.transport_id = "TTransportName1"; graph.transport_id = "TTransportName1";
pc_->AddVoiceChannel("VoiceMid", "TransportName", media_info); pc_->AddVoiceChannel("VoiceMid", "TransportName", media_info);
// track (sender) // outbound-rtp's sender
graph.sender = stats_->SetupLocalTrackAndSender( graph.sender = stats_->SetupLocalTrackAndSender(
cricket::MEDIA_TYPE_AUDIO, "LocalAudioTrackID", kLocalSsrc, false, 50); cricket::MEDIA_TYPE_AUDIO, "LocalAudioTrackID", kLocalSsrc, false, 50);
graph.sender_track_id = // inbound-rtp's receiver
"DEPRECATED_TO" + rtc::ToString(graph.sender->AttachmentId());
// track (receiver) and stream (remote stream)
graph.receiver = stats_->SetupRemoteTrackAndReceiver( graph.receiver = stats_->SetupRemoteTrackAndReceiver(
cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID", "RemoteStreamId", cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID", "RemoteStreamId",
kRemoteSsrc); kRemoteSsrc);
graph.receiver_track_id =
"DEPRECATED_TI" + rtc::ToString(graph.receiver->AttachmentId());
graph.remote_stream_id = "DEPRECATED_SRemoteStreamId";
// peer-connection // peer-connection
graph.peer_connection_id = "P"; graph.peer_connection_id = "P";
// media-source (kind: video) // media-source (kind: video)
@ -902,41 +868,31 @@ class RTCStatsCollectorTest : public ::testing::Test {
// Expected stats graph: // Expected stats graph:
// //
// +--- track (sender) stream (remote stream) ---> track (receiver) // media-source peer-connection
// | ^ ^ // ^
// | | | // |
// | +--------- outbound-rtp inbound-rtp ---------------+ // +--------- outbound-rtp inbound-rtp
// | | | | | | // | | | |
// | | v v v v // v v v v
// | | codec (send) transport codec (recv) peer-connection // codec (send) transport codec (recv)
// v v
// media-source
// Verify the stats graph is set up correctly. // Verify the stats graph is set up correctly.
graph.full_report = stats_->GetStatsReport(); graph.full_report = stats_->GetStatsReport();
EXPECT_EQ(graph.full_report->size(), add_remote_outbound_stats ? 11u : 10u); EXPECT_EQ(graph.full_report->size(), add_remote_outbound_stats ? 8u : 7u);
EXPECT_TRUE(graph.full_report->Get(graph.send_codec_id)); EXPECT_TRUE(graph.full_report->Get(graph.send_codec_id));
EXPECT_TRUE(graph.full_report->Get(graph.recv_codec_id)); EXPECT_TRUE(graph.full_report->Get(graph.recv_codec_id));
EXPECT_TRUE(graph.full_report->Get(graph.outbound_rtp_id)); EXPECT_TRUE(graph.full_report->Get(graph.outbound_rtp_id));
EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id)); EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id));
EXPECT_TRUE(graph.full_report->Get(graph.transport_id)); EXPECT_TRUE(graph.full_report->Get(graph.transport_id));
EXPECT_TRUE(graph.full_report->Get(graph.sender_track_id));
EXPECT_TRUE(graph.full_report->Get(graph.receiver_track_id));
EXPECT_TRUE(graph.full_report->Get(graph.remote_stream_id));
EXPECT_TRUE(graph.full_report->Get(graph.peer_connection_id)); EXPECT_TRUE(graph.full_report->Get(graph.peer_connection_id));
EXPECT_TRUE(graph.full_report->Get(graph.media_source_id)); EXPECT_TRUE(graph.full_report->Get(graph.media_source_id));
// `graph.remote_outbound_rtp_id` is omitted on purpose so that expectations // `graph.remote_outbound_rtp_id` is omitted on purpose so that expectations
// can be added by the caller depending on what value it sets for the // can be added by the caller depending on what value it sets for the
// `add_remote_outbound_stats` argument. // `add_remote_outbound_stats` argument.
const auto& sender_track =
graph.full_report->Get(graph.sender_track_id)
->cast_to<DEPRECATED_RTCMediaStreamTrackStats>();
EXPECT_EQ(*sender_track.media_source_id, graph.media_source_id);
const auto& outbound_rtp = graph.full_report->Get(graph.outbound_rtp_id) const auto& outbound_rtp = graph.full_report->Get(graph.outbound_rtp_id)
->cast_to<RTCOutboundRtpStreamStats>(); ->cast_to<RTCOutboundRtpStreamStats>();
EXPECT_EQ(*outbound_rtp.media_source_id, graph.media_source_id); EXPECT_EQ(*outbound_rtp.media_source_id, graph.media_source_id);
EXPECT_EQ(*outbound_rtp.codec_id, graph.send_codec_id); EXPECT_EQ(*outbound_rtp.codec_id, graph.send_codec_id);
EXPECT_EQ(*outbound_rtp.track_id, graph.sender_track_id);
EXPECT_EQ(*outbound_rtp.transport_id, graph.transport_id); EXPECT_EQ(*outbound_rtp.transport_id, graph.transport_id);
EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id)); EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id));
// We can't use ASSERT in a function with a return value. // We can't use ASSERT in a function with a return value.
@ -946,7 +902,6 @@ class RTCStatsCollectorTest : public ::testing::Test {
const auto& inbound_rtp = graph.full_report->Get(graph.inbound_rtp_id) const auto& inbound_rtp = graph.full_report->Get(graph.inbound_rtp_id)
->cast_to<RTCInboundRtpStreamStats>(); ->cast_to<RTCInboundRtpStreamStats>();
EXPECT_EQ(*inbound_rtp.codec_id, graph.recv_codec_id); EXPECT_EQ(*inbound_rtp.codec_id, graph.recv_codec_id);
EXPECT_EQ(*inbound_rtp.track_id, graph.receiver_track_id);
EXPECT_EQ(*inbound_rtp.transport_id, graph.transport_id); EXPECT_EQ(*inbound_rtp.transport_id, graph.transport_id);
return graph; return graph;
@ -2193,277 +2148,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) {
} }
} }
TEST_F(RTCStatsCollectorTest,
CollectLocalRTCMediaStreamStatsAndRTCMediaStreamTrackStats_Audio) {
rtc::scoped_refptr<MediaStream> local_stream =
MediaStream::Create("LocalStreamId");
pc_->mutable_local_streams()->AddStream(local_stream);
// Local audio track
rtc::scoped_refptr<MediaStreamTrackInterface> local_audio_track =
CreateFakeTrack(cricket::MEDIA_TYPE_AUDIO, "LocalAudioTrackID",
MediaStreamTrackInterface::kEnded);
local_stream->AddTrack(rtc::scoped_refptr<AudioTrackInterface>(
static_cast<AudioTrackInterface*>(local_audio_track.get())));
cricket::VoiceSenderInfo voice_sender_info_ssrc1;
voice_sender_info_ssrc1.local_stats.push_back(cricket::SsrcSenderInfo());
voice_sender_info_ssrc1.local_stats[0].ssrc = 1;
voice_sender_info_ssrc1.apm_statistics.echo_return_loss = 42.0;
voice_sender_info_ssrc1.apm_statistics.echo_return_loss_enhancement = 52.0;
stats_->CreateMockRtpSendersReceiversAndChannels(
{std::make_pair(local_audio_track.get(), voice_sender_info_ssrc1)}, {},
{}, {}, {local_stream->id()}, {});
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
DEPRECATED_RTCMediaStreamStats expected_local_stream(
IdForType<DEPRECATED_RTCMediaStreamStats>(report.get()),
report->timestamp());
expected_local_stream.stream_identifier = local_stream->id();
expected_local_stream.track_ids = {
IdForType<DEPRECATED_RTCMediaStreamTrackStats>(report.get())};
ASSERT_TRUE(report->Get(expected_local_stream.id()))
<< "Did not find " << expected_local_stream.id() << " in "
<< report->ToJson();
EXPECT_EQ(expected_local_stream,
report->Get(expected_local_stream.id())
->cast_to<DEPRECATED_RTCMediaStreamStats>());
DEPRECATED_RTCMediaStreamTrackStats expected_local_audio_track_ssrc1(
IdForType<DEPRECATED_RTCMediaStreamTrackStats>(report.get()),
report->timestamp(), RTCMediaStreamTrackKind::kAudio);
expected_local_audio_track_ssrc1.track_identifier = local_audio_track->id();
expected_local_audio_track_ssrc1.media_source_id =
"SA11"; // Attachment ID = SSRC + 10
expected_local_audio_track_ssrc1.remote_source = false;
expected_local_audio_track_ssrc1.ended = true;
expected_local_audio_track_ssrc1.detached = false;
expected_local_audio_track_ssrc1.echo_return_loss = 42.0;
expected_local_audio_track_ssrc1.echo_return_loss_enhancement = 52.0;
ASSERT_TRUE(report->Get(expected_local_audio_track_ssrc1.id()))
<< "Did not find " << expected_local_audio_track_ssrc1.id() << " in "
<< report->ToJson();
EXPECT_EQ(expected_local_audio_track_ssrc1,
report->Get(expected_local_audio_track_ssrc1.id())
->cast_to<DEPRECATED_RTCMediaStreamTrackStats>());
}
TEST_F(RTCStatsCollectorTest,
CollectRemoteRTCMediaStreamStatsAndRTCMediaStreamTrackStats_Audio) {
rtc::scoped_refptr<MediaStream> remote_stream =
MediaStream::Create("RemoteStreamId");
pc_->mutable_remote_streams()->AddStream(remote_stream);
// Remote audio track
rtc::scoped_refptr<MediaStreamTrackInterface> remote_audio_track =
CreateFakeTrack(cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID",
MediaStreamTrackInterface::kLive);
remote_stream->AddTrack(rtc::scoped_refptr<AudioTrackInterface>(
static_cast<AudioTrackInterface*>(remote_audio_track.get())));
cricket::VoiceReceiverInfo voice_receiver_info;
voice_receiver_info.local_stats.push_back(cricket::SsrcReceiverInfo());
voice_receiver_info.local_stats[0].ssrc = 3;
voice_receiver_info.audio_level = 16383; // [0,32767]
voice_receiver_info.total_output_energy = 0.125;
voice_receiver_info.total_samples_received = 4567;
voice_receiver_info.total_output_duration = 0.25;
voice_receiver_info.concealed_samples = 123;
voice_receiver_info.concealment_events = 12;
voice_receiver_info.inserted_samples_for_deceleration = 987;
voice_receiver_info.removed_samples_for_acceleration = 876;
voice_receiver_info.silent_concealed_samples = 765;
voice_receiver_info.jitter_buffer_delay_seconds = 3.456;
voice_receiver_info.jitter_buffer_emitted_count = 13;
stats_->CreateMockRtpSendersReceiversAndChannels(
{}, {std::make_pair(remote_audio_track.get(), voice_receiver_info)}, {},
{}, {}, {remote_stream});
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
DEPRECATED_RTCMediaStreamStats expected_remote_stream(
IdForType<DEPRECATED_RTCMediaStreamStats>(report.get()),
report->timestamp());
expected_remote_stream.stream_identifier = remote_stream->id();
expected_remote_stream.track_ids = std::vector<std::string>(
{IdForType<DEPRECATED_RTCMediaStreamTrackStats>(report.get())});
ASSERT_TRUE(report->Get(expected_remote_stream.id()))
<< "Did not find " << expected_remote_stream.id() << " in "
<< report->ToJson();
EXPECT_EQ(expected_remote_stream,
report->Get(expected_remote_stream.id())
->cast_to<DEPRECATED_RTCMediaStreamStats>());
DEPRECATED_RTCMediaStreamTrackStats expected_remote_audio_track(
IdForType<DEPRECATED_RTCMediaStreamTrackStats>(report.get()),
report->timestamp(), RTCMediaStreamTrackKind::kAudio);
expected_remote_audio_track.track_identifier = remote_audio_track->id();
// `expected_remote_audio_track.media_source_id` should be undefined
// because the track is remote.
expected_remote_audio_track.remote_source = true;
expected_remote_audio_track.ended = false;
expected_remote_audio_track.detached = false;
expected_remote_audio_track.audio_level = 16383.0 / 32767.0; // [0,1]
expected_remote_audio_track.total_audio_energy = 0.125;
expected_remote_audio_track.total_samples_received = 4567;
expected_remote_audio_track.total_samples_duration = 0.25;
expected_remote_audio_track.concealed_samples = 123;
expected_remote_audio_track.concealment_events = 12;
expected_remote_audio_track.inserted_samples_for_deceleration = 987;
expected_remote_audio_track.removed_samples_for_acceleration = 876;
expected_remote_audio_track.silent_concealed_samples = 765;
expected_remote_audio_track.jitter_buffer_delay = 3.456;
expected_remote_audio_track.jitter_buffer_emitted_count = 13;
ASSERT_TRUE(report->Get(expected_remote_audio_track.id()));
EXPECT_EQ(expected_remote_audio_track,
report->Get(expected_remote_audio_track.id())
->cast_to<DEPRECATED_RTCMediaStreamTrackStats>());
}
TEST_F(RTCStatsCollectorTest,
CollectLocalRTCMediaStreamStatsAndRTCMediaStreamTrackStats_Video) {
rtc::scoped_refptr<MediaStream> local_stream =
MediaStream::Create("LocalStreamId");
pc_->mutable_local_streams()->AddStream(local_stream);
// Local video track
rtc::scoped_refptr<MediaStreamTrackInterface> local_video_track =
CreateFakeTrack(cricket::MEDIA_TYPE_VIDEO, "LocalVideoTrackID",
MediaStreamTrackInterface::kLive);
local_stream->AddTrack(rtc::scoped_refptr<VideoTrackInterface>(
static_cast<VideoTrackInterface*>(local_video_track.get())));
cricket::VideoSenderInfo video_sender_info_ssrc1;
video_sender_info_ssrc1.local_stats.push_back(cricket::SsrcSenderInfo());
video_sender_info_ssrc1.local_stats[0].ssrc = 1;
video_sender_info_ssrc1.send_frame_width = 1234;
video_sender_info_ssrc1.send_frame_height = 4321;
video_sender_info_ssrc1.frames_encoded = 11;
video_sender_info_ssrc1.huge_frames_sent = 1;
stats_->CreateMockRtpSendersReceiversAndChannels(
{}, {},
{std::make_pair(local_video_track.get(), video_sender_info_ssrc1)}, {},
{local_stream->id()}, {});
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
auto stats_of_my_type =
report->GetStatsOfType<DEPRECATED_RTCMediaStreamStats>();
ASSERT_EQ(1U, stats_of_my_type.size()) << "No stream in " << report->ToJson();
auto stats_of_track_type =
report->GetStatsOfType<DEPRECATED_RTCMediaStreamTrackStats>();
ASSERT_EQ(1U, stats_of_track_type.size())
<< "Wrong number of tracks in " << report->ToJson();
DEPRECATED_RTCMediaStreamStats expected_local_stream(
stats_of_my_type[0]->id(), report->timestamp());
expected_local_stream.stream_identifier = local_stream->id();
expected_local_stream.track_ids =
std::vector<std::string>({stats_of_track_type[0]->id()});
ASSERT_TRUE(report->Get(expected_local_stream.id()));
EXPECT_EQ(expected_local_stream,
report->Get(expected_local_stream.id())
->cast_to<DEPRECATED_RTCMediaStreamStats>());
DEPRECATED_RTCMediaStreamTrackStats expected_local_video_track_ssrc1(
stats_of_track_type[0]->id(), report->timestamp(),
RTCMediaStreamTrackKind::kVideo);
expected_local_video_track_ssrc1.track_identifier = local_video_track->id();
expected_local_video_track_ssrc1.media_source_id =
"SV11"; // Attachment ID = SSRC + 10
expected_local_video_track_ssrc1.remote_source = false;
expected_local_video_track_ssrc1.ended = false;
expected_local_video_track_ssrc1.detached = false;
expected_local_video_track_ssrc1.frame_width = 1234;
expected_local_video_track_ssrc1.frame_height = 4321;
expected_local_video_track_ssrc1.frames_sent = 11;
expected_local_video_track_ssrc1.huge_frames_sent = 1;
ASSERT_TRUE(report->Get(expected_local_video_track_ssrc1.id()));
EXPECT_EQ(expected_local_video_track_ssrc1,
report->Get(expected_local_video_track_ssrc1.id())
->cast_to<DEPRECATED_RTCMediaStreamTrackStats>());
}
TEST_F(RTCStatsCollectorTest,
CollectRemoteRTCMediaStreamStatsAndRTCMediaStreamTrackStats_Video) {
rtc::scoped_refptr<MediaStream> remote_stream =
MediaStream::Create("RemoteStreamId");
pc_->mutable_remote_streams()->AddStream(remote_stream);
// Remote video track with values
rtc::scoped_refptr<MediaStreamTrackInterface> remote_video_track_ssrc3 =
CreateFakeTrack(cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID3",
MediaStreamTrackInterface::kEnded);
remote_stream->AddTrack(rtc::scoped_refptr<VideoTrackInterface>(
static_cast<VideoTrackInterface*>(remote_video_track_ssrc3.get())));
cricket::VideoReceiverInfo video_receiver_info_ssrc3;
video_receiver_info_ssrc3.local_stats.push_back(cricket::SsrcReceiverInfo());
video_receiver_info_ssrc3.local_stats[0].ssrc = 3;
video_receiver_info_ssrc3.frame_width = 6789;
video_receiver_info_ssrc3.frame_height = 9876;
video_receiver_info_ssrc3.jitter_buffer_delay_seconds = 2.5;
video_receiver_info_ssrc3.jitter_buffer_emitted_count = 25;
video_receiver_info_ssrc3.frames_received = 1000;
video_receiver_info_ssrc3.frames_decoded = 995;
video_receiver_info_ssrc3.frames_dropped = 10;
video_receiver_info_ssrc3.frames_rendered = 990;
stats_->CreateMockRtpSendersReceiversAndChannels(
{}, {}, {},
{std::make_pair(remote_video_track_ssrc3.get(),
video_receiver_info_ssrc3)},
{}, {remote_stream});
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
auto stats_of_my_type =
report->GetStatsOfType<DEPRECATED_RTCMediaStreamStats>();
ASSERT_EQ(1U, stats_of_my_type.size()) << "No stream in " << report->ToJson();
auto stats_of_track_type =
report->GetStatsOfType<DEPRECATED_RTCMediaStreamTrackStats>();
ASSERT_EQ(1U, stats_of_track_type.size())
<< "Wrong number of tracks in " << report->ToJson();
ASSERT_TRUE(*(stats_of_track_type[0]->remote_source));
DEPRECATED_RTCMediaStreamStats expected_remote_stream(
stats_of_my_type[0]->id(), report->timestamp());
expected_remote_stream.stream_identifier = remote_stream->id();
expected_remote_stream.track_ids =
std::vector<std::string>({stats_of_track_type[0]->id()});
ASSERT_TRUE(report->Get(expected_remote_stream.id()));
EXPECT_EQ(expected_remote_stream,
report->Get(expected_remote_stream.id())
->cast_to<DEPRECATED_RTCMediaStreamStats>());
DEPRECATED_RTCMediaStreamTrackStats expected_remote_video_track_ssrc3(
stats_of_track_type[0]->id(), report->timestamp(),
RTCMediaStreamTrackKind::kVideo);
expected_remote_video_track_ssrc3.track_identifier =
remote_video_track_ssrc3->id();
// `expected_remote_video_track_ssrc3.media_source_id` should be undefined
// because the track is remote.
expected_remote_video_track_ssrc3.remote_source = true;
expected_remote_video_track_ssrc3.ended = true;
expected_remote_video_track_ssrc3.detached = false;
expected_remote_video_track_ssrc3.frame_width = 6789;
expected_remote_video_track_ssrc3.frame_height = 9876;
expected_remote_video_track_ssrc3.jitter_buffer_delay = 2.5;
expected_remote_video_track_ssrc3.jitter_buffer_emitted_count = 25;
expected_remote_video_track_ssrc3.frames_received = 1000;
expected_remote_video_track_ssrc3.frames_decoded = 995;
expected_remote_video_track_ssrc3.frames_dropped = 10;
ASSERT_TRUE(report->Get(expected_remote_video_track_ssrc3.id()));
EXPECT_EQ(expected_remote_video_track_ssrc3,
report->Get(expected_remote_video_track_ssrc3.id())
->cast_to<DEPRECATED_RTCMediaStreamTrackStats>());
}
TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) { TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) {
cricket::VoiceMediaInfo voice_media_info; cricket::VoiceMediaInfo voice_media_info;
@ -2523,17 +2207,12 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) {
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport(); rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
auto stats_of_track_type =
report->GetStatsOfType<DEPRECATED_RTCMediaStreamTrackStats>();
ASSERT_EQ(1U, stats_of_track_type.size());
RTCInboundRtpStreamStats expected_audio("ITTransportName1A1", RTCInboundRtpStreamStats expected_audio("ITTransportName1A1",
report->timestamp()); report->timestamp());
expected_audio.ssrc = 1; expected_audio.ssrc = 1;
expected_audio.kind = "audio"; expected_audio.kind = "audio";
expected_audio.track_identifier = "RemoteAudioTrackID"; expected_audio.track_identifier = "RemoteAudioTrackID";
expected_audio.mid = "AudioMid"; expected_audio.mid = "AudioMid";
expected_audio.track_id = stats_of_track_type[0]->id();
expected_audio.transport_id = "TTransportName1"; expected_audio.transport_id = "TTransportName1";
expected_audio.codec_id = "CITTransportName1_42"; expected_audio.codec_id = "CITTransportName1_42";
expected_audio.packets_received = 2; expected_audio.packets_received = 2;
@ -2585,7 +2264,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) {
EXPECT_EQ( EXPECT_EQ(
report->Get(expected_audio.id())->cast_to<RTCInboundRtpStreamStats>(), report->Get(expected_audio.id())->cast_to<RTCInboundRtpStreamStats>(),
expected_audio); expected_audio);
EXPECT_TRUE(report->Get(*expected_audio.track_id));
EXPECT_TRUE(report->Get(*expected_audio.transport_id)); EXPECT_TRUE(report->Get(*expected_audio.transport_id));
EXPECT_TRUE(report->Get(*expected_audio.codec_id)); EXPECT_TRUE(report->Get(*expected_audio.codec_id));
} }
@ -2703,8 +2381,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) {
expected_video.kind = "video"; expected_video.kind = "video";
expected_video.track_identifier = "RemoteVideoTrackID"; expected_video.track_identifier = "RemoteVideoTrackID";
expected_video.mid = "VideoMid"; expected_video.mid = "VideoMid";
expected_video.track_id =
IdForType<DEPRECATED_RTCMediaStreamTrackStats>(report.get());
expected_video.transport_id = "TTransportName1"; expected_video.transport_id = "TTransportName1";
expected_video.codec_id = "CITTransportName1_42"; expected_video.codec_id = "CITTransportName1_42";
expected_video.fir_count = 5; expected_video.fir_count = 5;
@ -2770,7 +2446,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) {
EXPECT_EQ( EXPECT_EQ(
report->Get(expected_video.id())->cast_to<RTCInboundRtpStreamStats>(), report->Get(expected_video.id())->cast_to<RTCInboundRtpStreamStats>(),
expected_video); expected_video);
EXPECT_TRUE(report->Get(*expected_video.track_id));
EXPECT_TRUE(report->Get(*expected_video.transport_id)); EXPECT_TRUE(report->Get(*expected_video.transport_id));
EXPECT_TRUE(report->Get(*expected_video.codec_id)); EXPECT_TRUE(report->Get(*expected_video.codec_id));
} }
@ -2876,8 +2551,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Audio) {
expected_audio.mid = "AudioMid"; expected_audio.mid = "AudioMid";
expected_audio.ssrc = 1; expected_audio.ssrc = 1;
expected_audio.kind = "audio"; expected_audio.kind = "audio";
expected_audio.track_id =
IdForType<DEPRECATED_RTCMediaStreamTrackStats>(report.get());
expected_audio.transport_id = "TTransportName1"; expected_audio.transport_id = "TTransportName1";
expected_audio.codec_id = "COTTransportName1_42"; expected_audio.codec_id = "COTTransportName1_42";
expected_audio.packets_sent = 2; expected_audio.packets_sent = 2;
@ -2899,7 +2572,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Audio) {
EXPECT_EQ( EXPECT_EQ(
report->Get(expected_audio.id())->cast_to<RTCOutboundRtpStreamStats>(), report->Get(expected_audio.id())->cast_to<RTCOutboundRtpStreamStats>(),
expected_audio); expected_audio);
EXPECT_TRUE(report->Get(*expected_audio.track_id));
EXPECT_TRUE(report->Get(*expected_audio.transport_id)); EXPECT_TRUE(report->Get(*expected_audio.transport_id));
EXPECT_TRUE(report->Get(*expected_audio.codec_id)); EXPECT_TRUE(report->Get(*expected_audio.codec_id));
} }
@ -2960,9 +2632,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) {
auto stats_of_my_type = report->GetStatsOfType<RTCOutboundRtpStreamStats>(); auto stats_of_my_type = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
ASSERT_EQ(1U, stats_of_my_type.size()); ASSERT_EQ(1U, stats_of_my_type.size());
auto stats_of_track_type =
report->GetStatsOfType<DEPRECATED_RTCMediaStreamTrackStats>();
ASSERT_EQ(1U, stats_of_track_type.size());
RTCOutboundRtpStreamStats expected_video(stats_of_my_type[0]->id(), RTCOutboundRtpStreamStats expected_video(stats_of_my_type[0]->id(),
report->timestamp()); report->timestamp());
@ -2971,7 +2640,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) {
expected_video.mid = "VideoMid"; expected_video.mid = "VideoMid";
expected_video.ssrc = 1; expected_video.ssrc = 1;
expected_video.kind = "video"; expected_video.kind = "video";
expected_video.track_id = stats_of_track_type[0]->id();
expected_video.transport_id = "TTransportName1"; expected_video.transport_id = "TTransportName1";
expected_video.codec_id = "COTTransportName1_42"; expected_video.codec_id = "COTTransportName1_42";
expected_video.fir_count = 2; expected_video.fir_count = 2;
@ -3028,7 +2696,6 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) {
EXPECT_EQ( EXPECT_EQ(
report->Get(expected_video.id())->cast_to<RTCOutboundRtpStreamStats>(), report->Get(expected_video.id())->cast_to<RTCOutboundRtpStreamStats>(),
expected_video); expected_video);
EXPECT_TRUE(report->Get(*expected_video.track_id));
EXPECT_TRUE(report->Get(*expected_video.transport_id)); EXPECT_TRUE(report->Get(*expected_video.transport_id));
EXPECT_TRUE(report->Get(*expected_video.codec_id)); EXPECT_TRUE(report->Get(*expected_video.codec_id));
} }
@ -3316,8 +2983,6 @@ TEST_F(RTCStatsCollectorTest, CollectNoStreamRTCOutboundRtpStreamStats_Audio) {
expected_audio.mid = "AudioMid"; expected_audio.mid = "AudioMid";
expected_audio.ssrc = 1; expected_audio.ssrc = 1;
expected_audio.kind = "audio"; expected_audio.kind = "audio";
expected_audio.track_id =
IdForType<DEPRECATED_RTCMediaStreamTrackStats>(report.get());
expected_audio.transport_id = "TTransportName1"; expected_audio.transport_id = "TTransportName1";
expected_audio.codec_id = "COTTransportName1_42"; expected_audio.codec_id = "COTTransportName1_42";
expected_audio.packets_sent = 2; expected_audio.packets_sent = 2;
@ -3333,7 +2998,6 @@ TEST_F(RTCStatsCollectorTest, CollectNoStreamRTCOutboundRtpStreamStats_Audio) {
EXPECT_EQ( EXPECT_EQ(
report->Get(expected_audio.id())->cast_to<RTCOutboundRtpStreamStats>(), report->Get(expected_audio.id())->cast_to<RTCOutboundRtpStreamStats>(),
expected_audio); expected_audio);
EXPECT_TRUE(report->Get(*expected_audio.track_id));
EXPECT_TRUE(report->Get(*expected_audio.transport_id)); EXPECT_TRUE(report->Get(*expected_audio.transport_id));
EXPECT_TRUE(report->Get(*expected_audio.codec_id)); EXPECT_TRUE(report->Get(*expected_audio.codec_id));
} }
@ -3880,24 +3544,6 @@ TEST_F(RTCStatsCollectorTest, CollectEchoReturnLossFromTrackAudioProcessor) {
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport(); rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
DEPRECATED_RTCMediaStreamTrackStats expected_local_audio_track_ssrc1(
IdForType<DEPRECATED_RTCMediaStreamTrackStats>(report.get()),
report->timestamp(), RTCMediaStreamTrackKind::kAudio);
expected_local_audio_track_ssrc1.track_identifier = local_audio_track->id();
expected_local_audio_track_ssrc1.media_source_id =
"SA11"; // Attachment ID = SSRC + 10
expected_local_audio_track_ssrc1.remote_source = false;
expected_local_audio_track_ssrc1.ended = true;
expected_local_audio_track_ssrc1.detached = false;
expected_local_audio_track_ssrc1.echo_return_loss = 2.0;
expected_local_audio_track_ssrc1.echo_return_loss_enhancement = 3.0;
ASSERT_TRUE(report->Get(expected_local_audio_track_ssrc1.id()))
<< "Did not find " << expected_local_audio_track_ssrc1.id() << " in "
<< report->ToJson();
EXPECT_EQ(expected_local_audio_track_ssrc1,
report->Get(expected_local_audio_track_ssrc1.id())
->cast_to<DEPRECATED_RTCMediaStreamTrackStats>());
RTCAudioSourceStats expected_audio("SA11", report->timestamp()); RTCAudioSourceStats expected_audio("SA11", report->timestamp());
expected_audio.track_identifier = "LocalAudioTrackID"; expected_audio.track_identifier = "LocalAudioTrackID";
expected_audio.kind = "audio"; expected_audio.kind = "audio";
@ -3916,28 +3562,23 @@ TEST_F(RTCStatsCollectorTest, GetStatsWithSenderSelector) {
ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests(); ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests();
// Expected stats graph when filtered by sender: // Expected stats graph when filtered by sender:
// //
// +--- track (sender)
// | ^
// | |
// | +--------- outbound-rtp
// | | | |
// | | v v
// | | codec (send) transport
// v v
// media-source // media-source
// ^
// |
// +--------- outbound-rtp
// | |
// v v
// codec (send) transport
rtc::scoped_refptr<const RTCStatsReport> sender_report = rtc::scoped_refptr<const RTCStatsReport> sender_report =
stats_->GetStatsReportWithSenderSelector(graph.sender); stats_->GetStatsReportWithSenderSelector(graph.sender);
EXPECT_TRUE(sender_report); EXPECT_TRUE(sender_report);
EXPECT_EQ(sender_report->timestamp(), graph.full_report->timestamp()); EXPECT_EQ(sender_report->timestamp(), graph.full_report->timestamp());
EXPECT_EQ(sender_report->size(), 5u); EXPECT_EQ(sender_report->size(), 4u);
EXPECT_TRUE(sender_report->Get(graph.send_codec_id)); EXPECT_TRUE(sender_report->Get(graph.send_codec_id));
EXPECT_FALSE(sender_report->Get(graph.recv_codec_id)); EXPECT_FALSE(sender_report->Get(graph.recv_codec_id));
EXPECT_TRUE(sender_report->Get(graph.outbound_rtp_id)); EXPECT_TRUE(sender_report->Get(graph.outbound_rtp_id));
EXPECT_FALSE(sender_report->Get(graph.inbound_rtp_id)); EXPECT_FALSE(sender_report->Get(graph.inbound_rtp_id));
EXPECT_TRUE(sender_report->Get(graph.transport_id)); EXPECT_TRUE(sender_report->Get(graph.transport_id));
EXPECT_TRUE(sender_report->Get(graph.sender_track_id));
EXPECT_FALSE(sender_report->Get(graph.receiver_track_id));
EXPECT_FALSE(sender_report->Get(graph.remote_stream_id));
EXPECT_FALSE(sender_report->Get(graph.peer_connection_id)); EXPECT_FALSE(sender_report->Get(graph.peer_connection_id));
EXPECT_TRUE(sender_report->Get(graph.media_source_id)); EXPECT_TRUE(sender_report->Get(graph.media_source_id));
} }
@ -3946,26 +3587,22 @@ TEST_F(RTCStatsCollectorTest, GetStatsWithReceiverSelector) {
ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests(); ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests();
// Expected stats graph when filtered by receiver: // Expected stats graph when filtered by receiver:
// //
// track (receiver) //
// ^ //
// | // inbound-rtp
// inbound-rtp ---------------+
// | | // | |
// v v // v v
// transport codec (recv) // transport codec (recv)
rtc::scoped_refptr<const RTCStatsReport> receiver_report = rtc::scoped_refptr<const RTCStatsReport> receiver_report =
stats_->GetStatsReportWithReceiverSelector(graph.receiver); stats_->GetStatsReportWithReceiverSelector(graph.receiver);
EXPECT_TRUE(receiver_report); EXPECT_TRUE(receiver_report);
EXPECT_EQ(receiver_report->size(), 4u); EXPECT_EQ(receiver_report->size(), 3u);
EXPECT_EQ(receiver_report->timestamp(), graph.full_report->timestamp()); EXPECT_EQ(receiver_report->timestamp(), graph.full_report->timestamp());
EXPECT_FALSE(receiver_report->Get(graph.send_codec_id)); EXPECT_FALSE(receiver_report->Get(graph.send_codec_id));
EXPECT_TRUE(receiver_report->Get(graph.recv_codec_id)); EXPECT_TRUE(receiver_report->Get(graph.recv_codec_id));
EXPECT_FALSE(receiver_report->Get(graph.outbound_rtp_id)); EXPECT_FALSE(receiver_report->Get(graph.outbound_rtp_id));
EXPECT_TRUE(receiver_report->Get(graph.inbound_rtp_id)); EXPECT_TRUE(receiver_report->Get(graph.inbound_rtp_id));
EXPECT_TRUE(receiver_report->Get(graph.transport_id)); EXPECT_TRUE(receiver_report->Get(graph.transport_id));
EXPECT_FALSE(receiver_report->Get(graph.sender_track_id));
EXPECT_TRUE(receiver_report->Get(graph.receiver_track_id));
EXPECT_FALSE(receiver_report->Get(graph.remote_stream_id));
EXPECT_FALSE(receiver_report->Get(graph.peer_connection_id)); EXPECT_FALSE(receiver_report->Get(graph.peer_connection_id));
EXPECT_FALSE(receiver_report->Get(graph.media_source_id)); EXPECT_FALSE(receiver_report->Get(graph.media_source_id));
} }
@ -4001,8 +3638,6 @@ TEST_F(RTCStatsCollectorTest, RtpIsMissingWhileSsrcIsZero) {
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport(); rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
auto tracks = report->GetStatsOfType<DEPRECATED_RTCMediaStreamTrackStats>();
EXPECT_EQ(1U, tracks.size());
auto outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>(); auto outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
EXPECT_TRUE(outbound_rtps.empty()); EXPECT_TRUE(outbound_rtps.empty());
} }
@ -4021,9 +3656,6 @@ TEST_F(RTCStatsCollectorTest, DoNotCrashIfSsrcIsKnownButInfosAreStillMissing) {
// We do not generate any matching voice_sender_info stats. // We do not generate any matching voice_sender_info stats.
rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport(); rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
std::vector<const DEPRECATED_RTCMediaStreamTrackStats*> track_stats =
report->GetStatsOfType<DEPRECATED_RTCMediaStreamTrackStats>();
EXPECT_EQ(1U, track_stats.size());
auto outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>(); auto outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
EXPECT_TRUE(outbound_rtps.empty()); EXPECT_TRUE(outbound_rtps.empty());
} }

View file

@ -339,8 +339,6 @@ class RTCStatsReportVerifier {
stats_types.insert(RTCIceCandidatePairStats::kType); stats_types.insert(RTCIceCandidatePairStats::kType);
stats_types.insert(RTCLocalIceCandidateStats::kType); stats_types.insert(RTCLocalIceCandidateStats::kType);
stats_types.insert(RTCRemoteIceCandidateStats::kType); stats_types.insert(RTCRemoteIceCandidateStats::kType);
stats_types.insert(DEPRECATED_RTCMediaStreamStats::kType);
stats_types.insert(DEPRECATED_RTCMediaStreamTrackStats::kType);
stats_types.insert(RTCPeerConnectionStats::kType); stats_types.insert(RTCPeerConnectionStats::kType);
stats_types.insert(RTCInboundRtpStreamStats::kType); stats_types.insert(RTCInboundRtpStreamStats::kType);
stats_types.insert(RTCOutboundRtpStreamStats::kType); stats_types.insert(RTCOutboundRtpStreamStats::kType);
@ -380,12 +378,6 @@ class RTCStatsReportVerifier {
} else if (stats.type() == RTCRemoteIceCandidateStats::kType) { } else if (stats.type() == RTCRemoteIceCandidateStats::kType) {
verify_successful &= VerifyRTCRemoteIceCandidateStats( verify_successful &= VerifyRTCRemoteIceCandidateStats(
stats.cast_to<RTCRemoteIceCandidateStats>()); stats.cast_to<RTCRemoteIceCandidateStats>());
} else if (stats.type() == DEPRECATED_RTCMediaStreamStats::kType) {
verify_successful &= DEPRECATED_VerifyRTCMediaStreamStats(
stats.cast_to<DEPRECATED_RTCMediaStreamStats>());
} else if (stats.type() == DEPRECATED_RTCMediaStreamTrackStats::kType) {
verify_successful &= VerLegacyifyRTCMediaStreamTrackStats(
stats.cast_to<DEPRECATED_RTCMediaStreamTrackStats>());
} else if (stats.type() == RTCPeerConnectionStats::kType) { } else if (stats.type() == RTCPeerConnectionStats::kType) {
verify_successful &= VerifyRTCPeerConnectionStats( verify_successful &= VerifyRTCPeerConnectionStats(
stats.cast_to<RTCPeerConnectionStats>()); stats.cast_to<RTCPeerConnectionStats>());
@ -565,146 +557,6 @@ class RTCStatsReportVerifier {
return VerifyRTCIceCandidateStats(remote_candidate); return VerifyRTCIceCandidateStats(remote_candidate);
} }
bool DEPRECATED_VerifyRTCMediaStreamStats(
const DEPRECATED_RTCMediaStreamStats& media_stream) {
RTCStatsVerifier verifier(report_.get(), &media_stream);
verifier.TestMemberIsDefined(media_stream.stream_identifier);
verifier.TestMemberIsIDReference(
media_stream.track_ids, DEPRECATED_RTCMediaStreamTrackStats::kType);
return verifier.ExpectAllMembersSuccessfullyTested();
}
bool VerLegacyifyRTCMediaStreamTrackStats(
const DEPRECATED_RTCMediaStreamTrackStats& media_stream_track) {
RTCStatsVerifier verifier(report_.get(), &media_stream_track);
verifier.TestMemberIsDefined(media_stream_track.track_identifier);
verifier.TestMemberIsDefined(media_stream_track.remote_source);
verifier.TestMemberIsDefined(media_stream_track.ended);
verifier.TestMemberIsDefined(media_stream_track.detached);
verifier.TestMemberIsDefined(media_stream_track.kind);
RTC_DCHECK(media_stream_track.remote_source.is_defined());
// Video or audio media stream track?
if (*media_stream_track.kind == RTCMediaStreamTrackKind::kVideo) {
// The type of the referenced media source depends on kind.
if (*media_stream_track.remote_source) {
verifier.TestMemberIsUndefined(media_stream_track.media_source_id);
verifier.TestMemberIsNonNegative<double>(
media_stream_track.jitter_buffer_delay);
verifier.TestMemberIsNonNegative<uint64_t>(
media_stream_track.jitter_buffer_emitted_count);
verifier.TestMemberIsUndefined(media_stream_track.frames_sent);
verifier.TestMemberIsUndefined(media_stream_track.huge_frames_sent);
verifier.TestMemberIsNonNegative<uint32_t>(
media_stream_track.frames_received);
verifier.TestMemberIsNonNegative<uint32_t>(
media_stream_track.frames_decoded);
verifier.TestMemberIsNonNegative<uint32_t>(
media_stream_track.frames_dropped);
} else {
verifier.TestMemberIsIDReference(media_stream_track.media_source_id,
RTCVideoSourceStats::kType);
// Local tracks have no jitter buffer.
verifier.TestMemberIsUndefined(media_stream_track.jitter_buffer_delay);
verifier.TestMemberIsUndefined(
media_stream_track.jitter_buffer_emitted_count);
verifier.TestMemberIsNonNegative<uint32_t>(
media_stream_track.frames_sent);
verifier.TestMemberIsNonNegative<uint32_t>(
media_stream_track.huge_frames_sent);
verifier.TestMemberIsUndefined(media_stream_track.frames_received);
verifier.TestMemberIsUndefined(media_stream_track.frames_decoded);
verifier.TestMemberIsUndefined(media_stream_track.frames_dropped);
}
// Video-only members
verifier.TestMemberIsNonNegative<uint32_t>(
media_stream_track.frame_width);
verifier.TestMemberIsNonNegative<uint32_t>(
media_stream_track.frame_height);
// Audio-only members should be undefined
verifier.TestMemberIsUndefined(media_stream_track.audio_level);
verifier.TestMemberIsUndefined(media_stream_track.echo_return_loss);
verifier.TestMemberIsUndefined(
media_stream_track.echo_return_loss_enhancement);
verifier.TestMemberIsUndefined(media_stream_track.total_audio_energy);
verifier.TestMemberIsUndefined(media_stream_track.total_samples_duration);
verifier.TestMemberIsUndefined(media_stream_track.total_samples_received);
verifier.TestMemberIsUndefined(media_stream_track.concealed_samples);
verifier.TestMemberIsUndefined(
media_stream_track.silent_concealed_samples);
verifier.TestMemberIsUndefined(media_stream_track.concealment_events);
verifier.TestMemberIsUndefined(
media_stream_track.inserted_samples_for_deceleration);
verifier.TestMemberIsUndefined(
media_stream_track.removed_samples_for_acceleration);
} else {
RTC_DCHECK_EQ(*media_stream_track.kind, RTCMediaStreamTrackKind::kAudio);
// The type of the referenced media source depends on kind.
if (*media_stream_track.remote_source) {
// Remote tracks don't have media source stats.
verifier.TestMemberIsUndefined(media_stream_track.media_source_id);
verifier.TestMemberIsNonNegative<double>(
media_stream_track.jitter_buffer_delay);
verifier.TestMemberIsNonNegative<uint64_t>(
media_stream_track.jitter_buffer_emitted_count);
verifier.TestMemberIsPositive<double>(media_stream_track.audio_level);
verifier.TestMemberIsPositive<double>(
media_stream_track.total_audio_energy);
verifier.TestMemberIsPositive<uint64_t>(
media_stream_track.total_samples_received);
verifier.TestMemberIsPositive<double>(
media_stream_track.total_samples_duration);
verifier.TestMemberIsNonNegative<uint64_t>(
media_stream_track.concealed_samples);
verifier.TestMemberIsNonNegative<uint64_t>(
media_stream_track.silent_concealed_samples);
verifier.TestMemberIsNonNegative<uint64_t>(
media_stream_track.concealment_events);
verifier.TestMemberIsNonNegative<uint64_t>(
media_stream_track.inserted_samples_for_deceleration);
verifier.TestMemberIsNonNegative<uint64_t>(
media_stream_track.removed_samples_for_acceleration);
} else {
verifier.TestMemberIsIDReference(media_stream_track.media_source_id,
RTCAudioSourceStats::kType);
// Local audio tracks have no jitter buffer.
verifier.TestMemberIsUndefined(media_stream_track.jitter_buffer_delay);
verifier.TestMemberIsUndefined(
media_stream_track.jitter_buffer_emitted_count);
verifier.TestMemberIsUndefined(media_stream_track.audio_level);
verifier.TestMemberIsUndefined(media_stream_track.total_audio_energy);
verifier.TestMemberIsUndefined(
media_stream_track.total_samples_received);
verifier.TestMemberIsUndefined(
media_stream_track.total_samples_duration);
verifier.TestMemberIsUndefined(media_stream_track.concealed_samples);
verifier.TestMemberIsUndefined(
media_stream_track.silent_concealed_samples);
verifier.TestMemberIsUndefined(media_stream_track.concealment_events);
verifier.TestMemberIsUndefined(
media_stream_track.inserted_samples_for_deceleration);
verifier.TestMemberIsUndefined(
media_stream_track.removed_samples_for_acceleration);
}
// Video-only members should be undefined
verifier.TestMemberIsUndefined(media_stream_track.frame_width);
verifier.TestMemberIsUndefined(media_stream_track.frame_height);
verifier.TestMemberIsUndefined(media_stream_track.frames_sent);
verifier.TestMemberIsUndefined(media_stream_track.huge_frames_sent);
verifier.TestMemberIsUndefined(media_stream_track.frames_received);
verifier.TestMemberIsUndefined(media_stream_track.frames_decoded);
verifier.TestMemberIsUndefined(media_stream_track.frames_dropped);
// Audio-only members
// TODO(hbos): `echo_return_loss` and `echo_return_loss_enhancement` are
// flaky on msan bot (sometimes defined, sometimes undefined). Should the
// test run until available or is there a way to have it always be
// defined? crbug.com/627816
verifier.MarkMemberTested(media_stream_track.echo_return_loss, true);
verifier.MarkMemberTested(media_stream_track.echo_return_loss_enhancement,
true);
}
return verifier.ExpectAllMembersSuccessfullyTested();
}
bool VerifyRTCPeerConnectionStats( bool VerifyRTCPeerConnectionStats(
const RTCPeerConnectionStats& peer_connection) { const RTCPeerConnectionStats& peer_connection) {
RTCStatsVerifier verifier(report_.get(), &peer_connection); RTCStatsVerifier verifier(report_.get(), &peer_connection);
@ -719,15 +571,6 @@ class RTCStatsReportVerifier {
RTCStatsVerifier& verifier) { RTCStatsVerifier& verifier) {
verifier.TestMemberIsDefined(stream.ssrc); verifier.TestMemberIsDefined(stream.ssrc);
verifier.TestMemberIsDefined(stream.kind); verifier.TestMemberIsDefined(stream.kind);
// Some legacy metrics are only defined for some of the RTP types in the
// hierarcy.
if (stream.type() == RTCInboundRtpStreamStats::kType ||
stream.type() == RTCOutboundRtpStreamStats::kType) {
verifier.TestMemberIsIDReference(
stream.track_id, DEPRECATED_RTCMediaStreamTrackStats::kType);
} else {
verifier.TestMemberIsUndefined(stream.track_id);
}
verifier.TestMemberIsIDReference(stream.transport_id, verifier.TestMemberIsIDReference(stream.transport_id,
RTCTransportStats::kType); RTCTransportStats::kType);
verifier.TestMemberIsIDReference(stream.codec_id, RTCCodecStats::kType); verifier.TestMemberIsIDReference(stream.codec_id, RTCCodecStats::kType);
@ -1210,7 +1053,6 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithSenderSelector) {
// TODO(hbos): Include RTCRtpContributingSourceStats when implemented. // TODO(hbos): Include RTCRtpContributingSourceStats when implemented.
RTCInboundRtpStreamStats::kType, RTCInboundRtpStreamStats::kType,
RTCPeerConnectionStats::kType, RTCPeerConnectionStats::kType,
DEPRECATED_RTCMediaStreamStats::kType,
RTCDataChannelStats::kType, RTCDataChannelStats::kType,
}; };
RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats); RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats);
@ -1229,7 +1071,6 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithReceiverSelector) {
// TODO(hbos): Include RTCRtpContributingSourceStats when implemented. // TODO(hbos): Include RTCRtpContributingSourceStats when implemented.
RTCOutboundRtpStreamStats::kType, RTCOutboundRtpStreamStats::kType,
RTCPeerConnectionStats::kType, RTCPeerConnectionStats::kType,
DEPRECATED_RTCMediaStreamStats::kType,
RTCDataChannelStats::kType, RTCDataChannelStats::kType,
}; };
RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats); RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats);

View file

@ -48,14 +48,6 @@ void AddIdIfDefined(const RTCStatsMember<std::string>& id,
neighbor_ids->push_back(&(*id)); neighbor_ids->push_back(&(*id));
} }
void AddIdsIfDefined(const RTCStatsMember<std::vector<std::string>>& ids,
std::vector<const std::string*>* neighbor_ids) {
if (ids.is_defined()) {
for (const std::string& id : *ids)
neighbor_ids->push_back(&id);
}
}
} // namespace } // namespace
rtc::scoped_refptr<RTCStatsReport> TakeReferencedStats( rtc::scoped_refptr<RTCStatsReport> TakeReferencedStats(
@ -91,21 +83,12 @@ std::vector<const std::string*> GetStatsReferencedIds(const RTCStats& stats) {
const auto& local_or_remote_candidate = const auto& local_or_remote_candidate =
static_cast<const RTCIceCandidateStats&>(stats); static_cast<const RTCIceCandidateStats&>(stats);
AddIdIfDefined(local_or_remote_candidate.transport_id, &neighbor_ids); AddIdIfDefined(local_or_remote_candidate.transport_id, &neighbor_ids);
} else if (type == DEPRECATED_RTCMediaStreamStats::kType) {
const auto& stream =
static_cast<const DEPRECATED_RTCMediaStreamStats&>(stats);
AddIdsIfDefined(stream.track_ids, &neighbor_ids);
} else if (type == DEPRECATED_RTCMediaStreamTrackStats::kType) {
const auto& track =
static_cast<const DEPRECATED_RTCMediaStreamTrackStats&>(stats);
AddIdIfDefined(track.media_source_id, &neighbor_ids);
} else if (type == RTCPeerConnectionStats::kType) { } else if (type == RTCPeerConnectionStats::kType) {
// RTCPeerConnectionStats does not have any neighbor references. // RTCPeerConnectionStats does not have any neighbor references.
} else if (type == RTCInboundRtpStreamStats::kType) { } else if (type == RTCInboundRtpStreamStats::kType) {
const auto& inbound_rtp = const auto& inbound_rtp =
static_cast<const RTCInboundRtpStreamStats&>(stats); static_cast<const RTCInboundRtpStreamStats&>(stats);
AddIdIfDefined(inbound_rtp.remote_id, &neighbor_ids); AddIdIfDefined(inbound_rtp.remote_id, &neighbor_ids);
AddIdIfDefined(inbound_rtp.track_id, &neighbor_ids);
AddIdIfDefined(inbound_rtp.transport_id, &neighbor_ids); AddIdIfDefined(inbound_rtp.transport_id, &neighbor_ids);
AddIdIfDefined(inbound_rtp.codec_id, &neighbor_ids); AddIdIfDefined(inbound_rtp.codec_id, &neighbor_ids);
AddIdIfDefined(inbound_rtp.playout_id, &neighbor_ids); AddIdIfDefined(inbound_rtp.playout_id, &neighbor_ids);
@ -113,7 +96,6 @@ std::vector<const std::string*> GetStatsReferencedIds(const RTCStats& stats) {
const auto& outbound_rtp = const auto& outbound_rtp =
static_cast<const RTCOutboundRtpStreamStats&>(stats); static_cast<const RTCOutboundRtpStreamStats&>(stats);
AddIdIfDefined(outbound_rtp.remote_id, &neighbor_ids); AddIdIfDefined(outbound_rtp.remote_id, &neighbor_ids);
AddIdIfDefined(outbound_rtp.track_id, &neighbor_ids);
AddIdIfDefined(outbound_rtp.transport_id, &neighbor_ids); AddIdIfDefined(outbound_rtp.transport_id, &neighbor_ids);
AddIdIfDefined(outbound_rtp.codec_id, &neighbor_ids); AddIdIfDefined(outbound_rtp.codec_id, &neighbor_ids);
AddIdIfDefined(outbound_rtp.media_source_id, &neighbor_ids); AddIdIfDefined(outbound_rtp.media_source_id, &neighbor_ids);
@ -127,7 +109,6 @@ std::vector<const std::string*> GetStatsReferencedIds(const RTCStats& stats) {
const auto& remote_outbound_rtp = const auto& remote_outbound_rtp =
static_cast<const RTCRemoteOutboundRtpStreamStats&>(stats); static_cast<const RTCRemoteOutboundRtpStreamStats&>(stats);
// Inherited from `RTCRTPStreamStats`. // Inherited from `RTCRTPStreamStats`.
AddIdIfDefined(remote_outbound_rtp.track_id, &neighbor_ids);
AddIdIfDefined(remote_outbound_rtp.transport_id, &neighbor_ids); AddIdIfDefined(remote_outbound_rtp.transport_id, &neighbor_ids);
AddIdIfDefined(remote_outbound_rtp.codec_id, &neighbor_ids); AddIdIfDefined(remote_outbound_rtp.codec_id, &neighbor_ids);
// Direct members of `RTCRemoteOutboundRtpStreamStats`. // Direct members of `RTCRemoteOutboundRtpStreamStats`.

View file

@ -651,7 +651,6 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
received_stats->GetStatsOfType<webrtc::RTCInboundRtpStreamStats>()[0]; received_stats->GetStatsOfType<webrtc::RTCInboundRtpStreamStats>()[0];
ASSERT_TRUE(rtp_stats->relative_packet_arrival_delay.is_defined()); ASSERT_TRUE(rtp_stats->relative_packet_arrival_delay.is_defined());
ASSERT_TRUE(rtp_stats->packets_received.is_defined()); ASSERT_TRUE(rtp_stats->packets_received.is_defined());
ASSERT_TRUE(rtp_stats->track_id.is_defined());
rtp_stats_id_ = rtp_stats->id(); rtp_stats_id_ = rtp_stats->id();
audio_packets_stat_ = *rtp_stats->packets_received; audio_packets_stat_ = *rtp_stats->packets_received;
audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay; audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay;

View file

@ -144,7 +144,6 @@ void TrackMediaInfoMap::Initialize(
// One sender is associated with at most one track. // One sender is associated with at most one track.
// One track may be associated with multiple senders. // One track may be associated with multiple senders.
audio_track_by_sender_info_[&sender_info] = associated_track; audio_track_by_sender_info_[&sender_info] = associated_track;
voice_infos_by_local_track_[associated_track].push_back(&sender_info);
} }
if (sender_info.ssrc() == 0) if (sender_info.ssrc() == 0)
continue; // Unconnected SSRC. bugs.webrtc.org/8673 continue; // Unconnected SSRC. bugs.webrtc.org/8673
@ -159,12 +158,8 @@ void TrackMediaInfoMap::Initialize(
// One receiver is associated with at most one track, which is uniquely // One receiver is associated with at most one track, which is uniquely
// associated with that receiver. // associated with that receiver.
audio_track_by_receiver_info_[&receiver_info] = associated_track; audio_track_by_receiver_info_[&receiver_info] = associated_track;
RTC_DCHECK(voice_info_by_remote_track_.find(associated_track) ==
voice_info_by_remote_track_.end());
voice_info_by_remote_track_[associated_track] = &receiver_info;
} else if (unsignaled_audio_track) { } else if (unsignaled_audio_track) {
audio_track_by_receiver_info_[&receiver_info] = unsignaled_audio_track; audio_track_by_receiver_info_[&receiver_info] = unsignaled_audio_track;
voice_info_by_remote_track_[unsignaled_audio_track] = &receiver_info;
} }
RTC_CHECK(voice_info_by_receiver_ssrc_.count(receiver_info.ssrc()) == 0) RTC_CHECK(voice_info_by_receiver_ssrc_.count(receiver_info.ssrc()) == 0)
<< "Duplicate voice receiver SSRC: " << receiver_info.ssrc(); << "Duplicate voice receiver SSRC: " << receiver_info.ssrc();
@ -187,7 +182,6 @@ void TrackMediaInfoMap::Initialize(
// One sender is associated with at most one track. // One sender is associated with at most one track.
// One track may be associated with multiple senders. // One track may be associated with multiple senders.
video_track_by_sender_info_[&sender_info] = associated_track; video_track_by_sender_info_[&sender_info] = associated_track;
video_infos_by_local_track_[associated_track].push_back(&sender_info);
break; break;
} }
} }
@ -211,12 +205,8 @@ void TrackMediaInfoMap::Initialize(
// One receiver is associated with at most one track, which is uniquely // One receiver is associated with at most one track, which is uniquely
// associated with that receiver. // associated with that receiver.
video_track_by_receiver_info_[&receiver_info] = associated_track; video_track_by_receiver_info_[&receiver_info] = associated_track;
RTC_DCHECK(video_info_by_remote_track_.find(associated_track) ==
video_info_by_remote_track_.end());
video_info_by_remote_track_[associated_track] = &receiver_info;
} else if (unsignaled_video_track) { } else if (unsignaled_video_track) {
video_track_by_receiver_info_[&receiver_info] = unsignaled_video_track; video_track_by_receiver_info_[&receiver_info] = unsignaled_video_track;
video_info_by_remote_track_[unsignaled_video_track] = &receiver_info;
} }
RTC_DCHECK(video_info_by_receiver_ssrc_.count(receiver_info.ssrc()) == 0) RTC_DCHECK(video_info_by_receiver_ssrc_.count(receiver_info.ssrc()) == 0)
<< "Duplicate video receiver SSRC: " << receiver_info.ssrc(); << "Duplicate video receiver SSRC: " << receiver_info.ssrc();
@ -225,32 +215,6 @@ void TrackMediaInfoMap::Initialize(
} }
} }
const std::vector<cricket::VoiceSenderInfo*>*
TrackMediaInfoMap::GetVoiceSenderInfos(
const AudioTrackInterface& local_audio_track) const {
RTC_DCHECK(is_initialized_);
return FindAddressOrNull(voice_infos_by_local_track_, &local_audio_track);
}
const cricket::VoiceReceiverInfo* TrackMediaInfoMap::GetVoiceReceiverInfo(
const AudioTrackInterface& remote_audio_track) const {
RTC_DCHECK(is_initialized_);
return FindValueOrNull(voice_info_by_remote_track_, &remote_audio_track);
}
const std::vector<cricket::VideoSenderInfo*>*
TrackMediaInfoMap::GetVideoSenderInfos(
const VideoTrackInterface& local_video_track) const {
RTC_DCHECK(is_initialized_);
return FindAddressOrNull(video_infos_by_local_track_, &local_video_track);
}
const cricket::VideoReceiverInfo* TrackMediaInfoMap::GetVideoReceiverInfo(
const VideoTrackInterface& remote_video_track) const {
RTC_DCHECK(is_initialized_);
return FindValueOrNull(video_info_by_remote_track_, &remote_video_track);
}
const cricket::VoiceSenderInfo* TrackMediaInfoMap::GetVoiceSenderInfoBySsrc( const cricket::VoiceSenderInfo* TrackMediaInfoMap::GetVoiceSenderInfoBySsrc(
uint32_t ssrc) const { uint32_t ssrc) const {
RTC_DCHECK(is_initialized_); RTC_DCHECK(is_initialized_);

View file

@ -31,22 +31,8 @@ namespace webrtc {
// Audio/video tracks and sender/receiver statistical information are associated // Audio/video tracks and sender/receiver statistical information are associated
// with each other based on attachments to RTP senders/receivers. This class // with each other based on attachments to RTP senders/receivers. This class
// maps that relationship, in both directions, so that stats about a track can // maps that relationship so that "infos" can be obtained from SSRCs and tracks
// be retrieved on a per-attachment basis. // can be obtained from "infos".
//
// An RTP sender/receiver sends or receives media for a set of SSRCs. The media
// comes from an audio/video track that is attached to it.
// |[Voice/Video][Sender/Receiver]Info| has statistical information for a set of
// SSRCs. Looking at the RTP senders and receivers uncovers the track <-> info
// relationships, which this class does.
//
// In the spec, "track" attachment stats have been made obsolete, and in Unified
// Plan there is just one sender and one receiver per transceiver, so we may be
// able to simplify/delete this class.
// TODO(https://crbug.com/webrtc/14175): Simplify or delete this class when
// "track" stats have been deleted.
// TODO(https://crbug.com/webrtc/13528): Simplify or delete this class when
// Plan B is gone from the native library (already gone for Chrome).
class TrackMediaInfoMap { class TrackMediaInfoMap {
public: public:
TrackMediaInfoMap(); TrackMediaInfoMap();
@ -69,15 +55,6 @@ class TrackMediaInfoMap {
return video_media_info_; return video_media_info_;
} }
const std::vector<cricket::VoiceSenderInfo*>* GetVoiceSenderInfos(
const AudioTrackInterface& local_audio_track) const;
const cricket::VoiceReceiverInfo* GetVoiceReceiverInfo(
const AudioTrackInterface& remote_audio_track) const;
const std::vector<cricket::VideoSenderInfo*>* GetVideoSenderInfos(
const VideoTrackInterface& local_video_track) const;
const cricket::VideoReceiverInfo* GetVideoReceiverInfo(
const VideoTrackInterface& remote_video_track) const;
const cricket::VoiceSenderInfo* GetVoiceSenderInfoBySsrc(uint32_t ssrc) const; const cricket::VoiceSenderInfo* GetVoiceSenderInfoBySsrc(uint32_t ssrc) const;
const cricket::VoiceReceiverInfo* GetVoiceReceiverInfoBySsrc( const cricket::VoiceReceiverInfo* GetVoiceReceiverInfoBySsrc(
uint32_t ssrc) const; uint32_t ssrc) const;
@ -105,18 +82,6 @@ class TrackMediaInfoMap {
bool is_initialized_ = false; bool is_initialized_ = false;
absl::optional<cricket::VoiceMediaInfo> voice_media_info_; absl::optional<cricket::VoiceMediaInfo> voice_media_info_;
absl::optional<cricket::VideoMediaInfo> video_media_info_; absl::optional<cricket::VideoMediaInfo> video_media_info_;
// These maps map tracks (identified by a pointer) to their corresponding info
// object of the correct kind. One track can map to multiple info objects.
// Known tracks are guaranteed to be alive because they are also stored as
// entries in the reverse maps below.
std::map<const AudioTrackInterface*, std::vector<cricket::VoiceSenderInfo*>>
voice_infos_by_local_track_;
std::map<const AudioTrackInterface*, cricket::VoiceReceiverInfo*>
voice_info_by_remote_track_;
std::map<const VideoTrackInterface*, std::vector<cricket::VideoSenderInfo*>>
video_infos_by_local_track_;
std::map<const VideoTrackInterface*, cricket::VideoReceiverInfo*>
video_info_by_remote_track_;
// These maps map info objects to their corresponding tracks. They are always // These maps map info objects to their corresponding tracks. They are always
// the inverse of the maps above. One info object always maps to only one // the inverse of the maps above. One info object always maps to only one
// track. The use of scoped_refptr<> here ensures the tracks outlive // track. The use of scoped_refptr<> here ensures the tracks outlive

View file

@ -202,47 +202,20 @@ TEST_F(TrackMediaInfoMapTest, SingleSenderReceiverPerTrackWithOneSsrc) {
AddRtpSenderWithSsrcs({3}, local_video_track_.get()); AddRtpSenderWithSsrcs({3}, local_video_track_.get());
AddRtpReceiverWithSsrcs({4}, remote_video_track_.get()); AddRtpReceiverWithSsrcs({4}, remote_video_track_.get());
InitializeMap(); InitializeMap();
// RTP audio sender -> local audio track
// Local audio track <-> RTP audio sender
ASSERT_TRUE(map_.GetVoiceSenderInfos(*local_audio_track_));
EXPECT_THAT(*map_.GetVoiceSenderInfos(*local_audio_track_),
ElementsAre(&map_.voice_media_info()->senders[0]));
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
local_audio_track_.get()); local_audio_track_.get());
// RTP audio receiver -> remote audio track
// Remote audio track <-> RTP audio receiver
EXPECT_EQ(map_.GetVoiceReceiverInfo(*remote_audio_track_),
&map_.voice_media_info()->receivers[0]);
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]),
remote_audio_track_.get()); remote_audio_track_.get());
// RTP video sender -> local video track
// Local video track <-> RTP video sender
ASSERT_TRUE(map_.GetVideoSenderInfos(*local_video_track_));
EXPECT_THAT(*map_.GetVideoSenderInfos(*local_video_track_),
ElementsAre(&map_.video_media_info()->senders[0]));
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
local_video_track_.get()); local_video_track_.get());
// RTP video receiver -> remote video track
// Remote video track <-> RTP video receiver
EXPECT_EQ(map_.GetVideoReceiverInfo(*remote_video_track_),
&map_.video_media_info()->receivers[0]);
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]),
remote_video_track_.get()); remote_video_track_.get());
} }
TEST_F(TrackMediaInfoMapTest, SingleSenderReceiverPerTrackWithMissingSsrc) {
AddRtpSenderWithSsrcs({}, local_audio_track_.get());
AddRtpSenderWithSsrcs({}, local_video_track_.get());
AddRtpReceiverWithSsrcs({}, remote_audio_track_.get());
AddRtpReceiverWithSsrcs({}, remote_video_track_.get());
InitializeMap();
EXPECT_FALSE(map_.GetVoiceSenderInfos(*local_audio_track_));
EXPECT_FALSE(map_.GetVideoSenderInfos(*local_video_track_));
EXPECT_FALSE(map_.GetVoiceReceiverInfo(*remote_audio_track_));
EXPECT_FALSE(map_.GetVideoReceiverInfo(*remote_video_track_));
}
TEST_F(TrackMediaInfoMapTest, TEST_F(TrackMediaInfoMapTest,
SingleSenderReceiverPerTrackWithAudioAndVideoUseSameSsrc) { SingleSenderReceiverPerTrackWithAudioAndVideoUseSameSsrc) {
AddRtpSenderWithSsrcs({1}, local_audio_track_.get()); AddRtpSenderWithSsrcs({1}, local_audio_track_.get());
@ -250,30 +223,16 @@ TEST_F(TrackMediaInfoMapTest,
AddRtpSenderWithSsrcs({1}, local_video_track_.get()); AddRtpSenderWithSsrcs({1}, local_video_track_.get());
AddRtpReceiverWithSsrcs({2}, remote_video_track_.get()); AddRtpReceiverWithSsrcs({2}, remote_video_track_.get());
InitializeMap(); InitializeMap();
// RTP audio sender -> local audio track
// Local audio track <-> RTP audio sender
ASSERT_TRUE(map_.GetVoiceSenderInfos(*local_audio_track_));
EXPECT_THAT(*map_.GetVoiceSenderInfos(*local_audio_track_),
ElementsAre(&map_.voice_media_info()->senders[0]));
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
local_audio_track_.get()); local_audio_track_.get());
// RTP audio receiver -> remote audio track
// Remote audio track <-> RTP audio receiver
EXPECT_EQ(map_.GetVoiceReceiverInfo(*remote_audio_track_),
&map_.voice_media_info()->receivers[0]);
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]),
remote_audio_track_.get()); remote_audio_track_.get());
// RTP video sender -> local video track
// Local video track <-> RTP video sender
ASSERT_TRUE(map_.GetVideoSenderInfos(*local_video_track_));
EXPECT_THAT(*map_.GetVideoSenderInfos(*local_video_track_),
ElementsAre(&map_.video_media_info()->senders[0]));
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
local_video_track_.get()); local_video_track_.get());
// RTP video receiver -> remote video track
// Remote video track <-> RTP video receiver
EXPECT_EQ(map_.GetVideoReceiverInfo(*remote_video_track_),
&map_.video_media_info()->receivers[0]);
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]),
remote_video_track_.get()); remote_video_track_.get());
} }
@ -282,18 +241,10 @@ TEST_F(TrackMediaInfoMapTest, SingleMultiSsrcSenderPerTrack) {
AddRtpSenderWithSsrcs({1, 2}, local_audio_track_.get()); AddRtpSenderWithSsrcs({1, 2}, local_audio_track_.get());
AddRtpSenderWithSsrcs({3, 4}, local_video_track_.get()); AddRtpSenderWithSsrcs({3, 4}, local_video_track_.get());
InitializeMap(); InitializeMap();
// RTP audio senders -> local audio track
// Local audio track <-> RTP audio senders
ASSERT_TRUE(map_.GetVoiceSenderInfos(*local_audio_track_));
EXPECT_THAT(*map_.GetVoiceSenderInfos(*local_audio_track_),
ElementsAre(&map_.voice_media_info()->senders[0]));
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
local_audio_track_.get()); local_audio_track_.get());
// RTP video senders -> local video track
// Local video track <-> RTP video senders
ASSERT_TRUE(map_.GetVideoSenderInfos(*local_video_track_));
EXPECT_THAT(*map_.GetVideoSenderInfos(*local_video_track_),
ElementsAre(&map_.video_media_info()->senders[0]));
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
local_video_track_.get()); local_video_track_.get());
} }
@ -304,22 +255,12 @@ TEST_F(TrackMediaInfoMapTest, MultipleOneSsrcSendersPerTrack) {
AddRtpSenderWithSsrcs({3}, local_video_track_.get()); AddRtpSenderWithSsrcs({3}, local_video_track_.get());
AddRtpSenderWithSsrcs({4}, local_video_track_.get()); AddRtpSenderWithSsrcs({4}, local_video_track_.get());
InitializeMap(); InitializeMap();
// RTP audio senders -> local audio track
// Local audio track <-> RTP audio senders
ASSERT_TRUE(map_.GetVoiceSenderInfos(*local_audio_track_));
EXPECT_THAT(*map_.GetVoiceSenderInfos(*local_audio_track_),
ElementsAre(&map_.voice_media_info()->senders[0],
&map_.voice_media_info()->senders[1]));
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
local_audio_track_.get()); local_audio_track_.get());
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[1]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[1]),
local_audio_track_.get()); local_audio_track_.get());
// RTP video senders -> local video track
// Local video track <-> RTP video senders
ASSERT_TRUE(map_.GetVideoSenderInfos(*local_video_track_));
EXPECT_THAT(*map_.GetVideoSenderInfos(*local_video_track_),
ElementsAre(&map_.video_media_info()->senders[0],
&map_.video_media_info()->senders[1]));
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
local_video_track_.get()); local_video_track_.get());
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[1]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[1]),
@ -332,22 +273,12 @@ TEST_F(TrackMediaInfoMapTest, MultipleMultiSsrcSendersPerTrack) {
AddRtpSenderWithSsrcs({5, 6}, local_video_track_.get()); AddRtpSenderWithSsrcs({5, 6}, local_video_track_.get());
AddRtpSenderWithSsrcs({7, 8}, local_video_track_.get()); AddRtpSenderWithSsrcs({7, 8}, local_video_track_.get());
InitializeMap(); InitializeMap();
// RTP audio senders -> local audio track
// Local audio track <-> RTP audio senders
ASSERT_TRUE(map_.GetVoiceSenderInfos(*local_audio_track_));
EXPECT_THAT(*map_.GetVoiceSenderInfos(*local_audio_track_),
ElementsAre(&map_.voice_media_info()->senders[0],
&map_.voice_media_info()->senders[1]));
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
local_audio_track_.get()); local_audio_track_.get());
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[1]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[1]),
local_audio_track_.get()); local_audio_track_.get());
// RTP video senders -> local video track
// Local video track <-> RTP video senders
ASSERT_TRUE(map_.GetVideoSenderInfos(*local_video_track_));
EXPECT_THAT(*map_.GetVideoSenderInfos(*local_video_track_),
ElementsAre(&map_.video_media_info()->senders[0],
&map_.video_media_info()->senders[1]));
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
local_video_track_.get()); local_video_track_.get());
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[1]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[1]),
@ -361,30 +292,16 @@ TEST_F(TrackMediaInfoMapTest, SingleSenderReceiverPerTrackWithSsrcNotUnique) {
AddRtpSenderWithSsrcs({2}, local_video_track_.get()); AddRtpSenderWithSsrcs({2}, local_video_track_.get());
AddRtpReceiverWithSsrcs({2}, remote_video_track_.get()); AddRtpReceiverWithSsrcs({2}, remote_video_track_.get());
InitializeMap(); InitializeMap();
// RTP audio senders -> local audio track
// Local audio track <-> RTP audio senders
ASSERT_TRUE(map_.GetVoiceSenderInfos(*local_audio_track_));
EXPECT_THAT(*map_.GetVoiceSenderInfos(*local_audio_track_),
ElementsAre(&map_.voice_media_info()->senders[0]));
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
local_audio_track_.get()); local_audio_track_.get());
// RTP audio receiver -> remote audio track
// Remote audio track <-> RTP audio receiver
EXPECT_EQ(map_.GetVoiceReceiverInfo(*remote_audio_track_),
&map_.voice_media_info()->receivers[0]);
EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]), EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]),
remote_audio_track_.get()); remote_audio_track_.get());
// RTP video senders -> local video track
// Local video track <-> RTP video senders
ASSERT_TRUE(map_.GetVideoSenderInfos(*local_video_track_));
EXPECT_THAT(*map_.GetVideoSenderInfos(*local_video_track_),
ElementsAre(&map_.video_media_info()->senders[0]));
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
local_video_track_.get()); local_video_track_.get());
// RTP video receiver -> remote video track
// Remote video track <-> RTP video receiver
EXPECT_EQ(map_.GetVideoReceiverInfo(*remote_video_track_),
&map_.video_media_info()->receivers[0]);
EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]), EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]),
remote_video_track_.get()); remote_video_track_.get());
} }
@ -412,32 +329,4 @@ TEST_F(TrackMediaInfoMapTest, GetAttachmentIdByTrack) {
map_.GetAttachmentIdByTrack(local_video_track_.get())); map_.GetAttachmentIdByTrack(local_video_track_.get()));
} }
// Death tests.
// Disabled on Android because death tests misbehave on Android, see
// base/test/gtest_util.h.
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
class TrackMediaInfoMapDeathTest : public TrackMediaInfoMapTest {
public:
TrackMediaInfoMapDeathTest() : TrackMediaInfoMapTest(false) {}
};
TEST_F(TrackMediaInfoMapDeathTest, MultipleOneSsrcReceiversPerTrack) {
AddRtpReceiverWithSsrcs({1}, remote_audio_track_.get());
AddRtpReceiverWithSsrcs({2}, remote_audio_track_.get());
AddRtpReceiverWithSsrcs({3}, remote_video_track_.get());
AddRtpReceiverWithSsrcs({4}, remote_video_track_.get());
EXPECT_DEATH(InitializeMap(), "");
}
TEST_F(TrackMediaInfoMapDeathTest, MultipleMultiSsrcReceiversPerTrack) {
AddRtpReceiverWithSsrcs({1, 2}, remote_audio_track_.get());
AddRtpReceiverWithSsrcs({3, 4}, remote_audio_track_.get());
AddRtpReceiverWithSsrcs({5, 6}, remote_video_track_.get());
AddRtpReceiverWithSsrcs({7, 8}, remote_video_track_.get());
EXPECT_DEATH(InitializeMap(), "");
}
#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
} // namespace webrtc } // namespace webrtc

View file

@ -290,94 +290,6 @@ const char* RTCRemoteIceCandidateStats::type() const {
return kType; return kType;
} }
// clang-format off
WEBRTC_RTCSTATS_IMPL(DEPRECATED_RTCMediaStreamStats, RTCStats, "stream",
&stream_identifier,
&track_ids)
// clang-format on
DEPRECATED_RTCMediaStreamStats::DEPRECATED_RTCMediaStreamStats(
std::string id,
Timestamp timestamp)
: RTCStats(std::move(id), timestamp),
stream_identifier("streamIdentifier"),
track_ids("trackIds") {}
DEPRECATED_RTCMediaStreamStats::DEPRECATED_RTCMediaStreamStats(
const DEPRECATED_RTCMediaStreamStats& other) = default;
DEPRECATED_RTCMediaStreamStats::~DEPRECATED_RTCMediaStreamStats() {}
// clang-format off
WEBRTC_RTCSTATS_IMPL(DEPRECATED_RTCMediaStreamTrackStats, RTCStats, "track",
&track_identifier,
&media_source_id,
&remote_source,
&ended,
&detached,
&kind,
&jitter_buffer_delay,
&jitter_buffer_emitted_count,
&frame_width,
&frame_height,
&frames_sent,
&huge_frames_sent,
&frames_received,
&frames_decoded,
&frames_dropped,
&audio_level,
&total_audio_energy,
&echo_return_loss,
&echo_return_loss_enhancement,
&total_samples_received,
&total_samples_duration,
&concealed_samples,
&silent_concealed_samples,
&concealment_events,
&inserted_samples_for_deceleration,
&removed_samples_for_acceleration)
// clang-format on
DEPRECATED_RTCMediaStreamTrackStats::DEPRECATED_RTCMediaStreamTrackStats(
std::string id,
Timestamp timestamp,
const char* kind)
: RTCStats(std::move(id), timestamp),
track_identifier("trackIdentifier"),
media_source_id("mediaSourceId"),
remote_source("remoteSource"),
ended("ended"),
detached("detached"),
kind("kind", kind),
jitter_buffer_delay("jitterBufferDelay"),
jitter_buffer_emitted_count("jitterBufferEmittedCount"),
frame_width("frameWidth"),
frame_height("frameHeight"),
frames_sent("framesSent"),
huge_frames_sent("hugeFramesSent"),
frames_received("framesReceived"),
frames_decoded("framesDecoded"),
frames_dropped("framesDropped"),
audio_level("audioLevel"),
total_audio_energy("totalAudioEnergy"),
echo_return_loss("echoReturnLoss"),
echo_return_loss_enhancement("echoReturnLossEnhancement"),
total_samples_received("totalSamplesReceived"),
total_samples_duration("totalSamplesDuration"),
concealed_samples("concealedSamples"),
silent_concealed_samples("silentConcealedSamples"),
concealment_events("concealmentEvents"),
inserted_samples_for_deceleration("insertedSamplesForDeceleration"),
removed_samples_for_acceleration("removedSamplesForAcceleration") {
RTC_DCHECK(kind == RTCMediaStreamTrackKind::kAudio ||
kind == RTCMediaStreamTrackKind::kVideo);
}
DEPRECATED_RTCMediaStreamTrackStats::DEPRECATED_RTCMediaStreamTrackStats(
const DEPRECATED_RTCMediaStreamTrackStats& other) = default;
DEPRECATED_RTCMediaStreamTrackStats::~DEPRECATED_RTCMediaStreamTrackStats() {}
// clang-format off // clang-format off
WEBRTC_RTCSTATS_IMPL(RTCPeerConnectionStats, RTCStats, "peer-connection", WEBRTC_RTCSTATS_IMPL(RTCPeerConnectionStats, RTCStats, "peer-connection",
&data_channels_opened, &data_channels_opened,
@ -399,7 +311,6 @@ RTCPeerConnectionStats::~RTCPeerConnectionStats() {}
WEBRTC_RTCSTATS_IMPL(RTCRtpStreamStats, RTCStats, "rtp", WEBRTC_RTCSTATS_IMPL(RTCRtpStreamStats, RTCStats, "rtp",
&ssrc, &ssrc,
&kind, &kind,
&track_id,
&transport_id, &transport_id,
&codec_id) &codec_id)
// clang-format on // clang-format on
@ -408,7 +319,6 @@ RTCRtpStreamStats::RTCRtpStreamStats(std::string id, Timestamp timestamp)
: RTCStats(std::move(id), timestamp), : RTCStats(std::move(id), timestamp),
ssrc("ssrc"), ssrc("ssrc"),
kind("kind"), kind("kind"),
track_id("trackId"),
transport_id("transportId"), transport_id("transportId"),
codec_id("codecId") {} codec_id("codecId") {}