mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-13 22:00:47 +01:00

This CL introduced 2 new macros that affect the WebRTC OBJC API symbols: - RTC_OBJC_TYPE_PREFIX: Macro used to prepend a prefix to the API types that are exported with RTC_OBJC_EXPORT. Clients can patch the definition of this macro locally and build WebRTC.framework with their own prefix in case symbol clashing is a problem. This macro must only be defined by changing the value in sdk/objc/base/RTCMacros.h and not on via compiler flag to ensure it has a unique value. - RCT_OBJC_TYPE: Macro used internally to reference API types. Declaring an API type without using this macro will not include the declared type in the set of types that will be affected by the configurable RTC_OBJC_TYPE_PREFIX. Manual changes: https://webrtc-review.googlesource.com/c/src/+/173781/5..10 The auto-generated changes in PS#5 have been done with: https://webrtc-review.googlesource.com/c/src/+/174061. Bug: None Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781 Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org> Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> Reviewed-by: Kári Helgason <kthelgason@webrtc.org> Cr-Commit-Position: refs/heads/master@{#31153}
346 lines
12 KiB
Objective-C
346 lines
12 KiB
Objective-C
/*
|
|
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
|
*
|
|
* Use of this source code is governed by a BSD-style license
|
|
* that can be found in the LICENSE file in the root of the source
|
|
* tree. An additional intellectual property rights grant can be found
|
|
* in the file PATENTS. All contributing project authors may
|
|
* be found in the AUTHORS file in the root of the source tree.
|
|
*/
|
|
|
|
#import "ARDStatsBuilder.h"
|
|
|
|
#import <WebRTC/RTCLegacyStatsReport.h>
|
|
#import <WebRTC/RTCMacros.h>
|
|
|
|
#import "ARDBitrateTracker.h"
|
|
#import "ARDUtilities.h"
|
|
|
|
@implementation ARDStatsBuilder {
|
|
// Connection stats.
|
|
NSString *_connRecvBitrate;
|
|
NSString *_connRtt;
|
|
NSString *_connSendBitrate;
|
|
NSString *_localCandType;
|
|
NSString *_remoteCandType;
|
|
NSString *_transportType;
|
|
|
|
// BWE stats.
|
|
NSString *_actualEncBitrate;
|
|
NSString *_availableRecvBw;
|
|
NSString *_availableSendBw;
|
|
NSString *_targetEncBitrate;
|
|
|
|
// Video send stats.
|
|
NSString *_videoEncodeMs;
|
|
NSString *_videoInputFps;
|
|
NSString *_videoInputHeight;
|
|
NSString *_videoInputWidth;
|
|
NSString *_videoSendCodec;
|
|
NSString *_videoSendBitrate;
|
|
NSString *_videoSendFps;
|
|
NSString *_videoSendHeight;
|
|
NSString *_videoSendWidth;
|
|
|
|
// QP stats.
|
|
int _videoQPSum;
|
|
int _framesEncoded;
|
|
int _oldVideoQPSum;
|
|
int _oldFramesEncoded;
|
|
|
|
// Video receive stats.
|
|
NSString *_videoDecodeMs;
|
|
NSString *_videoDecodedFps;
|
|
NSString *_videoOutputFps;
|
|
NSString *_videoRecvBitrate;
|
|
NSString *_videoRecvFps;
|
|
NSString *_videoRecvHeight;
|
|
NSString *_videoRecvWidth;
|
|
|
|
// Audio send stats.
|
|
NSString *_audioSendBitrate;
|
|
NSString *_audioSendCodec;
|
|
|
|
// Audio receive stats.
|
|
NSString *_audioCurrentDelay;
|
|
NSString *_audioExpandRate;
|
|
NSString *_audioRecvBitrate;
|
|
NSString *_audioRecvCodec;
|
|
|
|
// Bitrate trackers.
|
|
ARDBitrateTracker *_audioRecvBitrateTracker;
|
|
ARDBitrateTracker *_audioSendBitrateTracker;
|
|
ARDBitrateTracker *_connRecvBitrateTracker;
|
|
ARDBitrateTracker *_connSendBitrateTracker;
|
|
ARDBitrateTracker *_videoRecvBitrateTracker;
|
|
ARDBitrateTracker *_videoSendBitrateTracker;
|
|
}
|
|
|
|
- (instancetype)init {
|
|
if (self = [super init]) {
|
|
_audioSendBitrateTracker = [[ARDBitrateTracker alloc] init];
|
|
_audioRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
|
|
_connSendBitrateTracker = [[ARDBitrateTracker alloc] init];
|
|
_connRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
|
|
_videoSendBitrateTracker = [[ARDBitrateTracker alloc] init];
|
|
_videoRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
|
|
_videoQPSum = 0;
|
|
_framesEncoded = 0;
|
|
}
|
|
return self;
|
|
}
|
|
|
|
- (NSString *)statsString {
|
|
NSMutableString *result = [NSMutableString string];
|
|
NSString *systemStatsFormat = @"(cpu)%ld%%\n";
|
|
[result appendString:[NSString stringWithFormat:systemStatsFormat,
|
|
(long)ARDGetCpuUsagePercentage()]];
|
|
|
|
// Connection stats.
|
|
NSString *connStatsFormat = @"CN %@ms | %@->%@/%@ | (s)%@ | (r)%@\n";
|
|
[result appendString:[NSString stringWithFormat:connStatsFormat,
|
|
_connRtt,
|
|
_localCandType, _remoteCandType, _transportType,
|
|
_connSendBitrate, _connRecvBitrate]];
|
|
|
|
// Video send stats.
|
|
NSString *videoSendFormat = @"VS (input) %@x%@@%@fps | (sent) %@x%@@%@fps\n"
|
|
"VS (enc) %@/%@ | (sent) %@/%@ | %@ms | %@\n"
|
|
"AvgQP (past %d encoded frames) = %d\n ";
|
|
int avgqp = [self calculateAvgQP];
|
|
|
|
[result appendString:[NSString stringWithFormat:videoSendFormat,
|
|
_videoInputWidth, _videoInputHeight, _videoInputFps,
|
|
_videoSendWidth, _videoSendHeight, _videoSendFps,
|
|
_actualEncBitrate, _targetEncBitrate,
|
|
_videoSendBitrate, _availableSendBw,
|
|
_videoEncodeMs,
|
|
_videoSendCodec,
|
|
_framesEncoded - _oldFramesEncoded, avgqp]];
|
|
|
|
// Video receive stats.
|
|
NSString *videoReceiveFormat =
|
|
@"VR (recv) %@x%@@%@fps | (decoded)%@ | (output)%@fps | %@/%@ | %@ms\n";
|
|
[result appendString:[NSString stringWithFormat:videoReceiveFormat,
|
|
_videoRecvWidth, _videoRecvHeight, _videoRecvFps,
|
|
_videoDecodedFps,
|
|
_videoOutputFps,
|
|
_videoRecvBitrate, _availableRecvBw,
|
|
_videoDecodeMs]];
|
|
|
|
// Audio send stats.
|
|
NSString *audioSendFormat = @"AS %@ | %@\n";
|
|
[result appendString:[NSString stringWithFormat:audioSendFormat,
|
|
_audioSendBitrate, _audioSendCodec]];
|
|
|
|
// Audio receive stats.
|
|
NSString *audioReceiveFormat = @"AR %@ | %@ | %@ms | (expandrate)%@";
|
|
[result appendString:[NSString stringWithFormat:audioReceiveFormat,
|
|
_audioRecvBitrate, _audioRecvCodec, _audioCurrentDelay,
|
|
_audioExpandRate]];
|
|
|
|
return result;
|
|
}
|
|
|
|
- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
NSString *reportType = statsReport.type;
|
|
if ([reportType isEqualToString:@"ssrc"] &&
|
|
[statsReport.reportId rangeOfString:@"ssrc"].location != NSNotFound) {
|
|
if ([statsReport.reportId rangeOfString:@"send"].location != NSNotFound) {
|
|
[self parseSendSsrcStatsReport:statsReport];
|
|
}
|
|
if ([statsReport.reportId rangeOfString:@"recv"].location != NSNotFound) {
|
|
[self parseRecvSsrcStatsReport:statsReport];
|
|
}
|
|
} else if ([reportType isEqualToString:@"VideoBwe"]) {
|
|
[self parseBweStatsReport:statsReport];
|
|
} else if ([reportType isEqualToString:@"googCandidatePair"]) {
|
|
[self parseConnectionStatsReport:statsReport];
|
|
}
|
|
}
|
|
|
|
#pragma mark - Private
|
|
|
|
- (int)calculateAvgQP {
|
|
int deltaFramesEncoded = _framesEncoded - _oldFramesEncoded;
|
|
int deltaQPSum = _videoQPSum - _oldVideoQPSum;
|
|
|
|
return deltaFramesEncoded != 0 ? deltaQPSum / deltaFramesEncoded : 0;
|
|
}
|
|
|
|
- (void)updateBweStatOfKey:(NSString *)key value:(NSString *)value {
|
|
if ([key isEqualToString:@"googAvailableSendBandwidth"]) {
|
|
_availableSendBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
|
|
} else if ([key isEqualToString:@"googAvailableReceiveBandwidth"]) {
|
|
_availableRecvBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
|
|
} else if ([key isEqualToString:@"googActualEncBitrate"]) {
|
|
_actualEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
|
|
} else if ([key isEqualToString:@"googTargetEncBitrate"]) {
|
|
_targetEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
|
|
}
|
|
}
|
|
|
|
- (void)parseBweStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
[statsReport.values
|
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
|
[self updateBweStatOfKey:key value:value];
|
|
}];
|
|
}
|
|
|
|
- (void)updateConnectionStatOfKey:(NSString *)key value:(NSString *)value {
|
|
if ([key isEqualToString:@"googRtt"]) {
|
|
_connRtt = value;
|
|
} else if ([key isEqualToString:@"googLocalCandidateType"]) {
|
|
_localCandType = value;
|
|
} else if ([key isEqualToString:@"googRemoteCandidateType"]) {
|
|
_remoteCandType = value;
|
|
} else if ([key isEqualToString:@"googTransportType"]) {
|
|
_transportType = value;
|
|
} else if ([key isEqualToString:@"bytesReceived"]) {
|
|
NSInteger byteCount = value.integerValue;
|
|
[_connRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
|
|
_connRecvBitrate = _connRecvBitrateTracker.bitrateString;
|
|
} else if ([key isEqualToString:@"bytesSent"]) {
|
|
NSInteger byteCount = value.integerValue;
|
|
[_connSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
|
|
_connSendBitrate = _connSendBitrateTracker.bitrateString;
|
|
}
|
|
}
|
|
|
|
- (void)parseConnectionStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
NSString *activeConnection = statsReport.values[@"googActiveConnection"];
|
|
if (![activeConnection isEqualToString:@"true"]) {
|
|
return;
|
|
}
|
|
[statsReport.values
|
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
|
[self updateConnectionStatOfKey:key value:value];
|
|
}];
|
|
}
|
|
|
|
- (void)parseSendSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
NSDictionary *values = statsReport.values;
|
|
if ([values objectForKey:@"googFrameRateSent"]) {
|
|
// Video track.
|
|
[self parseVideoSendStatsReport:statsReport];
|
|
} else if ([values objectForKey:@"audioInputLevel"]) {
|
|
// Audio track.
|
|
[self parseAudioSendStatsReport:statsReport];
|
|
}
|
|
}
|
|
|
|
- (void)updateAudioSendStatOfKey:(NSString *)key value:(NSString *)value {
|
|
if ([key isEqualToString:@"googCodecName"]) {
|
|
_audioSendCodec = value;
|
|
} else if ([key isEqualToString:@"bytesSent"]) {
|
|
NSInteger byteCount = value.integerValue;
|
|
[_audioSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
|
|
_audioSendBitrate = _audioSendBitrateTracker.bitrateString;
|
|
}
|
|
}
|
|
|
|
- (void)parseAudioSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
[statsReport.values
|
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
|
[self updateAudioSendStatOfKey:key value:value];
|
|
}];
|
|
}
|
|
|
|
- (void)updateVideoSendStatOfKey:(NSString *)key value:(NSString *)value {
|
|
if ([key isEqualToString:@"googCodecName"]) {
|
|
_videoSendCodec = value;
|
|
} else if ([key isEqualToString:@"googFrameHeightInput"]) {
|
|
_videoInputHeight = value;
|
|
} else if ([key isEqualToString:@"googFrameWidthInput"]) {
|
|
_videoInputWidth = value;
|
|
} else if ([key isEqualToString:@"googFrameRateInput"]) {
|
|
_videoInputFps = value;
|
|
} else if ([key isEqualToString:@"googFrameHeightSent"]) {
|
|
_videoSendHeight = value;
|
|
} else if ([key isEqualToString:@"googFrameWidthSent"]) {
|
|
_videoSendWidth = value;
|
|
} else if ([key isEqualToString:@"googFrameRateSent"]) {
|
|
_videoSendFps = value;
|
|
} else if ([key isEqualToString:@"googAvgEncodeMs"]) {
|
|
_videoEncodeMs = value;
|
|
} else if ([key isEqualToString:@"bytesSent"]) {
|
|
NSInteger byteCount = value.integerValue;
|
|
[_videoSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
|
|
_videoSendBitrate = _videoSendBitrateTracker.bitrateString;
|
|
} else if ([key isEqualToString:@"qpSum"]) {
|
|
_oldVideoQPSum = _videoQPSum;
|
|
_videoQPSum = value.integerValue;
|
|
} else if ([key isEqualToString:@"framesEncoded"]) {
|
|
_oldFramesEncoded = _framesEncoded;
|
|
_framesEncoded = value.integerValue;
|
|
}
|
|
}
|
|
|
|
- (void)parseVideoSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
[statsReport.values
|
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
|
[self updateVideoSendStatOfKey:key value:value];
|
|
}];
|
|
}
|
|
|
|
- (void)parseRecvSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
NSDictionary *values = statsReport.values;
|
|
if ([values objectForKey:@"googFrameWidthReceived"]) {
|
|
// Video track.
|
|
[self parseVideoRecvStatsReport:statsReport];
|
|
} else if ([values objectForKey:@"audioOutputLevel"]) {
|
|
// Audio track.
|
|
[self parseAudioRecvStatsReport:statsReport];
|
|
}
|
|
}
|
|
|
|
- (void)updateAudioRecvStatOfKey:(NSString *)key value:(NSString *)value {
|
|
if ([key isEqualToString:@"googCodecName"]) {
|
|
_audioRecvCodec = value;
|
|
} else if ([key isEqualToString:@"bytesReceived"]) {
|
|
NSInteger byteCount = value.integerValue;
|
|
[_audioRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
|
|
_audioRecvBitrate = _audioRecvBitrateTracker.bitrateString;
|
|
} else if ([key isEqualToString:@"googSpeechExpandRate"]) {
|
|
_audioExpandRate = value;
|
|
} else if ([key isEqualToString:@"googCurrentDelayMs"]) {
|
|
_audioCurrentDelay = value;
|
|
}
|
|
}
|
|
|
|
- (void)parseAudioRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
[statsReport.values
|
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
|
[self updateAudioRecvStatOfKey:key value:value];
|
|
}];
|
|
}
|
|
|
|
- (void)updateVideoRecvStatOfKey:(NSString *)key value:(NSString *)value {
|
|
if ([key isEqualToString:@"googFrameHeightReceived"]) {
|
|
_videoRecvHeight = value;
|
|
} else if ([key isEqualToString:@"googFrameWidthReceived"]) {
|
|
_videoRecvWidth = value;
|
|
} else if ([key isEqualToString:@"googFrameRateReceived"]) {
|
|
_videoRecvFps = value;
|
|
} else if ([key isEqualToString:@"googFrameRateDecoded"]) {
|
|
_videoDecodedFps = value;
|
|
} else if ([key isEqualToString:@"googFrameRateOutput"]) {
|
|
_videoOutputFps = value;
|
|
} else if ([key isEqualToString:@"googDecodeMs"]) {
|
|
_videoDecodeMs = value;
|
|
} else if ([key isEqualToString:@"bytesReceived"]) {
|
|
NSInteger byteCount = value.integerValue;
|
|
[_videoRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
|
|
_videoRecvBitrate = _videoRecvBitrateTracker.bitrateString;
|
|
}
|
|
}
|
|
|
|
- (void)parseVideoRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
|
[statsReport.values
|
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
|
[self updateVideoRecvStatOfKey:key value:value];
|
|
}];
|
|
}
|
|
|
|
@end
|
|
|