mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-14 06:10:40 +01:00
Wrap WebRTC OBJC API types with RTC_OBJC_TYPE.
This CL introduced 2 new macros that affect the WebRTC OBJC API symbols: - RTC_OBJC_TYPE_PREFIX: Macro used to prepend a prefix to the API types that are exported with RTC_OBJC_EXPORT. Clients can patch the definition of this macro locally and build WebRTC.framework with their own prefix in case symbol clashing is a problem. This macro must only be defined by changing the value in sdk/objc/base/RTCMacros.h and not on via compiler flag to ensure it has a unique value. - RCT_OBJC_TYPE: Macro used internally to reference API types. Declaring an API type without using this macro will not include the declared type in the set of types that will be affected by the configurable RTC_OBJC_TYPE_PREFIX. Manual changes: https://webrtc-review.googlesource.com/c/src/+/173781/5..10 The auto-generated changes in PS#5 have been done with: https://webrtc-review.googlesource.com/c/src/+/174061. Bug: None Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781 Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org> Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> Reviewed-by: Kári Helgason <kthelgason@webrtc.org> Cr-Commit-Position: refs/heads/master@{#31153}
This commit is contained in:
parent
ce1320cc4d
commit
a81e9c82fc
303 changed files with 2534 additions and 2189 deletions
|
@ -16,9 +16,9 @@
|
||||||
#import "ARDSignalingChannel.h"
|
#import "ARDSignalingChannel.h"
|
||||||
#import "ARDTURNClient.h"
|
#import "ARDTURNClient.h"
|
||||||
|
|
||||||
@class RTCPeerConnectionFactory;
|
@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
|
||||||
|
|
||||||
@interface ARDAppClient () <ARDSignalingChannelDelegate, RTCPeerConnectionDelegate>
|
@interface ARDAppClient () <ARDSignalingChannelDelegate, RTC_OBJC_TYPE (RTCPeerConnectionDelegate)>
|
||||||
|
|
||||||
// All properties should only be mutated from the main queue.
|
// All properties should only be mutated from the main queue.
|
||||||
@property(nonatomic, strong) id<ARDRoomServerClient> roomServerClient;
|
@property(nonatomic, strong) id<ARDRoomServerClient> roomServerClient;
|
||||||
|
@ -26,8 +26,8 @@
|
||||||
@property(nonatomic, strong) id<ARDSignalingChannel> loopbackChannel;
|
@property(nonatomic, strong) id<ARDSignalingChannel> loopbackChannel;
|
||||||
@property(nonatomic, strong) id<ARDTURNClient> turnClient;
|
@property(nonatomic, strong) id<ARDTURNClient> turnClient;
|
||||||
|
|
||||||
@property(nonatomic, strong) RTCPeerConnection *peerConnection;
|
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
|
||||||
@property(nonatomic, strong) RTCPeerConnectionFactory *factory;
|
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
||||||
@property(nonatomic, strong) NSMutableArray *messageQueue;
|
@property(nonatomic, strong) NSMutableArray *messageQueue;
|
||||||
|
|
||||||
@property(nonatomic, assign) BOOL isTurnComplete;
|
@property(nonatomic, assign) BOOL isTurnComplete;
|
||||||
|
@ -42,7 +42,7 @@
|
||||||
@property(nonatomic, strong) NSURL *webSocketRestURL;
|
@property(nonatomic, strong) NSURL *webSocketRestURL;
|
||||||
@property(nonatomic, readonly) BOOL isLoopback;
|
@property(nonatomic, readonly) BOOL isLoopback;
|
||||||
|
|
||||||
@property(nonatomic, strong) RTCMediaConstraints *defaultPeerConnectionConstraints;
|
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) * defaultPeerConnectionConstraints;
|
||||||
|
|
||||||
- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
|
- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
|
||||||
signalingChannel:(id<ARDSignalingChannel>)channel
|
signalingChannel:(id<ARDSignalingChannel>)channel
|
||||||
|
|
|
@ -24,9 +24,9 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
|
||||||
@class ARDAppClient;
|
@class ARDAppClient;
|
||||||
@class ARDSettingsModel;
|
@class ARDSettingsModel;
|
||||||
@class ARDExternalSampleCapturer;
|
@class ARDExternalSampleCapturer;
|
||||||
@class RTCMediaConstraints;
|
@class RTC_OBJC_TYPE(RTCMediaConstraints);
|
||||||
@class RTCCameraVideoCapturer;
|
@class RTC_OBJC_TYPE(RTCCameraVideoCapturer);
|
||||||
@class RTCFileVideoCapturer;
|
@class RTC_OBJC_TYPE(RTCFileVideoCapturer);
|
||||||
|
|
||||||
// The delegate is informed of pertinent events and will be called on the
|
// The delegate is informed of pertinent events and will be called on the
|
||||||
// main queue.
|
// main queue.
|
||||||
|
@ -37,12 +37,13 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
|
||||||
- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state;
|
- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state;
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer;
|
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer;
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack;
|
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack;
|
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack;
|
||||||
|
|
||||||
|
- (void)appClient:(ARDAppClient *)client
|
||||||
|
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack;
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error;
|
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error;
|
||||||
|
|
||||||
|
@ -50,7 +51,7 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
|
||||||
|
|
||||||
@optional
|
@optional
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalFileCapturer:(RTCFileVideoCapturer *)fileCapturer;
|
didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer;
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer;
|
didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer;
|
||||||
|
|
|
@ -105,10 +105,10 @@ static int const kKbpsMultiplier = 1000;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation ARDAppClient {
|
@implementation ARDAppClient {
|
||||||
RTCFileLogger *_fileLogger;
|
RTC_OBJC_TYPE(RTCFileLogger) * _fileLogger;
|
||||||
ARDTimerProxy *_statsTimer;
|
ARDTimerProxy *_statsTimer;
|
||||||
ARDSettingsModel *_settings;
|
ARDSettingsModel *_settings;
|
||||||
RTCVideoTrack *_localVideoTrack;
|
RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack;
|
||||||
}
|
}
|
||||||
|
|
||||||
@synthesize shouldGetStats = _shouldGetStats;
|
@synthesize shouldGetStats = _shouldGetStats;
|
||||||
|
@ -172,7 +172,7 @@ static int const kKbpsMultiplier = 1000;
|
||||||
- (void)configure {
|
- (void)configure {
|
||||||
_messageQueue = [NSMutableArray array];
|
_messageQueue = [NSMutableArray array];
|
||||||
_iceServers = [NSMutableArray array];
|
_iceServers = [NSMutableArray array];
|
||||||
_fileLogger = [[RTCFileLogger alloc] init];
|
_fileLogger = [[RTC_OBJC_TYPE(RTCFileLogger) alloc] init];
|
||||||
[_fileLogger start];
|
[_fileLogger start];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -224,11 +224,14 @@ static int const kKbpsMultiplier = 1000;
|
||||||
_isLoopback = isLoopback;
|
_isLoopback = isLoopback;
|
||||||
self.state = kARDAppClientStateConnecting;
|
self.state = kARDAppClientStateConnecting;
|
||||||
|
|
||||||
RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init];
|
RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) *decoderFactory =
|
||||||
RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init];
|
[[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init];
|
||||||
|
RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory =
|
||||||
|
[[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init];
|
||||||
encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
|
encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
|
||||||
_factory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory
|
_factory =
|
||||||
decoderFactory:decoderFactory];
|
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoderFactory
|
||||||
|
decoderFactory:decoderFactory];
|
||||||
|
|
||||||
#if defined(WEBRTC_IOS)
|
#if defined(WEBRTC_IOS)
|
||||||
if (kARDAppClientEnableTracing) {
|
if (kARDAppClientEnableTracing) {
|
||||||
|
@ -365,38 +368,38 @@ static int const kKbpsMultiplier = 1000;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - RTCPeerConnectionDelegate
|
#pragma mark - RTC_OBJC_TYPE(RTCPeerConnectionDelegate)
|
||||||
// Callbacks for this delegate occur on non-main thread and need to be
|
// Callbacks for this delegate occur on non-main thread and need to be
|
||||||
// dispatched back to main queue as needed.
|
// dispatched back to main queue as needed.
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didChangeSignalingState:(RTCSignalingState)stateChanged {
|
didChangeSignalingState:(RTCSignalingState)stateChanged {
|
||||||
RTCLog(@"Signaling state changed: %ld", (long)stateChanged);
|
RTCLog(@"Signaling state changed: %ld", (long)stateChanged);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didAddStream:(RTCMediaStream *)stream {
|
didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
|
||||||
RTCLog(@"Stream with %lu video tracks and %lu audio tracks was added.",
|
RTCLog(@"Stream with %lu video tracks and %lu audio tracks was added.",
|
||||||
(unsigned long)stream.videoTracks.count,
|
(unsigned long)stream.videoTracks.count,
|
||||||
(unsigned long)stream.audioTracks.count);
|
(unsigned long)stream.audioTracks.count);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver {
|
didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver {
|
||||||
RTCMediaStreamTrack *track = transceiver.receiver.track;
|
RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track;
|
||||||
RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
|
RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didRemoveStream:(RTCMediaStream *)stream {
|
didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
|
||||||
RTCLog(@"Stream was removed.");
|
RTCLog(@"Stream was removed.");
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection {
|
- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection {
|
||||||
RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
|
RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didChangeIceConnectionState:(RTCIceConnectionState)newState {
|
didChangeIceConnectionState:(RTCIceConnectionState)newState {
|
||||||
RTCLog(@"ICE state changed: %ld", (long)newState);
|
RTCLog(@"ICE state changed: %ld", (long)newState);
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
|
@ -404,18 +407,18 @@ static int const kKbpsMultiplier = 1000;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didChangeConnectionState:(RTCPeerConnectionState)newState {
|
didChangeConnectionState:(RTCPeerConnectionState)newState {
|
||||||
RTCLog(@"ICE+DTLS state changed: %ld", (long)newState);
|
RTCLog(@"ICE+DTLS state changed: %ld", (long)newState);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didChangeIceGatheringState:(RTCIceGatheringState)newState {
|
didChangeIceGatheringState:(RTCIceGatheringState)newState {
|
||||||
RTCLog(@"ICE gathering state changed: %ld", (long)newState);
|
RTCLog(@"ICE gathering state changed: %ld", (long)newState);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didGenerateIceCandidate:(RTCIceCandidate *)candidate {
|
didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
ARDICECandidateMessage *message =
|
ARDICECandidateMessage *message =
|
||||||
[[ARDICECandidateMessage alloc] initWithCandidate:candidate];
|
[[ARDICECandidateMessage alloc] initWithCandidate:candidate];
|
||||||
|
@ -423,8 +426,8 @@ static int const kKbpsMultiplier = 1000;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didRemoveIceCandidates:(NSArray<RTCIceCandidate *> *)candidates {
|
didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
ARDICECandidateRemovalMessage *message =
|
ARDICECandidateRemovalMessage *message =
|
||||||
[[ARDICECandidateRemovalMessage alloc]
|
[[ARDICECandidateRemovalMessage alloc]
|
||||||
|
@ -433,24 +436,24 @@ static int const kKbpsMultiplier = 1000;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didChangeLocalCandidate:(RTCIceCandidate *)local
|
didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local
|
||||||
didChangeRemoteCandidate:(RTCIceCandidate *)remote
|
didChangeRemoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote
|
||||||
lastReceivedMs:(int)lastDataReceivedMs
|
lastReceivedMs:(int)lastDataReceivedMs
|
||||||
didHaveReason:(NSString *)reason {
|
didHaveReason:(NSString *)reason {
|
||||||
RTCLog(@"ICE candidate pair changed because: %@", reason);
|
RTCLog(@"ICE candidate pair changed because: %@", reason);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didOpenDataChannel:(RTCDataChannel *)dataChannel {
|
didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel {
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - RTCSessionDescriptionDelegate
|
#pragma mark - RTCSessionDescriptionDelegate
|
||||||
// Callbacks for this delegate occur on non-main thread and need to be
|
// Callbacks for this delegate occur on non-main thread and need to be
|
||||||
// dispatched back to main queue as needed.
|
// dispatched back to main queue as needed.
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didCreateSessionDescription:(RTCSessionDescription *)sdp
|
didCreateSessionDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
|
||||||
error:(NSError *)error {
|
error:(NSError *)error {
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
if (error) {
|
if (error) {
|
||||||
|
@ -480,7 +483,7 @@ static int const kKbpsMultiplier = 1000;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTCPeerConnection *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didSetSessionDescriptionWithError:(NSError *)error {
|
didSetSessionDescriptionWithError:(NSError *)error {
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
if (error) {
|
if (error) {
|
||||||
|
@ -499,15 +502,16 @@ static int const kKbpsMultiplier = 1000;
|
||||||
// If we're answering and we've just set the remote offer we need to create
|
// If we're answering and we've just set the remote offer we need to create
|
||||||
// an answer and set the local description.
|
// an answer and set the local description.
|
||||||
if (!self.isInitiator && !self.peerConnection.localDescription) {
|
if (!self.isInitiator && !self.peerConnection.localDescription) {
|
||||||
RTCMediaConstraints *constraints = [self defaultAnswerConstraints];
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultAnswerConstraints];
|
||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
[self.peerConnection answerForConstraints:constraints
|
[self.peerConnection
|
||||||
completionHandler:^(RTCSessionDescription *sdp, NSError *error) {
|
answerForConstraints:constraints
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
|
||||||
[strongSelf peerConnection:strongSelf.peerConnection
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
didCreateSessionDescription:sdp
|
[strongSelf peerConnection:strongSelf.peerConnection
|
||||||
error:error];
|
didCreateSessionDescription:sdp
|
||||||
}];
|
error:error];
|
||||||
|
}];
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -544,12 +548,10 @@ static int const kKbpsMultiplier = 1000;
|
||||||
self.state = kARDAppClientStateConnected;
|
self.state = kARDAppClientStateConnected;
|
||||||
|
|
||||||
// Create peer connection.
|
// Create peer connection.
|
||||||
RTCMediaConstraints *constraints = [self defaultPeerConnectionConstraints];
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultPeerConnectionConstraints];
|
||||||
RTCConfiguration *config = [[RTCConfiguration alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
RTCCertificate *pcert = [RTCCertificate generateCertificateWithParams:@{
|
RTC_OBJC_TYPE(RTCCertificate) *pcert = [RTC_OBJC_TYPE(RTCCertificate)
|
||||||
@"expires" : @100000,
|
generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
|
||||||
@"name" : @"RSASSA-PKCS1-v1_5"
|
|
||||||
}];
|
|
||||||
config.iceServers = _iceServers;
|
config.iceServers = _iceServers;
|
||||||
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
|
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
|
||||||
config.certificate = pcert;
|
config.certificate = pcert;
|
||||||
|
@ -562,14 +564,14 @@ static int const kKbpsMultiplier = 1000;
|
||||||
if (_isInitiator) {
|
if (_isInitiator) {
|
||||||
// Send offer.
|
// Send offer.
|
||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
[_peerConnection offerForConstraints:[self defaultOfferConstraints]
|
[_peerConnection
|
||||||
completionHandler:^(RTCSessionDescription *sdp,
|
offerForConstraints:[self defaultOfferConstraints]
|
||||||
NSError *error) {
|
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
[strongSelf peerConnection:strongSelf.peerConnection
|
[strongSelf peerConnection:strongSelf.peerConnection
|
||||||
didCreateSessionDescription:sdp
|
didCreateSessionDescription:sdp
|
||||||
error:error];
|
error:error];
|
||||||
}];
|
}];
|
||||||
} else {
|
} else {
|
||||||
// Check if we've received an offer.
|
// Check if we've received an offer.
|
||||||
[self drainMessageQueueIfReady];
|
[self drainMessageQueueIfReady];
|
||||||
|
@ -619,7 +621,7 @@ static int const kKbpsMultiplier = 1000;
|
||||||
case kARDSignalingMessageTypeAnswer: {
|
case kARDSignalingMessageTypeAnswer: {
|
||||||
ARDSessionDescriptionMessage *sdpMessage =
|
ARDSessionDescriptionMessage *sdpMessage =
|
||||||
(ARDSessionDescriptionMessage *)message;
|
(ARDSessionDescriptionMessage *)message;
|
||||||
RTCSessionDescription *description = sdpMessage.sessionDescription;
|
RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
|
||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
[_peerConnection setRemoteDescription:description
|
[_peerConnection setRemoteDescription:description
|
||||||
completionHandler:^(NSError *error) {
|
completionHandler:^(NSError *error) {
|
||||||
|
@ -679,7 +681,7 @@ static int const kKbpsMultiplier = 1000;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setMaxBitrateForPeerConnectionVideoSender {
|
- (void)setMaxBitrateForPeerConnectionVideoSender {
|
||||||
for (RTCRtpSender *sender in _peerConnection.senders) {
|
for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) {
|
||||||
if (sender.track != nil) {
|
if (sender.track != nil) {
|
||||||
if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
|
if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
|
||||||
[self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender];
|
[self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender];
|
||||||
|
@ -688,20 +690,20 @@ static int const kKbpsMultiplier = 1000;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTCRtpSender *)sender {
|
- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
|
||||||
if (maxBitrate.intValue <= 0) {
|
if (maxBitrate.intValue <= 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
RTCRtpParameters *parametersToModify = sender.parameters;
|
RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters;
|
||||||
for (RTCRtpEncodingParameters *encoding in parametersToModify.encodings) {
|
for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in parametersToModify.encodings) {
|
||||||
encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
|
encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
|
||||||
}
|
}
|
||||||
[sender setParameters:parametersToModify];
|
[sender setParameters:parametersToModify];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCRtpTransceiver *)videoTransceiver {
|
- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver {
|
||||||
for (RTCRtpTransceiver *transceiver in _peerConnection.transceivers) {
|
for (RTC_OBJC_TYPE(RTCRtpTransceiver) * transceiver in _peerConnection.transceivers) {
|
||||||
if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
|
if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
|
||||||
return transceiver;
|
return transceiver;
|
||||||
}
|
}
|
||||||
|
@ -710,29 +712,30 @@ static int const kKbpsMultiplier = 1000;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)createMediaSenders {
|
- (void)createMediaSenders {
|
||||||
RTCMediaConstraints *constraints = [self defaultMediaAudioConstraints];
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultMediaAudioConstraints];
|
||||||
RTCAudioSource *source = [_factory audioSourceWithConstraints:constraints];
|
RTC_OBJC_TYPE(RTCAudioSource) *source = [_factory audioSourceWithConstraints:constraints];
|
||||||
RTCAudioTrack *track = [_factory audioTrackWithSource:source
|
RTC_OBJC_TYPE(RTCAudioTrack) *track = [_factory audioTrackWithSource:source
|
||||||
trackId:kARDAudioTrackId];
|
trackId:kARDAudioTrackId];
|
||||||
[_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]];
|
[_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]];
|
||||||
_localVideoTrack = [self createLocalVideoTrack];
|
_localVideoTrack = [self createLocalVideoTrack];
|
||||||
if (_localVideoTrack) {
|
if (_localVideoTrack) {
|
||||||
[_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]];
|
[_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]];
|
||||||
[_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
|
[_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
|
||||||
// We can set up rendering for the remote track right away since the transceiver already has an
|
// We can set up rendering for the remote track right away since the transceiver already has an
|
||||||
// RTCRtpReceiver with a track. The track will automatically get unmuted and produce frames
|
// RTC_OBJC_TYPE(RTCRtpReceiver) with a track. The track will automatically get unmuted and
|
||||||
// once RTP is received.
|
// produce frames once RTP is received.
|
||||||
RTCVideoTrack *track = (RTCVideoTrack *)([self videoTransceiver].receiver.track);
|
RTC_OBJC_TYPE(RTCVideoTrack) *track =
|
||||||
|
(RTC_OBJC_TYPE(RTCVideoTrack) *)([self videoTransceiver].receiver.track);
|
||||||
[_delegate appClient:self didReceiveRemoteVideoTrack:track];
|
[_delegate appClient:self didReceiveRemoteVideoTrack:track];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCVideoTrack *)createLocalVideoTrack {
|
- (RTC_OBJC_TYPE(RTCVideoTrack) *)createLocalVideoTrack {
|
||||||
if ([_settings currentAudioOnlySettingFromStore]) {
|
if ([_settings currentAudioOnlySettingFromStore]) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
RTCVideoSource *source = [_factory videoSource];
|
RTC_OBJC_TYPE(RTCVideoSource) *source = [_factory videoSource];
|
||||||
|
|
||||||
#if !TARGET_IPHONE_SIMULATOR
|
#if !TARGET_IPHONE_SIMULATOR
|
||||||
if (self.isBroadcast) {
|
if (self.isBroadcast) {
|
||||||
|
@ -740,13 +743,15 @@ static int const kKbpsMultiplier = 1000;
|
||||||
[[ARDExternalSampleCapturer alloc] initWithDelegate:source];
|
[[ARDExternalSampleCapturer alloc] initWithDelegate:source];
|
||||||
[_delegate appClient:self didCreateLocalExternalSampleCapturer:capturer];
|
[_delegate appClient:self didCreateLocalExternalSampleCapturer:capturer];
|
||||||
} else {
|
} else {
|
||||||
RTCCameraVideoCapturer *capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:source];
|
RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer =
|
||||||
|
[[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:source];
|
||||||
[_delegate appClient:self didCreateLocalCapturer:capturer];
|
[_delegate appClient:self didCreateLocalCapturer:capturer];
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
|
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
|
||||||
if (@available(iOS 10, *)) {
|
if (@available(iOS 10, *)) {
|
||||||
RTCFileVideoCapturer *fileCapturer = [[RTCFileVideoCapturer alloc] initWithDelegate:source];
|
RTC_OBJC_TYPE(RTCFileVideoCapturer) *fileCapturer =
|
||||||
|
[[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:source];
|
||||||
[_delegate appClient:self didCreateLocalFileCapturer:fileCapturer];
|
[_delegate appClient:self didCreateLocalFileCapturer:fileCapturer];
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -781,40 +786,38 @@ static int const kKbpsMultiplier = 1000;
|
||||||
|
|
||||||
#pragma mark - Defaults
|
#pragma mark - Defaults
|
||||||
|
|
||||||
- (RTCMediaConstraints *)defaultMediaAudioConstraints {
|
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints {
|
||||||
NSDictionary *mandatoryConstraints = @{};
|
NSDictionary *mandatoryConstraints = @{};
|
||||||
RTCMediaConstraints *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatoryConstraints
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
return constraints;
|
return constraints;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCMediaConstraints *)defaultAnswerConstraints {
|
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultAnswerConstraints {
|
||||||
return [self defaultOfferConstraints];
|
return [self defaultOfferConstraints];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCMediaConstraints *)defaultOfferConstraints {
|
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints {
|
||||||
NSDictionary *mandatoryConstraints = @{
|
NSDictionary *mandatoryConstraints = @{
|
||||||
@"OfferToReceiveAudio" : @"true",
|
@"OfferToReceiveAudio" : @"true",
|
||||||
@"OfferToReceiveVideo" : @"true"
|
@"OfferToReceiveVideo" : @"true"
|
||||||
};
|
};
|
||||||
RTCMediaConstraints* constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTCMediaConstraints alloc]
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
|
||||||
initWithMandatoryConstraints:mandatoryConstraints
|
optionalConstraints:nil];
|
||||||
optionalConstraints:nil];
|
|
||||||
return constraints;
|
return constraints;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCMediaConstraints *)defaultPeerConnectionConstraints {
|
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultPeerConnectionConstraints {
|
||||||
if (_defaultPeerConnectionConstraints) {
|
if (_defaultPeerConnectionConstraints) {
|
||||||
return _defaultPeerConnectionConstraints;
|
return _defaultPeerConnectionConstraints;
|
||||||
}
|
}
|
||||||
NSString *value = _isLoopback ? @"false" : @"true";
|
NSString *value = _isLoopback ? @"false" : @"true";
|
||||||
NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value };
|
NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value };
|
||||||
RTCMediaConstraints* constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTCMediaConstraints alloc]
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
|
||||||
initWithMandatoryConstraints:nil
|
optionalConstraints:optionalConstraints];
|
||||||
optionalConstraints:optionalConstraints];
|
|
||||||
return constraints;
|
return constraints;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
// Controls the camera. Handles starting the capture, switching cameras etc.
|
// Controls the camera. Handles starting the capture, switching cameras etc.
|
||||||
@interface ARDCaptureController : NSObject
|
@interface ARDCaptureController : NSObject
|
||||||
|
|
||||||
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
|
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
|
||||||
settings:(ARDSettingsModel *)settings;
|
settings:(ARDSettingsModel *)settings;
|
||||||
- (void)startCapture;
|
- (void)startCapture;
|
||||||
- (void)stopCapture;
|
- (void)stopCapture;
|
||||||
|
|
|
@ -17,12 +17,12 @@
|
||||||
const Float64 kFramerateLimit = 30.0;
|
const Float64 kFramerateLimit = 30.0;
|
||||||
|
|
||||||
@implementation ARDCaptureController {
|
@implementation ARDCaptureController {
|
||||||
RTCCameraVideoCapturer *_capturer;
|
RTC_OBJC_TYPE(RTCCameraVideoCapturer) * _capturer;
|
||||||
ARDSettingsModel *_settings;
|
ARDSettingsModel *_settings;
|
||||||
BOOL _usingFrontCamera;
|
BOOL _usingFrontCamera;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
|
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
|
||||||
settings:(ARDSettingsModel *)settings {
|
settings:(ARDSettingsModel *)settings {
|
||||||
if (self = [super init]) {
|
if (self = [super init]) {
|
||||||
_capturer = capturer;
|
_capturer = capturer;
|
||||||
|
@ -63,7 +63,8 @@ const Float64 kFramerateLimit = 30.0;
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position {
|
- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position {
|
||||||
NSArray<AVCaptureDevice *> *captureDevices = [RTCCameraVideoCapturer captureDevices];
|
NSArray<AVCaptureDevice *> *captureDevices =
|
||||||
|
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices];
|
||||||
for (AVCaptureDevice *device in captureDevices) {
|
for (AVCaptureDevice *device in captureDevices) {
|
||||||
if (device.position == position) {
|
if (device.position == position) {
|
||||||
return device;
|
return device;
|
||||||
|
@ -74,7 +75,7 @@ const Float64 kFramerateLimit = 30.0;
|
||||||
|
|
||||||
- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device {
|
- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device {
|
||||||
NSArray<AVCaptureDeviceFormat *> *formats =
|
NSArray<AVCaptureDeviceFormat *> *formats =
|
||||||
[RTCCameraVideoCapturer supportedFormatsForDevice:device];
|
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device];
|
||||||
int targetWidth = [_settings currentVideoResolutionWidthFromStore];
|
int targetWidth = [_settings currentVideoResolutionWidthFromStore];
|
||||||
int targetHeight = [_settings currentVideoResolutionHeightFromStore];
|
int targetHeight = [_settings currentVideoResolutionHeightFromStore];
|
||||||
AVCaptureDeviceFormat *selectedFormat = nil;
|
AVCaptureDeviceFormat *selectedFormat = nil;
|
||||||
|
|
|
@ -14,5 +14,5 @@
|
||||||
- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer;
|
- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface ARDExternalSampleCapturer : RTCVideoCapturer <ARDExternalSampleDelegate>
|
@interface ARDExternalSampleCapturer : RTC_OBJC_TYPE
|
||||||
@end
|
(RTCVideoCapturer)<ARDExternalSampleDelegate> @end
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
@implementation ARDExternalSampleCapturer
|
@implementation ARDExternalSampleCapturer
|
||||||
|
|
||||||
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
|
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
||||||
return [super initWithDelegate:delegate];
|
return [super initWithDelegate:delegate];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,12 +32,14 @@
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
|
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
|
||||||
|
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
|
||||||
int64_t timeStampNs =
|
int64_t timeStampNs =
|
||||||
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
|
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
|
||||||
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
|
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
|
||||||
rotation:RTCVideoRotation_0
|
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
|
||||||
timeStampNs:timeStampNs];
|
rotation:RTCVideoRotation_0
|
||||||
|
timeStampNs:timeStampNs];
|
||||||
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
|
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -53,12 +53,12 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
/**
|
/**
|
||||||
* Returns array of available video codecs.
|
* Returns array of available video codecs.
|
||||||
*/
|
*/
|
||||||
- (NSArray<RTCVideoCodecInfo *> *)availableVideoCodecs;
|
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns current video codec setting from store if present or default (H264) otherwise.
|
* Returns current video codec setting from store if present or default (H264) otherwise.
|
||||||
*/
|
*/
|
||||||
- (RTCVideoCodecInfo *)currentVideoCodecSettingFromStore;
|
- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stores the provided video codec setting into the store.
|
* Stores the provided video codec setting into the store.
|
||||||
|
@ -68,7 +68,7 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
* @param video codec settings the string to be stored.
|
* @param video codec settings the string to be stored.
|
||||||
* @return YES/NO depending on success.
|
* @return YES/NO depending on success.
|
||||||
*/
|
*/
|
||||||
- (BOOL)storeVideoCodecSetting:(RTCVideoCodecInfo *)videoCodec;
|
- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns current max bitrate setting from store if present.
|
* Returns current max bitrate setting from store if present.
|
||||||
|
|
|
@ -27,9 +27,9 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
- (NSArray<NSString *> *)availableVideoResolutions {
|
- (NSArray<NSString *> *)availableVideoResolutions {
|
||||||
NSMutableSet<NSArray<NSNumber *> *> *resolutions =
|
NSMutableSet<NSArray<NSNumber *> *> *resolutions =
|
||||||
[[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
|
[[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
|
||||||
for (AVCaptureDevice *device in [RTCCameraVideoCapturer captureDevices]) {
|
for (AVCaptureDevice *device in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) {
|
||||||
for (AVCaptureDeviceFormat *format in
|
for (AVCaptureDeviceFormat *format in
|
||||||
[RTCCameraVideoCapturer supportedFormatsForDevice:device]) {
|
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]) {
|
||||||
CMVideoDimensions resolution =
|
CMVideoDimensions resolution =
|
||||||
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||||
NSArray<NSNumber *> *resolutionObject = @[ @(resolution.width), @(resolution.height) ];
|
NSArray<NSNumber *> *resolutionObject = @[ @(resolution.width), @(resolution.height) ];
|
||||||
|
@ -70,17 +70,17 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
return YES;
|
return YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSArray<RTCVideoCodecInfo *> *)availableVideoCodecs {
|
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs {
|
||||||
return [RTCDefaultVideoEncoderFactory supportedCodecs];
|
return [RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) supportedCodecs];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCVideoCodecInfo *)currentVideoCodecSettingFromStore {
|
- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore {
|
||||||
[self registerStoreDefaults];
|
[self registerStoreDefaults];
|
||||||
NSData *codecData = [[self settingsStore] videoCodec];
|
NSData *codecData = [[self settingsStore] videoCodec];
|
||||||
return [NSKeyedUnarchiver unarchiveObjectWithData:codecData];
|
return [NSKeyedUnarchiver unarchiveObjectWithData:codecData];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)storeVideoCodecSetting:(RTCVideoCodecInfo *)videoCodec {
|
- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec {
|
||||||
if (![[self availableVideoCodecs] containsObject:videoCodec]) {
|
if (![[self availableVideoCodecs] containsObject:videoCodec]) {
|
||||||
return NO;
|
return NO;
|
||||||
}
|
}
|
||||||
|
@ -149,7 +149,7 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
return [self availableVideoResolutions].firstObject;
|
return [self availableVideoResolutions].firstObject;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCVideoCodecInfo *)defaultVideoCodecSetting {
|
- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)defaultVideoCodecSetting {
|
||||||
return [self availableVideoCodecs].firstObject;
|
return [self availableVideoCodecs].firstObject;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,25 +32,25 @@ typedef enum {
|
||||||
|
|
||||||
@interface ARDICECandidateMessage : ARDSignalingMessage
|
@interface ARDICECandidateMessage : ARDSignalingMessage
|
||||||
|
|
||||||
@property(nonatomic, readonly) RTCIceCandidate *candidate;
|
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceCandidate) * candidate;
|
||||||
|
|
||||||
- (instancetype)initWithCandidate:(RTCIceCandidate *)candidate;
|
- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface ARDICECandidateRemovalMessage : ARDSignalingMessage
|
@interface ARDICECandidateRemovalMessage : ARDSignalingMessage
|
||||||
|
|
||||||
@property(nonatomic, readonly) NSArray<RTCIceCandidate *> *candidates;
|
@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates;
|
||||||
|
|
||||||
- (instancetype)initWithRemovedCandidates:(NSArray<RTCIceCandidate *> *)candidates;
|
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface ARDSessionDescriptionMessage : ARDSignalingMessage
|
@interface ARDSessionDescriptionMessage : ARDSignalingMessage
|
||||||
|
|
||||||
@property(nonatomic, readonly) RTCSessionDescription *sessionDescription;
|
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription;
|
||||||
|
|
||||||
- (instancetype)initWithDescription:(RTCSessionDescription *)description;
|
- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -45,19 +45,19 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
||||||
NSString *typeString = values[kARDSignalingMessageTypeKey];
|
NSString *typeString = values[kARDSignalingMessageTypeKey];
|
||||||
ARDSignalingMessage *message = nil;
|
ARDSignalingMessage *message = nil;
|
||||||
if ([typeString isEqualToString:@"candidate"]) {
|
if ([typeString isEqualToString:@"candidate"]) {
|
||||||
RTCIceCandidate *candidate =
|
RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
|
||||||
[RTCIceCandidate candidateFromJSONDictionary:values];
|
[RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:values];
|
||||||
message = [[ARDICECandidateMessage alloc] initWithCandidate:candidate];
|
message = [[ARDICECandidateMessage alloc] initWithCandidate:candidate];
|
||||||
} else if ([typeString isEqualToString:kARDTypeValueRemoveCandidates]) {
|
} else if ([typeString isEqualToString:kARDTypeValueRemoveCandidates]) {
|
||||||
RTCLogInfo(@"Received remove-candidates message");
|
RTCLogInfo(@"Received remove-candidates message");
|
||||||
NSArray<RTCIceCandidate *> *candidates =
|
NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
|
||||||
[RTCIceCandidate candidatesFromJSONDictionary:values];
|
[RTC_OBJC_TYPE(RTCIceCandidate) candidatesFromJSONDictionary:values];
|
||||||
message = [[ARDICECandidateRemovalMessage alloc]
|
message = [[ARDICECandidateRemovalMessage alloc]
|
||||||
initWithRemovedCandidates:candidates];
|
initWithRemovedCandidates:candidates];
|
||||||
} else if ([typeString isEqualToString:@"offer"] ||
|
} else if ([typeString isEqualToString:@"offer"] ||
|
||||||
[typeString isEqualToString:@"answer"]) {
|
[typeString isEqualToString:@"answer"]) {
|
||||||
RTCSessionDescription *description =
|
RTC_OBJC_TYPE(RTCSessionDescription) *description =
|
||||||
[RTCSessionDescription descriptionFromJSONDictionary:values];
|
[RTC_OBJC_TYPE(RTCSessionDescription) descriptionFromJSONDictionary:values];
|
||||||
message =
|
message =
|
||||||
[[ARDSessionDescriptionMessage alloc] initWithDescription:description];
|
[[ARDSessionDescriptionMessage alloc] initWithDescription:description];
|
||||||
} else if ([typeString isEqualToString:@"bye"]) {
|
} else if ([typeString isEqualToString:@"bye"]) {
|
||||||
|
@ -78,7 +78,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
||||||
|
|
||||||
@synthesize candidate = _candidate;
|
@synthesize candidate = _candidate;
|
||||||
|
|
||||||
- (instancetype)initWithCandidate:(RTCIceCandidate *)candidate {
|
- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
|
||||||
if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) {
|
if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) {
|
||||||
_candidate = candidate;
|
_candidate = candidate;
|
||||||
}
|
}
|
||||||
|
@ -95,8 +95,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
||||||
|
|
||||||
@synthesize candidates = _candidates;
|
@synthesize candidates = _candidates;
|
||||||
|
|
||||||
- (instancetype)initWithRemovedCandidates:(
|
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
|
||||||
NSArray<RTCIceCandidate *> *)candidates {
|
|
||||||
NSParameterAssert(candidates.count);
|
NSParameterAssert(candidates.count);
|
||||||
if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) {
|
if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) {
|
||||||
_candidates = candidates;
|
_candidates = candidates;
|
||||||
|
@ -105,9 +104,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSData *)JSONData {
|
- (NSData *)JSONData {
|
||||||
return
|
return [RTC_OBJC_TYPE(RTCIceCandidate) JSONDataForIceCandidates:_candidates
|
||||||
[RTCIceCandidate JSONDataForIceCandidates:_candidates
|
withType:kARDTypeValueRemoveCandidates];
|
||||||
withType:kARDTypeValueRemoveCandidates];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -116,7 +114,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
||||||
|
|
||||||
@synthesize sessionDescription = _sessionDescription;
|
@synthesize sessionDescription = _sessionDescription;
|
||||||
|
|
||||||
- (instancetype)initWithDescription:(RTCSessionDescription *)description {
|
- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description {
|
||||||
ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
|
ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
|
||||||
RTCSdpType sdpType = description.type;
|
RTCSdpType sdpType = description.type;
|
||||||
switch (sdpType) {
|
switch (sdpType) {
|
||||||
|
@ -127,8 +125,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
||||||
messageType = kARDSignalingMessageTypeAnswer;
|
messageType = kARDSignalingMessageTypeAnswer;
|
||||||
break;
|
break;
|
||||||
case RTCSdpTypePrAnswer:
|
case RTCSdpTypePrAnswer:
|
||||||
NSAssert(NO, @"Unexpected type: %@",
|
NSAssert(
|
||||||
[RTCSessionDescription stringForType:sdpType]);
|
NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (self = [super initWithType:messageType]) {
|
if (self = [super initWithType:messageType]) {
|
||||||
|
|
|
@ -10,7 +10,9 @@
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
|
|
||||||
@class RTCLegacyStatsReport;
|
#import <WebRTC/RTCMacros.h>
|
||||||
|
|
||||||
|
@class RTC_OBJC_TYPE(RTCLegacyStatsReport);
|
||||||
|
|
||||||
/** Class used to accumulate stats information into a single displayable string.
|
/** Class used to accumulate stats information into a single displayable string.
|
||||||
*/
|
*/
|
||||||
|
@ -24,6 +26,6 @@
|
||||||
/** Parses the information in the stats report into an appropriate internal
|
/** Parses the information in the stats report into an appropriate internal
|
||||||
* format used to generate the stats string.
|
* format used to generate the stats string.
|
||||||
*/
|
*/
|
||||||
- (void)parseStatsReport:(RTCLegacyStatsReport *)statsReport;
|
- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
#import "ARDStatsBuilder.h"
|
#import "ARDStatsBuilder.h"
|
||||||
|
|
||||||
#import <WebRTC/RTCLegacyStatsReport.h>
|
#import <WebRTC/RTCLegacyStatsReport.h>
|
||||||
|
#import <WebRTC/RTCMacros.h>
|
||||||
|
|
||||||
#import "ARDBitrateTracker.h"
|
#import "ARDBitrateTracker.h"
|
||||||
#import "ARDUtilities.h"
|
#import "ARDUtilities.h"
|
||||||
|
@ -141,7 +142,7 @@
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
NSString *reportType = statsReport.type;
|
NSString *reportType = statsReport.type;
|
||||||
if ([reportType isEqualToString:@"ssrc"] &&
|
if ([reportType isEqualToString:@"ssrc"] &&
|
||||||
[statsReport.reportId rangeOfString:@"ssrc"].location != NSNotFound) {
|
[statsReport.reportId rangeOfString:@"ssrc"].location != NSNotFound) {
|
||||||
|
@ -179,7 +180,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseBweStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseBweStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
[statsReport.values
|
[statsReport.values
|
||||||
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
||||||
[self updateBweStatOfKey:key value:value];
|
[self updateBweStatOfKey:key value:value];
|
||||||
|
@ -206,7 +207,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseConnectionStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseConnectionStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
NSString *activeConnection = statsReport.values[@"googActiveConnection"];
|
NSString *activeConnection = statsReport.values[@"googActiveConnection"];
|
||||||
if (![activeConnection isEqualToString:@"true"]) {
|
if (![activeConnection isEqualToString:@"true"]) {
|
||||||
return;
|
return;
|
||||||
|
@ -217,7 +218,7 @@
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseSendSsrcStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseSendSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
NSDictionary *values = statsReport.values;
|
NSDictionary *values = statsReport.values;
|
||||||
if ([values objectForKey:@"googFrameRateSent"]) {
|
if ([values objectForKey:@"googFrameRateSent"]) {
|
||||||
// Video track.
|
// Video track.
|
||||||
|
@ -238,7 +239,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseAudioSendStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseAudioSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
[statsReport.values
|
[statsReport.values
|
||||||
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
||||||
[self updateAudioSendStatOfKey:key value:value];
|
[self updateAudioSendStatOfKey:key value:value];
|
||||||
|
@ -275,14 +276,14 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseVideoSendStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseVideoSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
[statsReport.values
|
[statsReport.values
|
||||||
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
||||||
[self updateVideoSendStatOfKey:key value:value];
|
[self updateVideoSendStatOfKey:key value:value];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseRecvSsrcStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseRecvSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
NSDictionary *values = statsReport.values;
|
NSDictionary *values = statsReport.values;
|
||||||
if ([values objectForKey:@"googFrameWidthReceived"]) {
|
if ([values objectForKey:@"googFrameWidthReceived"]) {
|
||||||
// Video track.
|
// Video track.
|
||||||
|
@ -307,7 +308,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseAudioRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseAudioRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
[statsReport.values
|
[statsReport.values
|
||||||
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
||||||
[self updateAudioRecvStatOfKey:key value:value];
|
[self updateAudioRecvStatOfKey:key value:value];
|
||||||
|
@ -334,7 +335,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)parseVideoRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
|
- (void)parseVideoRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
|
||||||
[statsReport.values
|
[statsReport.values
|
||||||
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
|
||||||
[self updateVideoRecvStatOfKey:key value:value];
|
[self updateVideoRecvStatOfKey:key value:value];
|
||||||
|
|
|
@ -10,7 +10,9 @@
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
|
|
||||||
@class RTCIceServer;
|
#import <WebRTC/RTCMacros.h>
|
||||||
|
|
||||||
|
@class RTC_OBJC_TYPE(RTCIceServer);
|
||||||
|
|
||||||
@protocol ARDTURNClient <NSObject>
|
@protocol ARDTURNClient <NSObject>
|
||||||
|
|
||||||
|
|
|
@ -65,10 +65,10 @@ static NSInteger kARDTURNClientErrorBadResponse = -1;
|
||||||
}
|
}
|
||||||
NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data];
|
NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data];
|
||||||
NSMutableArray *turnServers = [NSMutableArray array];
|
NSMutableArray *turnServers = [NSMutableArray array];
|
||||||
[turnResponseDict[@"iceServers"] enumerateObjectsUsingBlock:
|
[turnResponseDict[@"iceServers"]
|
||||||
^(NSDictionary *obj, NSUInteger idx, BOOL *stop){
|
enumerateObjectsUsingBlock:^(NSDictionary *obj, NSUInteger idx, BOOL *stop) {
|
||||||
[turnServers addObject:[RTCIceServer serverFromJSONDictionary:obj]];
|
[turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) serverFromJSONDictionary:obj]];
|
||||||
}];
|
}];
|
||||||
if (!turnServers) {
|
if (!turnServers) {
|
||||||
NSError *responseError =
|
NSError *responseError =
|
||||||
[[NSError alloc] initWithDomain:kARDTURNClientErrorDomain
|
[[NSError alloc] initWithDomain:kARDTURNClientErrorDomain
|
||||||
|
|
|
@ -217,12 +217,12 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
||||||
// Change message to answer, send back to server.
|
// Change message to answer, send back to server.
|
||||||
ARDSessionDescriptionMessage *sdpMessage =
|
ARDSessionDescriptionMessage *sdpMessage =
|
||||||
(ARDSessionDescriptionMessage *)message;
|
(ARDSessionDescriptionMessage *)message;
|
||||||
RTCSessionDescription *description = sdpMessage.sessionDescription;
|
RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
|
||||||
NSString *dsc = description.sdp;
|
NSString *dsc = description.sdp;
|
||||||
dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
|
dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
|
||||||
withString:@"answer"];
|
withString:@"answer"];
|
||||||
RTCSessionDescription *answerDescription =
|
RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription =
|
||||||
[[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
|
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
|
||||||
ARDSignalingMessage *answer =
|
ARDSignalingMessage *answer =
|
||||||
[[ARDSessionDescriptionMessage alloc]
|
[[ARDSessionDescriptionMessage alloc]
|
||||||
initWithDescription:answerDescription];
|
initWithDescription:answerDescription];
|
||||||
|
|
|
@ -10,11 +10,13 @@
|
||||||
|
|
||||||
#import <WebRTC/RTCIceCandidate.h>
|
#import <WebRTC/RTCIceCandidate.h>
|
||||||
|
|
||||||
@interface RTCIceCandidate (JSON)
|
@interface RTC_OBJC_TYPE (RTCIceCandidate)
|
||||||
|
(JSON)
|
||||||
|
|
||||||
+ (RTCIceCandidate *)candidateFromJSONDictionary:(NSDictionary *)dictionary;
|
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary;
|
||||||
+ (NSArray<RTCIceCandidate *> *)candidatesFromJSONDictionary:(NSDictionary *)dictionary;
|
+ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
|
||||||
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTCIceCandidate *> *)candidates
|
(NSDictionary *)dictionary;
|
||||||
|
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
|
||||||
withType:(NSString *)typeValue;
|
withType:(NSString *)typeValue;
|
||||||
- (NSData *)JSONData;
|
- (NSData *)JSONData;
|
||||||
|
|
||||||
|
|
|
@ -19,24 +19,24 @@ static NSString const *kRTCICECandidateMLineIndexKey = @"label";
|
||||||
static NSString const *kRTCICECandidateSdpKey = @"candidate";
|
static NSString const *kRTCICECandidateSdpKey = @"candidate";
|
||||||
static NSString const *kRTCICECandidatesTypeKey = @"candidates";
|
static NSString const *kRTCICECandidatesTypeKey = @"candidates";
|
||||||
|
|
||||||
|
@implementation RTC_OBJC_TYPE (RTCIceCandidate)
|
||||||
|
(JSON)
|
||||||
|
|
||||||
@implementation RTCIceCandidate (JSON)
|
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary {
|
||||||
|
|
||||||
+ (RTCIceCandidate *)candidateFromJSONDictionary:(NSDictionary *)dictionary {
|
|
||||||
NSString *mid = dictionary[kRTCICECandidateMidKey];
|
NSString *mid = dictionary[kRTCICECandidateMidKey];
|
||||||
NSString *sdp = dictionary[kRTCICECandidateSdpKey];
|
NSString *sdp = dictionary[kRTCICECandidateSdpKey];
|
||||||
NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
|
NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
|
||||||
NSInteger mLineIndex = [num integerValue];
|
NSInteger mLineIndex = [num integerValue];
|
||||||
return [[RTCIceCandidate alloc] initWithSdp:sdp
|
return [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp
|
||||||
sdpMLineIndex:mLineIndex
|
sdpMLineIndex:mLineIndex
|
||||||
sdpMid:mid];
|
sdpMid:mid];
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTCIceCandidate *> *)candidates
|
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
|
||||||
withType:(NSString *)typeValue {
|
withType:(NSString *)typeValue {
|
||||||
NSMutableArray *jsonCandidates =
|
NSMutableArray *jsonCandidates =
|
||||||
[NSMutableArray arrayWithCapacity:candidates.count];
|
[NSMutableArray arrayWithCapacity:candidates.count];
|
||||||
for (RTCIceCandidate *candidate in candidates) {
|
for (RTC_OBJC_TYPE(RTCIceCandidate) * candidate in candidates) {
|
||||||
NSDictionary *jsonCandidate = [candidate JSONDictionary];
|
NSDictionary *jsonCandidate = [candidate JSONDictionary];
|
||||||
[jsonCandidates addObject:jsonCandidate];
|
[jsonCandidates addObject:jsonCandidate];
|
||||||
}
|
}
|
||||||
|
@ -56,14 +56,14 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSArray<RTCIceCandidate *> *)candidatesFromJSONDictionary:
|
+ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
|
||||||
(NSDictionary *)dictionary {
|
(NSDictionary *)dictionary {
|
||||||
NSArray *jsonCandidates = dictionary[kRTCICECandidatesTypeKey];
|
NSArray *jsonCandidates = dictionary[kRTCICECandidatesTypeKey];
|
||||||
NSMutableArray<RTCIceCandidate *> *candidates =
|
NSMutableArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
|
||||||
[NSMutableArray arrayWithCapacity:jsonCandidates.count];
|
[NSMutableArray arrayWithCapacity:jsonCandidates.count];
|
||||||
for (NSDictionary *jsonCandidate in jsonCandidates) {
|
for (NSDictionary *jsonCandidate in jsonCandidates) {
|
||||||
RTCIceCandidate *candidate =
|
RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
|
||||||
[RTCIceCandidate candidateFromJSONDictionary:jsonCandidate];
|
[RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:jsonCandidate];
|
||||||
[candidates addObject:candidate];
|
[candidates addObject:candidate];
|
||||||
}
|
}
|
||||||
return candidates;
|
return candidates;
|
||||||
|
|
|
@ -10,8 +10,9 @@
|
||||||
|
|
||||||
#import <WebRTC/RTCIceServer.h>
|
#import <WebRTC/RTCIceServer.h>
|
||||||
|
|
||||||
@interface RTCIceServer (JSON)
|
@interface RTC_OBJC_TYPE (RTCIceServer)
|
||||||
|
(JSON)
|
||||||
|
|
||||||
+ (RTCIceServer *)serverFromJSONDictionary:(NSDictionary *)dictionary;
|
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -10,15 +10,16 @@
|
||||||
|
|
||||||
#import "RTCIceServer+JSON.h"
|
#import "RTCIceServer+JSON.h"
|
||||||
|
|
||||||
@implementation RTCIceServer (JSON)
|
@implementation RTC_OBJC_TYPE (RTCIceServer)
|
||||||
|
(JSON)
|
||||||
|
|
||||||
+ (RTCIceServer *)serverFromJSONDictionary:(NSDictionary *)dictionary {
|
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary {
|
||||||
NSArray *turnUrls = dictionary[@"urls"];
|
NSArray *turnUrls = dictionary[@"urls"];
|
||||||
NSString *username = dictionary[@"username"] ?: @"";
|
NSString *username = dictionary[@"username"] ?: @"";
|
||||||
NSString *credential = dictionary[@"credential"] ?: @"";
|
NSString *credential = dictionary[@"credential"] ?: @"";
|
||||||
return [[RTCIceServer alloc] initWithURLStrings:turnUrls
|
return [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:turnUrls
|
||||||
username:username
|
username:username
|
||||||
credential:credential];
|
credential:credential];
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -10,9 +10,11 @@
|
||||||
|
|
||||||
#import <WebRTC/RTCSessionDescription.h>
|
#import <WebRTC/RTCSessionDescription.h>
|
||||||
|
|
||||||
@interface RTCSessionDescription (JSON)
|
@interface RTC_OBJC_TYPE (RTCSessionDescription)
|
||||||
|
(JSON)
|
||||||
|
|
||||||
+ (RTCSessionDescription *)descriptionFromJSONDictionary:(NSDictionary *)dictionary;
|
+ (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary
|
||||||
|
: (NSDictionary *)dictionary;
|
||||||
- (NSData *)JSONData;
|
- (NSData *)JSONData;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -13,14 +13,15 @@
|
||||||
static NSString const *kRTCSessionDescriptionTypeKey = @"type";
|
static NSString const *kRTCSessionDescriptionTypeKey = @"type";
|
||||||
static NSString const *kRTCSessionDescriptionSdpKey = @"sdp";
|
static NSString const *kRTCSessionDescriptionSdpKey = @"sdp";
|
||||||
|
|
||||||
@implementation RTCSessionDescription (JSON)
|
@implementation RTC_OBJC_TYPE (RTCSessionDescription)
|
||||||
|
(JSON)
|
||||||
|
|
||||||
+ (RTCSessionDescription *)descriptionFromJSONDictionary:
|
+ (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary
|
||||||
(NSDictionary *)dictionary {
|
: (NSDictionary *)dictionary {
|
||||||
NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
|
NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
|
||||||
RTCSdpType type = [[self class] typeForString:typeString];
|
RTCSdpType type = [[self class] typeForString:typeString];
|
||||||
NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
|
NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
|
||||||
return [[RTCSessionDescription alloc] initWithType:type sdp:sdp];
|
return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type sdp:sdp];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSData *)JSONData {
|
- (NSData *)JSONData {
|
||||||
|
|
|
@ -10,7 +10,9 @@
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
|
|
||||||
@class RTCFileVideoCapturer;
|
#import <WebRTC/RTCMacros.h>
|
||||||
|
|
||||||
|
@class RTC_OBJC_TYPE(RTCFileVideoCapturer);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Controls a file capturer.
|
* Controls a file capturer.
|
||||||
|
@ -23,7 +25,7 @@ NS_CLASS_AVAILABLE_IOS(10)
|
||||||
*
|
*
|
||||||
* @param capturer The capturer to be controlled.
|
* @param capturer The capturer to be controlled.
|
||||||
*/
|
*/
|
||||||
- (instancetype)initWithCapturer:(RTCFileVideoCapturer *)capturer;
|
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Starts the file capturer.
|
* Starts the file capturer.
|
||||||
|
|
|
@ -14,14 +14,14 @@
|
||||||
|
|
||||||
@interface ARDFileCaptureController ()
|
@interface ARDFileCaptureController ()
|
||||||
|
|
||||||
@property(nonatomic, strong) RTCFileVideoCapturer *fileCapturer;
|
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * fileCapturer;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation ARDFileCaptureController
|
@implementation ARDFileCaptureController
|
||||||
@synthesize fileCapturer = _fileCapturer;
|
@synthesize fileCapturer = _fileCapturer;
|
||||||
|
|
||||||
- (instancetype)initWithCapturer:(RTCFileVideoCapturer *)capturer {
|
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
|
||||||
if (self = [super init]) {
|
if (self = [super init]) {
|
||||||
_fileCapturer = capturer;
|
_fileCapturer = capturer;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,10 +28,9 @@ static NSString *const barButtonImageString = @"ic_settings_black_24dp.png";
|
||||||
// Launch argument to be passed to indicate that the app should start loopback immediatly
|
// Launch argument to be passed to indicate that the app should start loopback immediatly
|
||||||
static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
||||||
|
|
||||||
@interface ARDMainViewController () <
|
@interface ARDMainViewController () <ARDMainViewDelegate,
|
||||||
ARDMainViewDelegate,
|
ARDVideoCallViewControllerDelegate,
|
||||||
ARDVideoCallViewControllerDelegate,
|
RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
|
||||||
RTCAudioSessionDelegate>
|
|
||||||
@property(nonatomic, strong) ARDMainView *mainView;
|
@property(nonatomic, strong) ARDMainView *mainView;
|
||||||
@property(nonatomic, strong) AVAudioPlayer *audioPlayer;
|
@property(nonatomic, strong) AVAudioPlayer *audioPlayer;
|
||||||
@end
|
@end
|
||||||
|
@ -57,13 +56,13 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
||||||
self.view = _mainView;
|
self.view = _mainView;
|
||||||
[self addSettingsBarButton];
|
[self addSettingsBarButton];
|
||||||
|
|
||||||
RTCAudioSessionConfiguration *webRTCConfig =
|
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
|
||||||
[RTCAudioSessionConfiguration webRTCConfiguration];
|
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
|
||||||
webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
|
webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
|
||||||
AVAudioSessionCategoryOptionDefaultToSpeaker;
|
AVAudioSessionCategoryOptionDefaultToSpeaker;
|
||||||
[RTCAudioSessionConfiguration setWebRTCConfiguration:webRTCConfig];
|
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) setWebRTCConfiguration:webRTCConfig];
|
||||||
|
|
||||||
RTCAudioSession *session = [RTCAudioSession sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
[session addDelegate:self];
|
[session addDelegate:self];
|
||||||
|
|
||||||
[self configureAudioSession];
|
[self configureAudioSession];
|
||||||
|
@ -124,7 +123,7 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
||||||
|
|
||||||
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
||||||
|
|
||||||
RTCAudioSession *session = [RTCAudioSession sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore];
|
session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore];
|
||||||
session.isAudioEnabled = NO;
|
session.isAudioEnabled = NO;
|
||||||
|
|
||||||
|
@ -158,32 +157,33 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
||||||
[self restartAudioPlayerIfNeeded];
|
[self restartAudioPlayerIfNeeded];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
RTCAudioSession *session = [RTCAudioSession sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
session.isAudioEnabled = NO;
|
session.isAudioEnabled = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - RTCAudioSessionDelegate
|
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
|
||||||
|
|
||||||
- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
|
- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
|
||||||
// Stop playback on main queue and then configure WebRTC.
|
// Stop playback on main queue and then configure WebRTC.
|
||||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
block:^{
|
dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||||
if (self.mainView.isAudioLoopPlaying) {
|
block:^{
|
||||||
RTCLog(@"Stopping audio loop due to WebRTC start.");
|
if (self.mainView.isAudioLoopPlaying) {
|
||||||
[self.audioPlayer stop];
|
RTCLog(@"Stopping audio loop due to WebRTC start.");
|
||||||
}
|
[self.audioPlayer stop];
|
||||||
RTCLog(@"Setting isAudioEnabled to YES.");
|
}
|
||||||
session.isAudioEnabled = YES;
|
RTCLog(@"Setting isAudioEnabled to YES.");
|
||||||
}];
|
session.isAudioEnabled = YES;
|
||||||
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
|
- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
|
||||||
// WebRTC is done with the audio session. Restart playback.
|
// WebRTC is done with the audio session. Restart playback.
|
||||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
|
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||||
block:^{
|
block:^{
|
||||||
RTCLog(@"audioSessionDidStopPlayOrRecord");
|
RTCLog(@"audioSessionDidStopPlayOrRecord");
|
||||||
[self restartAudioPlayerIfNeeded];
|
[self restartAudioPlayerIfNeeded];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
@ -202,13 +202,13 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)configureAudioSession {
|
- (void)configureAudioSession {
|
||||||
RTCAudioSessionConfiguration *configuration =
|
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *configuration =
|
||||||
[[RTCAudioSessionConfiguration alloc] init];
|
[[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
|
||||||
configuration.category = AVAudioSessionCategoryAmbient;
|
configuration.category = AVAudioSessionCategoryAmbient;
|
||||||
configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
|
configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
|
||||||
configuration.mode = AVAudioSessionModeDefault;
|
configuration.mode = AVAudioSessionModeDefault;
|
||||||
|
|
||||||
RTCAudioSession *session = [RTCAudioSession sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
[session lockForConfiguration];
|
[session lockForConfiguration];
|
||||||
BOOL hasSucceeded = NO;
|
BOOL hasSucceeded = NO;
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
|
|
|
@ -62,7 +62,7 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
|
||||||
return [_settingsModel availableVideoResolutions];
|
return [_settingsModel availableVideoResolutions];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSArray<RTCVideoCodecInfo *> *)videoCodecArray {
|
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)videoCodecArray {
|
||||||
return [_settingsModel availableVideoCodecs];
|
return [_settingsModel availableVideoCodecs];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -214,7 +214,7 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
||||||
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
||||||
reuseIdentifier:dequeueIdentifier];
|
reuseIdentifier:dequeueIdentifier];
|
||||||
}
|
}
|
||||||
RTCVideoCodecInfo *codec = self.videoCodecArray[indexPath.row];
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = self.videoCodecArray[indexPath.row];
|
||||||
cell.textLabel.text = [codec humanReadableDescription];
|
cell.textLabel.text = [codec humanReadableDescription];
|
||||||
if ([codec isEqualToCodecInfo:[_settingsModel currentVideoCodecSettingFromStore]]) {
|
if ([codec isEqualToCodecInfo:[_settingsModel currentVideoCodecSettingFromStore]]) {
|
||||||
cell.accessoryType = UITableViewCellAccessoryCheckmark;
|
cell.accessoryType = UITableViewCellAccessoryCheckmark;
|
||||||
|
@ -231,7 +231,7 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
||||||
updateListSelectionAtIndexPath:indexPath
|
updateListSelectionAtIndexPath:indexPath
|
||||||
inSection:ARDSettingsSectionVideoCodec];
|
inSection:ARDSettingsSectionVideoCodec];
|
||||||
|
|
||||||
RTCVideoCodecInfo *videoCodec = self.videoCodecArray[indexPath.row];
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec = self.videoCodecArray[indexPath.row];
|
||||||
[_settingsModel storeVideoCodecSetting:videoCodec];
|
[_settingsModel storeVideoCodecSetting:videoCodec];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setStats:(NSArray *)stats {
|
- (void)setStats:(NSArray *)stats {
|
||||||
for (RTCLegacyStatsReport *report in stats) {
|
for (RTC_OBJC_TYPE(RTCLegacyStatsReport) * report in stats) {
|
||||||
[_statsBuilder parseStatsReport:report];
|
[_statsBuilder parseStatsReport:report];
|
||||||
}
|
}
|
||||||
_statsLabel.text = _statsBuilder.statsString;
|
_statsLabel.text = _statsBuilder.statsString;
|
||||||
|
|
|
@ -37,8 +37,8 @@
|
||||||
@interface ARDVideoCallView : UIView
|
@interface ARDVideoCallView : UIView
|
||||||
|
|
||||||
@property(nonatomic, readonly) UILabel *statusLabel;
|
@property(nonatomic, readonly) UILabel *statusLabel;
|
||||||
@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView;
|
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView;
|
||||||
@property(nonatomic, readonly) __kindof UIView<RTCVideoRenderer> *remoteVideoView;
|
@property(nonatomic, readonly) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
|
||||||
@property(nonatomic, readonly) ARDStatsView *statsView;
|
@property(nonatomic, readonly) ARDStatsView *statsView;
|
||||||
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
|
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ static CGFloat const kLocalVideoViewSize = 120;
|
||||||
static CGFloat const kLocalVideoViewPadding = 8;
|
static CGFloat const kLocalVideoViewPadding = 8;
|
||||||
static CGFloat const kStatusBarHeight = 20;
|
static CGFloat const kStatusBarHeight = 20;
|
||||||
|
|
||||||
@interface ARDVideoCallView () <RTCVideoViewDelegate>
|
@interface ARDVideoCallView () <RTC_OBJC_TYPE (RTCVideoViewDelegate)>
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation ARDVideoCallView {
|
@implementation ARDVideoCallView {
|
||||||
|
@ -45,16 +45,17 @@ static CGFloat const kStatusBarHeight = 20;
|
||||||
if (self = [super initWithFrame:frame]) {
|
if (self = [super initWithFrame:frame]) {
|
||||||
|
|
||||||
#if defined(RTC_SUPPORTS_METAL)
|
#if defined(RTC_SUPPORTS_METAL)
|
||||||
_remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
|
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
|
||||||
#else
|
#else
|
||||||
RTCEAGLVideoView *remoteView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
|
RTC_OBJC_TYPE(RTCEAGLVideoView) *remoteView =
|
||||||
|
[[RTC_OBJC_TYPE(RTCEAGLVideoView) alloc] initWithFrame:CGRectZero];
|
||||||
remoteView.delegate = self;
|
remoteView.delegate = self;
|
||||||
_remoteVideoView = remoteView;
|
_remoteVideoView = remoteView;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
[self addSubview:_remoteVideoView];
|
[self addSubview:_remoteVideoView];
|
||||||
|
|
||||||
_localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
|
_localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
|
||||||
[self addSubview:_localVideoView];
|
[self addSubview:_localVideoView];
|
||||||
|
|
||||||
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
|
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
|
||||||
|
@ -175,9 +176,9 @@ static CGFloat const kStatusBarHeight = 20;
|
||||||
CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
|
CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - RTCVideoViewDelegate
|
#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate)
|
||||||
|
|
||||||
- (void)videoView:(id<RTCVideoRenderer>)videoView didChangeVideoSize:(CGSize)size {
|
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
|
||||||
if (videoView == _remoteVideoView) {
|
if (videoView == _remoteVideoView) {
|
||||||
_remoteVideoSize = size;
|
_remoteVideoSize = size;
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,15 +24,15 @@
|
||||||
|
|
||||||
@interface ARDVideoCallViewController () <ARDAppClientDelegate,
|
@interface ARDVideoCallViewController () <ARDAppClientDelegate,
|
||||||
ARDVideoCallViewDelegate,
|
ARDVideoCallViewDelegate,
|
||||||
RTCAudioSessionDelegate>
|
RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
|
||||||
@property(nonatomic, strong) RTCVideoTrack *remoteVideoTrack;
|
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack;
|
||||||
@property(nonatomic, readonly) ARDVideoCallView *videoCallView;
|
@property(nonatomic, readonly) ARDVideoCallView *videoCallView;
|
||||||
@property(nonatomic, assign) AVAudioSessionPortOverride portOverride;
|
@property(nonatomic, assign) AVAudioSessionPortOverride portOverride;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation ARDVideoCallViewController {
|
@implementation ARDVideoCallViewController {
|
||||||
ARDAppClient *_client;
|
ARDAppClient *_client;
|
||||||
RTCVideoTrack *_remoteVideoTrack;
|
RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack;
|
||||||
ARDCaptureController *_captureController;
|
ARDCaptureController *_captureController;
|
||||||
ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10);
|
ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10);
|
||||||
}
|
}
|
||||||
|
@ -62,7 +62,7 @@
|
||||||
[self statusTextForState:RTCIceConnectionStateNew];
|
[self statusTextForState:RTCIceConnectionStateNew];
|
||||||
self.view = _videoCallView;
|
self.view = _videoCallView;
|
||||||
|
|
||||||
RTCAudioSession *session = [RTCAudioSession sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
[session addDelegate:self];
|
[session addDelegate:self];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,7 +100,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer {
|
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
||||||
_videoCallView.localVideoView.captureSession = localCapturer.captureSession;
|
_videoCallView.localVideoView.captureSession = localCapturer.captureSession;
|
||||||
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
||||||
_captureController =
|
_captureController =
|
||||||
|
@ -109,7 +109,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalFileCapturer:(RTCFileVideoCapturer *)fileCapturer {
|
didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer {
|
||||||
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
|
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
|
||||||
if (@available(iOS 10, *)) {
|
if (@available(iOS 10, *)) {
|
||||||
_fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
|
_fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
|
||||||
|
@ -119,11 +119,11 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
|
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
|
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
||||||
self.remoteVideoTrack = remoteVideoTrack;
|
self.remoteVideoTrack = remoteVideoTrack;
|
||||||
__weak ARDVideoCallViewController *weakSelf = self;
|
__weak ARDVideoCallViewController *weakSelf = self;
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
|
@ -163,19 +163,21 @@
|
||||||
if (_portOverride == AVAudioSessionPortOverrideNone) {
|
if (_portOverride == AVAudioSessionPortOverrideNone) {
|
||||||
override = AVAudioSessionPortOverrideSpeaker;
|
override = AVAudioSessionPortOverrideSpeaker;
|
||||||
}
|
}
|
||||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeAudioSession
|
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeAudioSession
|
||||||
block:^{
|
block:^{
|
||||||
RTCAudioSession *session = [RTCAudioSession sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
||||||
[session lockForConfiguration];
|
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
NSError *error = nil;
|
[session lockForConfiguration];
|
||||||
if ([session overrideOutputAudioPort:override error:&error]) {
|
NSError *error = nil;
|
||||||
self.portOverride = override;
|
if ([session overrideOutputAudioPort:override
|
||||||
} else {
|
error:&error]) {
|
||||||
RTCLogError(@"Error overriding output port: %@",
|
self.portOverride = override;
|
||||||
error.localizedDescription);
|
} else {
|
||||||
}
|
RTCLogError(@"Error overriding output port: %@",
|
||||||
[session unlockForConfiguration];
|
error.localizedDescription);
|
||||||
}];
|
}
|
||||||
|
[session unlockForConfiguration];
|
||||||
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view {
|
- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view {
|
||||||
|
@ -183,16 +185,16 @@
|
||||||
_videoCallView.statsView.hidden = NO;
|
_videoCallView.statsView.hidden = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - RTCAudioSessionDelegate
|
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
|
||||||
|
|
||||||
- (void)audioSession:(RTCAudioSession *)audioSession
|
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
|
||||||
didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
|
didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
|
||||||
RTCLog(@"Audio session detected glitch, total: %lld", totalNumberOfGlitches);
|
RTCLog(@"Audio session detected glitch, total: %lld", totalNumberOfGlitches);
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
|
- (void)setRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
||||||
if (_remoteVideoTrack == remoteVideoTrack) {
|
if (_remoteVideoTrack == remoteVideoTrack) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,8 +10,9 @@
|
||||||
|
|
||||||
#import <WebRTC/RTCVideoCodecInfo.h>
|
#import <WebRTC/RTCVideoCodecInfo.h>
|
||||||
|
|
||||||
@interface RTCVideoCodecInfo (HumanReadable)
|
@interface RTC_OBJC_TYPE (RTCVideoCodecInfo)
|
||||||
|
(HumanReadable)
|
||||||
|
|
||||||
- (NSString *)humanReadableDescription;
|
- (NSString *)humanReadableDescription;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -12,13 +12,14 @@
|
||||||
|
|
||||||
#import <WebRTC/RTCH264ProfileLevelId.h>
|
#import <WebRTC/RTCH264ProfileLevelId.h>
|
||||||
|
|
||||||
@implementation RTCVideoCodecInfo (HumanReadable)
|
@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
|
||||||
|
(HumanReadable)
|
||||||
|
|
||||||
- (NSString *)humanReadableDescription {
|
- (NSString *)humanReadableDescription {
|
||||||
if ([self.name isEqualToString:@"H264"]) {
|
if ([self.name isEqualToString:@"H264"]) {
|
||||||
NSString *profileId = self.parameters[@"profile-level-id"];
|
NSString *profileId = self.parameters[@"profile-level-id"];
|
||||||
RTCH264ProfileLevelId *profileLevelId =
|
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
|
||||||
[[RTCH264ProfileLevelId alloc] initWithHexString:profileId];
|
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:profileId];
|
||||||
if (profileLevelId.profile == RTCH264ProfileConstrainedHigh ||
|
if (profileLevelId.profile == RTCH264ProfileConstrainedHigh ||
|
||||||
profileLevelId.profile == RTCH264ProfileHigh) {
|
profileLevelId.profile == RTCH264ProfileHigh) {
|
||||||
return @"H264 (High)";
|
return @"H264 (High)";
|
||||||
|
|
|
@ -20,14 +20,14 @@
|
||||||
|
|
||||||
@implementation ARDBroadcastSampleHandler {
|
@implementation ARDBroadcastSampleHandler {
|
||||||
ARDAppClient *_client;
|
ARDAppClient *_client;
|
||||||
RTCCallbackLogger *_callbackLogger;
|
RTC_OBJC_TYPE(RTCCallbackLogger) * _callbackLogger;
|
||||||
}
|
}
|
||||||
|
|
||||||
@synthesize capturer = _capturer;
|
@synthesize capturer = _capturer;
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
if (self = [super init]) {
|
if (self = [super init]) {
|
||||||
_callbackLogger = [[RTCCallbackLogger alloc] init];
|
_callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
|
||||||
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
|
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
|
||||||
[_callbackLogger start:^(NSString *logMessage) {
|
[_callbackLogger start:^(NSString *logMessage) {
|
||||||
os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
|
os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
|
||||||
|
@ -104,7 +104,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer {
|
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
|
@ -113,11 +113,11 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
|
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
|
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats {
|
- (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats {
|
||||||
|
|
|
@ -37,15 +37,15 @@ static NSUInteger const kBottomViewHeight = 200;
|
||||||
@interface APPRTCMainView : NSView
|
@interface APPRTCMainView : NSView
|
||||||
|
|
||||||
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
|
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
|
||||||
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* localVideoView;
|
@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* localVideoView;
|
||||||
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* remoteVideoView;
|
@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* remoteVideoView;
|
||||||
@property(nonatomic, readonly) NSTextView* logView;
|
@property(nonatomic, readonly) NSTextView* logView;
|
||||||
|
|
||||||
- (void)displayLogMessage:(NSString*)message;
|
- (void)displayLogMessage:(NSString*)message;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface APPRTCMainView () <NSTextFieldDelegate, RTCNSGLVideoViewDelegate>
|
@interface APPRTCMainView () <NSTextFieldDelegate, RTC_OBJC_TYPE (RTCNSGLVideoViewDelegate)>
|
||||||
@end
|
@end
|
||||||
@implementation APPRTCMainView {
|
@implementation APPRTCMainView {
|
||||||
NSScrollView* _scrollView;
|
NSScrollView* _scrollView;
|
||||||
|
@ -178,10 +178,9 @@ static NSUInteger const kBottomViewHeight = 200;
|
||||||
[self setNeedsUpdateConstraints:YES];
|
[self setNeedsUpdateConstraints:YES];
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - RTCNSGLVideoViewDelegate
|
#pragma mark - RTC_OBJC_TYPE(RTCNSGLVideoViewDelegate)
|
||||||
|
|
||||||
- (void)videoView:(RTCNSGLVideoView*)videoView
|
- (void)videoView:(RTC_OBJC_TYPE(RTCNSGLVideoView) *)videoView didChangeVideoSize:(NSSize)size {
|
||||||
didChangeVideoSize:(NSSize)size {
|
|
||||||
if (videoView == _remoteVideoView) {
|
if (videoView == _remoteVideoView) {
|
||||||
_remoteVideoSize = size;
|
_remoteVideoSize = size;
|
||||||
} else if (videoView == _localVideoView) {
|
} else if (videoView == _localVideoView) {
|
||||||
|
@ -222,9 +221,10 @@ static NSUInteger const kBottomViewHeight = 200;
|
||||||
// If not we're providing sensible default.
|
// If not we're providing sensible default.
|
||||||
#pragma clang diagnostic push
|
#pragma clang diagnostic push
|
||||||
#pragma clang diagnostic ignored "-Wpartial-availability"
|
#pragma clang diagnostic ignored "-Wpartial-availability"
|
||||||
if ([RTCMTLNSVideoView class] && [RTCMTLNSVideoView isMetalAvailable]) {
|
if ([RTC_OBJC_TYPE(RTCMTLNSVideoView) class] &&
|
||||||
_remoteVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect];
|
[RTC_OBJC_TYPE(RTCMTLNSVideoView) isMetalAvailable]) {
|
||||||
_localVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect];
|
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
|
||||||
|
_localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
|
||||||
}
|
}
|
||||||
#pragma clang diagnostic pop
|
#pragma clang diagnostic pop
|
||||||
if (_remoteVideoView == nil) {
|
if (_remoteVideoView == nil) {
|
||||||
|
@ -238,13 +238,13 @@ static NSUInteger const kBottomViewHeight = 200;
|
||||||
NSOpenGLPixelFormat* pixelFormat =
|
NSOpenGLPixelFormat* pixelFormat =
|
||||||
[[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
|
[[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
|
||||||
|
|
||||||
RTCNSGLVideoView* remote =
|
RTC_OBJC_TYPE(RTCNSGLVideoView)* remote =
|
||||||
[[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
|
[[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
|
||||||
remote.delegate = self;
|
remote.delegate = self;
|
||||||
_remoteVideoView = remote;
|
_remoteVideoView = remote;
|
||||||
|
|
||||||
RTCNSGLVideoView* local =
|
RTC_OBJC_TYPE(RTCNSGLVideoView)* local =
|
||||||
[[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
|
[[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
|
||||||
local.delegate = self;
|
local.delegate = self;
|
||||||
_localVideoView = local;
|
_localVideoView = local;
|
||||||
}
|
}
|
||||||
|
@ -299,8 +299,8 @@ static NSUInteger const kBottomViewHeight = 200;
|
||||||
|
|
||||||
@implementation APPRTCViewController {
|
@implementation APPRTCViewController {
|
||||||
ARDAppClient* _client;
|
ARDAppClient* _client;
|
||||||
RTCVideoTrack* _localVideoTrack;
|
RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack;
|
||||||
RTCVideoTrack* _remoteVideoTrack;
|
RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack;
|
||||||
ARDCaptureController* _captureController;
|
ARDCaptureController* _captureController;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -357,21 +357,21 @@ static NSUInteger const kBottomViewHeight = 200;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient*)client
|
- (void)appClient:(ARDAppClient*)client
|
||||||
didCreateLocalCapturer:(RTCCameraVideoCapturer*)localCapturer {
|
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
||||||
_captureController =
|
_captureController =
|
||||||
[[ARDCaptureController alloc] initWithCapturer:localCapturer
|
[[ARDCaptureController alloc] initWithCapturer:localCapturer
|
||||||
settings:[[ARDSettingsModel alloc] init]];
|
settings:[[ARDSettingsModel alloc] init]];
|
||||||
[_captureController startCapture];
|
[_captureController startCapture];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient*)client
|
||||||
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
|
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
|
||||||
_localVideoTrack = localVideoTrack;
|
_localVideoTrack = localVideoTrack;
|
||||||
[_localVideoTrack addRenderer:self.mainView.localVideoView];
|
[_localVideoTrack addRenderer:self.mainView.localVideoView];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient*)client
|
||||||
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
|
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
||||||
_remoteVideoTrack = remoteVideoTrack;
|
_remoteVideoTrack = remoteVideoTrack;
|
||||||
[_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
|
[_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
|
||||||
}
|
}
|
||||||
|
|
|
@ -196,8 +196,8 @@
|
||||||
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
|
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
|
||||||
// crash in Debug.
|
// crash in Debug.
|
||||||
caller.defaultPeerConnectionConstraints =
|
caller.defaultPeerConnectionConstraints =
|
||||||
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
weakCaller = caller;
|
weakCaller = caller;
|
||||||
|
|
||||||
answerer = [self createAppClientForRoomId:roomId
|
answerer = [self createAppClientForRoomId:roomId
|
||||||
|
@ -214,8 +214,8 @@
|
||||||
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
|
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
|
||||||
// crash in Debug.
|
// crash in Debug.
|
||||||
answerer.defaultPeerConnectionConstraints =
|
answerer.defaultPeerConnectionConstraints =
|
||||||
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
weakAnswerer = answerer;
|
weakAnswerer = answerer;
|
||||||
|
|
||||||
// Kick off connection.
|
// Kick off connection.
|
||||||
|
@ -248,8 +248,8 @@
|
||||||
connectedHandler:^{}
|
connectedHandler:^{}
|
||||||
localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }];
|
localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }];
|
||||||
caller.defaultPeerConnectionConstraints =
|
caller.defaultPeerConnectionConstraints =
|
||||||
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
|
|
||||||
// Kick off connection.
|
// Kick off connection.
|
||||||
[caller connectToRoomWithId:roomId
|
[caller connectToRoomWithId:roomId
|
||||||
|
|
|
@ -31,7 +31,7 @@ NS_CLASS_AVAILABLE_IOS(10)
|
||||||
|
|
||||||
- (void)setUp {
|
- (void)setUp {
|
||||||
[super setUp];
|
[super setUp];
|
||||||
self.fileCapturerMock = OCMClassMock([RTCFileVideoCapturer class]);
|
self.fileCapturerMock = OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]);
|
||||||
self.fileCaptureController =
|
self.fileCaptureController =
|
||||||
[[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock];
|
[[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock];
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,9 +24,9 @@
|
||||||
|
|
||||||
@interface NADViewController ()
|
@interface NADViewController ()
|
||||||
|
|
||||||
@property(nonatomic) RTCCameraVideoCapturer *capturer;
|
@property(nonatomic) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
|
||||||
@property(nonatomic) RTCCameraPreviewView *localVideoView;
|
@property(nonatomic) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView;
|
||||||
@property(nonatomic) __kindof UIView<RTCVideoRenderer> *remoteVideoView;
|
@property(nonatomic) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
|
||||||
@property(nonatomic) UIButton *callButton;
|
@property(nonatomic) UIButton *callButton;
|
||||||
@property(nonatomic) UIButton *hangUpButton;
|
@property(nonatomic) UIButton *hangUpButton;
|
||||||
|
|
||||||
|
@ -50,14 +50,14 @@
|
||||||
_view = [[UIView alloc] initWithFrame:CGRectZero];
|
_view = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
|
|
||||||
#if defined(RTC_SUPPORTS_METAL)
|
#if defined(RTC_SUPPORTS_METAL)
|
||||||
_remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
|
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
|
||||||
#else
|
#else
|
||||||
_remoteVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
|
_remoteVideoView = [[RTC_OBJC_TYPE(RTCEAGLVideoView) alloc] initWithFrame:CGRectZero];
|
||||||
#endif
|
#endif
|
||||||
_remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO;
|
_remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO;
|
||||||
[_view addSubview:_remoteVideoView];
|
[_view addSubview:_remoteVideoView];
|
||||||
|
|
||||||
_localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
|
_localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
|
||||||
_localVideoView.translatesAutoresizingMaskIntoConstraints = NO;
|
_localVideoView.translatesAutoresizingMaskIntoConstraints = NO;
|
||||||
[_view addSubview:_localVideoView];
|
[_view addSubview:_localVideoView];
|
||||||
|
|
||||||
|
@ -106,14 +106,15 @@
|
||||||
- (void)viewDidLoad {
|
- (void)viewDidLoad {
|
||||||
[super viewDidLoad];
|
[super viewDidLoad];
|
||||||
|
|
||||||
self.capturer = [[RTCCameraVideoCapturer alloc] init];
|
self.capturer = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] init];
|
||||||
self.localVideoView.captureSession = self.capturer.captureSession;
|
self.localVideoView.captureSession = self.capturer.captureSession;
|
||||||
|
|
||||||
_call_client.reset(new webrtc_examples::ObjCCallClient());
|
_call_client.reset(new webrtc_examples::ObjCCallClient());
|
||||||
|
|
||||||
// Start capturer.
|
// Start capturer.
|
||||||
AVCaptureDevice *selectedDevice = nil;
|
AVCaptureDevice *selectedDevice = nil;
|
||||||
NSArray<AVCaptureDevice *> *captureDevices = [RTCCameraVideoCapturer captureDevices];
|
NSArray<AVCaptureDevice *> *captureDevices =
|
||||||
|
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices];
|
||||||
for (AVCaptureDevice *device in captureDevices) {
|
for (AVCaptureDevice *device in captureDevices) {
|
||||||
if (device.position == AVCaptureDevicePositionFront) {
|
if (device.position == AVCaptureDevicePositionFront) {
|
||||||
selectedDevice = device;
|
selectedDevice = device;
|
||||||
|
@ -126,7 +127,7 @@
|
||||||
int targetHeight = 480;
|
int targetHeight = 480;
|
||||||
int currentDiff = INT_MAX;
|
int currentDiff = INT_MAX;
|
||||||
NSArray<AVCaptureDeviceFormat *> *formats =
|
NSArray<AVCaptureDeviceFormat *> *formats =
|
||||||
[RTCCameraVideoCapturer supportedFormatsForDevice:selectedDevice];
|
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:selectedDevice];
|
||||||
for (AVCaptureDeviceFormat *format in formats) {
|
for (AVCaptureDeviceFormat *format in formats) {
|
||||||
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||||
FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
|
FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
|
||||||
|
|
|
@ -14,13 +14,16 @@
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
|
#import "sdk/objc/base/RTCMacros.h"
|
||||||
|
|
||||||
#include "api/peer_connection_interface.h"
|
#include "api/peer_connection_interface.h"
|
||||||
#include "api/scoped_refptr.h"
|
#include "api/scoped_refptr.h"
|
||||||
#include "rtc_base/critical_section.h"
|
#include "rtc_base/critical_section.h"
|
||||||
#include "rtc_base/thread_checker.h"
|
#include "rtc_base/thread_checker.h"
|
||||||
|
|
||||||
@class RTCVideoCapturer;
|
@class RTC_OBJC_TYPE(RTCVideoCapturer);
|
||||||
@protocol RTCVideoRenderer;
|
@protocol RTC_OBJC_TYPE
|
||||||
|
(RTCVideoRenderer);
|
||||||
|
|
||||||
namespace webrtc_examples {
|
namespace webrtc_examples {
|
||||||
|
|
||||||
|
@ -28,7 +31,8 @@ class ObjCCallClient {
|
||||||
public:
|
public:
|
||||||
ObjCCallClient();
|
ObjCCallClient();
|
||||||
|
|
||||||
void Call(RTCVideoCapturer* capturer, id<RTCVideoRenderer> remote_renderer);
|
void Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer,
|
||||||
|
id<RTC_OBJC_TYPE(RTCVideoRenderer)> remote_renderer);
|
||||||
void Hangup();
|
void Hangup();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -64,7 +64,8 @@ ObjCCallClient::ObjCCallClient()
|
||||||
CreatePeerConnectionFactory();
|
CreatePeerConnectionFactory();
|
||||||
}
|
}
|
||||||
|
|
||||||
void ObjCCallClient::Call(RTCVideoCapturer* capturer, id<RTCVideoRenderer> remote_renderer) {
|
void ObjCCallClient::Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer,
|
||||||
|
id<RTC_OBJC_TYPE(RTCVideoRenderer)> remote_renderer) {
|
||||||
RTC_DCHECK_RUN_ON(&thread_checker_);
|
RTC_DCHECK_RUN_ON(&thread_checker_);
|
||||||
|
|
||||||
rtc::CritScope lock(&pc_mutex_);
|
rtc::CritScope lock(&pc_mutex_);
|
||||||
|
@ -122,10 +123,10 @@ void ObjCCallClient::CreatePeerConnectionFactory() {
|
||||||
media_deps.task_queue_factory = dependencies.task_queue_factory.get();
|
media_deps.task_queue_factory = dependencies.task_queue_factory.get();
|
||||||
media_deps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
|
media_deps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
|
||||||
media_deps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
|
media_deps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
|
||||||
media_deps.video_encoder_factory =
|
media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(
|
||||||
webrtc::ObjCToNativeVideoEncoderFactory([[RTCDefaultVideoEncoderFactory alloc] init]);
|
[[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]);
|
||||||
media_deps.video_decoder_factory =
|
media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(
|
||||||
webrtc::ObjCToNativeVideoDecoderFactory([[RTCDefaultVideoDecoderFactory alloc] init]);
|
[[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init]);
|
||||||
media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
|
media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
|
||||||
dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
|
dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
|
||||||
RTC_LOG(LS_INFO) << "Media engine created: " << dependencies.media_engine.get();
|
RTC_LOG(LS_INFO) << "Media engine created: " << dependencies.media_engine.get();
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
%
|
% % Copyright(c) 2011 The WebRTC project authors.All Rights Reserved.%
|
||||||
% Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
% Use of this source code is governed by a BSD
|
||||||
%
|
-
|
||||||
% Use of this source code is governed by a BSD-style license
|
style license % that can be found in the LICENSE file in the root of the source
|
||||||
% that can be found in the LICENSE file in the root of the source
|
% tree.An additional intellectual property rights grant can be found
|
||||||
% tree. An additional intellectual property rights grant can be found
|
% in the file PATENTS.All contributing project authors may
|
||||||
% in the file PATENTS. All contributing project authors may
|
% be found in the AUTHORS file in the root of the source tree.%
|
||||||
% be found in the AUTHORS file in the root of the source tree.
|
|
||||||
%
|
|
||||||
|
|
||||||
clear;
|
clear;
|
||||||
pack;
|
pack;
|
||||||
%
|
%
|
||||||
% Enter the path to YOUR executable and remember to define the perprocessor
|
% Enter the path to YOUR executable and remember to define the perprocessor
|
||||||
|
@ -56,4 +54,4 @@ result
|
||||||
% Compute maximum complexity for a single frame (enc/dec separately and together)
|
% Compute maximum complexity for a single frame (enc/dec separately and together)
|
||||||
maxEncComplexityInAFrame = (max(sum(new(1:size(new,1)/2,:),2))/0.03)/1000000
|
maxEncComplexityInAFrame = (max(sum(new(1:size(new,1)/2,:),2))/0.03)/1000000
|
||||||
maxDecComplexityInAFrame = (max(sum(new(size(new,1)/2+1:end,:),2))/0.03)/1000000
|
maxDecComplexityInAFrame = (max(sum(new(size(new,1)/2+1:end,:),2))/0.03)/1000000
|
||||||
totalComplexity = maxEncComplexityInAFrame + maxDecComplexityInAFrame
|
totalComplexity = maxEncComplexityInAFrame + maxDecComplexityInAFrame
|
||||||
|
|
|
@ -19,11 +19,11 @@ namespace webrtc {
|
||||||
namespace test {
|
namespace test {
|
||||||
|
|
||||||
std::unique_ptr<VideoEncoderFactory> CreateObjCEncoderFactory() {
|
std::unique_ptr<VideoEncoderFactory> CreateObjCEncoderFactory() {
|
||||||
return ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]);
|
return ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<VideoDecoderFactory> CreateObjCDecoderFactory() {
|
std::unique_ptr<VideoDecoderFactory> CreateObjCDecoderFactory() {
|
||||||
return ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]);
|
return ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace test
|
} // namespace test
|
||||||
|
|
|
@ -19,4 +19,4 @@ dispatch_queue_t RTCDispatchQueueCreateWithTarget(const char *label,
|
||||||
dispatch_queue_t queue = dispatch_queue_create(label, attr);
|
dispatch_queue_t queue = dispatch_queue_create(label, attr);
|
||||||
dispatch_set_target_queue(queue, target);
|
dispatch_set_target_queue(queue, target);
|
||||||
return queue;
|
return queue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
* Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
|
* Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
|
||||||
* this video renderer.
|
* this video renderer.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
|
@property(nonatomic, readonly) id<RTC_OBJC_TYPE(RTCVideoRenderer)> videoRenderer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The native VideoSinkInterface surface exposed by this adapter. Calls made
|
* The native VideoSinkInterface surface exposed by this adapter. Calls made
|
||||||
|
@ -33,7 +33,7 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
@property(nonatomic, readonly) rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
|
@property(nonatomic, readonly) rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
|
||||||
|
|
||||||
/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
|
/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
|
||||||
- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer
|
- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer
|
||||||
NS_DESIGNATED_INITIALIZER;
|
NS_DESIGNATED_INITIALIZER;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -26,7 +26,7 @@ class VideoRendererAdapter
|
||||||
}
|
}
|
||||||
|
|
||||||
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
|
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
|
||||||
RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
|
RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
|
||||||
|
|
||||||
CGSize current_size = (videoFrame.rotation % 180 == 0)
|
CGSize current_size = (videoFrame.rotation % 180 == 0)
|
||||||
? CGSizeMake(videoFrame.width, videoFrame.height)
|
? CGSizeMake(videoFrame.width, videoFrame.height)
|
||||||
|
@ -51,7 +51,7 @@ class VideoRendererAdapter
|
||||||
|
|
||||||
@synthesize videoRenderer = _videoRenderer;
|
@synthesize videoRenderer = _videoRenderer;
|
||||||
|
|
||||||
- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer {
|
- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer {
|
||||||
NSParameterAssert(videoRenderer);
|
NSParameterAssert(videoRenderer);
|
||||||
if (self = [super init]) {
|
if (self = [super init]) {
|
||||||
_videoRenderer = videoRenderer;
|
_videoRenderer = videoRenderer;
|
||||||
|
|
|
@ -22,7 +22,7 @@ typedef void (^RTCCallbackLoggerMessageAndSeverityHandler)(NSString *message,
|
||||||
// This class intercepts WebRTC logs and forwards them to a registered block.
|
// This class intercepts WebRTC logs and forwards them to a registered block.
|
||||||
// This class is not threadsafe.
|
// This class is not threadsafe.
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCCallbackLogger : NSObject
|
@interface RTC_OBJC_TYPE (RTCCallbackLogger) : NSObject
|
||||||
|
|
||||||
// The severity level to capture. The default is kRTCLoggingSeverityInfo.
|
// The severity level to capture. The default is kRTCLoggingSeverityInfo.
|
||||||
@property(nonatomic, assign) RTCLoggingSeverity severity;
|
@property(nonatomic, assign) RTCLoggingSeverity severity;
|
||||||
|
|
|
@ -64,7 +64,7 @@ class CallbackWithSeverityLogSink : public rtc::LogSink {
|
||||||
RTCCallbackLoggerMessageAndSeverityHandler callback_handler_;
|
RTCCallbackLoggerMessageAndSeverityHandler callback_handler_;
|
||||||
};
|
};
|
||||||
|
|
||||||
@implementation RTCCallbackLogger {
|
@implementation RTC_OBJC_TYPE (RTCCallbackLogger) {
|
||||||
BOOL _hasStarted;
|
BOOL _hasStarted;
|
||||||
std::unique_ptr<rtc::LogSink> _logSink;
|
std::unique_ptr<rtc::LogSink> _logSink;
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,20 +12,22 @@
|
||||||
|
|
||||||
#import "RTCMediaSource+Private.h"
|
#import "RTCMediaSource+Private.h"
|
||||||
|
|
||||||
@interface RTCAudioSource ()
|
@interface RTC_OBJC_TYPE (RTCAudioSource)
|
||||||
|
()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The AudioSourceInterface object passed to this RTCAudioSource during
|
* The AudioSourceInterface object passed to this RTCAudioSource during
|
||||||
* construction.
|
* construction.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
|
@property(nonatomic,
|
||||||
|
readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
|
||||||
|
|
||||||
/** Initialize an RTCAudioSource from a native AudioSourceInterface. */
|
/** Initialize an RTCAudioSource from a native AudioSourceInterface. */
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeAudioSource:(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
|
nativeAudioSource:(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
|
||||||
NS_DESIGNATED_INITIALIZER;
|
NS_DESIGNATED_INITIALIZER;
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||||
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
|
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCAudioSource : RTCMediaSource
|
@interface RTC_OBJC_TYPE (RTCAudioSource) : RTC_OBJC_TYPE(RTCMediaSource)
|
||||||
|
|
||||||
- (instancetype)init NS_UNAVAILABLE;
|
- (instancetype)init NS_UNAVAILABLE;
|
||||||
|
|
||||||
|
|
|
@ -12,13 +12,13 @@
|
||||||
|
|
||||||
#include "rtc_base/checks.h"
|
#include "rtc_base/checks.h"
|
||||||
|
|
||||||
@implementation RTCAudioSource {
|
@implementation RTC_OBJC_TYPE (RTCAudioSource) {
|
||||||
}
|
}
|
||||||
|
|
||||||
@synthesize volume = _volume;
|
@synthesize volume = _volume;
|
||||||
@synthesize nativeAudioSource = _nativeAudioSource;
|
@synthesize nativeAudioSource = _nativeAudioSource;
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeAudioSource:
|
nativeAudioSource:
|
||||||
(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
|
(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
|
||||||
RTC_DCHECK(factory);
|
RTC_DCHECK(factory);
|
||||||
|
@ -32,7 +32,7 @@
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||||
type:(RTCMediaSourceType)type {
|
type:(RTCMediaSourceType)type {
|
||||||
RTC_NOTREACHED();
|
RTC_NOTREACHED();
|
||||||
|
@ -41,7 +41,7 @@
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
NSString *stateString = [[self class] stringForState:self.state];
|
NSString *stateString = [[self class] stringForState:self.state];
|
||||||
return [NSString stringWithFormat:@"RTCAudioSource( %p ): %@", self, stateString];
|
return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCAudioSource)( %p ): %@", self, stateString];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setVolume:(double)volume {
|
- (void)setVolume:(double)volume {
|
||||||
|
|
|
@ -14,15 +14,16 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@class RTCPeerConnectionFactory;
|
@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
|
||||||
@interface RTCAudioTrack ()
|
@interface RTC_OBJC_TYPE (RTCAudioTrack)
|
||||||
|
()
|
||||||
|
|
||||||
/** AudioTrackInterface created or passed in at construction. */
|
/** AudioTrackInterface created or passed in at construction. */
|
||||||
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
|
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
|
||||||
|
|
||||||
/** Initialize an RTCAudioTrack with an id. */
|
/** Initialize an RTCAudioTrack with an id. */
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
source:(RTCAudioSource *)source
|
source:(RTC_OBJC_TYPE(RTCAudioSource) *)source
|
||||||
trackId:(NSString *)trackId;
|
trackId:(NSString *)trackId;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -13,15 +13,15 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@class RTCAudioSource;
|
@class RTC_OBJC_TYPE(RTCAudioSource);
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCAudioTrack : RTCMediaStreamTrack
|
@interface RTC_OBJC_TYPE (RTCAudioTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack)
|
||||||
|
|
||||||
- (instancetype)init NS_UNAVAILABLE;
|
- (instancetype)init NS_UNAVAILABLE;
|
||||||
|
|
||||||
/** The audio source for this audio track. */
|
/** The audio source for this audio track. */
|
||||||
@property(nonatomic, readonly) RTCAudioSource *source;
|
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -17,12 +17,12 @@
|
||||||
|
|
||||||
#include "rtc_base/checks.h"
|
#include "rtc_base/checks.h"
|
||||||
|
|
||||||
@implementation RTCAudioTrack
|
@implementation RTC_OBJC_TYPE (RTCAudioTrack)
|
||||||
|
|
||||||
@synthesize source = _source;
|
@synthesize source = _source;
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
source:(RTCAudioSource *)source
|
source:(RTC_OBJC_TYPE(RTCAudioSource) *)source
|
||||||
trackId:(NSString *)trackId {
|
trackId:(NSString *)trackId {
|
||||||
RTC_DCHECK(factory);
|
RTC_DCHECK(factory);
|
||||||
RTC_DCHECK(source);
|
RTC_DCHECK(source);
|
||||||
|
@ -37,7 +37,7 @@
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
||||||
type:(RTCMediaStreamTrackType)type {
|
type:(RTCMediaStreamTrackType)type {
|
||||||
NSParameterAssert(factory);
|
NSParameterAssert(factory);
|
||||||
|
@ -46,14 +46,13 @@
|
||||||
return [super initWithFactory:factory nativeTrack:nativeTrack type:type];
|
return [super initWithFactory:factory nativeTrack:nativeTrack type:type];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (RTC_OBJC_TYPE(RTCAudioSource) *)source {
|
||||||
- (RTCAudioSource *)source {
|
|
||||||
if (!_source) {
|
if (!_source) {
|
||||||
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
|
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
|
||||||
self.nativeAudioTrack->GetSource();
|
self.nativeAudioTrack->GetSource();
|
||||||
if (source) {
|
if (source) {
|
||||||
_source =
|
_source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory
|
||||||
[[RTCAudioSource alloc] initWithFactory:self.factory nativeAudioSource:source.get()];
|
nativeAudioSource:source.get()];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return _source;
|
return _source;
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCCertificate : NSObject <NSCopying>
|
@interface RTC_OBJC_TYPE (RTCCertificate) : NSObject <NSCopying>
|
||||||
|
|
||||||
/** Private key in PEM. */
|
/** Private key in PEM. */
|
||||||
@property(nonatomic, readonly, copy) NSString *private_key;
|
@property(nonatomic, readonly, copy) NSString *private_key;
|
||||||
|
@ -37,7 +37,7 @@ RTC_OBJC_EXPORT
|
||||||
* provided.
|
* provided.
|
||||||
* - name: "ECDSA" or "RSASSA-PKCS1-v1_5"
|
* - name: "ECDSA" or "RSASSA-PKCS1-v1_5"
|
||||||
*/
|
*/
|
||||||
+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params;
|
+ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
#include "rtc_base/rtc_certificate_generator.h"
|
#include "rtc_base/rtc_certificate_generator.h"
|
||||||
#include "rtc_base/ssl_identity.h"
|
#include "rtc_base/ssl_identity.h"
|
||||||
|
|
||||||
@implementation RTCCertificate
|
@implementation RTC_OBJC_TYPE (RTCCertificate)
|
||||||
|
|
||||||
@synthesize private_key = _private_key;
|
@synthesize private_key = _private_key;
|
||||||
@synthesize certificate = _certificate;
|
@synthesize certificate = _certificate;
|
||||||
|
@ -35,7 +35,7 @@
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params {
|
+ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params {
|
||||||
rtc::KeyType keyType = rtc::KT_ECDSA;
|
rtc::KeyType keyType = rtc::KT_ECDSA;
|
||||||
NSString *keyTypeString = [params valueForKey:@"name"];
|
NSString *keyTypeString = [params valueForKey:@"name"];
|
||||||
if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) {
|
if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) {
|
||||||
|
@ -63,8 +63,9 @@
|
||||||
RTC_LOG(LS_INFO) << "CERT PEM ";
|
RTC_LOG(LS_INFO) << "CERT PEM ";
|
||||||
RTC_LOG(LS_INFO) << pem_certificate;
|
RTC_LOG(LS_INFO) << pem_certificate;
|
||||||
|
|
||||||
RTCCertificate *cert = [[RTCCertificate alloc] initWithPrivateKey:@(pem_private_key.c_str())
|
RTC_OBJC_TYPE(RTCCertificate) *cert =
|
||||||
certificate:@(pem_certificate.c_str())];
|
[[RTC_OBJC_TYPE(RTCCertificate) alloc] initWithPrivateKey:@(pem_private_key.c_str())
|
||||||
|
certificate:@(pem_certificate.c_str())];
|
||||||
return cert;
|
return cert;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,14 +14,15 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCConfiguration ()
|
@interface RTC_OBJC_TYPE (RTCConfiguration)
|
||||||
|
()
|
||||||
|
|
||||||
/** Optional TurnCustomizer.
|
/** Optional TurnCustomizer.
|
||||||
* With this class one can modify outgoing TURN messages.
|
* With this class one can modify outgoing TURN messages.
|
||||||
* The object passed in must remain valid until PeerConnection::Close() is
|
* The object passed in must remain valid until PeerConnection::Close() is
|
||||||
* called.
|
* called.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer;
|
@property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -14,10 +14,11 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCConfiguration ()
|
@interface RTC_OBJC_TYPE (RTCConfiguration)
|
||||||
|
()
|
||||||
|
|
||||||
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy:
|
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy
|
||||||
(RTCIceTransportPolicy)policy;
|
: (RTCIceTransportPolicy)policy;
|
||||||
|
|
||||||
+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
|
+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
|
||||||
(webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
|
(webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
|
||||||
|
@ -65,8 +66,8 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
|
+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* RTCConfiguration struct representation of this RTCConfiguration. This is
|
* RTCConfiguration struct representation of this RTCConfiguration.
|
||||||
* needed to pass to the underlying C++ APIs.
|
* This is needed to pass to the underlying C++ APIs.
|
||||||
*/
|
*/
|
||||||
- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
|
- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
#import "RTCCryptoOptions.h"
|
#import "RTCCryptoOptions.h"
|
||||||
#import "RTCMacros.h"
|
#import "RTCMacros.h"
|
||||||
|
|
||||||
@class RTCIceServer;
|
@class RTC_OBJC_TYPE(RTCIceServer);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents the ice transport policy. This exposes the same states in C++,
|
* Represents the ice transport policy. This exposes the same states in C++,
|
||||||
|
@ -70,7 +70,7 @@ typedef NS_ENUM(NSInteger, RTCSdpSemantics) {
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCConfiguration : NSObject
|
@interface RTC_OBJC_TYPE (RTCConfiguration) : NSObject
|
||||||
|
|
||||||
/** If true, allows DSCP codes to be set on outgoing packets, configured using
|
/** If true, allows DSCP codes to be set on outgoing packets, configured using
|
||||||
* networkPriority field of RTCRtpEncodingParameters. Defaults to false.
|
* networkPriority field of RTCRtpEncodingParameters. Defaults to false.
|
||||||
|
@ -78,10 +78,10 @@ RTC_OBJC_EXPORT
|
||||||
@property(nonatomic, assign) BOOL enableDscp;
|
@property(nonatomic, assign) BOOL enableDscp;
|
||||||
|
|
||||||
/** An array of Ice Servers available to be used by ICE. */
|
/** An array of Ice Servers available to be used by ICE. */
|
||||||
@property(nonatomic, copy) NSArray<RTCIceServer *> *iceServers;
|
@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCIceServer) *> *iceServers;
|
||||||
|
|
||||||
/** An RTCCertificate for 're' use. */
|
/** An RTCCertificate for 're' use. */
|
||||||
@property(nonatomic, nullable) RTCCertificate *certificate;
|
@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCertificate) * certificate;
|
||||||
|
|
||||||
/** Which candidates the ICE agent is allowed to use. The W3C calls it
|
/** Which candidates the ICE agent is allowed to use. The W3C calls it
|
||||||
* |iceTransportPolicy|, while in C++ it is called |type|. */
|
* |iceTransportPolicy|, while in C++ it is called |type|. */
|
||||||
|
@ -173,9 +173,9 @@ RTC_OBJC_EXPORT
|
||||||
*
|
*
|
||||||
* UnifiedPlan will cause RTCPeerConnection to create offers and answers with
|
* UnifiedPlan will cause RTCPeerConnection to create offers and answers with
|
||||||
* multiple m= sections where each m= section maps to one RTCRtpSender and one
|
* multiple m= sections where each m= section maps to one RTCRtpSender and one
|
||||||
* RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both video. This
|
* RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both
|
||||||
* will also cause RTCPeerConnection to ignore all but the first a=ssrc lines
|
* video. This will also cause RTCPeerConnection) to ignore all but the first a=ssrc
|
||||||
* that form a Plan B stream.
|
* lines that form a Plan B stream.
|
||||||
*
|
*
|
||||||
* For users who wish to send multiple audio/video streams and need to stay
|
* For users who wish to send multiple audio/video streams and need to stay
|
||||||
* interoperable with legacy WebRTC implementations or use legacy APIs,
|
* interoperable with legacy WebRTC implementations or use legacy APIs,
|
||||||
|
@ -214,7 +214,7 @@ RTC_OBJC_EXPORT
|
||||||
* frame encryption for native WebRTC. Setting this will overwrite any
|
* frame encryption for native WebRTC. Setting this will overwrite any
|
||||||
* options set through the PeerConnectionFactory (which is deprecated).
|
* options set through the PeerConnectionFactory (which is deprecated).
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, nullable) RTCCryptoOptions *cryptoOptions;
|
@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCryptoOptions) * cryptoOptions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Time interval between audio RTCP reports.
|
* Time interval between audio RTCP reports.
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
#include "rtc_base/rtc_certificate_generator.h"
|
#include "rtc_base/rtc_certificate_generator.h"
|
||||||
#include "rtc_base/ssl_identity.h"
|
#include "rtc_base/ssl_identity.h"
|
||||||
|
|
||||||
@implementation RTCConfiguration
|
@implementation RTC_OBJC_TYPE (RTCConfiguration)
|
||||||
|
|
||||||
@synthesize enableDscp = _enableDscp;
|
@synthesize enableDscp = _enableDscp;
|
||||||
@synthesize iceServers = _iceServers;
|
@synthesize iceServers = _iceServers;
|
||||||
|
@ -70,7 +70,8 @@
|
||||||
_enableDscp = config.dscp();
|
_enableDscp = config.dscp();
|
||||||
NSMutableArray *iceServers = [NSMutableArray array];
|
NSMutableArray *iceServers = [NSMutableArray array];
|
||||||
for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
|
for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
|
||||||
RTCIceServer *iceServer = [[RTCIceServer alloc] initWithNativeServer:server];
|
RTC_OBJC_TYPE(RTCIceServer) *iceServer =
|
||||||
|
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:server];
|
||||||
[iceServers addObject:iceServer];
|
[iceServers addObject:iceServer];
|
||||||
}
|
}
|
||||||
_iceServers = iceServers;
|
_iceServers = iceServers;
|
||||||
|
@ -78,9 +79,9 @@
|
||||||
rtc::scoped_refptr<rtc::RTCCertificate> native_cert;
|
rtc::scoped_refptr<rtc::RTCCertificate> native_cert;
|
||||||
native_cert = config.certificates[0];
|
native_cert = config.certificates[0];
|
||||||
rtc::RTCCertificatePEM native_pem = native_cert->ToPEM();
|
rtc::RTCCertificatePEM native_pem = native_cert->ToPEM();
|
||||||
_certificate =
|
_certificate = [[RTC_OBJC_TYPE(RTCCertificate) alloc]
|
||||||
[[RTCCertificate alloc] initWithPrivateKey:@(native_pem.private_key().c_str())
|
initWithPrivateKey:@(native_pem.private_key().c_str())
|
||||||
certificate:@(native_pem.certificate().c_str())];
|
certificate:@(native_pem.certificate().c_str())];
|
||||||
}
|
}
|
||||||
_iceTransportPolicy =
|
_iceTransportPolicy =
|
||||||
[[self class] transportPolicyForTransportsType:config.type];
|
[[self class] transportPolicyForTransportsType:config.type];
|
||||||
|
@ -122,7 +123,7 @@
|
||||||
_turnCustomizer = config.turn_customizer;
|
_turnCustomizer = config.turn_customizer;
|
||||||
_activeResetSrtpParams = config.active_reset_srtp_params;
|
_activeResetSrtpParams = config.active_reset_srtp_params;
|
||||||
if (config.crypto_options) {
|
if (config.crypto_options) {
|
||||||
_cryptoOptions = [[RTCCryptoOptions alloc]
|
_cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc]
|
||||||
initWithSrtpEnableGcmCryptoSuites:config.crypto_options->srtp
|
initWithSrtpEnableGcmCryptoSuites:config.crypto_options->srtp
|
||||||
.enable_gcm_crypto_suites
|
.enable_gcm_crypto_suites
|
||||||
srtpEnableAes128Sha1_32CryptoCipher:config.crypto_options->srtp
|
srtpEnableAes128Sha1_32CryptoCipher:config.crypto_options->srtp
|
||||||
|
@ -140,7 +141,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
static NSString *formatString = @"RTCConfiguration: "
|
static NSString *formatString = @"RTC_OBJC_TYPE(RTCConfiguration): "
|
||||||
@"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n"
|
@"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n"
|
||||||
@"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n%d\n}\n";
|
@"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n%d\n}\n";
|
||||||
|
|
||||||
|
@ -181,7 +182,7 @@
|
||||||
webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
|
webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
|
||||||
|
|
||||||
nativeConfig->set_dscp(_enableDscp);
|
nativeConfig->set_dscp(_enableDscp);
|
||||||
for (RTCIceServer *iceServer in _iceServers) {
|
for (RTC_OBJC_TYPE(RTCIceServer) * iceServer in _iceServers) {
|
||||||
nativeConfig->servers.push_back(iceServer.nativeServer);
|
nativeConfig->servers.push_back(iceServer.nativeServer);
|
||||||
}
|
}
|
||||||
nativeConfig->type =
|
nativeConfig->type =
|
||||||
|
|
|
@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
* as Objective-C doesn't support nested structures.
|
* as Objective-C doesn't support nested structures.
|
||||||
*/
|
*/
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCCryptoOptions : NSObject
|
@interface RTC_OBJC_TYPE (RTCCryptoOptions) : NSObject
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
|
* Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
#import "RTCCryptoOptions.h"
|
#import "RTCCryptoOptions.h"
|
||||||
|
|
||||||
@implementation RTCCryptoOptions
|
@implementation RTC_OBJC_TYPE (RTCCryptoOptions)
|
||||||
|
|
||||||
@synthesize srtpEnableGcmCryptoSuites = _srtpEnableGcmCryptoSuites;
|
@synthesize srtpEnableGcmCryptoSuites = _srtpEnableGcmCryptoSuites;
|
||||||
@synthesize srtpEnableAes128Sha1_32CryptoCipher = _srtpEnableAes128Sha1_32CryptoCipher;
|
@synthesize srtpEnableAes128Sha1_32CryptoCipher = _srtpEnableAes128Sha1_32CryptoCipher;
|
||||||
|
|
|
@ -15,27 +15,29 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@class RTCPeerConnectionFactory;
|
@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
|
||||||
|
|
||||||
@interface RTCDataBuffer ()
|
@interface RTC_OBJC_TYPE (RTCDataBuffer)
|
||||||
|
()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The native DataBuffer representation of this RTCDatabuffer object. This is
|
* The native DataBuffer representation of this RTCDatabuffer object. This is
|
||||||
* needed to pass to the underlying C++ APIs.
|
* needed to pass to the underlying C++ APIs.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
|
@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
|
||||||
|
|
||||||
/** Initialize an RTCDataBuffer from a native DataBuffer. */
|
/** Initialize an RTCDataBuffer from a native DataBuffer. */
|
||||||
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer;
|
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface RTCDataChannel ()
|
@interface RTC_OBJC_TYPE (RTCDataChannel)
|
||||||
|
()
|
||||||
|
|
||||||
/** Initialize an RTCDataChannel from a native DataChannelInterface. */
|
/** Initialize an RTCDataChannel from a native DataChannelInterface. */
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory
|
||||||
nativeDataChannel:(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel
|
: (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeDataChannel
|
||||||
NS_DESIGNATED_INITIALIZER;
|
: (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel NS_DESIGNATED_INITIALIZER;
|
||||||
|
|
||||||
+ (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState:
|
+ (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState:
|
||||||
(RTCDataChannelState)state;
|
(RTCDataChannelState)state;
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCDataBuffer : NSObject
|
@interface RTC_OBJC_TYPE (RTCDataBuffer) : NSObject
|
||||||
|
|
||||||
/** NSData representation of the underlying buffer. */
|
/** NSData representation of the underlying buffer. */
|
||||||
@property(nonatomic, readonly) NSData *data;
|
@property(nonatomic, readonly) NSData *data;
|
||||||
|
@ -34,20 +34,22 @@ RTC_OBJC_EXPORT
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@class RTCDataChannel;
|
@class RTC_OBJC_TYPE(RTCDataChannel);
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@protocol RTCDataChannelDelegate <NSObject>
|
@protocol RTC_OBJC_TYPE
|
||||||
|
(RTCDataChannelDelegate)<NSObject>
|
||||||
|
|
||||||
/** The data channel state changed. */
|
/** The data channel state changed. */
|
||||||
- (void)dataChannelDidChangeState:(RTCDataChannel *)dataChannel;
|
- (void)dataChannelDidChangeState : (RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel;
|
||||||
|
|
||||||
/** The data channel successfully received a data buffer. */
|
/** The data channel successfully received a data buffer. */
|
||||||
- (void)dataChannel:(RTCDataChannel *)dataChannel
|
- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel
|
||||||
didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer;
|
didReceiveMessageWithBuffer:(RTC_OBJC_TYPE(RTCDataBuffer) *)buffer;
|
||||||
|
|
||||||
@optional
|
@optional
|
||||||
/** The data channel's |bufferedAmount| changed. */
|
/** The data channel's |bufferedAmount| changed. */
|
||||||
- (void)dataChannel:(RTCDataChannel *)dataChannel didChangeBufferedAmount:(uint64_t)amount;
|
- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel
|
||||||
|
didChangeBufferedAmount:(uint64_t)amount;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
@ -60,7 +62,7 @@ typedef NS_ENUM(NSInteger, RTCDataChannelState) {
|
||||||
};
|
};
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCDataChannel : NSObject
|
@interface RTC_OBJC_TYPE (RTCDataChannel) : NSObject
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A label that can be used to distinguish this data channel from other data
|
* A label that can be used to distinguish this data channel from other data
|
||||||
|
@ -115,7 +117,7 @@ RTC_OBJC_EXPORT
|
||||||
@property(nonatomic, readonly) uint64_t bufferedAmount;
|
@property(nonatomic, readonly) uint64_t bufferedAmount;
|
||||||
|
|
||||||
/** The delegate for this data channel. */
|
/** The delegate for this data channel. */
|
||||||
@property(nonatomic, weak) id<RTCDataChannelDelegate> delegate;
|
@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCDataChannelDelegate)> delegate;
|
||||||
|
|
||||||
- (instancetype)init NS_UNAVAILABLE;
|
- (instancetype)init NS_UNAVAILABLE;
|
||||||
|
|
||||||
|
@ -123,7 +125,7 @@ RTC_OBJC_EXPORT
|
||||||
- (void)close;
|
- (void)close;
|
||||||
|
|
||||||
/** Attempt to send |data| on this data channel's underlying data transport. */
|
/** Attempt to send |data| on this data channel's underlying data transport. */
|
||||||
- (BOOL)sendData:(RTCDataBuffer *)data;
|
- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -18,21 +18,21 @@ namespace webrtc {
|
||||||
|
|
||||||
class DataChannelDelegateAdapter : public DataChannelObserver {
|
class DataChannelDelegateAdapter : public DataChannelObserver {
|
||||||
public:
|
public:
|
||||||
DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; }
|
DataChannelDelegateAdapter(RTC_OBJC_TYPE(RTCDataChannel) * channel) { channel_ = channel; }
|
||||||
|
|
||||||
void OnStateChange() override {
|
void OnStateChange() override {
|
||||||
[channel_.delegate dataChannelDidChangeState:channel_];
|
[channel_.delegate dataChannelDidChangeState:channel_];
|
||||||
}
|
}
|
||||||
|
|
||||||
void OnMessage(const DataBuffer& buffer) override {
|
void OnMessage(const DataBuffer& buffer) override {
|
||||||
RTCDataBuffer *data_buffer =
|
RTC_OBJC_TYPE(RTCDataBuffer) *data_buffer =
|
||||||
[[RTCDataBuffer alloc] initWithNativeBuffer:buffer];
|
[[RTC_OBJC_TYPE(RTCDataBuffer) alloc] initWithNativeBuffer:buffer];
|
||||||
[channel_.delegate dataChannel:channel_
|
[channel_.delegate dataChannel:channel_
|
||||||
didReceiveMessageWithBuffer:data_buffer];
|
didReceiveMessageWithBuffer:data_buffer];
|
||||||
}
|
}
|
||||||
|
|
||||||
void OnBufferedAmountChange(uint64_t previousAmount) override {
|
void OnBufferedAmountChange(uint64_t previousAmount) override {
|
||||||
id<RTCDataChannelDelegate> delegate = channel_.delegate;
|
id<RTC_OBJC_TYPE(RTCDataChannelDelegate)> delegate = channel_.delegate;
|
||||||
SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
|
SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
|
||||||
if ([delegate respondsToSelector:sel]) {
|
if ([delegate respondsToSelector:sel]) {
|
||||||
[delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
|
[delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
|
||||||
|
@ -40,12 +40,11 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
__weak RTCDataChannel *channel_;
|
__weak RTC_OBJC_TYPE(RTCDataChannel) * channel_;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@implementation RTC_OBJC_TYPE (RTCDataBuffer) {
|
||||||
@implementation RTCDataBuffer {
|
|
||||||
std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
|
std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,9 +82,8 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
@implementation RTC_OBJC_TYPE (RTCDataChannel) {
|
||||||
@implementation RTCDataChannel {
|
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
|
||||||
RTCPeerConnectionFactory *_factory;
|
|
||||||
rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
|
rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
|
||||||
std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
|
std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
|
||||||
BOOL _isObserverRegistered;
|
BOOL _isObserverRegistered;
|
||||||
|
@ -152,21 +150,20 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
|
||||||
_nativeDataChannel->Close();
|
_nativeDataChannel->Close();
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)sendData:(RTCDataBuffer *)data {
|
- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data {
|
||||||
return _nativeDataChannel->Send(*data.nativeDataBuffer);
|
return _nativeDataChannel->Send(*data.nativeDataBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@",
|
return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDataChannel):\n%ld\n%@\n%@",
|
||||||
(long)self.channelId,
|
(long)self.channelId,
|
||||||
self.label,
|
self.label,
|
||||||
[[self class]
|
[[self class] stringForState:self.readyState]];
|
||||||
stringForState:self.readyState]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeDataChannel:
|
nativeDataChannel:
|
||||||
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
|
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
|
||||||
NSParameterAssert(nativeDataChannel);
|
NSParameterAssert(nativeDataChannel);
|
||||||
|
|
|
@ -14,9 +14,10 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCDataChannelConfiguration ()
|
@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration)
|
||||||
|
()
|
||||||
|
|
||||||
@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
|
@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCDataChannelConfiguration : NSObject
|
@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration) : NSObject
|
||||||
|
|
||||||
/** Set to YES if ordered delivery is required. */
|
/** Set to YES if ordered delivery is required. */
|
||||||
@property(nonatomic, assign) BOOL isOrdered;
|
@property(nonatomic, assign) BOOL isOrdered;
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
|
|
||||||
#import "helpers/NSString+StdString.h"
|
#import "helpers/NSString+StdString.h"
|
||||||
|
|
||||||
@implementation RTCDataChannelConfiguration
|
@implementation RTC_OBJC_TYPE (RTCDataChannelConfiguration)
|
||||||
|
|
||||||
@synthesize nativeDataChannelInit = _nativeDataChannelInit;
|
@synthesize nativeDataChannelInit = _nativeDataChannelInit;
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCDtmfSender : NSObject <RTCDtmfSender>
|
@interface RTC_OBJC_TYPE (RTCDtmfSender) : NSObject <RTC_OBJC_TYPE(RTCDtmfSender)>
|
||||||
|
|
||||||
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender;
|
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender;
|
||||||
|
|
||||||
|
|
|
@ -15,14 +15,15 @@
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@protocol RTCDtmfSender <NSObject>
|
@protocol RTC_OBJC_TYPE
|
||||||
|
(RTCDtmfSender)<NSObject>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise
|
* Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise
|
||||||
* returns false. To be able to send DTMF, the associated RTCRtpSender must be
|
* returns false. To be able to send DTMF, the associated RTCRtpSender must be
|
||||||
* able to send packets, and a "telephone-event" codec must be negotiated.
|
* able to send packets, and a "telephone-event" codec must be negotiated.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly) BOOL canInsertDtmf;
|
@property(nonatomic, readonly) BOOL canInsertDtmf;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Queues a task that sends the DTMF tones. The tones parameter is treated
|
* Queues a task that sends the DTMF tones. The tones parameter is treated
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
#include "rtc_base/time_utils.h"
|
#include "rtc_base/time_utils.h"
|
||||||
|
|
||||||
@implementation RTCDtmfSender {
|
@implementation RTC_OBJC_TYPE (RTCDtmfSender) {
|
||||||
rtc::scoped_refptr<webrtc::DtmfSenderInterface> _nativeDtmfSender;
|
rtc::scoped_refptr<webrtc::DtmfSenderInterface> _nativeDtmfSender;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,12 +48,11 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
return [NSString
|
return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDtmfSender) {\n remainingTones: %@\n "
|
||||||
stringWithFormat:
|
@"duration: %f sec\n interToneGap: %f sec\n}",
|
||||||
@"RTCDtmfSender {\n remainingTones: %@\n duration: %f sec\n interToneGap: %f sec\n}",
|
[self remainingTones],
|
||||||
[self remainingTones],
|
[self duration],
|
||||||
[self duration],
|
[self interToneGap]];
|
||||||
[self interToneGap]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
@ -67,7 +66,8 @@
|
||||||
NSParameterAssert(nativeDtmfSender);
|
NSParameterAssert(nativeDtmfSender);
|
||||||
if (self = [super init]) {
|
if (self = [super init]) {
|
||||||
_nativeDtmfSender = nativeDtmfSender;
|
_nativeDtmfSender = nativeDtmfSender;
|
||||||
RTCLogInfo(@"RTCDtmfSender(%p): created DTMF sender: %@", self, self.description);
|
RTCLogInfo(
|
||||||
|
@"RTC_OBJC_TYPE(RTCDtmfSender)(%p): created DTMF sender: %@", self, self.description);
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,9 +15,10 @@
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
/* Interfaces for converting to/from internal C++ formats. */
|
/* Interfaces for converting to/from internal C++ formats. */
|
||||||
@interface RTCEncodedImage (Private)
|
@interface RTC_OBJC_TYPE (RTCEncodedImage)
|
||||||
|
(Private)
|
||||||
|
|
||||||
- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage;
|
- (instancetype)initWithNativeEncodedImage : (const webrtc::EncodedImage &)encodedImage;
|
||||||
- (webrtc::EncodedImage)nativeEncodedImage;
|
- (webrtc::EncodedImage)nativeEncodedImage;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -56,9 +56,10 @@ class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface {
|
||||||
}
|
}
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation RTCEncodedImage (Private)
|
@implementation RTC_OBJC_TYPE (RTCEncodedImage)
|
||||||
|
(Private)
|
||||||
|
|
||||||
- (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)encodedData {
|
- (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)encodedData {
|
||||||
RTCWrappedEncodedImageBuffer *wrappedBuffer =
|
RTCWrappedEncodedImageBuffer *wrappedBuffer =
|
||||||
objc_getAssociatedObject(self, @selector(encodedData));
|
objc_getAssociatedObject(self, @selector(encodedData));
|
||||||
return wrappedBuffer.buffer;
|
return wrappedBuffer.buffer;
|
||||||
|
|
|
@ -34,7 +34,7 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
// For kRTCFileLoggerTypeApp, the oldest log is overwritten.
|
// For kRTCFileLoggerTypeApp, the oldest log is overwritten.
|
||||||
// This class is not threadsafe.
|
// This class is not threadsafe.
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCFileLogger : NSObject
|
@interface RTC_OBJC_TYPE (RTCFileLogger) : NSObject
|
||||||
|
|
||||||
// The severity level to capture. The default is kRTCFileLoggerSeverityInfo.
|
// The severity level to capture. The default is kRTCFileLoggerSeverityInfo.
|
||||||
@property(nonatomic, assign) RTCFileLoggerSeverity severity;
|
@property(nonatomic, assign) RTCFileLoggerSeverity severity;
|
||||||
|
|
|
@ -21,7 +21,7 @@ NSString *const kDefaultLogDirName = @"webrtc_logs";
|
||||||
NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
|
NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
|
||||||
const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
|
const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
|
||||||
|
|
||||||
@implementation RTCFileLogger {
|
@implementation RTC_OBJC_TYPE (RTCFileLogger) {
|
||||||
BOOL _hasStarted;
|
BOOL _hasStarted;
|
||||||
NSString *_dirPath;
|
NSString *_dirPath;
|
||||||
NSUInteger _maxFileSize;
|
NSUInteger _maxFileSize;
|
||||||
|
|
|
@ -16,13 +16,14 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCIceCandidate ()
|
@interface RTC_OBJC_TYPE (RTCIceCandidate)
|
||||||
|
()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The native IceCandidateInterface representation of this RTCIceCandidate
|
* The native IceCandidateInterface representation of this RTCIceCandidate
|
||||||
* object. This is needed to pass to the underlying C++ APIs.
|
* object. This is needed to pass to the underlying C++ APIs.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly) std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
|
@property(nonatomic, readonly) std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize an RTCIceCandidate from a native IceCandidateInterface. No
|
* Initialize an RTCIceCandidate from a native IceCandidateInterface. No
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCIceCandidate : NSObject
|
@interface RTC_OBJC_TYPE (RTCIceCandidate) : NSObject
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If present, the identifier of the "media stream identification" for the media
|
* If present, the identifier of the "media stream identification" for the media
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
#import "base/RTCLogging.h"
|
#import "base/RTCLogging.h"
|
||||||
#import "helpers/NSString+StdString.h"
|
#import "helpers/NSString+StdString.h"
|
||||||
|
|
||||||
@implementation RTCIceCandidate
|
@implementation RTC_OBJC_TYPE (RTCIceCandidate)
|
||||||
|
|
||||||
@synthesize sdpMid = _sdpMid;
|
@synthesize sdpMid = _sdpMid;
|
||||||
@synthesize sdpMLineIndex = _sdpMLineIndex;
|
@synthesize sdpMLineIndex = _sdpMLineIndex;
|
||||||
|
@ -35,7 +35,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@\n%@",
|
return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceCandidate):\n%@\n%d\n%@\n%@",
|
||||||
_sdpMid,
|
_sdpMid,
|
||||||
_sdpMLineIndex,
|
_sdpMLineIndex,
|
||||||
_sdp,
|
_sdp,
|
||||||
|
@ -50,7 +50,7 @@
|
||||||
std::string sdp;
|
std::string sdp;
|
||||||
candidate->ToString(&sdp);
|
candidate->ToString(&sdp);
|
||||||
|
|
||||||
RTCIceCandidate *rtcCandidate =
|
RTC_OBJC_TYPE(RTCIceCandidate) *rtcCandidate =
|
||||||
[self initWithSdp:[NSString stringForStdString:sdp]
|
[self initWithSdp:[NSString stringForStdString:sdp]
|
||||||
sdpMLineIndex:candidate->sdp_mline_index()
|
sdpMLineIndex:candidate->sdp_mline_index()
|
||||||
sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
|
sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
|
||||||
|
|
|
@ -14,13 +14,14 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCIceServer ()
|
@interface RTC_OBJC_TYPE (RTCIceServer)
|
||||||
|
()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* IceServer struct representation of this RTCIceServer object's data.
|
* IceServer struct representation of this RTCIceServer object's data.
|
||||||
* This is needed to pass to the underlying C++ APIs.
|
* This is needed to pass to the underlying C++ APIs.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer;
|
@property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer;
|
||||||
|
|
||||||
/** Initialize an RTCIceServer from a native IceServer. */
|
/** Initialize an RTCIceServer from a native IceServer. */
|
||||||
- (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer;
|
- (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer;
|
||||||
|
|
|
@ -20,7 +20,7 @@ typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) {
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCIceServer : NSObject
|
@interface RTC_OBJC_TYPE (RTCIceServer) : NSObject
|
||||||
|
|
||||||
/** URI(s) for this server represented as NSStrings. */
|
/** URI(s) for this server represented as NSStrings. */
|
||||||
@property(nonatomic, readonly) NSArray<NSString *> *urlStrings;
|
@property(nonatomic, readonly) NSArray<NSString *> *urlStrings;
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
|
|
||||||
#import "helpers/NSString+StdString.h"
|
#import "helpers/NSString+StdString.h"
|
||||||
|
|
||||||
@implementation RTCIceServer
|
@implementation RTC_OBJC_TYPE (RTCIceServer)
|
||||||
|
|
||||||
@synthesize urlStrings = _urlStrings;
|
@synthesize urlStrings = _urlStrings;
|
||||||
@synthesize username = _username;
|
@synthesize username = _username;
|
||||||
|
@ -97,7 +97,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
|
return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceServer):\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
|
||||||
_urlStrings,
|
_urlStrings,
|
||||||
_username,
|
_username,
|
||||||
_credential,
|
_credential,
|
||||||
|
|
|
@ -14,10 +14,11 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCLegacyStatsReport ()
|
@interface RTC_OBJC_TYPE (RTCLegacyStatsReport)
|
||||||
|
()
|
||||||
|
|
||||||
/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
|
/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
|
||||||
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport;
|
- (instancetype)initWithNativeReport : (const webrtc::StatsReport &)nativeReport;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
/** This does not currently conform to the spec. */
|
/** This does not currently conform to the spec. */
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCLegacyStatsReport : NSObject
|
@interface RTC_OBJC_TYPE (RTCLegacyStatsReport) : NSObject
|
||||||
|
|
||||||
/** Time since 1970-01-01T00:00:00Z in milliseconds. */
|
/** Time since 1970-01-01T00:00:00Z in milliseconds. */
|
||||||
@property(nonatomic, readonly) CFTimeInterval timestamp;
|
@property(nonatomic, readonly) CFTimeInterval timestamp;
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
#include "rtc_base/checks.h"
|
#include "rtc_base/checks.h"
|
||||||
|
|
||||||
@implementation RTCLegacyStatsReport
|
@implementation RTC_OBJC_TYPE (RTCLegacyStatsReport)
|
||||||
|
|
||||||
@synthesize timestamp = _timestamp;
|
@synthesize timestamp = _timestamp;
|
||||||
@synthesize type = _type;
|
@synthesize type = _type;
|
||||||
|
@ -23,7 +23,7 @@
|
||||||
@synthesize values = _values;
|
@synthesize values = _values;
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
return [NSString stringWithFormat:@"RTCLegacyStatsReport:\n%@\n%@\n%f\n%@",
|
return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCLegacyStatsReport):\n%@\n%@\n%f\n%@",
|
||||||
_reportId,
|
_reportId,
|
||||||
_type,
|
_type,
|
||||||
_timestamp,
|
_timestamp,
|
||||||
|
|
|
@ -16,13 +16,14 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCMediaConstraints ()
|
@interface RTC_OBJC_TYPE (RTCMediaConstraints)
|
||||||
|
()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A MediaConstraints representation of this RTCMediaConstraints object. This is
|
* A MediaConstraints representation of this RTCMediaConstraints object. This is
|
||||||
* needed to pass to the underlying C++ APIs.
|
* needed to pass to the underlying C++ APIs.
|
||||||
*/
|
*/
|
||||||
- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
|
- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
|
||||||
|
|
||||||
/** Return a native Constraints object representing these constraints */
|
/** Return a native Constraints object representing these constraints */
|
||||||
+ (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints:
|
+ (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints:
|
||||||
|
|
|
@ -31,7 +31,7 @@ RTC_EXTERN NSString *const kRTCMediaConstraintsValueTrue;
|
||||||
RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse;
|
RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse;
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCMediaConstraints : NSObject
|
@interface RTC_OBJC_TYPE (RTCMediaConstraints) : NSObject
|
||||||
|
|
||||||
- (instancetype)init NS_UNAVAILABLE;
|
- (instancetype)init NS_UNAVAILABLE;
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ NSString *const kRTCMediaConstraintsVoiceActivityDetection =
|
||||||
NSString *const kRTCMediaConstraintsValueTrue = @(webrtc::MediaConstraints::kValueTrue);
|
NSString *const kRTCMediaConstraintsValueTrue = @(webrtc::MediaConstraints::kValueTrue);
|
||||||
NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kValueFalse);
|
NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kValueFalse);
|
||||||
|
|
||||||
@implementation RTCMediaConstraints {
|
@implementation RTC_OBJC_TYPE (RTCMediaConstraints) {
|
||||||
NSDictionary<NSString *, NSString *> *_mandatory;
|
NSDictionary<NSString *, NSString *> *_mandatory;
|
||||||
NSDictionary<NSString *, NSString *> *_optional;
|
NSDictionary<NSString *, NSString *> *_optional;
|
||||||
}
|
}
|
||||||
|
@ -47,9 +47,8 @@ NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kVa
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@",
|
return [NSString
|
||||||
_mandatory,
|
stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaConstraints):\n%@\n%@", _mandatory, _optional];
|
||||||
_optional];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
|
@ -14,18 +14,20 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@class RTCPeerConnectionFactory;
|
@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
|
||||||
|
|
||||||
typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
|
typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
|
||||||
RTCMediaSourceTypeAudio,
|
RTCMediaSourceTypeAudio,
|
||||||
RTCMediaSourceTypeVideo,
|
RTCMediaSourceTypeVideo,
|
||||||
};
|
};
|
||||||
|
|
||||||
@interface RTCMediaSource ()
|
@interface RTC_OBJC_TYPE (RTCMediaSource)
|
||||||
|
()
|
||||||
|
|
||||||
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
|
@property(nonatomic,
|
||||||
|
readonly) rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||||
type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER;
|
type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER;
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ typedef NS_ENUM(NSInteger, RTCSourceState) {
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCMediaSource : NSObject
|
@interface RTC_OBJC_TYPE (RTCMediaSource) : NSObject
|
||||||
|
|
||||||
/** The current state of the RTCMediaSource. */
|
/** The current state of the RTCMediaSource. */
|
||||||
@property(nonatomic, readonly) RTCSourceState state;
|
@property(nonatomic, readonly) RTCSourceState state;
|
||||||
|
|
|
@ -12,14 +12,14 @@
|
||||||
|
|
||||||
#include "rtc_base/checks.h"
|
#include "rtc_base/checks.h"
|
||||||
|
|
||||||
@implementation RTCMediaSource {
|
@implementation RTC_OBJC_TYPE (RTCMediaSource) {
|
||||||
RTCPeerConnectionFactory *_factory;
|
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
|
||||||
RTCMediaSourceType _type;
|
RTCMediaSourceType _type;
|
||||||
}
|
}
|
||||||
|
|
||||||
@synthesize nativeMediaSource = _nativeMediaSource;
|
@synthesize nativeMediaSource = _nativeMediaSource;
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
|
||||||
type:(RTCMediaSourceType)type {
|
type:(RTCMediaSourceType)type {
|
||||||
RTC_DCHECK(factory);
|
RTC_DCHECK(factory);
|
||||||
|
|
|
@ -14,19 +14,22 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCMediaStream ()
|
@interface RTC_OBJC_TYPE (RTCMediaStream)
|
||||||
|
()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MediaStreamInterface representation of this RTCMediaStream object. This is
|
* MediaStreamInterface representation of this RTCMediaStream object. This is
|
||||||
* needed to pass to the underlying C++ APIs.
|
* needed to pass to the underlying C++ APIs.
|
||||||
*/
|
*/
|
||||||
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
|
@property(nonatomic,
|
||||||
|
readonly) rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
|
||||||
|
|
||||||
/** Initialize an RTCMediaStream with an id. */
|
/** Initialize an RTCMediaStream with an id. */
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory streamId:(NSString *)streamId;
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
|
streamId:(NSString *)streamId;
|
||||||
|
|
||||||
/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
|
/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeMediaStream:(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
|
nativeMediaStream:(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -14,18 +14,18 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@class RTCAudioTrack;
|
@class RTC_OBJC_TYPE(RTCAudioTrack);
|
||||||
@class RTCPeerConnectionFactory;
|
@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
|
||||||
@class RTCVideoTrack;
|
@class RTC_OBJC_TYPE(RTCVideoTrack);
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCMediaStream : NSObject
|
@interface RTC_OBJC_TYPE (RTCMediaStream) : NSObject
|
||||||
|
|
||||||
/** The audio tracks in this stream. */
|
/** The audio tracks in this stream. */
|
||||||
@property(nonatomic, strong, readonly) NSArray<RTCAudioTrack *> *audioTracks;
|
@property(nonatomic, strong, readonly) NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *audioTracks;
|
||||||
|
|
||||||
/** The video tracks in this stream. */
|
/** The video tracks in this stream. */
|
||||||
@property(nonatomic, strong, readonly) NSArray<RTCVideoTrack *> *videoTracks;
|
@property(nonatomic, strong, readonly) NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *videoTracks;
|
||||||
|
|
||||||
/** An identifier for this media stream. */
|
/** An identifier for this media stream. */
|
||||||
@property(nonatomic, readonly) NSString *streamId;
|
@property(nonatomic, readonly) NSString *streamId;
|
||||||
|
@ -33,16 +33,16 @@ RTC_OBJC_EXPORT
|
||||||
- (instancetype)init NS_UNAVAILABLE;
|
- (instancetype)init NS_UNAVAILABLE;
|
||||||
|
|
||||||
/** Adds the given audio track to this media stream. */
|
/** Adds the given audio track to this media stream. */
|
||||||
- (void)addAudioTrack:(RTCAudioTrack *)audioTrack;
|
- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack;
|
||||||
|
|
||||||
/** Adds the given video track to this media stream. */
|
/** Adds the given video track to this media stream. */
|
||||||
- (void)addVideoTrack:(RTCVideoTrack *)videoTrack;
|
- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack;
|
||||||
|
|
||||||
/** Removes the given audio track to this media stream. */
|
/** Removes the given audio track to this media stream. */
|
||||||
- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack;
|
- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack;
|
||||||
|
|
||||||
/** Removes the given video track to this media stream. */
|
/** Removes the given video track to this media stream. */
|
||||||
- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack;
|
- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -18,14 +18,14 @@
|
||||||
#import "RTCVideoTrack+Private.h"
|
#import "RTCVideoTrack+Private.h"
|
||||||
#import "helpers/NSString+StdString.h"
|
#import "helpers/NSString+StdString.h"
|
||||||
|
|
||||||
@implementation RTCMediaStream {
|
@implementation RTC_OBJC_TYPE (RTCMediaStream) {
|
||||||
RTCPeerConnectionFactory *_factory;
|
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
|
||||||
NSMutableArray *_audioTracks;
|
NSMutableArray *_audioTracks;
|
||||||
NSMutableArray *_videoTracks;
|
NSMutableArray *_videoTracks;
|
||||||
rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
|
rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
streamId:(NSString *)streamId {
|
streamId:(NSString *)streamId {
|
||||||
NSParameterAssert(factory);
|
NSParameterAssert(factory);
|
||||||
NSParameterAssert(streamId.length);
|
NSParameterAssert(streamId.length);
|
||||||
|
@ -35,11 +35,11 @@
|
||||||
return [self initWithFactory:factory nativeMediaStream:stream];
|
return [self initWithFactory:factory nativeMediaStream:stream];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSArray<RTCAudioTrack *> *)audioTracks {
|
- (NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *)audioTracks {
|
||||||
return [_audioTracks copy];
|
return [_audioTracks copy];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSArray<RTCVideoTrack *> *)videoTracks {
|
- (NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *)videoTracks {
|
||||||
return [_videoTracks copy];
|
return [_videoTracks copy];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,32 +47,32 @@
|
||||||
return [NSString stringForStdString:_nativeMediaStream->id()];
|
return [NSString stringForStdString:_nativeMediaStream->id()];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)addAudioTrack:(RTCAudioTrack *)audioTrack {
|
- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
|
||||||
if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
|
if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
|
||||||
[_audioTracks addObject:audioTrack];
|
[_audioTracks addObject:audioTrack];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)addVideoTrack:(RTCVideoTrack *)videoTrack {
|
- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
|
||||||
if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
|
if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
|
||||||
[_videoTracks addObject:videoTrack];
|
[_videoTracks addObject:videoTrack];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack {
|
- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
|
||||||
NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
|
NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
|
||||||
NSAssert(index != NSNotFound,
|
NSAssert(index != NSNotFound,
|
||||||
@"|removeAudioTrack| called on unexpected RTCAudioTrack");
|
@"|removeAudioTrack| called on unexpected RTC_OBJC_TYPE(RTCAudioTrack)");
|
||||||
if (index != NSNotFound &&
|
if (index != NSNotFound &&
|
||||||
_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
|
_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
|
||||||
[_audioTracks removeObjectAtIndex:index];
|
[_audioTracks removeObjectAtIndex:index];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack {
|
- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
|
||||||
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
|
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
|
||||||
NSAssert(index != NSNotFound,
|
NSAssert(index != NSNotFound,
|
||||||
@"|removeVideoTrack| called on unexpected RTCVideoTrack");
|
@"|removeVideoTrack| called on unexpected RTC_OBJC_TYPE(RTCVideoTrack)");
|
||||||
if (index != NSNotFound &&
|
if (index != NSNotFound &&
|
||||||
_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
|
_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
|
||||||
[_videoTracks removeObjectAtIndex:index];
|
[_videoTracks removeObjectAtIndex:index];
|
||||||
|
@ -80,7 +80,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu",
|
return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStream):\n%@\nA=%lu\nV=%lu",
|
||||||
self.streamId,
|
self.streamId,
|
||||||
(unsigned long)self.audioTracks.count,
|
(unsigned long)self.audioTracks.count,
|
||||||
(unsigned long)self.videoTracks.count];
|
(unsigned long)self.videoTracks.count];
|
||||||
|
@ -92,7 +92,7 @@
|
||||||
return _nativeMediaStream;
|
return _nativeMediaStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeMediaStream:
|
nativeMediaStream:
|
||||||
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
|
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
|
||||||
NSParameterAssert(nativeMediaStream);
|
NSParameterAssert(nativeMediaStream);
|
||||||
|
@ -108,15 +108,19 @@
|
||||||
|
|
||||||
for (auto &track : audioTracks) {
|
for (auto &track : audioTracks) {
|
||||||
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
|
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
|
||||||
RTCAudioTrack *audioTrack =
|
RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack =
|
||||||
[[RTCAudioTrack alloc] initWithFactory:_factory nativeTrack:track type:type];
|
[[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:_factory
|
||||||
|
nativeTrack:track
|
||||||
|
type:type];
|
||||||
[_audioTracks addObject:audioTrack];
|
[_audioTracks addObject:audioTrack];
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto &track : videoTracks) {
|
for (auto &track : videoTracks) {
|
||||||
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
|
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
|
||||||
RTCVideoTrack *videoTrack =
|
RTC_OBJC_TYPE(RTCVideoTrack) *videoTrack =
|
||||||
[[RTCVideoTrack alloc] initWithFactory:_factory nativeTrack:track type:type];
|
[[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:_factory
|
||||||
|
nativeTrack:track
|
||||||
|
type:type];
|
||||||
[_videoTracks addObject:videoTrack];
|
[_videoTracks addObject:videoTrack];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,11 +19,13 @@ typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@class RTCPeerConnectionFactory;
|
@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
|
||||||
|
|
||||||
@interface RTCMediaStreamTrack ()
|
@interface RTC_OBJC_TYPE (RTCMediaStreamTrack)
|
||||||
|
()
|
||||||
|
|
||||||
@property(nonatomic, readonly) RTCPeerConnectionFactory *factory;
|
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) *
|
||||||
|
factory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The native MediaStreamTrackInterface passed in or created during
|
* The native MediaStreamTrackInterface passed in or created during
|
||||||
|
@ -34,14 +36,14 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
/**
|
/**
|
||||||
* Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
|
* Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
|
||||||
*/
|
*/
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
||||||
type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER;
|
type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER;
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
|
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
|
||||||
|
|
||||||
- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track;
|
- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track;
|
||||||
|
|
||||||
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
|
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
|
||||||
(RTCMediaStreamTrackState)state;
|
(RTCMediaStreamTrackState)state;
|
||||||
|
@ -51,9 +53,9 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
+ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
|
+ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
|
||||||
|
|
||||||
+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack:
|
+ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)
|
||||||
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
mediaTrackForNativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
||||||
factory:(RTCPeerConnectionFactory *)factory;
|
factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ RTC_EXTERN NSString *const kRTCMediaStreamTrackKindAudio;
|
||||||
RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo;
|
RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo;
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCMediaStreamTrack : NSObject
|
@interface RTC_OBJC_TYPE (RTCMediaStreamTrack) : NSObject
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The kind of track. For example, "audio" if this track represents an audio
|
* The kind of track. For example, "audio" if this track represents an audio
|
||||||
|
|
|
@ -19,8 +19,8 @@ NSString * const kRTCMediaStreamTrackKindAudio =
|
||||||
NSString * const kRTCMediaStreamTrackKindVideo =
|
NSString * const kRTCMediaStreamTrackKindVideo =
|
||||||
@(webrtc::MediaStreamTrackInterface::kVideoKind);
|
@(webrtc::MediaStreamTrackInterface::kVideoKind);
|
||||||
|
|
||||||
@implementation RTCMediaStreamTrack {
|
@implementation RTC_OBJC_TYPE (RTCMediaStreamTrack) {
|
||||||
RTCPeerConnectionFactory *_factory;
|
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
|
||||||
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
|
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
|
||||||
RTCMediaStreamTrackType _type;
|
RTCMediaStreamTrackType _type;
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ NSString * const kRTCMediaStreamTrackKindVideo =
|
||||||
|
|
||||||
- (NSString *)description {
|
- (NSString *)description {
|
||||||
NSString *readyState = [[self class] stringForState:self.readyState];
|
NSString *readyState = [[self class] stringForState:self.readyState];
|
||||||
return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@",
|
return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStreamTrack):\n%@\n%@\n%@\n%@",
|
||||||
self.kind,
|
self.kind,
|
||||||
self.trackId,
|
self.trackId,
|
||||||
self.isEnabled ? @"enabled" : @"disabled",
|
self.isEnabled ? @"enabled" : @"disabled",
|
||||||
|
@ -61,7 +61,7 @@ NSString * const kRTCMediaStreamTrackKindVideo =
|
||||||
if (![object isMemberOfClass:[self class]]) {
|
if (![object isMemberOfClass:[self class]]) {
|
||||||
return NO;
|
return NO;
|
||||||
}
|
}
|
||||||
return [self isEqualToTrack:(RTCMediaStreamTrack *)object];
|
return [self isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)object];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSUInteger)hash {
|
- (NSUInteger)hash {
|
||||||
|
@ -76,7 +76,7 @@ NSString * const kRTCMediaStreamTrackKindVideo =
|
||||||
|
|
||||||
@synthesize factory = _factory;
|
@synthesize factory = _factory;
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
||||||
type:(RTCMediaStreamTrackType)type {
|
type:(RTCMediaStreamTrackType)type {
|
||||||
NSParameterAssert(nativeTrack);
|
NSParameterAssert(nativeTrack);
|
||||||
|
@ -89,7 +89,7 @@ NSString * const kRTCMediaStreamTrackKindVideo =
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
|
- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
|
||||||
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
|
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
|
||||||
NSParameterAssert(nativeTrack);
|
NSParameterAssert(nativeTrack);
|
||||||
if (nativeTrack->kind() ==
|
if (nativeTrack->kind() ==
|
||||||
|
@ -103,7 +103,7 @@ NSString * const kRTCMediaStreamTrackKindVideo =
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track {
|
- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
|
||||||
if (!track) {
|
if (!track) {
|
||||||
return NO;
|
return NO;
|
||||||
}
|
}
|
||||||
|
@ -139,21 +139,22 @@ NSString * const kRTCMediaStreamTrackKindVideo =
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack:
|
+ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)
|
||||||
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
mediaTrackForNativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
|
||||||
factory:(RTCPeerConnectionFactory *)factory {
|
factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory {
|
||||||
NSParameterAssert(nativeTrack);
|
NSParameterAssert(nativeTrack);
|
||||||
NSParameterAssert(factory);
|
NSParameterAssert(factory);
|
||||||
if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) {
|
if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) {
|
||||||
return [[RTCAudioTrack alloc] initWithFactory:factory
|
return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:factory
|
||||||
nativeTrack:nativeTrack
|
nativeTrack:nativeTrack
|
||||||
type:RTCMediaStreamTrackTypeAudio];
|
type:RTCMediaStreamTrackTypeAudio];
|
||||||
} else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
|
} else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
|
||||||
return [[RTCVideoTrack alloc] initWithFactory:factory
|
return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:factory
|
||||||
nativeTrack:nativeTrack
|
nativeTrack:nativeTrack
|
||||||
type:RTCMediaStreamTrackTypeVideo];
|
type:RTCMediaStreamTrackTypeVideo];
|
||||||
} else {
|
} else {
|
||||||
return [[RTCMediaStreamTrack alloc] initWithFactory:factory nativeTrack:nativeTrack];
|
return [[RTC_OBJC_TYPE(RTCMediaStreamTrack) alloc] initWithFactory:factory
|
||||||
|
nativeTrack:nativeTrack];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,4 +20,4 @@
|
||||||
RTC_EXTERN void RTCEnableMetrics(void);
|
RTC_EXTERN void RTCEnableMetrics(void);
|
||||||
|
|
||||||
/** Gets and clears native histograms. */
|
/** Gets and clears native histograms. */
|
||||||
RTC_EXTERN NSArray<RTCMetricsSampleInfo*>* RTCGetAndResetMetrics(void);
|
RTC_EXTERN NSArray<RTC_OBJC_TYPE(RTCMetricsSampleInfo) *>* RTCGetAndResetMetrics(void);
|
||||||
|
|
|
@ -16,7 +16,7 @@ void RTCEnableMetrics(void) {
|
||||||
webrtc::metrics::Enable();
|
webrtc::metrics::Enable();
|
||||||
}
|
}
|
||||||
|
|
||||||
NSArray<RTCMetricsSampleInfo *> *RTCGetAndResetMetrics(void) {
|
NSArray<RTC_OBJC_TYPE(RTCMetricsSampleInfo) *> *RTCGetAndResetMetrics(void) {
|
||||||
std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>>
|
std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>>
|
||||||
histograms;
|
histograms;
|
||||||
webrtc::metrics::GetAndReset(&histograms);
|
webrtc::metrics::GetAndReset(&histograms);
|
||||||
|
@ -24,8 +24,8 @@ NSArray<RTCMetricsSampleInfo *> *RTCGetAndResetMetrics(void) {
|
||||||
NSMutableArray *metrics =
|
NSMutableArray *metrics =
|
||||||
[NSMutableArray arrayWithCapacity:histograms.size()];
|
[NSMutableArray arrayWithCapacity:histograms.size()];
|
||||||
for (auto const &histogram : histograms) {
|
for (auto const &histogram : histograms) {
|
||||||
RTCMetricsSampleInfo *metric = [[RTCMetricsSampleInfo alloc]
|
RTC_OBJC_TYPE(RTCMetricsSampleInfo) *metric =
|
||||||
initWithNativeSampleInfo:*histogram.second];
|
[[RTC_OBJC_TYPE(RTCMetricsSampleInfo) alloc] initWithNativeSampleInfo:*histogram.second];
|
||||||
[metrics addObject:metric];
|
[metrics addObject:metric];
|
||||||
}
|
}
|
||||||
return metrics;
|
return metrics;
|
||||||
|
|
|
@ -14,10 +14,11 @@
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
@interface RTCMetricsSampleInfo ()
|
@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo)
|
||||||
|
()
|
||||||
|
|
||||||
/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
|
/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
|
||||||
- (instancetype)initWithNativeSampleInfo:(const webrtc::metrics::SampleInfo &)info;
|
- (instancetype)initWithNativeSampleInfo : (const webrtc::metrics::SampleInfo &)info;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
@interface RTCMetricsSampleInfo : NSObject
|
@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo) : NSObject
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Example of RTCMetricsSampleInfo:
|
* Example of RTCMetricsSampleInfo:
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
|
|
||||||
#import "helpers/NSString+StdString.h"
|
#import "helpers/NSString+StdString.h"
|
||||||
|
|
||||||
@implementation RTCMetricsSampleInfo
|
@implementation RTC_OBJC_TYPE (RTCMetricsSampleInfo)
|
||||||
|
|
||||||
@synthesize name = _name;
|
@synthesize name = _name;
|
||||||
@synthesize min = _min;
|
@synthesize min = _min;
|
||||||
|
|
|
@ -14,10 +14,12 @@
|
||||||
#import "RTCDataChannelConfiguration+Private.h"
|
#import "RTCDataChannelConfiguration+Private.h"
|
||||||
#import "helpers/NSString+StdString.h"
|
#import "helpers/NSString+StdString.h"
|
||||||
|
|
||||||
@implementation RTCPeerConnection (DataChannel)
|
@implementation RTC_OBJC_TYPE (RTCPeerConnection)
|
||||||
|
(DataChannel)
|
||||||
|
|
||||||
- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
|
- (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel
|
||||||
configuration:(RTCDataChannelConfiguration *)configuration {
|
: (NSString *)label configuration
|
||||||
|
: (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration {
|
||||||
std::string labelString = [NSString stdStringForString:label];
|
std::string labelString = [NSString stdStringForString:label];
|
||||||
const webrtc::DataChannelInit nativeInit =
|
const webrtc::DataChannelInit nativeInit =
|
||||||
configuration.nativeDataChannelInit;
|
configuration.nativeDataChannelInit;
|
||||||
|
@ -27,7 +29,8 @@
|
||||||
if (!dataChannel) {
|
if (!dataChannel) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
return [[RTCDataChannel alloc] initWithFactory:self.factory nativeDataChannel:dataChannel];
|
return [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:self.factory
|
||||||
|
nativeDataChannel:dataChannel];
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue