Add an example app for iOS native API.

Demonstrates how to use the iOS native API to wrap components into
C++ classes.

This CL also introduces a native API wrapper for the capturer.

The C++ code is forked from the corresponding CL for Android at
https://webrtc-review.googlesource.com/c/src/+/60540

Bug: webrtc:8832
Change-Id: I12d9f30e701c0222628e329218f6d5bfca26e6e0
Reviewed-on: https://webrtc-review.googlesource.com/61422
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22484}
This commit is contained in:
Anders Carlsson 2018-03-15 09:41:03 +01:00 committed by Commit Bot
parent 8cf0a87bc3
commit 7311918269
18 changed files with 817 additions and 5 deletions

View file

@ -392,6 +392,66 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"{{bundle_resources_dir}}/{{source_file_part}}", "{{bundle_resources_dir}}/{{source_file_part}}",
] ]
} }
rtc_static_library("ObjCNativeAPIDemo_lib") {
testonly = true
sources = [
"objcnativeapi/objc/NADAppDelegate.h",
"objcnativeapi/objc/NADAppDelegate.m",
"objcnativeapi/objc/NADViewController.h",
"objcnativeapi/objc/NADViewController.mm",
"objcnativeapi/objc/objccallclient.h",
"objcnativeapi/objc/objccallclient.mm",
]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [
"../api:libjingle_peerconnection_api",
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/audio_codecs:builtin_audio_encoder_factory",
"../logging:rtc_event_log_impl_base",
"../media:rtc_audio_video",
"../modules/audio_processing:audio_processing",
"../pc:libjingle_peerconnection",
"../rtc_base:rtc_base",
"../sdk:default_codec_factory_objc",
"../sdk:framework_objc",
"../sdk:native_api",
"../sdk:ui_objc",
"../sdk:videotoolbox_objc",
"../system_wrappers:field_trial_default",
"../system_wrappers:metrics_default",
"../system_wrappers:runtime_enabled_features_default",
]
if (current_cpu == "arm64") {
deps += [ "../sdk:metal_objc" ]
}
}
ios_app_bundle("ObjCNativeAPIDemo") {
testonly = true
sources = [
"objcnativeapi/objc/main.m",
]
info_plist = "objcnativeapi/Info.plist"
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
deps = [
":ObjCNativeAPIDemo_lib",
]
if (target_cpu == "x86") {
deps += [ "//testing/iossim:iossim" ]
}
}
} }
if (is_mac) { if (is_mac) {

View file

@ -8,5 +8,6 @@ include_rules = [
"+modules/audio_processing", "+modules/audio_processing",
"+p2p", "+p2p",
"+pc", "+pc",
"+sdk/objc/Framework/Native/api",
"+third_party/libyuv", "+third_party/libyuv",
] ]

View file

@ -0,0 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>com.google.ObjCNativeAPIDemo</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>ObjCNativeAPIDemo</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>NSCameraUsageDescription</key>
<string>Camera access needed for video calling</string>
<key>NSMicrophoneUsageDescription</key>
<string>Microphone access needed for video calling</string>
</dict>
</plist>

View file

@ -0,0 +1,17 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
@interface NADAppDelegate : UIResponder<UIApplicationDelegate>
@property(strong, nonatomic) UIWindow* window;
@end

View file

@ -0,0 +1,63 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "NADAppDelegate.h"
#import "NADViewController.h"
@interface NADAppDelegate ()
@end
@implementation NADAppDelegate
@synthesize window = _window;
- (BOOL)application:(UIApplication *)application
didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
_window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
[_window makeKeyAndVisible];
NADViewController *viewController = [[NADViewController alloc] init];
_window.rootViewController = viewController;
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for
// certain types of temporary interruptions (such as an incoming phone call or SMS message) or
// when the user quits the application and it begins the transition to the background state. Use
// this method to pause ongoing tasks, disable timers, and invalidate graphics rendering
// callbacks. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store
// enough application state information to restore your application to its current state in case
// it is terminated later. If your application supports background execution, this method is
// called instead of applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo
// many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If
// the application was previously in the background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also
// applicationDidEnterBackground:.
}
@end

View file

@ -0,0 +1,15 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
@interface NADViewController : UIViewController
@end

View file

@ -0,0 +1,157 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "NADViewController.h"
#import <WebRTC/RTCCameraPreviewView.h>
#import <WebRTC/RTCCameraVideoCapturer.h>
#import <WebRTC/RTCEAGLVideoView.h>
#import <WebRTC/RTCMTLVideoView.h>
#import <WebRTC/RTCVideoRenderer.h>
#include <memory>
#include "objccallclient.h"
@interface NADViewController ()
@property(nonatomic) RTCCameraVideoCapturer *capturer;
@property(nonatomic) RTCCameraPreviewView *localVideoView;
@property(nonatomic) __kindof UIView<RTCVideoRenderer> *remoteVideoView;
@property(nonatomic) UIButton *callButton;
@property(nonatomic) UIButton *hangUpButton;
@end
@implementation NADViewController {
std::unique_ptr<webrtc_examples::ObjCCallClient> _call_client;
UIView *_view;
}
@synthesize capturer = _capturer;
@synthesize localVideoView = _localVideoView;
@synthesize remoteVideoView = _remoteVideoView;
@synthesize callButton = _callButton;
@synthesize hangUpButton = _hangUpButton;
#pragma mark - View controller lifecycle
- (void)loadView {
_view = [[UIView alloc] initWithFrame:CGRectZero];
#if defined(RTC_SUPPORTS_METAL)
_remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
#else
_remoteVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
#endif
_remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO;
[_view addSubview:_remoteVideoView];
_localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
_localVideoView.translatesAutoresizingMaskIntoConstraints = NO;
[_view addSubview:_localVideoView];
_callButton = [UIButton buttonWithType:UIButtonTypeSystem];
_callButton.translatesAutoresizingMaskIntoConstraints = NO;
[_callButton setTitle:@"Call" forState:UIControlStateNormal];
[_callButton addTarget:self action:@selector(call:) forControlEvents:UIControlEventTouchUpInside];
[_view addSubview:_callButton];
_hangUpButton = [UIButton buttonWithType:UIButtonTypeSystem];
_hangUpButton.translatesAutoresizingMaskIntoConstraints = NO;
[_hangUpButton setTitle:@"Hang up" forState:UIControlStateNormal];
[_hangUpButton addTarget:self
action:@selector(hangUp:)
forControlEvents:UIControlEventTouchUpInside];
[_view addSubview:_hangUpButton];
UILayoutGuide *margin = _view.layoutMarginsGuide;
[_remoteVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
[_remoteVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor].active = YES;
[_remoteVideoView.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
[_remoteVideoView.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor].active = YES;
[_localVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active =
YES;
[_localVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:8.0].active = YES;
[_localVideoView.widthAnchor constraintEqualToConstant:60].active = YES;
[_localVideoView.heightAnchor constraintEqualToConstant:60].active = YES;
[_callButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active =
YES;
[_callButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active = YES;
[_callButton.widthAnchor constraintEqualToConstant:100].active = YES;
[_callButton.heightAnchor constraintEqualToConstant:40].active = YES;
[_hangUpButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor constant:8.0].active =
YES;
[_hangUpButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active =
YES;
[_hangUpButton.widthAnchor constraintEqualToConstant:100].active = YES;
[_hangUpButton.heightAnchor constraintEqualToConstant:40].active = YES;
self.view = _view;
}
- (void)viewDidLoad {
[super viewDidLoad];
self.capturer = [[RTCCameraVideoCapturer alloc] init];
self.localVideoView.captureSession = self.capturer.captureSession;
_call_client.reset(new webrtc_examples::ObjCCallClient());
// Start capturer.
AVCaptureDevice *selectedDevice = nil;
NSArray<AVCaptureDevice *> *captureDevices = [RTCCameraVideoCapturer captureDevices];
for (AVCaptureDevice *device in captureDevices) {
if (device.position == AVCaptureDevicePositionFront) {
selectedDevice = device;
break;
}
}
AVCaptureDeviceFormat *selectedFormat = nil;
int targetWidth = 640;
int targetHeight = 480;
int currentDiff = INT_MAX;
NSArray<AVCaptureDeviceFormat *> *formats =
[RTCCameraVideoCapturer supportedFormatsForDevice:selectedDevice];
for (AVCaptureDeviceFormat *format in formats) {
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
if (diff < currentDiff) {
selectedFormat = format;
currentDiff = diff;
} else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
selectedFormat = format;
}
}
[self.capturer startCaptureWithDevice:selectedDevice format:selectedFormat fps:30];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark - Actions
- (IBAction)call:(id)sender {
_call_client->Call(self.capturer, self.remoteVideoView);
}
- (IBAction)hangUp:(id)sender {
_call_client->Hangup();
}
@end

View file

@ -0,0 +1,18 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
#import "NADAppDelegate.h"
int main(int argc, char* argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([NADAppDelegate class]));
}
}

View file

@ -0,0 +1,84 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_
#define EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_
#include <memory>
#include <string>
#include "api/peerconnectioninterface.h"
#include "rtc_base/criticalsection.h"
#include "rtc_base/scoped_ref_ptr.h"
#include "rtc_base/thread_checker.h"
@class RTCVideoCapturer;
@protocol RTCVideoRenderer;
namespace webrtc_examples {
class ObjCCallClient {
public:
ObjCCallClient();
void Call(RTCVideoCapturer* capturer, id<RTCVideoRenderer> remote_renderer);
void Hangup();
private:
class PCObserver : public webrtc::PeerConnectionObserver {
public:
explicit PCObserver(ObjCCallClient* client);
void OnSignalingChange(
webrtc::PeerConnectionInterface::SignalingState new_state) override;
void OnDataChannel(
rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
void OnRenegotiationNeeded() override;
void OnIceConnectionChange(
webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
void OnIceGatheringChange(
webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
void OnIceCandidate(
const webrtc::IceCandidateInterface* candidate) override;
private:
const ObjCCallClient* client_;
};
void CreatePeerConnectionFactory() RTC_RUN_ON(thread_checker_);
void CreatePeerConnection() RTC_RUN_ON(thread_checker_);
void Connect() RTC_RUN_ON(thread_checker_);
rtc::ThreadChecker thread_checker_;
bool call_started_ RTC_GUARDED_BY(thread_checker_);
const std::unique_ptr<PCObserver> pc_observer_;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf_
RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> network_thread_ RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> worker_thread_ RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::Thread> signaling_thread_
RTC_GUARDED_BY(thread_checker_);
std::unique_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> remote_sink_
RTC_GUARDED_BY(thread_checker_);
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source_
RTC_GUARDED_BY(thread_checker_);
rtc::CriticalSection pc_mutex_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_
RTC_GUARDED_BY(pc_mutex_);
};
} // namespace webrtc_examples
#endif // EXAMPLES_OBJCNATIVEAPI_OBJCCALLCLIENT_H_

View file

@ -0,0 +1,237 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "examples/objcnativeapi/objc/objccallclient.h"
#include <utility>
#import <WebRTC/RTCCameraPreviewView.h>
#import <WebRTC/RTCVideoCodecFactory.h>
#import <WebRTC/RTCVideoRenderer.h>
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/peerconnectioninterface.h"
#include "media/engine/webrtcmediaengine.h"
#include "modules/audio_processing/include/audio_processing.h"
#include "rtc_base/ptr_util.h"
#include "sdk/objc/Framework/Native/api/video_capturer.h"
#include "sdk/objc/Framework/Native/api/video_decoder_factory.h"
#include "sdk/objc/Framework/Native/api/video_encoder_factory.h"
#include "sdk/objc/Framework/Native/api/video_renderer.h"
namespace webrtc_examples {
namespace {
class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver {
public:
explicit CreateOfferObserver(rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc);
void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
void OnFailure(const std::string& error) override;
private:
const rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc_;
};
class SetRemoteSessionDescriptionObserver : public webrtc::SetRemoteDescriptionObserverInterface {
public:
void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override;
};
class SetLocalSessionDescriptionObserver : public webrtc::SetSessionDescriptionObserver {
public:
void OnSuccess() override;
void OnFailure(const std::string& error) override;
};
} // namespace
ObjCCallClient::ObjCCallClient()
: call_started_(false), pc_observer_(rtc::MakeUnique<PCObserver>(this)) {
thread_checker_.DetachFromThread();
CreatePeerConnectionFactory();
}
void ObjCCallClient::Call(RTCVideoCapturer* capturer, id<RTCVideoRenderer> remote_renderer) {
RTC_DCHECK_RUN_ON(&thread_checker_);
rtc::CritScope lock(&pc_mutex_);
if (call_started_) {
RTC_LOG(LS_WARNING) << "Call already started.";
return;
}
call_started_ = true;
remote_sink_ = webrtc::ObjCToNativeVideoRenderer(remote_renderer);
video_source_ =
webrtc::ObjCToNativeVideoCapturer(capturer, signaling_thread_.get(), worker_thread_.get());
CreatePeerConnection();
Connect();
}
void ObjCCallClient::Hangup() {
RTC_DCHECK_RUN_ON(&thread_checker_);
call_started_ = false;
{
rtc::CritScope lock(&pc_mutex_);
if (pc_ != nullptr) {
pc_->Close();
pc_ = nullptr;
}
}
remote_sink_ = nullptr;
video_source_ = nullptr;
}
void ObjCCallClient::CreatePeerConnectionFactory() {
network_thread_ = rtc::Thread::CreateWithSocketServer();
network_thread_->SetName("network_thread", nullptr);
RTC_CHECK(network_thread_->Start()) << "Failed to start thread";
worker_thread_ = rtc::Thread::Create();
worker_thread_->SetName("worker_thread", nullptr);
RTC_CHECK(worker_thread_->Start()) << "Failed to start thread";
signaling_thread_ = rtc::Thread::Create();
signaling_thread_->SetName("signaling_thread", nullptr);
RTC_CHECK(signaling_thread_->Start()) << "Failed to start thread";
std::unique_ptr<webrtc::VideoDecoderFactory> videoDecoderFactory =
webrtc::ObjCToNativeVideoDecoderFactory([[RTCDefaultVideoDecoderFactory alloc] init]);
std::unique_ptr<webrtc::VideoEncoderFactory> videoEncoderFactory =
webrtc::ObjCToNativeVideoEncoderFactory([[RTCDefaultVideoEncoderFactory alloc] init]);
std::unique_ptr<cricket::MediaEngineInterface> media_engine =
cricket::WebRtcMediaEngineFactory::Create(nullptr /* adm */,
webrtc::CreateBuiltinAudioEncoderFactory(),
webrtc::CreateBuiltinAudioDecoderFactory(),
std::move(videoEncoderFactory),
std::move(videoDecoderFactory),
nullptr /* audio_mixer */,
webrtc::AudioProcessingBuilder().Create());
RTC_LOG(LS_INFO) << "Media engine created: " << media_engine.get();
pcf_ = webrtc::CreateModularPeerConnectionFactory(network_thread_.get(),
worker_thread_.get(),
signaling_thread_.get(),
std::move(media_engine),
webrtc::CreateCallFactory(),
webrtc::CreateRtcEventLogFactory());
RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_;
}
void ObjCCallClient::CreatePeerConnection() {
rtc::CritScope lock(&pc_mutex_);
webrtc::PeerConnectionInterface::RTCConfiguration config;
config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
// DTLS SRTP has to be disabled for loopback to work.
config.enable_dtls_srtp = false;
pc_ = pcf_->CreatePeerConnection(
config, nullptr /* port_allocator */, nullptr /* cert_generator */, pc_observer_.get());
RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_;
rtc::scoped_refptr<webrtc::VideoTrackInterface> local_video_track =
pcf_->CreateVideoTrack("video", video_source_);
pc_->AddTransceiver(local_video_track);
RTC_LOG(LS_INFO) << "Local video sink set up: " << local_video_track;
for (const rtc::scoped_refptr<webrtc::RtpTransceiverInterface>& tranceiver :
pc_->GetTransceivers()) {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track = tranceiver->receiver()->track();
if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
static_cast<webrtc::VideoTrackInterface*>(track.get())
->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants());
RTC_LOG(LS_INFO) << "Remote video sink set up: " << track;
break;
}
}
}
void ObjCCallClient::Connect() {
rtc::CritScope lock(&pc_mutex_);
pc_->CreateOffer(new rtc::RefCountedObject<CreateOfferObserver>(pc_),
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
}
ObjCCallClient::PCObserver::PCObserver(ObjCCallClient* client) : client_(client) {}
void ObjCCallClient::PCObserver::OnSignalingChange(
webrtc::PeerConnectionInterface::SignalingState new_state) {
RTC_LOG(LS_INFO) << "OnSignalingChange: " << new_state;
}
void ObjCCallClient::PCObserver::OnDataChannel(
rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
RTC_LOG(LS_INFO) << "OnDataChannel";
}
void ObjCCallClient::PCObserver::OnRenegotiationNeeded() {
RTC_LOG(LS_INFO) << "OnRenegotiationNeeded";
}
void ObjCCallClient::PCObserver::OnIceConnectionChange(
webrtc::PeerConnectionInterface::IceConnectionState new_state) {
RTC_LOG(LS_INFO) << "OnIceConnectionChange: " << new_state;
}
void ObjCCallClient::PCObserver::OnIceGatheringChange(
webrtc::PeerConnectionInterface::IceGatheringState new_state) {
RTC_LOG(LS_INFO) << "OnIceGatheringChange: " << new_state;
}
void ObjCCallClient::PCObserver::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {
RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url();
rtc::CritScope lock(&client_->pc_mutex_);
RTC_DCHECK(client_->pc_ != nullptr);
client_->pc_->AddIceCandidate(candidate);
}
CreateOfferObserver::CreateOfferObserver(rtc::scoped_refptr<webrtc::PeerConnectionInterface> pc)
: pc_(pc) {}
void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
std::string sdp;
desc->ToString(&sdp);
RTC_LOG(LS_INFO) << "Created offer: " << sdp;
// Ownership of desc was transferred to us, now we transfer it forward.
pc_->SetLocalDescription(new rtc::RefCountedObject<SetLocalSessionDescriptionObserver>(), desc);
// Generate a fake answer.
std::unique_ptr<webrtc::SessionDescriptionInterface> answer(
webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
pc_->SetRemoteDescription(std::move(answer),
new rtc::RefCountedObject<SetRemoteSessionDescriptionObserver>());
}
void CreateOfferObserver::OnFailure(const std::string& error) {
RTC_LOG(LS_INFO) << "Failed to create offer: " << error;
}
void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete(webrtc::RTCError error) {
RTC_LOG(LS_INFO) << "Set remote description: " << error.message();
}
void SetLocalSessionDescriptionObserver::OnSuccess() {
RTC_LOG(LS_INFO) << "Set local description success!";
}
void SetLocalSessionDescriptionObserver::OnFailure(const std::string& error) {
RTC_LOG(LS_INFO) << "Set local description failure: " << error;
}
} // namespace webrtc_examples

View file

@ -296,13 +296,29 @@ if (is_ios || is_mac) {
} }
} }
rtc_static_library("videocapturebase_objc") {
visibility = [ "*" ]
sources = [
"objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m",
"objc/Framework/Headers/WebRTC/RTCVideoCapturer.h",
]
libs = [ "AVFoundation.framework" ]
configs += [ "..:common_objc" ]
public_configs = [ ":common_config_objc" ]
deps = [
":common_objc",
":videoframebuffer_objc",
]
}
rtc_static_library("videocapture_objc") { rtc_static_library("videocapture_objc") {
visibility = [ "*" ] visibility = [ "*" ]
sources = [ sources = [
"objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m", "objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m",
"objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m",
"objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h", "objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h",
"objc/Framework/Headers/WebRTC/RTCVideoCapturer.h",
] ]
if (is_ios) { if (is_ios) {
sources += [ sources += [
@ -325,6 +341,7 @@ if (is_ios || is_mac) {
deps = [ deps = [
":common_objc", ":common_objc",
":video_objc", ":video_objc",
":videocapturebase_objc",
":videoframebuffer_objc", ":videoframebuffer_objc",
] ]
} }
@ -922,6 +939,8 @@ if (is_ios || is_mac) {
rtc_static_library("native_api") { rtc_static_library("native_api") {
visibility = [ "*" ] visibility = [ "*" ]
sources = [ sources = [
"objc/Framework/Native/api/video_capturer.h",
"objc/Framework/Native/api/video_capturer.mm",
"objc/Framework/Native/api/video_decoder_factory.h", "objc/Framework/Native/api/video_decoder_factory.h",
"objc/Framework/Native/api/video_decoder_factory.mm", "objc/Framework/Native/api/video_decoder_factory.mm",
"objc/Framework/Native/api/video_encoder_factory.h", "objc/Framework/Native/api/video_encoder_factory.h",
@ -946,9 +965,11 @@ if (is_ios || is_mac) {
deps = [ deps = [
":native_video", ":native_video",
":videocapturebase_objc",
":videocodec_objc", ":videocodec_objc",
":videoframebuffer_objc", ":videoframebuffer_objc",
":videorenderer_objc", ":videorenderer_objc",
"../api:libjingle_peerconnection_api",
"../api:video_frame_api", "../api:video_frame_api",
"../api/video_codecs:video_codecs_api", "../api/video_codecs:video_codecs_api",
"../common_video", "../common_video",
@ -984,6 +1005,7 @@ if (is_ios || is_mac) {
deps = [ deps = [
":common_objc", ":common_objc",
":videocapturebase_objc",
":videocodec_objc", ":videocodec_objc",
":videoframebuffer_objc", ":videoframebuffer_objc",
":videorenderer_objc", ":videorenderer_objc",

View file

@ -46,6 +46,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
@synthesize frameQueue = _frameQueue; @synthesize frameQueue = _frameQueue;
@synthesize captureSession = _captureSession; @synthesize captureSession = _captureSession;
- (instancetype)init {
return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
}
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate { - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]]; return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
} }

View file

@ -15,7 +15,6 @@
@synthesize delegate = _delegate; @synthesize delegate = _delegate;
- (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate { - (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate {
NSAssert(delegate != nil, @"delegate cannot be nil");
if (self = [super init]) { if (self = [super init]) {
_delegate = delegate; _delegate = delegate;
} }

View file

@ -22,7 +22,7 @@ RTC_EXPORT
RTC_EXPORT RTC_EXPORT
@interface RTCVideoCapturer : NSObject @interface RTCVideoCapturer : NSObject
@property(nonatomic, readonly, weak) id<RTCVideoCapturerDelegate> delegate; @property(nonatomic, weak) id<RTCVideoCapturerDelegate> delegate;
- (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate; - (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate;

View file

@ -0,0 +1,28 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_OBJC_FRAMEWORK_NATIVE_API_VIDEO_CAPTURER_H_
#define SDK_OBJC_FRAMEWORK_NATIVE_API_VIDEO_CAPTURER_H_
#import "WebRTC/RTCVideoCapturer.h"
#include "api/mediastreaminterface.h"
#include "rtc_base/scoped_ref_ptr.h"
namespace webrtc {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
RTCVideoCapturer* objc_video_capturer,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread);
} // namespace webrtc
#endif // SDK_OBJC_FRAMEWORK_NATIVE_API_VIDEO_CAPTURER_H_

View file

@ -0,0 +1,35 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "sdk/objc/Framework/Native/api/video_capturer.h"
#include "api/videosourceproxy.h"
#include "rtc_base/ptr_util.h"
#include "sdk/objc/Framework/Native/src/objc_video_track_source.h"
namespace webrtc {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
RTCVideoCapturer *objc_video_capturer,
rtc::Thread *signaling_thread,
rtc::Thread *worker_thread) {
RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init];
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source(
new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>(adapter));
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source =
webrtc::VideoTrackSourceProxy::Create(
signaling_thread, worker_thread, objc_video_track_source);
objc_video_capturer.delegate = adapter;
return video_source;
}
} // namespace webrtc

View file

@ -11,17 +11,23 @@
#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_ #ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_
#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_ #define SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_
#import "WebRTC/RTCVideoCapturer.h"
#include "WebRTC/RTCMacros.h" #include "WebRTC/RTCMacros.h"
#include "media/base/adaptedvideotracksource.h" #include "media/base/adaptedvideotracksource.h"
#include "rtc_base/timestampaligner.h" #include "rtc_base/timestampaligner.h"
RTC_FWD_DECL_OBJC_CLASS(RTCVideoFrame); RTC_FWD_DECL_OBJC_CLASS(RTCVideoFrame);
@interface RTCObjCVideoSourceAdapter : NSObject<RTCVideoCapturerDelegate>
@end
namespace webrtc { namespace webrtc {
class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource {
public: public:
ObjCVideoTrackSource(); ObjCVideoTrackSource();
explicit ObjCVideoTrackSource(RTCObjCVideoSourceAdapter* adapter);
// This class can not be used for implementing screen casting. Hopefully, this // This class can not be used for implementing screen casting. Hopefully, this
// function will be removed before we add that to iOS/Mac. // function will be removed before we add that to iOS/Mac.
@ -36,13 +42,16 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource {
bool remote() const override { return false; } bool remote() const override { return false; }
// Called by RTCVideoSource.
void OnCapturedFrame(RTCVideoFrame* frame); void OnCapturedFrame(RTCVideoFrame* frame);
// Called by RTCVideoSource.
void OnOutputFormatRequest(int width, int height, int fps); void OnOutputFormatRequest(int width, int height, int fps);
private: private:
rtc::VideoBroadcaster broadcaster_; rtc::VideoBroadcaster broadcaster_;
rtc::TimestampAligner timestamp_aligner_; rtc::TimestampAligner timestamp_aligner_;
RTCObjCVideoSourceAdapter* adapter_;
}; };
} // namespace webrtc } // namespace webrtc

View file

@ -16,10 +16,28 @@
#include "api/video/i420_buffer.h" #include "api/video/i420_buffer.h"
#include "sdk/objc/Framework/Native/src/objc_frame_buffer.h" #include "sdk/objc/Framework/Native/src/objc_frame_buffer.h"
@interface RTCObjCVideoSourceAdapter ()
@property(nonatomic) webrtc::ObjCVideoTrackSource *objCVideoTrackSource;
@end
@implementation RTCObjCVideoSourceAdapter
@synthesize objCVideoTrackSource = _objCVideoTrackSource;
- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
_objCVideoTrackSource->OnCapturedFrame(frame);
}
@end
namespace webrtc { namespace webrtc {
ObjCVideoTrackSource::ObjCVideoTrackSource() {} ObjCVideoTrackSource::ObjCVideoTrackSource() {}
ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) {
adapter_.objCVideoTrackSource = this;
}
void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) { void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) {
cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0); cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter()->OnOutputFormatRequest(format); video_adapter()->OnOutputFormatRequest(format);