From ac0d18341d3bcc13e15dd89170d04aa09f987cfe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Niels=20M=C3=B6ller?= Date: Mon, 17 Jan 2022 15:26:54 +0100 Subject: [PATCH] Prepare for deleting implicit conversion from raw pointer to scoped_refptr. Updates all webrtc code, to have a small followup cl to just add the "explicit" keyword. Patchset #24 passed all webrtc tests, with explicit. Bug: webrtc:13464 Change-Id: I39863d3752f73209b531120f66916dc9177bf63a Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/242363 Reviewed-by: Tomas Gunnarsson Commit-Queue: Niels Moller Cr-Commit-Position: refs/heads/main@{#35718} --- api/scoped_refptr.h | 2 ++ api/video/i444_buffer.cc | 6 ++--- common_video/video_frame_buffer_pool.cc | 2 +- .../jni/android_call_client.cc | 2 +- .../objcnativeapi/objc/objc_call_client.mm | 2 +- examples/peerconnection/client/conductor.cc | 3 +-- .../unityplugin/simple_peer_connection.cc | 2 +- .../desktop_capture_options.cc | 11 +++++++--- .../linux/x11/shared_x_display.cc | 4 ++-- .../desktop_capture/shared_desktop_frame.cc | 4 ++-- .../test/video_capture_unittest.cc | 2 +- .../codecs/vp9/libvpx_vp9_decoder.cc | 4 ++-- pc/media_stream.cc | 4 ++-- pc/media_stream_unittest.cc | 2 +- sdk/android/src/jni/pc/peer_connection.cc | 6 +++-- .../src/jni/pc/peer_connection_factory.cc | 7 +++--- sdk/android/src/jni/pc/rtp_receiver.cc | 5 +++-- sdk/android/src/jni/pc/rtp_sender.cc | 5 +++-- sdk/objc/api/peerconnection/RTCAudioTrack.mm | 8 +++---- .../peerconnection/RTCEncodedImage+Private.mm | 2 +- .../peerconnection/RTCPeerConnection+Stats.mm | 17 +++++++------- .../api/peerconnection/RTCPeerConnection.mm | 22 +++++++++---------- sdk/objc/api/peerconnection/RTCVideoSource.mm | 4 ++-- sdk/objc/api/peerconnection/RTCVideoTrack.mm | 9 ++++---- sdk/objc/native/api/audio_device_module.mm | 2 +- sdk/objc/native/api/video_capturer.mm | 4 ++-- sdk/objc/native/api/video_frame_buffer.mm | 2 +- sdk/objc/native/src/objc_frame_buffer.mm | 2 +- .../native/src/objc_video_decoder_factory.mm | 2 +- .../native/src/objc_video_track_source.mm | 6 ++--- .../unittests/ObjCVideoTrackSource_xctest.mm | 2 +- .../objc_video_encoder_factory_tests.mm | 4 ++-- test/mac_capturer.mm | 2 +- 33 files changed, 85 insertions(+), 76 deletions(-) diff --git a/api/scoped_refptr.h b/api/scoped_refptr.h index 5b3a08541e..164fefec14 100644 --- a/api/scoped_refptr.h +++ b/api/scoped_refptr.h @@ -75,6 +75,8 @@ class scoped_refptr { scoped_refptr() : ptr_(nullptr) {} + // TODO(bugs.webrtc.org/13464): Implicit construction is deprecated. Mark + // explicit, and add a new implicit constructor accepting a nullptr_t. scoped_refptr(T* p) : ptr_(p) { // NOLINT(runtime/explicit) if (ptr_) ptr_->AddRef(); diff --git a/api/video/i444_buffer.cc b/api/video/i444_buffer.cc index 92f1662f43..8bf9f76625 100644 --- a/api/video/i444_buffer.cc +++ b/api/video/i444_buffer.cc @@ -61,7 +61,7 @@ I444Buffer::~I444Buffer() {} // static rtc::scoped_refptr I444Buffer::Create(int width, int height) { - return new rtc::RefCountedObject(width, height); + return rtc::make_ref_counted(width, height); } // static @@ -70,8 +70,8 @@ rtc::scoped_refptr I444Buffer::Create(int width, int stride_y, int stride_u, int stride_v) { - return new rtc::RefCountedObject(width, height, stride_y, - stride_u, stride_v); + return rtc::make_ref_counted(width, height, stride_y, stride_u, + stride_v); } // static diff --git a/common_video/video_frame_buffer_pool.cc b/common_video/video_frame_buffer_pool.cc index e95eac3027..267cab1a71 100644 --- a/common_video/video_frame_buffer_pool.cc +++ b/common_video/video_frame_buffer_pool.cc @@ -143,7 +143,7 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateI444Buffer( return nullptr; // Allocate new buffer. rtc::scoped_refptr buffer = - new rtc::RefCountedObject(width, height); + rtc::make_ref_counted(width, height); if (zero_initialize_) buffer->InitializeData(); diff --git a/examples/androidnativeapi/jni/android_call_client.cc b/examples/androidnativeapi/jni/android_call_client.cc index 81782730db..3c7c86d7ef 100644 --- a/examples/androidnativeapi/jni/android_call_client.cc +++ b/examples/androidnativeapi/jni/android_call_client.cc @@ -265,7 +265,7 @@ void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) { webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp)); pc_->SetRemoteDescription( std::move(answer), - new rtc::RefCountedObject()); + rtc::make_ref_counted()); } void CreateOfferObserver::OnFailure(webrtc::RTCError error) { diff --git a/examples/objcnativeapi/objc/objc_call_client.mm b/examples/objcnativeapi/objc/objc_call_client.mm index 09d3d4afdd..c1d84f7ce7 100644 --- a/examples/objcnativeapi/objc/objc_call_client.mm +++ b/examples/objcnativeapi/objc/objc_call_client.mm @@ -220,7 +220,7 @@ void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) { std::unique_ptr answer( webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp)); pc_->SetRemoteDescription(std::move(answer), - new rtc::RefCountedObject()); + rtc::make_ref_counted()); } void CreateOfferObserver::OnFailure(webrtc::RTCError error) { diff --git a/examples/peerconnection/client/conductor.cc b/examples/peerconnection/client/conductor.cc index d9adffb07b..65958a445d 100644 --- a/examples/peerconnection/client/conductor.cc +++ b/examples/peerconnection/client/conductor.cc @@ -85,8 +85,7 @@ class CapturerTrackSource : public webrtc::VideoTrackSource { capturer = absl::WrapUnique( webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, i)); if (capturer) { - return new rtc::RefCountedObject( - std::move(capturer)); + return rtc::make_ref_counted(std::move(capturer)); } } diff --git a/examples/unityplugin/simple_peer_connection.cc b/examples/unityplugin/simple_peer_connection.cc index 8a82718dc8..34abbe294e 100644 --- a/examples/unityplugin/simple_peer_connection.cc +++ b/examples/unityplugin/simple_peer_connection.cc @@ -61,7 +61,7 @@ class CapturerTrackSource : public webrtc::VideoTrackSource { if (!capturer) { return nullptr; } - return new rtc::RefCountedObject(std::move(capturer)); + return rtc::make_ref_counted(std::move(capturer)); } protected: diff --git a/modules/desktop_capture/desktop_capture_options.cc b/modules/desktop_capture/desktop_capture_options.cc index c89896d5fd..fc0340af9e 100644 --- a/modules/desktop_capture/desktop_capture_options.cc +++ b/modules/desktop_capture/desktop_capture_options.cc @@ -15,6 +15,8 @@ #include "modules/desktop_capture/win/full_screen_win_application_handler.h" #endif +#include "rtc_base/ref_counted_object.h" + namespace webrtc { DesktopCaptureOptions::DesktopCaptureOptions() {} @@ -36,12 +38,15 @@ DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() { result.set_x_display(SharedXDisplay::CreateDefault()); #endif #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - result.set_configuration_monitor(new DesktopConfigurationMonitor()); + result.set_configuration_monitor( + rtc::make_ref_counted()); result.set_full_screen_window_detector( - new FullScreenWindowDetector(CreateFullScreenMacApplicationHandler)); + rtc::make_ref_counted( + CreateFullScreenMacApplicationHandler)); #elif defined(WEBRTC_WIN) result.set_full_screen_window_detector( - new FullScreenWindowDetector(CreateFullScreenWinApplicationHandler)); + rtc::make_ref_counted( + CreateFullScreenWinApplicationHandler)); #endif return result; } diff --git a/modules/desktop_capture/linux/x11/shared_x_display.cc b/modules/desktop_capture/linux/x11/shared_x_display.cc index ca084d4bf5..ad2e04374d 100644 --- a/modules/desktop_capture/linux/x11/shared_x_display.cc +++ b/modules/desktop_capture/linux/x11/shared_x_display.cc @@ -36,9 +36,9 @@ rtc::scoped_refptr SharedXDisplay::Create( XOpenDisplay(display_name.empty() ? NULL : display_name.c_str()); if (!display) { RTC_LOG(LS_ERROR) << "Unable to open display"; - return NULL; + return nullptr; } - return new SharedXDisplay(display); + return rtc::scoped_refptr(new SharedXDisplay(display)); } // static diff --git a/modules/desktop_capture/shared_desktop_frame.cc b/modules/desktop_capture/shared_desktop_frame.cc index 2ded145478..e374038cbc 100644 --- a/modules/desktop_capture/shared_desktop_frame.cc +++ b/modules/desktop_capture/shared_desktop_frame.cc @@ -21,8 +21,8 @@ SharedDesktopFrame::~SharedDesktopFrame() {} // static std::unique_ptr SharedDesktopFrame::Wrap( std::unique_ptr desktop_frame) { - return std::unique_ptr( - new SharedDesktopFrame(new Core(std::move(desktop_frame)))); + return std::unique_ptr(new SharedDesktopFrame( + rtc::scoped_refptr(new Core(std::move(desktop_frame))))); } SharedDesktopFrame* SharedDesktopFrame::Wrap(DesktopFrame* desktop_frame) { diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc index 098d60da0f..4cf3d5931c 100644 --- a/modules/video_capture/test/video_capture_unittest.cc +++ b/modules/video_capture/test/video_capture_unittest.cc @@ -168,7 +168,7 @@ class VideoCaptureTest : public ::testing::Test { rtc::scoped_refptr module( VideoCaptureFactory::Create(unique_name)); if (module.get() == NULL) - return NULL; + return nullptr; EXPECT_FALSE(module->CaptureStarted()); diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc index 5d0f380af9..669dc55a4b 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc @@ -275,8 +275,8 @@ int LibvpxVp9Decoder::ReturnFrame( // This buffer contains all of `img`'s image data, a reference counted // Vp9FrameBuffer. (libvpx is done with the buffers after a few // vpx_codec_decode calls or vpx_codec_destroy). - rtc::scoped_refptr img_buffer = - static_cast(img->fb_priv); + rtc::scoped_refptr img_buffer( + static_cast(img->fb_priv)); // The buffer can be used directly by the VideoFrame (without copy) by // using a Wrapped*Buffer. diff --git a/pc/media_stream.cc b/pc/media_stream.cc index 6cf84ac637..6fe308827c 100644 --- a/pc/media_stream.cc +++ b/pc/media_stream.cc @@ -59,7 +59,7 @@ rtc::scoped_refptr MediaStream::FindAudioTrack( const std::string& track_id) { AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id); if (it == audio_tracks_.end()) - return NULL; + return nullptr; return *it; } @@ -67,7 +67,7 @@ rtc::scoped_refptr MediaStream::FindVideoTrack( const std::string& track_id) { VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id); if (it == video_tracks_.end()) - return NULL; + return nullptr; return *it; } diff --git a/pc/media_stream_unittest.cc b/pc/media_stream_unittest.cc index 55226992e0..6ce8de9a1a 100644 --- a/pc/media_stream_unittest.cc +++ b/pc/media_stream_unittest.cc @@ -63,7 +63,7 @@ class MediaStreamTest : public ::testing::Test { ASSERT_TRUE(video_track_.get() != NULL); EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state()); - audio_track_ = AudioTrack::Create(kAudioTrackId, NULL); + audio_track_ = AudioTrack::Create(kAudioTrackId, nullptr); ASSERT_TRUE(audio_track_.get() != NULL); EXPECT_EQ(MediaStreamTrackInterface::kLive, audio_track_->state()); diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc index 6bcaef4552..12de839ba0 100644 --- a/sdk/android/src/jni/pc/peer_connection.cc +++ b/sdk/android/src/jni/pc/peer_connection.cc @@ -764,7 +764,8 @@ static ScopedJavaLocalRef JNI_PeerConnection_AddTrack( const JavaParamRef& j_stream_labels) { RTCErrorOr> result = ExtractNativePC(jni, j_pc)->AddTrack( - reinterpret_cast(native_track), + rtc::scoped_refptr( + reinterpret_cast(native_track)), JavaListToNativeVector(jni, j_stream_labels, &JavaToNativeString)); if (!result.ok()) { @@ -792,7 +793,8 @@ static ScopedJavaLocalRef JNI_PeerConnection_AddTransceiverWithTrack( const JavaParamRef& j_init) { RTCErrorOr> result = ExtractNativePC(jni, j_pc)->AddTransceiver( - reinterpret_cast(native_track), + rtc::scoped_refptr( + reinterpret_cast(native_track)), JavaToNativeRtpTransceiverInit(jni, j_init)); if (!result.ok()) { RTC_LOG(LS_ERROR) << "Failed to add transceiver: " diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc index 5330cbd638..08af07a5e2 100644 --- a/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -351,11 +351,12 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory( jlong native_network_controller_factory, jlong native_network_state_predictor_factory, jlong native_neteq_factory) { - rtc::scoped_refptr audio_processor = - reinterpret_cast(native_audio_processor); + rtc::scoped_refptr audio_processor( + reinterpret_cast(native_audio_processor)); return CreatePeerConnectionFactoryForJava( jni, jcontext, joptions, - reinterpret_cast(native_audio_device_module), + rtc::scoped_refptr( + reinterpret_cast(native_audio_device_module)), TakeOwnershipOfRefPtr(native_audio_encoder_factory), TakeOwnershipOfRefPtr(native_audio_decoder_factory), jencoder_factory, jdecoder_factory, diff --git a/sdk/android/src/jni/pc/rtp_receiver.cc b/sdk/android/src/jni/pc/rtp_receiver.cc index 4d7e954872..7a3600b424 100644 --- a/sdk/android/src/jni/pc/rtp_receiver.cc +++ b/sdk/android/src/jni/pc/rtp_receiver.cc @@ -118,8 +118,9 @@ static void JNI_RtpReceiver_SetFrameDecryptor(JNIEnv* jni, jlong j_rtp_sender_pointer, jlong j_frame_decryptor_pointer) { reinterpret_cast(j_rtp_sender_pointer) - ->SetFrameDecryptor(reinterpret_cast( - j_frame_decryptor_pointer)); + ->SetFrameDecryptor(rtc::scoped_refptr( + reinterpret_cast( + j_frame_decryptor_pointer))); } } // namespace jni diff --git a/sdk/android/src/jni/pc/rtp_sender.cc b/sdk/android/src/jni/pc/rtp_sender.cc index 411e5dc8c5..233a353654 100644 --- a/sdk/android/src/jni/pc/rtp_sender.cc +++ b/sdk/android/src/jni/pc/rtp_sender.cc @@ -105,8 +105,9 @@ static void JNI_RtpSender_SetFrameEncryptor(JNIEnv* jni, jlong j_rtp_sender_pointer, jlong j_frame_encryptor_pointer) { reinterpret_cast(j_rtp_sender_pointer) - ->SetFrameEncryptor(reinterpret_cast( - j_frame_encryptor_pointer)); + ->SetFrameEncryptor(rtc::scoped_refptr( + reinterpret_cast( + j_frame_encryptor_pointer))); } } // namespace jni diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 6a97f46eaa..73ec98f2aa 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -48,11 +48,10 @@ - (RTC_OBJC_TYPE(RTCAudioSource) *)source { if (!_source) { - rtc::scoped_refptr source = - self.nativeAudioTrack->GetSource(); + rtc::scoped_refptr source(self.nativeAudioTrack->GetSource()); if (source) { _source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory - nativeAudioSource:source.get()]; + nativeAudioSource:source]; } } return _source; @@ -61,7 +60,8 @@ #pragma mark - Private - (rtc::scoped_refptr)nativeAudioTrack { - return static_cast(self.nativeTrack.get()); + return rtc::scoped_refptr( + static_cast(self.nativeTrack.get())); } @end diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index b3e0a7bb67..56177b48dd 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -20,7 +20,7 @@ namespace { class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface { public: static rtc::scoped_refptr Create(NSData *data) { - return new rtc::RefCountedObject(data); + return rtc::make_ref_counted(data); } const uint8_t *data() const override { return static_cast(data_.bytes); } // TODO(bugs.webrtc.org/9378): delete this non-const data method. diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm index 8ded55200e..7f8b123cc1 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm @@ -69,21 +69,21 @@ class StatsObserverAdapter : public StatsObserver { - (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler : (RTCStatisticsCompletionHandler)completionHandler { - rtc::scoped_refptr collector( - new rtc::RefCountedObject(completionHandler)); + rtc::scoped_refptr collector = + rtc::make_ref_counted(completionHandler); self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector); } - (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver completionHandler:(RTCStatisticsCompletionHandler)completionHandler { - rtc::scoped_refptr collector( - new rtc::RefCountedObject(completionHandler)); + rtc::scoped_refptr collector = + rtc::make_ref_counted(completionHandler); self.nativePeerConnection->GetStats(receiver.nativeRtpReceiver, collector); } - (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler { - rtc::scoped_refptr collector( - new rtc::RefCountedObject(completionHandler)); + rtc::scoped_refptr collector = + rtc::make_ref_counted(completionHandler); self.nativePeerConnection->GetStats(collector); } @@ -91,9 +91,8 @@ class StatsObserverAdapter : public StatsObserver { statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel completionHandler: (void (^)(NSArray *stats))completionHandler { - rtc::scoped_refptr observer( - new rtc::RefCountedObject - (completionHandler)); + rtc::scoped_refptr observer = + rtc::make_ref_counted(completionHandler); webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel = [[self class] nativeStatsOutputLevelForLevel:statsOutputLevel]; self.nativePeerConnection->GetStats( diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm index 4a31a5460c..7db986ce1d 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm @@ -572,9 +572,8 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( - (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler { RTC_DCHECK(completionHandler != nil); - rtc::scoped_refptr - observer(new rtc::RefCountedObject - (completionHandler)); + rtc::scoped_refptr observer = + rtc::make_ref_counted(completionHandler); webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options); @@ -584,9 +583,8 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( - (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler { RTC_DCHECK(completionHandler != nil); - rtc::scoped_refptr - observer(new rtc::RefCountedObject - (completionHandler)); + rtc::scoped_refptr observer = + rtc::make_ref_counted(completionHandler); webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options); @@ -596,24 +594,24 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( - (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler { RTC_DCHECK(completionHandler != nil); - rtc::scoped_refptr observer( - new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler)); + rtc::scoped_refptr observer = + rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler); _peerConnection->SetLocalDescription(sdp.nativeDescription, observer); } - (void)setLocalDescriptionWithCompletionHandler: (RTCSetSessionDescriptionCompletionHandler)completionHandler { RTC_DCHECK(completionHandler != nil); - rtc::scoped_refptr observer( - new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler)); + rtc::scoped_refptr observer = + rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler); _peerConnection->SetLocalDescription(observer); } - (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler { RTC_DCHECK(completionHandler != nil); - rtc::scoped_refptr observer( - new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler)); + rtc::scoped_refptr observer = + rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler); _peerConnection->SetRemoteDescription(sdp.nativeDescription, observer); } diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm index 3211f181d9..486ca93771 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.mm +++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm @@ -61,8 +61,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource( signalingThread:(rtc::Thread *)signalingThread workerThread:(rtc::Thread *)workerThread isScreenCast:(BOOL)isScreenCast { - rtc::scoped_refptr objCVideoTrackSource( - new rtc::RefCountedObject(isScreenCast)); + rtc::scoped_refptr objCVideoTrackSource = + rtc::make_ref_counted(isScreenCast); return [self initWithFactory:factory nativeVideoSource:webrtc::VideoTrackSourceProxy::Create( diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index 3f38dd51a9..d9eddde407 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -59,11 +59,11 @@ - (RTC_OBJC_TYPE(RTCVideoSource) *)source { if (!_source) { - rtc::scoped_refptr source = - self.nativeVideoTrack->GetSource(); + rtc::scoped_refptr source( + self.nativeVideoTrack->GetSource()); if (source) { _source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory - nativeVideoSource:source.get()]; + nativeVideoSource:source]; } } return _source; @@ -107,7 +107,8 @@ #pragma mark - Private - (rtc::scoped_refptr)nativeVideoTrack { - return static_cast(self.nativeTrack.get()); + return rtc::scoped_refptr( + static_cast(self.nativeTrack.get())); } @end diff --git a/sdk/objc/native/api/audio_device_module.mm b/sdk/objc/native/api/audio_device_module.mm index 3c2790e38d..55ea7e3f34 100644 --- a/sdk/objc/native/api/audio_device_module.mm +++ b/sdk/objc/native/api/audio_device_module.mm @@ -20,7 +20,7 @@ namespace webrtc { rtc::scoped_refptr CreateAudioDeviceModule(bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; #if defined(WEBRTC_IOS) - return new rtc::RefCountedObject(bypass_voice_processing); + return rtc::make_ref_counted(bypass_voice_processing); #else RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!"; return nullptr; diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm index cae7a50318..0101c79a1b 100644 --- a/sdk/objc/native/api/video_capturer.mm +++ b/sdk/objc/native/api/video_capturer.mm @@ -22,8 +22,8 @@ rtc::scoped_refptr ObjCToNativeVideoCapturer( rtc::Thread *signaling_thread, rtc::Thread *worker_thread) { RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init]; - rtc::scoped_refptr objc_video_track_source( - new rtc::RefCountedObject(adapter)); + rtc::scoped_refptr objc_video_track_source = + rtc::make_ref_counted(adapter); rtc::scoped_refptr video_source = webrtc::CreateVideoTrackSourceProxy(signaling_thread, worker_thread, objc_video_track_source); diff --git a/sdk/objc/native/api/video_frame_buffer.mm b/sdk/objc/native/api/video_frame_buffer.mm index 6dc99756a6..e1d8aad02e 100644 --- a/sdk/objc/native/api/video_frame_buffer.mm +++ b/sdk/objc/native/api/video_frame_buffer.mm @@ -16,7 +16,7 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoFrameBuffer( id objc_video_frame_buffer) { - return new rtc::RefCountedObject(objc_video_frame_buffer); + return rtc::make_ref_counted(objc_video_frame_buffer); } id NativeToObjCVideoFrameBuffer( diff --git a/sdk/objc/native/src/objc_frame_buffer.mm b/sdk/objc/native/src/objc_frame_buffer.mm index deb38a7a74..34d5d4e429 100644 --- a/sdk/objc/native/src/objc_frame_buffer.mm +++ b/sdk/objc/native/src/objc_frame_buffer.mm @@ -67,7 +67,7 @@ int ObjCFrameBuffer::height() const { rtc::scoped_refptr ObjCFrameBuffer::ToI420() { rtc::scoped_refptr buffer = - new rtc::RefCountedObject([frame_buffer_ toI420]); + rtc::make_ref_counted([frame_buffer_ toI420]); return buffer; } diff --git a/sdk/objc/native/src/objc_video_decoder_factory.mm b/sdk/objc/native/src/objc_video_decoder_factory.mm index d005d0eda4..e144fda54f 100644 --- a/sdk/objc/native/src/objc_video_decoder_factory.mm +++ b/sdk/objc/native/src/objc_video_decoder_factory.mm @@ -57,7 +57,7 @@ class ObjCVideoDecoder : public VideoDecoder { int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override { [decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) { const rtc::scoped_refptr buffer = - new rtc::RefCountedObject(frame.buffer); + rtc::make_ref_counted(frame.buffer); VideoFrame videoFrame = VideoFrame::Builder() .set_video_frame_buffer(buffer) diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index a97323732a..7937e90505 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -91,12 +91,12 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) rtc::scoped_refptr buffer; if (adapted_width == frame.width && adapted_height == frame.height) { // No adaption - optimized path. - buffer = new rtc::RefCountedObject(frame.buffer); + buffer = rtc::make_ref_counted(frame.buffer); } else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { // Adapted CVPixelBuffer frame. RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; - buffer = new rtc::RefCountedObject([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] + buffer = rtc::make_ref_counted([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:rtcPixelBuffer.pixelBuffer adaptedWidth:adapted_width adaptedHeight:adapted_height @@ -108,7 +108,7 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) // Adapted I420 frame. // TODO(magjed): Optimize this I420 path. rtc::scoped_refptr i420_buffer = I420Buffer::Create(adapted_width, adapted_height); - buffer = new rtc::RefCountedObject(frame.buffer); + buffer = rtc::make_ref_counted(frame.buffer); i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height); buffer = i420_buffer; } diff --git a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm index ca3d67293f..8bd1f1a2fe 100644 --- a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm +++ b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm @@ -51,7 +51,7 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface } - (void)setUp { - _video_source = new rtc::RefCountedObject(); + _video_source = rtc::make_ref_counted(); } - (void)tearDown { diff --git a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm index 7c1594a109..cd97c81f15 100644 --- a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm +++ b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm @@ -83,7 +83,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsOKOnSuccess) { CVPixelBufferRef pixel_buffer; CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer); rtc::scoped_refptr buffer = - new rtc::RefCountedObject( + rtc::make_ref_counted( [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) @@ -101,7 +101,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsErrorOnFail) { CVPixelBufferRef pixel_buffer; CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer); rtc::scoped_refptr buffer = - new rtc::RefCountedObject( + rtc::make_ref_counted( [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) diff --git a/test/mac_capturer.mm b/test/mac_capturer.mm index 1f84c1bb96..da8e9b76b6 100644 --- a/test/mac_capturer.mm +++ b/test/mac_capturer.mm @@ -26,7 +26,7 @@ didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec; rtc::scoped_refptr buffer = - new rtc::RefCountedObject(frame.buffer); + rtc::make_ref_counted(frame.buffer); _capturer->OnFrame(webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0)