Prepare for deleting implicit conversion from raw pointer to scoped_refptr.

Updates all webrtc code, to have a small followup cl to just add the
"explicit" keyword. Patchset #24 passed all webrtc tests, with explicit.

Bug: webrtc:13464
Change-Id: I39863d3752f73209b531120f66916dc9177bf63a
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/242363
Reviewed-by: Tomas Gunnarsson <tommi@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#35718}
This commit is contained in:
Niels Möller 2022-01-17 15:26:54 +01:00 committed by WebRTC LUCI CQ
parent 9609a825eb
commit ac0d18341d
33 changed files with 85 additions and 76 deletions

View file

@ -75,6 +75,8 @@ class scoped_refptr {
scoped_refptr() : ptr_(nullptr) {}
// TODO(bugs.webrtc.org/13464): Implicit construction is deprecated. Mark
// explicit, and add a new implicit constructor accepting a nullptr_t.
scoped_refptr(T* p) : ptr_(p) { // NOLINT(runtime/explicit)
if (ptr_)
ptr_->AddRef();

View file

@ -61,7 +61,7 @@ I444Buffer::~I444Buffer() {}
// static
rtc::scoped_refptr<I444Buffer> I444Buffer::Create(int width, int height) {
return new rtc::RefCountedObject<I444Buffer>(width, height);
return rtc::make_ref_counted<I444Buffer>(width, height);
}
// static
@ -70,8 +70,8 @@ rtc::scoped_refptr<I444Buffer> I444Buffer::Create(int width,
int stride_y,
int stride_u,
int stride_v) {
return new rtc::RefCountedObject<I444Buffer>(width, height, stride_y,
stride_u, stride_v);
return rtc::make_ref_counted<I444Buffer>(width, height, stride_y, stride_u,
stride_v);
}
// static

View file

@ -143,7 +143,7 @@ rtc::scoped_refptr<I444Buffer> VideoFrameBufferPool::CreateI444Buffer(
return nullptr;
// Allocate new buffer.
rtc::scoped_refptr<I444Buffer> buffer =
new rtc::RefCountedObject<I444Buffer>(width, height);
rtc::make_ref_counted<I444Buffer>(width, height);
if (zero_initialize_)
buffer->InitializeData();

View file

@ -265,7 +265,7 @@ void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
pc_->SetRemoteDescription(
std::move(answer),
new rtc::RefCountedObject<SetRemoteSessionDescriptionObserver>());
rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>());
}
void CreateOfferObserver::OnFailure(webrtc::RTCError error) {

View file

@ -220,7 +220,7 @@ void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) {
std::unique_ptr<webrtc::SessionDescriptionInterface> answer(
webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
pc_->SetRemoteDescription(std::move(answer),
new rtc::RefCountedObject<SetRemoteSessionDescriptionObserver>());
rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>());
}
void CreateOfferObserver::OnFailure(webrtc::RTCError error) {

View file

@ -85,8 +85,7 @@ class CapturerTrackSource : public webrtc::VideoTrackSource {
capturer = absl::WrapUnique(
webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, i));
if (capturer) {
return new rtc::RefCountedObject<CapturerTrackSource>(
std::move(capturer));
return rtc::make_ref_counted<CapturerTrackSource>(std::move(capturer));
}
}

View file

@ -61,7 +61,7 @@ class CapturerTrackSource : public webrtc::VideoTrackSource {
if (!capturer) {
return nullptr;
}
return new rtc::RefCountedObject<CapturerTrackSource>(std::move(capturer));
return rtc::make_ref_counted<CapturerTrackSource>(std::move(capturer));
}
protected:

View file

@ -15,6 +15,8 @@
#include "modules/desktop_capture/win/full_screen_win_application_handler.h"
#endif
#include "rtc_base/ref_counted_object.h"
namespace webrtc {
DesktopCaptureOptions::DesktopCaptureOptions() {}
@ -36,12 +38,15 @@ DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() {
result.set_x_display(SharedXDisplay::CreateDefault());
#endif
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
result.set_configuration_monitor(new DesktopConfigurationMonitor());
result.set_configuration_monitor(
rtc::make_ref_counted<DesktopConfigurationMonitor>());
result.set_full_screen_window_detector(
new FullScreenWindowDetector(CreateFullScreenMacApplicationHandler));
rtc::make_ref_counted<FullScreenWindowDetector>(
CreateFullScreenMacApplicationHandler));
#elif defined(WEBRTC_WIN)
result.set_full_screen_window_detector(
new FullScreenWindowDetector(CreateFullScreenWinApplicationHandler));
rtc::make_ref_counted<FullScreenWindowDetector>(
CreateFullScreenWinApplicationHandler));
#endif
return result;
}

View file

@ -36,9 +36,9 @@ rtc::scoped_refptr<SharedXDisplay> SharedXDisplay::Create(
XOpenDisplay(display_name.empty() ? NULL : display_name.c_str());
if (!display) {
RTC_LOG(LS_ERROR) << "Unable to open display";
return NULL;
return nullptr;
}
return new SharedXDisplay(display);
return rtc::scoped_refptr<SharedXDisplay>(new SharedXDisplay(display));
}
// static

View file

@ -21,8 +21,8 @@ SharedDesktopFrame::~SharedDesktopFrame() {}
// static
std::unique_ptr<SharedDesktopFrame> SharedDesktopFrame::Wrap(
std::unique_ptr<DesktopFrame> desktop_frame) {
return std::unique_ptr<SharedDesktopFrame>(
new SharedDesktopFrame(new Core(std::move(desktop_frame))));
return std::unique_ptr<SharedDesktopFrame>(new SharedDesktopFrame(
rtc::scoped_refptr<Core>(new Core(std::move(desktop_frame)))));
}
SharedDesktopFrame* SharedDesktopFrame::Wrap(DesktopFrame* desktop_frame) {

View file

@ -168,7 +168,7 @@ class VideoCaptureTest : public ::testing::Test {
rtc::scoped_refptr<VideoCaptureModule> module(
VideoCaptureFactory::Create(unique_name));
if (module.get() == NULL)
return NULL;
return nullptr;
EXPECT_FALSE(module->CaptureStarted());

View file

@ -275,8 +275,8 @@ int LibvpxVp9Decoder::ReturnFrame(
// This buffer contains all of `img`'s image data, a reference counted
// Vp9FrameBuffer. (libvpx is done with the buffers after a few
// vpx_codec_decode calls or vpx_codec_destroy).
rtc::scoped_refptr<Vp9FrameBufferPool::Vp9FrameBuffer> img_buffer =
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
rtc::scoped_refptr<Vp9FrameBufferPool::Vp9FrameBuffer> img_buffer(
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv));
// The buffer can be used directly by the VideoFrame (without copy) by
// using a Wrapped*Buffer.

View file

@ -59,7 +59,7 @@ rtc::scoped_refptr<AudioTrackInterface> MediaStream::FindAudioTrack(
const std::string& track_id) {
AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id);
if (it == audio_tracks_.end())
return NULL;
return nullptr;
return *it;
}
@ -67,7 +67,7 @@ rtc::scoped_refptr<VideoTrackInterface> MediaStream::FindVideoTrack(
const std::string& track_id) {
VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id);
if (it == video_tracks_.end())
return NULL;
return nullptr;
return *it;
}

View file

@ -63,7 +63,7 @@ class MediaStreamTest : public ::testing::Test {
ASSERT_TRUE(video_track_.get() != NULL);
EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state());
audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
audio_track_ = AudioTrack::Create(kAudioTrackId, nullptr);
ASSERT_TRUE(audio_track_.get() != NULL);
EXPECT_EQ(MediaStreamTrackInterface::kLive, audio_track_->state());

View file

@ -764,7 +764,8 @@ static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTrack(
const JavaParamRef<jobject>& j_stream_labels) {
RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> result =
ExtractNativePC(jni, j_pc)->AddTrack(
reinterpret_cast<MediaStreamTrackInterface*>(native_track),
rtc::scoped_refptr<MediaStreamTrackInterface>(
reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
JavaListToNativeVector<std::string, jstring>(jni, j_stream_labels,
&JavaToNativeString));
if (!result.ok()) {
@ -792,7 +793,8 @@ static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTransceiverWithTrack(
const JavaParamRef<jobject>& j_init) {
RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
ExtractNativePC(jni, j_pc)->AddTransceiver(
reinterpret_cast<MediaStreamTrackInterface*>(native_track),
rtc::scoped_refptr<MediaStreamTrackInterface>(
reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
JavaToNativeRtpTransceiverInit(jni, j_init));
if (!result.ok()) {
RTC_LOG(LS_ERROR) << "Failed to add transceiver: "

View file

@ -351,11 +351,12 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
jlong native_network_controller_factory,
jlong native_network_state_predictor_factory,
jlong native_neteq_factory) {
rtc::scoped_refptr<AudioProcessing> audio_processor =
reinterpret_cast<AudioProcessing*>(native_audio_processor);
rtc::scoped_refptr<AudioProcessing> audio_processor(
reinterpret_cast<AudioProcessing*>(native_audio_processor));
return CreatePeerConnectionFactoryForJava(
jni, jcontext, joptions,
reinterpret_cast<AudioDeviceModule*>(native_audio_device_module),
rtc::scoped_refptr<AudioDeviceModule>(
reinterpret_cast<AudioDeviceModule*>(native_audio_device_module)),
TakeOwnershipOfRefPtr<AudioEncoderFactory>(native_audio_encoder_factory),
TakeOwnershipOfRefPtr<AudioDecoderFactory>(native_audio_decoder_factory),
jencoder_factory, jdecoder_factory,

View file

@ -118,8 +118,9 @@ static void JNI_RtpReceiver_SetFrameDecryptor(JNIEnv* jni,
jlong j_rtp_sender_pointer,
jlong j_frame_decryptor_pointer) {
reinterpret_cast<RtpReceiverInterface*>(j_rtp_sender_pointer)
->SetFrameDecryptor(reinterpret_cast<FrameDecryptorInterface*>(
j_frame_decryptor_pointer));
->SetFrameDecryptor(rtc::scoped_refptr<FrameDecryptorInterface>(
reinterpret_cast<FrameDecryptorInterface*>(
j_frame_decryptor_pointer)));
}
} // namespace jni

View file

@ -105,8 +105,9 @@ static void JNI_RtpSender_SetFrameEncryptor(JNIEnv* jni,
jlong j_rtp_sender_pointer,
jlong j_frame_encryptor_pointer) {
reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
->SetFrameEncryptor(reinterpret_cast<FrameEncryptorInterface*>(
j_frame_encryptor_pointer));
->SetFrameEncryptor(rtc::scoped_refptr<FrameEncryptorInterface>(
reinterpret_cast<FrameEncryptorInterface*>(
j_frame_encryptor_pointer)));
}
} // namespace jni

View file

@ -48,11 +48,10 @@
- (RTC_OBJC_TYPE(RTCAudioSource) *)source {
if (!_source) {
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
self.nativeAudioTrack->GetSource();
rtc::scoped_refptr<webrtc::AudioSourceInterface> source(self.nativeAudioTrack->GetSource());
if (source) {
_source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory
nativeAudioSource:source.get()];
nativeAudioSource:source];
}
}
return _source;
@ -61,7 +60,8 @@
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
return static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get());
return rtc::scoped_refptr<webrtc::AudioTrackInterface>(
static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get()));
}
@end

View file

@ -20,7 +20,7 @@ namespace {
class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface {
public:
static rtc::scoped_refptr<ObjCEncodedImageBuffer> Create(NSData *data) {
return new rtc::RefCountedObject<ObjCEncodedImageBuffer>(data);
return rtc::make_ref_counted<ObjCEncodedImageBuffer>(data);
}
const uint8_t *data() const override { return static_cast<const uint8_t *>(data_.bytes); }
// TODO(bugs.webrtc.org/9378): delete this non-const data method.

View file

@ -69,21 +69,21 @@ class StatsObserverAdapter : public StatsObserver {
- (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler
: (RTCStatisticsCompletionHandler)completionHandler {
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector);
}
- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
completionHandler:(RTCStatisticsCompletionHandler)completionHandler {
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(receiver.nativeRtpReceiver, collector);
}
- (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler {
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(collector);
}
@ -91,9 +91,8 @@ class StatsObserverAdapter : public StatsObserver {
statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
completionHandler:
(void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler {
rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
(completionHandler));
rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer =
rtc::make_ref_counted<webrtc::StatsObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
[[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
self.nativePeerConnection->GetStats(

View file

@ -572,9 +572,8 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
observer(new rtc::RefCountedObject
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
@ -584,9 +583,8 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
observer(new rtc::RefCountedObject
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
@ -596,24 +594,24 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack(
- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer(
new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetLocalDescription(sdp.nativeDescription, observer);
}
- (void)setLocalDescriptionWithCompletionHandler:
(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer(
new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetLocalDescription(observer);
}
- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer(
new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer =
rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetRemoteDescription(sdp.nativeDescription, observer);
}

View file

@ -61,8 +61,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(
signalingThread:(rtc::Thread *)signalingThread
workerThread:(rtc::Thread *)workerThread
isScreenCast:(BOOL)isScreenCast {
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(
new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>(isScreenCast));
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource =
rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(isScreenCast);
return [self initWithFactory:factory
nativeVideoSource:webrtc::VideoTrackSourceProxy::Create(

View file

@ -59,11 +59,11 @@
- (RTC_OBJC_TYPE(RTCVideoSource) *)source {
if (!_source) {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
self.nativeVideoTrack->GetSource();
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source(
self.nativeVideoTrack->GetSource());
if (source) {
_source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory
nativeVideoSource:source.get()];
nativeVideoSource:source];
}
}
return _source;
@ -107,7 +107,8 @@
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
return static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get());
return rtc::scoped_refptr<webrtc::VideoTrackInterface>(
static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get()));
}
@end

View file

@ -20,7 +20,7 @@ namespace webrtc {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(bool bypass_voice_processing) {
RTC_DLOG(LS_INFO) << __FUNCTION__;
#if defined(WEBRTC_IOS)
return new rtc::RefCountedObject<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing);
return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing);
#else
RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!";
return nullptr;

View file

@ -22,8 +22,8 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
rtc::Thread *signaling_thread,
rtc::Thread *worker_thread) {
RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init];
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source(
new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>(adapter));
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source =
rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(adapter);
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source =
webrtc::CreateVideoTrackSourceProxy(signaling_thread, worker_thread, objc_video_track_source);

View file

@ -16,7 +16,7 @@ namespace webrtc {
rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer) {
return new rtc::RefCountedObject<ObjCFrameBuffer>(objc_video_frame_buffer);
return rtc::make_ref_counted<ObjCFrameBuffer>(objc_video_frame_buffer);
}
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(

View file

@ -67,7 +67,7 @@ int ObjCFrameBuffer::height() const {
rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
rtc::scoped_refptr<I420BufferInterface> buffer =
new rtc::RefCountedObject<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
rtc::make_ref_counted<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
return buffer;
}

View file

@ -57,7 +57,7 @@ class ObjCVideoDecoder : public VideoDecoder {
int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override {
[decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
const rtc::scoped_refptr<VideoFrameBuffer> buffer =
new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
VideoFrame videoFrame =
VideoFrame::Builder()
.set_video_frame_buffer(buffer)

View file

@ -91,12 +91,12 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame)
rtc::scoped_refptr<VideoFrameBuffer> buffer;
if (adapted_width == frame.width && adapted_height == frame.height) {
// No adaption - optimized path.
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
} else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
// Adapted CVPixelBuffer frame.
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
buffer = rtc::make_ref_counted<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
@ -108,7 +108,7 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame)
// Adapted I420 frame.
// TODO(magjed): Optimize this I420 path.
rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
buffer = i420_buffer;
}

View file

@ -51,7 +51,7 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
}
- (void)setUp {
_video_source = new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>();
_video_source = rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>();
}
- (void)tearDown {

View file

@ -83,7 +83,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsOKOnSuccess) {
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
@ -101,7 +101,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsErrorOnFail) {
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)

View file

@ -26,7 +26,7 @@
didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(frame.buffer);
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(frame.buffer);
_capturer->OnFrame(webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_rotation(webrtc::kVideoRotation_0)