Update test/ to not use implicit conversion from scoped_refptr<T> to T*.

Bug: webrtc:13464
Change-Id: I55750dc842adf0d854bbc45e593c0e251064f9d6
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/259771
Reviewed-by: Artem Titov <titovartem@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#36623}
This commit is contained in:
Niels Möller 2022-04-22 11:14:34 +02:00 committed by WebRTC LUCI CQ
parent 58cc468d16
commit 3c4f9c13f5
8 changed files with 21 additions and 16 deletions

View file

@ -49,7 +49,7 @@ int32_t FakeDecoder::Decode(const EncodedImage& input,
} }
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_); rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_);
I420Buffer::SetBlack(buffer); I420Buffer::SetBlack(buffer.get());
VideoFrame frame = VideoFrame::Builder() VideoFrame frame = VideoFrame::Builder()
.set_video_frame_buffer(buffer) .set_video_frame_buffer(buffer)
.set_rotation(webrtc::kVideoRotation_0) .set_rotation(webrtc::kVideoRotation_0)

View file

@ -44,7 +44,7 @@ int FakeNativeBuffer::height() const {
rtc::scoped_refptr<I420BufferInterface> FakeNativeBuffer::ToI420() { rtc::scoped_refptr<I420BufferInterface> FakeNativeBuffer::ToI420() {
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_); rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_);
I420Buffer::SetBlack(buffer); I420Buffer::SetBlack(buffer.get());
return buffer; return buffer;
} }

View file

@ -163,7 +163,7 @@ MappableNativeBuffer::GetOrCreateMappedBuffer(int width, int height) {
case VideoFrameBuffer::Type::kI420: { case VideoFrameBuffer::Type::kI420: {
rtc::scoped_refptr<I420Buffer> i420_buffer = rtc::scoped_refptr<I420Buffer> i420_buffer =
I420Buffer::Create(width, height); I420Buffer::Create(width, height);
I420Buffer::SetBlack(i420_buffer); I420Buffer::SetBlack(i420_buffer.get());
mapped_buffer = i420_buffer; mapped_buffer = i420_buffer;
break; break;
} }

View file

@ -167,7 +167,7 @@ TEST(NetworkEmulationManagerPCTest, Run) {
rtc::scoped_refptr<webrtc::AudioSourceInterface> source = rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
alice_pcf->CreateAudioSource(cricket::AudioOptions()); alice_pcf->CreateAudioSource(cricket::AudioOptions());
rtc::scoped_refptr<AudioTrackInterface> track = rtc::scoped_refptr<AudioTrackInterface> track =
alice_pcf->CreateAudioTrack("audio", source); alice_pcf->CreateAudioTrack("audio", source.get());
alice->AddTransceiver(track); alice->AddTransceiver(track);
// Connect peers. // Connect peers.
@ -276,7 +276,7 @@ TEST(NetworkEmulationManagerPCTest, RunTURN) {
rtc::scoped_refptr<webrtc::AudioSourceInterface> source = rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
alice_pcf->CreateAudioSource(cricket::AudioOptions()); alice_pcf->CreateAudioSource(cricket::AudioOptions());
rtc::scoped_refptr<AudioTrackInterface> track = rtc::scoped_refptr<AudioTrackInterface> track =
alice_pcf->CreateAudioTrack("audio", source); alice_pcf->CreateAudioTrack("audio", source.get());
alice->AddTransceiver(track); alice->AddTransceiver(track);
// Connect peers. // Connect peers.

View file

@ -40,7 +40,8 @@ void MediaHelper::MaybeAddAudio(TestPeer* peer) {
rtc::scoped_refptr<webrtc::AudioSourceInterface> source = rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
peer->pc_factory()->CreateAudioSource(audio_config.audio_options); peer->pc_factory()->CreateAudioSource(audio_config.audio_options);
rtc::scoped_refptr<AudioTrackInterface> track = rtc::scoped_refptr<AudioTrackInterface> track =
peer->pc_factory()->CreateAudioTrack(*audio_config.stream_label, source); peer->pc_factory()->CreateAudioTrack(*audio_config.stream_label,
source.get());
std::string sync_group = audio_config.sync_group std::string sync_group = audio_config.sync_group
? audio_config.sync_group.value() ? audio_config.sync_group.value()
: audio_config.stream_label.value(); : audio_config.stream_label.value();
@ -71,7 +72,7 @@ MediaHelper::MaybeAddVideo(TestPeer* peer) {
<< video_config.stream_label.value(); << video_config.stream_label.value();
rtc::scoped_refptr<VideoTrackInterface> track = rtc::scoped_refptr<VideoTrackInterface> track =
peer->pc_factory()->CreateVideoTrack(video_config.stream_label.value(), peer->pc_factory()->CreateVideoTrack(video_config.stream_label.value(),
source); source.get());
if (video_config.content_hint.has_value()) { if (video_config.content_hint.has_value()) {
track->set_content_hint(video_config.content_hint.value()); track->set_content_hint(video_config.content_hint.value());
} }

View file

@ -306,7 +306,7 @@ PeerScenarioClient::AudioSendTrack PeerScenarioClient::CreateAudio(
RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK_RUN_ON(signaling_thread_);
AudioSendTrack res; AudioSendTrack res;
auto source = pc_factory_->CreateAudioSource(options); auto source = pc_factory_->CreateAudioSource(options);
auto track = pc_factory_->CreateAudioTrack(track_id, source); auto track = pc_factory_->CreateAudioTrack(track_id, source.get());
res.track = track; res.track = track;
res.sender = peer_connection_->AddTrack(track, {kCommonStreamId}).value(); res.sender = peer_connection_->AddTrack(track, {kCommonStreamId}).value();
return res; return res;
@ -323,9 +323,10 @@ PeerScenarioClient::VideoSendTrack PeerScenarioClient::CreateVideo(
capturer->Init(); capturer->Init();
res.source = rtc::make_ref_counted<FrameGeneratorCapturerVideoTrackSource>( res.source = rtc::make_ref_counted<FrameGeneratorCapturerVideoTrackSource>(
std::move(capturer), config.screencast); std::move(capturer), config.screencast);
auto track = pc_factory_->CreateVideoTrack(track_id, res.source); auto track = pc_factory_->CreateVideoTrack(track_id, res.source.get());
res.track = track; res.track = track.get();
res.sender = peer_connection_->AddTrack(track, {kCommonStreamId}).MoveValue(); res.sender =
peer_connection_->AddTrack(track, {kCommonStreamId}).MoveValue().get();
return res; return res;
} }
@ -355,7 +356,8 @@ void PeerScenarioClient::CreateAndSetSdp(
[sdp_offer, offer_handler](RTCError) { [sdp_offer, offer_handler](RTCError) {
offer_handler(sdp_offer); offer_handler(sdp_offer);
})); }));
}), })
.get(),
PeerConnectionInterface::RTCOfferAnswerOptions()); PeerConnectionInterface::RTCOfferAnswerOptions());
} }
@ -385,7 +387,8 @@ void PeerScenarioClient::SetSdpOfferAndGetAnswer(
[answer_handler, sdp_answer](RTCError) { [answer_handler, sdp_answer](RTCError) {
answer_handler(sdp_answer); answer_handler(sdp_answer);
})); }));
}), })
.get(),
PeerConnectionInterface::RTCOfferAnswerOptions()); PeerConnectionInterface::RTCOfferAnswerOptions());
})); }));
} }

View file

@ -29,7 +29,7 @@ TEST(PeerScenarioQualityTest, MAYBE_PsnrIsCollected) {
video_conf.generator.squares_video->framerate = 20; video_conf.generator.squares_video->framerate = 20;
auto video = caller->CreateVideo("VIDEO", video_conf); auto video = caller->CreateVideo("VIDEO", video_conf);
auto link_builder = s.net()->NodeBuilder().delay_ms(100).capacity_kbps(600); auto link_builder = s.net()->NodeBuilder().delay_ms(100).capacity_kbps(600);
s.AttachVideoQualityAnalyzer(&analyzer, video.track, callee); s.AttachVideoQualityAnalyzer(&analyzer, video.track.get(), callee);
s.SimpleConnection(caller, callee, {link_builder.Build().node}, s.SimpleConnection(caller, callee, {link_builder.Build().node},
{link_builder.Build().node}); {link_builder.Build().node});
s.ProcessMessages(TimeDelta::Seconds(2)); s.ProcessMessages(TimeDelta::Seconds(2));

View file

@ -194,8 +194,9 @@ void D3dRenderer::OnFrame(const webrtc::VideoFrame& frame) {
d3d_device_->BeginScene(); d3d_device_->BeginScene();
d3d_device_->SetFVF(D3DFVF_CUSTOMVERTEX); d3d_device_->SetFVF(D3DFVF_CUSTOMVERTEX);
d3d_device_->SetStreamSource(0, vertex_buffer_, 0, sizeof(D3dCustomVertex)); d3d_device_->SetStreamSource(0, vertex_buffer_.get(), 0,
d3d_device_->SetTexture(0, texture_); sizeof(D3dCustomVertex));
d3d_device_->SetTexture(0, texture_.get());
d3d_device_->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2); d3d_device_->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
d3d_device_->EndScene(); d3d_device_->EndScene();