Concatenate string literals at compile time.

This CL was generated by running:
git ls-files | grep ".cc" | xargs perl -i -ne 'BEGIN {undef $/}; s/("[\s\n]*<<[\s\n]*")/" "/g; print;'; git cl format

After that I manually edited modules/audio_processing/gain_controller2.cc to preserve its original
formatting.

This primary benefit of this change is a small reduction in binary size.

Bug: None
Change-Id: I689fa7ba9c717c314bb167e5d592c3c4e0871e29
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/165961
Reviewed-by: Alessio Bazzica <alessiob@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Commit-Queue: Jonas Olsson <jonasolsson@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#30251}
This commit is contained in:
Jonas Olsson 2020-01-14 12:11:31 +01:00 committed by Commit Bot
parent 6153e15d31
commit b2b2031457
98 changed files with 544 additions and 353 deletions

View file

@ -112,18 +112,21 @@ std::string VideoEncoder::EncoderInfo::ToString() const {
rtc::SimpleStringBuilder oss(string_buf);
oss << "EncoderInfo { "
<< "ScalingSettings { ";
"ScalingSettings { ";
if (scaling_settings.thresholds) {
oss << "Thresholds { "
<< "low = " << scaling_settings.thresholds->low
"low = "
<< scaling_settings.thresholds->low
<< ", high = " << scaling_settings.thresholds->high << "}, ";
}
oss << "min_pixels_per_frame = " << scaling_settings.min_pixels_per_frame
<< " }";
oss << ", requested_resolution_alignment = " << requested_resolution_alignment
<< ", supports_native_handle = " << supports_native_handle
<< ", implementation_name = '" << implementation_name << "'"
<< ", has_trusted_rate_controller = " << has_trusted_rate_controller
<< ", implementation_name = '" << implementation_name
<< "'"
", has_trusted_rate_controller = "
<< has_trusted_rate_controller
<< ", is_hardware_accelerated = " << is_hardware_accelerated
<< ", has_internal_source = " << has_internal_source
<< ", fps_allocation = [";
@ -154,13 +157,15 @@ std::string VideoEncoder::EncoderInfo::ToString() const {
}
ResolutionBitrateLimits l = resolution_bitrate_limits[i];
oss << "Limits { "
<< "frame_size_pixels = " << l.frame_size_pixels
"frame_size_pixels = "
<< l.frame_size_pixels
<< ", min_start_bitrate_bps = " << l.min_start_bitrate_bps
<< ", min_bitrate_bps = " << l.min_bitrate_bps
<< ", max_bitrate_bps = " << l.max_bitrate_bps << "} ";
}
oss << "] "
<< ", supports_simulcast = " << supports_simulcast << "}";
", supports_simulcast = "
<< supports_simulcast << "}";
return oss.str();
}

View file

@ -834,7 +834,7 @@ void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) {
if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
RTC_DLOG(LS_WARNING)
<< "ChannelReceive::UpdatePlayoutTimestamp() failed to read"
<< " playout delay from the ADM";
" playout delay from the ADM";
return;
}

View file

@ -420,7 +420,7 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType,
payload = encrypted_audio_payload;
} else if (crypto_options_.sframe.require_frame_encryption) {
RTC_DLOG(LS_ERROR) << "Channel::SendData() failed sending audio payload: "
<< "A frame encryptor is required but one is not set.";
"A frame encryptor is required but one is not set.";
return -1;
}
}

View file

@ -87,7 +87,7 @@ std::unique_ptr<FlexfecReceiver> MaybeCreateFlexfecReceiver(
if (config.payload_type < 0) {
RTC_LOG(LS_WARNING)
<< "Invalid FlexFEC payload type given. "
<< "This FlexfecReceiveStream will therefore be useless.";
"This FlexfecReceiveStream will therefore be useless.";
return nullptr;
}
RTC_DCHECK_GE(config.payload_type, 0);
@ -95,13 +95,13 @@ std::unique_ptr<FlexfecReceiver> MaybeCreateFlexfecReceiver(
if (config.remote_ssrc == 0) {
RTC_LOG(LS_WARNING)
<< "Invalid FlexFEC SSRC given. "
<< "This FlexfecReceiveStream will therefore be useless.";
"This FlexfecReceiveStream will therefore be useless.";
return nullptr;
}
if (config.protected_media_ssrcs.empty()) {
RTC_LOG(LS_WARNING)
<< "No protected media SSRC supplied. "
<< "This FlexfecReceiveStream will therefore be useless.";
"This FlexfecReceiveStream will therefore be useless.";
return nullptr;
}

View file

@ -25,7 +25,8 @@ RtpStreamReceiverController::Receiver::Receiver(
if (!sink_added) {
RTC_LOG(LS_ERROR)
<< "RtpStreamReceiverController::Receiver::Receiver: Sink "
<< "could not be added for SSRC=" << ssrc << ".";
"could not be added for SSRC="
<< ssrc << ".";
}
}

View file

@ -390,7 +390,7 @@ void RtpTransportControllerSend::SetSdpBitrateParameters(
} else {
RTC_LOG(LS_VERBOSE)
<< "WebRTC.RtpTransportControllerSend.SetSdpBitrateParameters: "
<< "nothing to update";
"nothing to update";
}
}
@ -411,7 +411,7 @@ void RtpTransportControllerSend::SetClientBitratePreferences(
} else {
RTC_LOG(LS_VERBOSE)
<< "WebRTC.RtpTransportControllerSend.SetClientBitratePreferences: "
<< "nothing to update";
"nothing to update";
}
}

View file

@ -100,9 +100,10 @@ void SimulatedNetwork::SetConfig(const Config& config) {
int min_avg_burst_loss_length = std::ceil(prob_loss / (1 - prob_loss));
RTC_CHECK_GT(avg_burst_loss_length, min_avg_burst_loss_length)
<< "For a total packet loss of " << config.loss_percent << "%% then"
<< " avg_burst_loss_length must be " << min_avg_burst_loss_length + 1
<< " or higher.";
<< "For a total packet loss of " << config.loss_percent
<< "%% then"
" avg_burst_loss_length must be "
<< min_avg_burst_loss_length + 1 << " or higher.";
config_state_.prob_loss_bursting = (1.0 - 1.0 / avg_burst_loss_length);
config_state_.prob_start_bursting =

View file

@ -140,7 +140,7 @@ void BitrateAdjuster::UpdateBitrate(uint32_t current_time_ms) {
float last_adjusted_bitrate_bps = adjusted_bitrate_bps_;
if (adjusted_bitrate_bps != last_adjusted_bitrate_bps) {
RTC_LOG(LS_VERBOSE) << "Adjusting encoder bitrate:"
<< "\n target_bitrate:"
"\n target_bitrate:"
<< static_cast<uint32_t>(target_bitrate_bps)
<< "\n estimated_bitrate:"
<< static_cast<uint32_t>(estimated_bitrate_bps)

View file

@ -345,7 +345,8 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) {
webrtc::CreateSessionDescription(type, sdp, &error);
if (!session_description) {
RTC_LOG(WARNING) << "Can't parse received session description message. "
<< "SdpParseError was: " << error.description;
"SdpParseError was: "
<< error.description;
return;
}
RTC_LOG(INFO) << " Received session description :" << message;
@ -373,7 +374,8 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) {
webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, sdp, &error));
if (!candidate.get()) {
RTC_LOG(WARNING) << "Can't parse received candidate message. "
<< "SdpParseError was: " << error.description;
"SdpParseError was: "
<< error.description;
return;
}
if (!peer_connection_->AddIceCandidate(candidate.get())) {

View file

@ -342,7 +342,8 @@ bool SimplePeerConnection::SetRemoteDescription(const char* type,
webrtc::CreateSessionDescription(sdp_type, remote_desc, &error));
if (!session_description) {
RTC_LOG(WARNING) << "Can't parse received session description message. "
<< "SdpParseError was: " << error.description;
"SdpParseError was: "
<< error.description;
return false;
}
RTC_LOG(INFO) << " Received session description :" << remote_desc;
@ -363,7 +364,8 @@ bool SimplePeerConnection::AddIceCandidate(const char* candidate,
webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, candidate, &error));
if (!ice_candidate.get()) {
RTC_LOG(WARNING) << "Can't parse received candidate message. "
<< "SdpParseError was: " << error.description;
"SdpParseError was: "
<< error.description;
return false;
}
if (!peer_connection_->AddIceCandidate(ice_candidate.get())) {

View file

@ -674,7 +674,8 @@ std::string RtcEventLogEncoderLegacy::EncodeVideoSendStreamConfig(
if (event.config().codecs.size() > 1) {
RTC_LOG(WARNING)
<< "LogVideoSendStreamConfig currently only supports one "
<< "codec. Logging codec :" << codec.payload_name;
"codec. Logging codec :"
<< codec.payload_name;
break;
}
}

View file

@ -249,7 +249,9 @@ int main(int argc, char* argv[]) {
event_processor.ProcessEventsInOrder();
std::cout << "Wrote " << rtp_counter << (header_only ? " header-only" : "")
<< " RTP packets and " << rtcp_counter << " RTCP packets to the "
<< "output file." << std::endl;
<< " RTP packets and " << rtcp_counter
<< " RTCP packets to the "
"output file."
<< std::endl;
return 0;
}

View file

@ -93,7 +93,8 @@ bool RtcEventLogImpl::StartLogging(std::unique_ptr<RtcEventLogOutput> output,
const int64_t timestamp_us = rtc::TimeMicros();
const int64_t utc_time_us = rtc::TimeUTCMicros();
RTC_LOG(LS_INFO) << "Starting WebRTC event log. (Timestamp, UTC) = "
<< "(" << timestamp_us << ", " << utc_time_us << ").";
"("
<< timestamp_us << ", " << utc_time_us << ").";
RTC_DCHECK_RUN_ON(&logging_state_checker_);
logging_state_started_ = true;

View file

@ -319,8 +319,8 @@ bool RtpDataMediaChannel::SendData(const SendDataParams& params,
packet.AppendData(payload);
RTC_LOG(LS_VERBOSE) << "Sent RTP data packet: "
<< " stream=" << found_stream->id
<< " ssrc=" << header.ssrc
" stream="
<< found_stream->id << " ssrc=" << header.ssrc
<< ", seqnum=" << header.seq_num
<< ", timestamp=" << header.timestamp
<< ", len=" << payload.size();

View file

@ -735,7 +735,7 @@ void WebRtcVideoChannel::RequestEncoderSwitch(
if (!allow_codec_switching_) {
RTC_LOG(LS_INFO) << "Encoder switch requested but codec switching has"
<< " not been enabled yet.";
" not been enabled yet.";
requested_encoder_switch_ = conf;
return;
}
@ -857,7 +857,8 @@ webrtc::RtpParameters WebRtcVideoChannel::GetRtpSendParameters(
auto it = send_streams_.find(ssrc);
if (it == send_streams_.end()) {
RTC_LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream "
<< "with ssrc " << ssrc << " which doesn't exist.";
"with ssrc "
<< ssrc << " which doesn't exist.";
return webrtc::RtpParameters();
}
@ -878,7 +879,8 @@ webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters(
auto it = send_streams_.find(ssrc);
if (it == send_streams_.end()) {
RTC_LOG(LS_ERROR) << "Attempting to set RTP send parameters for stream "
<< "with ssrc " << ssrc << " which doesn't exist.";
"with ssrc "
<< ssrc << " which doesn't exist.";
return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR);
}
@ -887,7 +889,7 @@ webrtc::RTCError WebRtcVideoChannel::SetRtpSendParameters(
webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc);
if (current_parameters.codecs != parameters.codecs) {
RTC_DLOG(LS_ERROR) << "Using SetParameters to change the set of codecs "
<< "is not currently supported.";
"is not currently supported.";
return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR);
}
@ -922,7 +924,8 @@ webrtc::RtpParameters WebRtcVideoChannel::GetRtpReceiveParameters(
if (it == receive_streams_.end()) {
RTC_LOG(LS_WARNING)
<< "Attempting to get RTP receive parameters for stream "
<< "with SSRC " << ssrc << " which doesn't exist.";
"with SSRC "
<< ssrc << " which doesn't exist.";
return webrtc::RtpParameters();
}
rtp_params = it->second->GetRtpParameters();
@ -2736,7 +2739,8 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFrameDecryptor(
if (stream_) {
RTC_LOG(LS_INFO)
<< "Setting FrameDecryptor (recv) because of SetFrameDecryptor, "
<< "remote_ssrc=" << config_.rtp.remote_ssrc;
"remote_ssrc="
<< config_.rtp.remote_ssrc;
stream_->SetFrameDecryptor(frame_decryptor);
}
}

View file

@ -170,9 +170,10 @@ absl::optional<int> ComputeSendBitrate(int max_send_bitrate_bps,
// fail. If codec is not multi-rate and |bps| exceeds or equal the fixed
// bitrate then ignore.
RTC_LOG(LS_ERROR) << "Failed to set codec " << spec.format.name
<< " to bitrate " << bps << " bps"
<< ", requires at least " << spec.info.min_bitrate_bps
<< " bps.";
<< " to bitrate " << bps
<< " bps"
", requires at least "
<< spec.info.min_bitrate_bps << " bps.";
return absl::nullopt;
}
@ -1181,7 +1182,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream {
return true;
} else {
RTC_LOG(LS_ERROR) << "Failed to SetBaseMinimumPlayoutDelayMs"
<< " on AudioReceiveStream on SSRC="
" on AudioReceiveStream on SSRC="
<< config_.rtp.remote_ssrc
<< " with delay_ms=" << delay_ms;
return false;
@ -1351,7 +1352,8 @@ webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpSendParameters(
auto it = send_streams_.find(ssrc);
if (it == send_streams_.end()) {
RTC_LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream "
<< "with ssrc " << ssrc << " which doesn't exist.";
"with ssrc "
<< ssrc << " which doesn't exist.";
return webrtc::RtpParameters();
}
@ -1371,7 +1373,8 @@ webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters(
auto it = send_streams_.find(ssrc);
if (it == send_streams_.end()) {
RTC_LOG(LS_WARNING) << "Attempting to set RTP send parameters for stream "
<< "with ssrc " << ssrc << " which doesn't exist.";
"with ssrc "
<< ssrc << " which doesn't exist.";
return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR);
}
@ -1380,7 +1383,7 @@ webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters(
webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc);
if (current_parameters.codecs != parameters.codecs) {
RTC_DLOG(LS_ERROR) << "Using SetParameters to change the set of codecs "
<< "is not currently supported.";
"is not currently supported.";
return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_PARAMETER);
}
@ -1426,7 +1429,8 @@ webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpReceiveParameters(
if (it == recv_streams_.end()) {
RTC_LOG(LS_WARNING)
<< "Attempting to get RTP receive parameters for stream "
<< "with ssrc " << ssrc << " which doesn't exist.";
"with ssrc "
<< ssrc << " which doesn't exist.";
return webrtc::RtpParameters();
}
rtp_params = it->second->GetRtpParameters();

View file

@ -284,7 +284,8 @@ class SctpTransport::UsrSctpWrapper {
uint8_t set_df) {
SctpTransport* transport = static_cast<SctpTransport*>(addr);
RTC_LOG(LS_VERBOSE) << "global OnSctpOutboundPacket():"
<< "addr: " << addr << "; length: " << length
"addr: "
<< addr << "; length: " << length
<< "; tos: " << rtc::ToHex(tos)
<< "; set_df: " << rtc::ToHex(set_df);
@ -511,9 +512,11 @@ bool SctpTransport::Start(int local_sctp_port,
bool SctpTransport::OpenStream(int sid) {
RTC_DCHECK_RUN_ON(network_thread_);
if (sid > kMaxSctpSid) {
RTC_LOG(LS_WARNING) << debug_name_ << "->OpenStream(...): "
<< "Not adding data stream "
<< "with sid=" << sid << " because sid is too high.";
RTC_LOG(LS_WARNING) << debug_name_
<< "->OpenStream(...): "
"Not adding data stream "
"with sid="
<< sid << " because sid is too high.";
return false;
}
auto it = stream_status_by_sid_.find(sid);
@ -522,16 +525,18 @@ bool SctpTransport::OpenStream(int sid) {
return true;
}
if (it->second.is_open()) {
RTC_LOG(LS_WARNING) << debug_name_ << "->OpenStream(...): "
<< "Not adding data stream "
<< "with sid=" << sid
<< " because stream is already open.";
RTC_LOG(LS_WARNING) << debug_name_
<< "->OpenStream(...): "
"Not adding data stream "
"with sid="
<< sid << " because stream is already open.";
return false;
} else {
RTC_LOG(LS_WARNING) << debug_name_ << "->OpenStream(...): "
<< "Not adding data stream "
<< " with sid=" << sid
<< " because stream is still closing.";
RTC_LOG(LS_WARNING) << debug_name_
<< "->OpenStream(...): "
"Not adding data stream "
" with sid="
<< sid << " because stream is still closing.";
return false;
}
}
@ -546,8 +551,9 @@ bool SctpTransport::ResetStream(int sid) {
return false;
}
RTC_LOG(LS_VERBOSE) << debug_name_ << "->ResetStream(" << sid << "): "
<< "Queuing RE-CONFIG chunk.";
RTC_LOG(LS_VERBOSE) << debug_name_ << "->ResetStream(" << sid
<< "): "
"Queuing RE-CONFIG chunk.";
it->second.closure_initiated = true;
// Signal our stream-reset logic that it should try to send now, if it can.
@ -597,8 +603,9 @@ bool SctpTransport::SendData(const SendDataParams& params,
SendDataResult SctpTransport::SendMessageInternal(OutgoingMessage* message) {
RTC_DCHECK_RUN_ON(network_thread_);
if (!sock_) {
RTC_LOG(LS_WARNING) << debug_name_ << "->SendMessageInternal(...): "
<< "Not sending packet with sid="
RTC_LOG(LS_WARNING) << debug_name_
<< "->SendMessageInternal(...): "
"Not sending packet with sid="
<< message->send_params().sid
<< " len=" << message->size() << " before Start().";
return SDR_ERROR;
@ -607,8 +614,9 @@ SendDataResult SctpTransport::SendMessageInternal(OutgoingMessage* message) {
auto it = stream_status_by_sid_.find(message->send_params().sid);
if (it == stream_status_by_sid_.end() || !it->second.is_open()) {
RTC_LOG(LS_WARNING)
<< debug_name_ << "->SendMessageInternal(...): "
<< "Not sending data because sid is unknown or closing: "
<< debug_name_
<< "->SendMessageInternal(...): "
"Not sending data because sid is unknown or closing: "
<< message->send_params().sid;
return SDR_ERROR;
}
@ -636,9 +644,9 @@ SendDataResult SctpTransport::SendMessageInternal(OutgoingMessage* message) {
return SDR_BLOCK;
}
RTC_LOG_ERRNO(LS_ERROR)
<< "ERROR:" << debug_name_ << "->SendMessageInternal(...): "
<< " usrsctp_sendv: ";
RTC_LOG_ERRNO(LS_ERROR) << "ERROR:" << debug_name_
<< "->SendMessageInternal(...): "
" usrsctp_sendv: ";
return SDR_ERROR;
}
@ -711,9 +719,10 @@ bool SctpTransport::Connect() {
int connect_result = usrsctp_connect(
sock_, reinterpret_cast<sockaddr*>(&remote_sconn), sizeof(remote_sconn));
if (connect_result < 0 && errno != SCTP_EINPROGRESS) {
RTC_LOG_ERRNO(LS_ERROR) << debug_name_ << "->Connect(): "
<< "Failed usrsctp_connect. got errno=" << errno
<< ", but wanted " << SCTP_EINPROGRESS;
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->Connect(): "
"Failed usrsctp_connect. got errno="
<< errno << ", but wanted " << SCTP_EINPROGRESS;
CloseSctpSocket();
return false;
}
@ -727,8 +736,9 @@ bool SctpTransport::Connect() {
params.spp_pathmtu = kSctpMtu - sizeof(struct sctp_common_header);
if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_PEER_ADDR_PARAMS, &params,
sizeof(params))) {
RTC_LOG_ERRNO(LS_ERROR) << debug_name_ << "->Connect(): "
<< "Failed to set SCTP_PEER_ADDR_PARAMS.";
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->Connect(): "
"Failed to set SCTP_PEER_ADDR_PARAMS.";
}
// Since this is a fresh SCTP association, we'll always start out with empty
// queues, so "ReadyToSendData" should be true.
@ -739,8 +749,9 @@ bool SctpTransport::Connect() {
bool SctpTransport::OpenSctpSocket() {
RTC_DCHECK_RUN_ON(network_thread_);
if (sock_) {
RTC_LOG(LS_WARNING) << debug_name_ << "->OpenSctpSocket(): "
<< "Ignoring attempt to re-create existing socket.";
RTC_LOG(LS_WARNING) << debug_name_
<< "->OpenSctpSocket(): "
"Ignoring attempt to re-create existing socket.";
return false;
}
@ -755,8 +766,9 @@ bool SctpTransport::OpenSctpSocket() {
AF_CONN, SOCK_STREAM, IPPROTO_SCTP, &UsrSctpWrapper::OnSctpInboundPacket,
&UsrSctpWrapper::SendThresholdCallback, kSendThreshold, this);
if (!sock_) {
RTC_LOG_ERRNO(LS_ERROR) << debug_name_ << "->OpenSctpSocket(): "
<< "Failed to create SCTP socket.";
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->OpenSctpSocket(): "
"Failed to create SCTP socket.";
UsrSctpWrapper::DecrementUsrSctpUsageCount();
return false;
}
@ -779,8 +791,9 @@ bool SctpTransport::ConfigureSctpSocket() {
// Make the socket non-blocking. Connect, close, shutdown etc will not block
// the thread waiting for the socket operation to complete.
if (usrsctp_set_non_blocking(sock_, 1) < 0) {
RTC_LOG_ERRNO(LS_ERROR) << debug_name_ << "->ConfigureSctpSocket(): "
<< "Failed to set SCTP to non blocking.";
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->ConfigureSctpSocket(): "
"Failed to set SCTP to non blocking.";
return false;
}
@ -792,8 +805,9 @@ bool SctpTransport::ConfigureSctpSocket() {
linger_opt.l_linger = 0;
if (usrsctp_setsockopt(sock_, SOL_SOCKET, SO_LINGER, &linger_opt,
sizeof(linger_opt))) {
RTC_LOG_ERRNO(LS_ERROR) << debug_name_ << "->ConfigureSctpSocket(): "
<< "Failed to set SO_LINGER.";
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->ConfigureSctpSocket(): "
"Failed to set SO_LINGER.";
return false;
}
@ -803,9 +817,9 @@ bool SctpTransport::ConfigureSctpSocket() {
stream_rst.assoc_value = 1;
if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_ENABLE_STREAM_RESET,
&stream_rst, sizeof(stream_rst))) {
RTC_LOG_ERRNO(LS_ERROR) << debug_name_ << "->ConfigureSctpSocket(): "
<< "Failed to set SCTP_ENABLE_STREAM_RESET.";
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->ConfigureSctpSocket(): "
"Failed to set SCTP_ENABLE_STREAM_RESET.";
return false;
}
@ -813,8 +827,9 @@ bool SctpTransport::ConfigureSctpSocket() {
uint32_t nodelay = 1;
if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_NODELAY, &nodelay,
sizeof(nodelay))) {
RTC_LOG_ERRNO(LS_ERROR) << debug_name_ << "->ConfigureSctpSocket(): "
<< "Failed to set SCTP_NODELAY.";
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->ConfigureSctpSocket(): "
"Failed to set SCTP_NODELAY.";
return false;
}
@ -822,8 +837,9 @@ bool SctpTransport::ConfigureSctpSocket() {
uint32_t eor = 1;
if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_EXPLICIT_EOR, &eor,
sizeof(eor))) {
RTC_LOG_ERRNO(LS_ERROR) << debug_name_ << "->ConfigureSctpSocket(): "
<< "Failed to set SCTP_EXPLICIT_EOR.";
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->ConfigureSctpSocket(): "
"Failed to set SCTP_EXPLICIT_EOR.";
return false;
}
@ -838,10 +854,10 @@ bool SctpTransport::ConfigureSctpSocket() {
event.se_type = event_types[i];
if (usrsctp_setsockopt(sock_, IPPROTO_SCTP, SCTP_EVENT, &event,
sizeof(event)) < 0) {
RTC_LOG_ERRNO(LS_ERROR)
<< debug_name_ << "->ConfigureSctpSocket(): "
<< "Failed to set SCTP_EVENT type: " << event.se_type;
RTC_LOG_ERRNO(LS_ERROR) << debug_name_
<< "->ConfigureSctpSocket(): "
"Failed to set SCTP_EVENT type: "
<< event.se_type;
return false;
}
}
@ -974,8 +990,10 @@ void SctpTransport::OnPacketRead(rtc::PacketTransportInternal* transport,
return;
}
RTC_LOG(LS_VERBOSE) << debug_name_ << "->OnPacketRead(...): "
<< " length=" << len << ", started: " << started_;
RTC_LOG(LS_VERBOSE) << debug_name_
<< "->OnPacketRead(...): "
" length="
<< len << ", started: " << started_;
// Only give receiving packets to usrsctp after if connected. This enables two
// peers to each make a connect call, but for them not to receive an INIT
// packet before they have called connect; least the last receiver of the INIT
@ -1023,10 +1041,11 @@ void SctpTransport::OnPacketFromSctpToNetwork(
const rtc::CopyOnWriteBuffer& buffer) {
RTC_DCHECK_RUN_ON(network_thread_);
if (buffer.size() > (kSctpMtu)) {
RTC_LOG(LS_ERROR) << debug_name_ << "->OnPacketFromSctpToNetwork(...): "
<< "SCTP seems to have made a packet that is bigger "
<< "than its official MTU: " << buffer.size()
<< " vs max of " << kSctpMtu;
RTC_LOG(LS_ERROR) << debug_name_
<< "->OnPacketFromSctpToNetwork(...): "
"SCTP seems to have made a packet that is bigger "
"than its official MTU: "
<< buffer.size() << " vs max of " << kSctpMtu;
}
TRACE_EVENT0("webrtc", "SctpTransport::OnPacketFromSctpToNetwork");
@ -1048,8 +1067,9 @@ void SctpTransport::OnInboundPacketFromSctpToTransport(
RTC_DCHECK_RUN_ON(network_thread_);
RTC_LOG(LS_VERBOSE) << debug_name_
<< "->OnInboundPacketFromSctpToTransport(...): "
<< "Received SCTP data:"
<< " sid=" << params.sid
"Received SCTP data:"
" sid="
<< params.sid
<< " notification: " << (flags & MSG_NOTIFICATION)
<< " length=" << buffer.size();
// Sending a packet with data == NULL (no data) is SCTPs "close the
@ -1071,9 +1091,10 @@ void SctpTransport::OnDataFromSctpToTransport(
const ReceiveDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
RTC_DCHECK_RUN_ON(network_thread_);
RTC_LOG(LS_VERBOSE) << debug_name_ << "->OnDataFromSctpToTransport(...): "
<< "Posting with length: " << buffer.size()
<< " on stream " << params.sid;
RTC_LOG(LS_VERBOSE) << debug_name_
<< "->OnDataFromSctpToTransport(...): "
"Posting with length: "
<< buffer.size() << " on stream " << params.sid;
// Reports all received messages to upper layers, no matter whether the sid
// is known.
SignalDataReceived(params, buffer);
@ -1119,7 +1140,7 @@ void SctpTransport::OnNotificationFromSctp(
const struct sctp_send_failed_event& ssfe =
notification.sn_send_failed_event;
RTC_LOG(LS_WARNING) << "SCTP_SEND_FAILED_EVENT: message with"
<< " PPID = "
" PPID = "
<< rtc::NetworkToHost32(ssfe.ssfe_info.snd_ppid)
<< " SID = " << ssfe.ssfe_info.snd_sid
<< " flags = " << rtc::ToHex(ssfe.ssfe_info.snd_flags)

View file

@ -53,7 +53,8 @@ DecisionLogic::DecisionLogic(NetEqController::Config config)
{&estimate_dtx_delay_, &time_stretch_cn_, &target_level_window_ms_},
field_trial_name);
RTC_LOG(LS_INFO) << "NetEq decision logic settings:"
<< " estimate_dtx_delay=" << estimate_dtx_delay_
" estimate_dtx_delay="
<< estimate_dtx_delay_
<< " time_stretch_cn=" << time_stretch_cn_
<< " target_level_window_ms=" << target_level_window_ms_;
}

View file

@ -71,7 +71,8 @@ DelayHistogramConfig GetDelayHistogramConfig() {
}
}
RTC_LOG(LS_INFO) << "Delay histogram config:"
<< " quantile=" << config.quantile
" quantile="
<< config.quantile
<< " forget_factor=" << config.forget_factor
<< " start_forget_weight="
<< config.start_forget_weight.value_or(0);

View file

@ -141,8 +141,9 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
RTC_LOG(LS_INFO) << "NetEq config: " << config.ToString();
int fs = config.sample_rate_hz;
if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
RTC_LOG(LS_ERROR) << "Sample rate " << fs << " Hz not supported. "
<< "Changing to 8000 Hz.";
RTC_LOG(LS_ERROR) << "Sample rate " << fs
<< " Hz not supported. "
"Changing to 8000 Hz.";
fs = 8000;
}
controller_->SetMaximumDelay(config.max_delay_ms);

View file

@ -21,13 +21,24 @@ NetEqInput::PacketData::~PacketData() = default;
std::string NetEqInput::PacketData::ToString() const {
rtc::StringBuilder ss;
ss << "{"
<< "time_ms: " << static_cast<int64_t>(time_ms) << ", "
<< "header: {"
<< "pt: " << static_cast<int>(header.payloadType) << ", "
<< "sn: " << header.sequenceNumber << ", "
<< "ts: " << header.timestamp << ", "
<< "ssrc: " << header.ssrc << "}, "
<< "payload bytes: " << payload.size() << "}";
"time_ms: "
<< static_cast<int64_t>(time_ms)
<< ", "
"header: {"
"pt: "
<< static_cast<int>(header.payloadType)
<< ", "
"sn: "
<< header.sequenceNumber
<< ", "
"ts: "
<< header.timestamp
<< ", "
"ssrc: "
<< header.ssrc
<< "}, "
"payload bytes: "
<< payload.size() << "}";
return ss.Release();
}

View file

@ -242,8 +242,9 @@ bool ValidateOutputFilesOptions(bool textlog,
bool output_files_base_name_specified = !output_files_base_name.empty();
if (!textlog && !plotting && output_files_base_name_specified) {
std::cout << "Error: --output_files_base_name cannot be used without at "
<< "least one of the following flags: --textlog, --matlabplot, "
<< "--pythonplot." << std::endl;
"least one of the following flags: --textlog, --matlabplot, "
"--pythonplot."
<< std::endl;
return false;
}
// Without |output_audio_filename|, |output_files_base_name| is required when
@ -252,8 +253,9 @@ bool ValidateOutputFilesOptions(bool textlog,
if (output_audio_filename.empty() && plotting &&
!output_files_base_name_specified) {
std::cout << "Error: when no output audio file is specified and "
<< "--matlabplot and/or --pythonplot are used, "
<< "--output_files_base_name must be also used." << std::endl;
"--matlabplot and/or --pythonplot are used, "
"--output_files_base_name must be also used."
<< std::endl;
return false;
}
return true;

View file

@ -158,7 +158,8 @@ aaudio_data_callback_result_t AAudioPlayer::OnDataCallback(void* audio_data,
// utilized.
if (first_data_callback_) {
RTC_LOG(INFO) << "--- First output data callback: "
<< "device id=" << aaudio_.device_id();
"device id="
<< aaudio_.device_id();
first_data_callback_ = false;
}

View file

@ -161,7 +161,8 @@ aaudio_data_callback_result_t AAudioRecorder::OnDataCallback(
// is obtained.
if (first_data_callback_) {
RTC_LOG(INFO) << "--- First input data callback: "
<< "device id=" << aaudio_.device_id();
"device id="
<< aaudio_.device_id();
aaudio_.ClearInputStream(audio_data, num_frames);
first_data_callback_ = false;
}

View file

@ -275,7 +275,8 @@ void AudioManager::OnCacheAudioParameters(JNIEnv* env,
jint input_buffer_size) {
RTC_LOG(INFO)
<< "OnCacheAudioParameters: "
<< "hardware_aec: " << static_cast<bool>(hardware_aec)
"hardware_aec: "
<< static_cast<bool>(hardware_aec)
<< ", hardware_agc: " << static_cast<bool>(hardware_agc)
<< ", hardware_ns: " << static_cast<bool>(hardware_ns)
<< ", low_latency_output: " << static_cast<bool>(low_latency_output)

View file

@ -413,11 +413,19 @@ void AudioDeviceBuffer::LogStats(LogState state) {
abs_diff_rate_in_percent);
RTC_LOG(INFO) << "[REC : " << time_since_last << "msec, "
<< rec_sample_rate / 1000 << "kHz] callbacks: "
<< stats.rec_callbacks - last_stats_.rec_callbacks << ", "
<< "samples: " << diff_samples << ", "
<< "rate: " << static_cast<int>(rate + 0.5) << ", "
<< "rate diff: " << abs_diff_rate_in_percent << "%, "
<< "level: " << stats.max_rec_level;
<< stats.rec_callbacks - last_stats_.rec_callbacks
<< ", "
"samples: "
<< diff_samples
<< ", "
"rate: "
<< static_cast<int>(rate + 0.5)
<< ", "
"rate diff: "
<< abs_diff_rate_in_percent
<< "%, "
"level: "
<< stats.max_rec_level;
}
diff_samples = stats.play_samples - last_stats_.play_samples;
@ -431,11 +439,19 @@ void AudioDeviceBuffer::LogStats(LogState state) {
abs_diff_rate_in_percent);
RTC_LOG(INFO) << "[PLAY: " << time_since_last << "msec, "
<< play_sample_rate / 1000 << "kHz] callbacks: "
<< stats.play_callbacks - last_stats_.play_callbacks << ", "
<< "samples: " << diff_samples << ", "
<< "rate: " << static_cast<int>(rate + 0.5) << ", "
<< "rate diff: " << abs_diff_rate_in_percent << "%, "
<< "level: " << stats.max_play_level;
<< stats.play_callbacks - last_stats_.play_callbacks
<< ", "
"samples: "
<< diff_samples
<< ", "
"rate: "
<< static_cast<int>(rate + 0.5)
<< ", "
"rate diff: "
<< abs_diff_rate_in_percent
<< "%, "
"level: "
<< stats.max_play_level;
}
}
last_stats_ = stats;

View file

@ -29,8 +29,8 @@ FileAudioDevice* FileAudioDeviceFactory::CreateFileAudioDevice() {
if (!_isConfigured) {
RTC_LOG(LS_WARNING)
<< "WebRTC configured with WEBRTC_DUMMY_FILE_DEVICES but "
<< "no device files supplied. Will fall back to dummy "
<< "audio.";
"no device files supplied. Will fall back to dummy "
"audio.";
return nullptr;
}

View file

@ -181,7 +181,8 @@ int32_t AudioMixerManagerLinuxALSA::OpenSpeaker(char* deviceName) {
if (errVal < 0) {
RTC_LOG(LS_ERROR)
<< "snd_mixer_selem_register(_outputMixerHandle, NULL, NULL), "
<< "error: " << LATE(snd_strerror)(errVal);
"error: "
<< LATE(snd_strerror)(errVal);
_outputMixerHandle = NULL;
return -1;
}
@ -262,7 +263,8 @@ int32_t AudioMixerManagerLinuxALSA::OpenMicrophone(char* deviceName) {
if (errVal < 0) {
RTC_LOG(LS_ERROR)
<< "snd_mixer_selem_register(_inputMixerHandle, NULL, NULL), "
<< "error: " << LATE(snd_strerror)(errVal);
"error: "
<< LATE(snd_strerror)(errVal);
_inputMixerHandle = NULL;
return -1;

View file

@ -455,7 +455,8 @@ int32_t AudioMixerManagerLinuxPulse::StereoRecordingIsAvailable(
RTC_LOG(LS_VERBOSE)
<< "AudioMixerManagerLinuxPulse::StereoRecordingIsAvailable()"
<< " => available=" << available;
" => available="
<< available;
return 0;
}

View file

@ -1034,7 +1034,7 @@ int32_t AudioDeviceMac::InitPlayout() {
if (_outStreamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved) {
RTC_LOG(LS_ERROR) << "Non-interleaved audio data is not supported."
<< "AudioHardware streams should not have this format.";
"AudioHardware streams should not have this format.";
return -1;
}
@ -1333,7 +1333,7 @@ int32_t AudioDeviceMac::StopRecording() {
if (!_stopEventRec.Wait(2000)) {
rtc::CritScope critScoped(&_critSect);
RTC_LOG(LS_WARNING) << "Timed out stopping the capture IOProc."
<< "We may have failed to detect a device removal.";
"We may have failed to detect a device removal.";
WEBRTC_CA_LOG_WARN(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
WEBRTC_CA_LOG_WARN(
AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));
@ -1361,7 +1361,7 @@ int32_t AudioDeviceMac::StopRecording() {
if (!_stopEvent.Wait(2000)) {
rtc::CritScope critScoped(&_critSect);
RTC_LOG(LS_WARNING) << "Timed out stopping the shared IOProc."
<< "We may have failed to detect a device removal.";
"We may have failed to detect a device removal.";
// We assume rendering on a shared device has stopped as well if
// the IOProc times out.
WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));
@ -1468,7 +1468,7 @@ int32_t AudioDeviceMac::StopPlayout() {
if (!_stopEvent.Wait(2000)) {
rtc::CritScope critScoped(&_critSect);
RTC_LOG(LS_WARNING) << "Timed out stopping the render IOProc."
<< "We may have failed to detect a device removal.";
"We may have failed to detect a device removal.";
// We assume capturing on a shared device has stopped as well if the
// IOProc times out.

View file

@ -265,10 +265,10 @@ bool AudioDeviceWindowsCore::CoreAudioIsSupported() {
if (FAILED(hr)) {
RTC_LOG(LS_ERROR) << "AudioDeviceWindowsCore::CoreAudioIsSupported()"
<< " Failed to create the required COM object (hr=" << hr
<< ")";
" Failed to create the required COM object (hr="
<< hr << ")";
RTC_LOG(LS_VERBOSE) << "AudioDeviceWindowsCore::CoreAudioIsSupported()"
<< " CoCreateInstance(MMDeviceEnumerator) failed (hr="
" CoCreateInstance(MMDeviceEnumerator) failed (hr="
<< hr << ")";
const DWORD dwFlags =
@ -295,7 +295,8 @@ bool AudioDeviceWindowsCore::CoreAudioIsSupported() {
MMDeviceIsAvailable = true;
RTC_LOG(LS_VERBOSE)
<< "AudioDeviceWindowsCore::CoreAudioIsSupported()"
<< " CoCreateInstance(MMDeviceEnumerator) succeeded (hr=" << hr << ")";
" CoCreateInstance(MMDeviceEnumerator) succeeded (hr="
<< hr << ")";
SAFE_RELEASE(pIMMD);
}
@ -404,7 +405,7 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore()
// Handle is valid (should only happen if OS larger than vista & win7).
// Try to get the function addresses.
RTC_LOG(LS_VERBOSE) << "AudioDeviceWindowsCore::AudioDeviceWindowsCore()"
<< " The Avrt DLL module is now loaded";
" The Avrt DLL module is now loaded";
_PAvRevertMmThreadCharacteristics =
(PAvRevertMmThreadCharacteristics)GetProcAddress(
@ -419,13 +420,13 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore()
_PAvSetMmThreadCharacteristicsA && _PAvSetMmThreadPriority) {
RTC_LOG(LS_VERBOSE)
<< "AudioDeviceWindowsCore::AudioDeviceWindowsCore()"
<< " AvRevertMmThreadCharacteristics() is OK";
" AvRevertMmThreadCharacteristics() is OK";
RTC_LOG(LS_VERBOSE)
<< "AudioDeviceWindowsCore::AudioDeviceWindowsCore()"
<< " AvSetMmThreadCharacteristicsA() is OK";
" AvSetMmThreadCharacteristicsA() is OK";
RTC_LOG(LS_VERBOSE)
<< "AudioDeviceWindowsCore::AudioDeviceWindowsCore()"
<< " AvSetMmThreadPriority() is OK";
" AvSetMmThreadPriority() is OK";
_winSupportAvrt = true;
}
}
@ -535,10 +536,10 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() {
if (!freeOK) {
RTC_LOG(LS_WARNING)
<< "AudioDeviceWindowsCore::~AudioDeviceWindowsCore()"
<< " failed to free the loaded Avrt DLL module correctly";
" failed to free the loaded Avrt DLL module correctly";
} else {
RTC_LOG(LS_WARNING) << "AudioDeviceWindowsCore::~AudioDeviceWindowsCore()"
<< " the Avrt DLL module is now unloaded";
" the Avrt DLL module is now unloaded";
}
}
}
@ -653,7 +654,7 @@ int32_t AudioDeviceWindowsCore::InitSpeaker() {
int16_t nDevices = PlayoutDevices();
if (_outputDeviceIndex > (nDevices - 1)) {
RTC_LOG(LS_ERROR) << "current device selection is invalid => unable to"
<< " initialize";
" initialize";
return -1;
}
}
@ -722,7 +723,7 @@ int32_t AudioDeviceWindowsCore::InitMicrophone() {
int16_t nDevices = RecordingDevices();
if (_inputDeviceIndex > (nDevices - 1)) {
RTC_LOG(LS_ERROR) << "current device selection is invalid => unable to"
<< " initialize";
" initialize";
return -1;
}
}
@ -1878,8 +1879,8 @@ int32_t AudioDeviceWindowsCore::InitPlayout() {
RTC_LOG(INFO) << "nChannels=" << Wfx.nChannels
<< ", nSamplesPerSec=" << Wfx.nSamplesPerSec
<< " is not supported. Closest match: "
<< "nChannels=" << pWfxClosestMatch->nChannels
<< ", nSamplesPerSec="
"nChannels="
<< pWfxClosestMatch->nChannels << ", nSamplesPerSec="
<< pWfxClosestMatch->nSamplesPerSec;
CoTaskMemFree(pWfxClosestMatch);
pWfxClosestMatch = NULL;
@ -2199,8 +2200,8 @@ int32_t AudioDeviceWindowsCore::InitRecording() {
RTC_LOG(INFO) << "nChannels=" << Wfx.Format.nChannels
<< ", nSamplesPerSec=" << Wfx.Format.nSamplesPerSec
<< " is not supported. Closest match: "
<< "nChannels=" << pWfxClosestMatch->nChannels
<< ", nSamplesPerSec="
"nChannels="
<< pWfxClosestMatch->nChannels << ", nSamplesPerSec="
<< pWfxClosestMatch->nSamplesPerSec;
CoTaskMemFree(pWfxClosestMatch);
pWfxClosestMatch = NULL;
@ -2338,7 +2339,7 @@ int32_t AudioDeviceWindowsCore::StartRecording() {
// give it render data to process.
RTC_LOG(LS_ERROR)
<< "Playout must be started before recording when using"
<< " the built-in AEC";
" the built-in AEC";
return -1;
}
}
@ -2571,7 +2572,7 @@ int32_t AudioDeviceWindowsCore::StopPlayout() {
// playout to stop properly.
RTC_LOG(LS_WARNING)
<< "Recording should be stopped before playout when using the"
<< " built-in AEC";
" built-in AEC";
}
// Reset the playout delay value.
@ -2822,7 +2823,7 @@ DWORD AudioDeviceWindowsCore::DoRenderThread() {
_UnLock();
RTC_LOG(LS_ERROR)
<< "output state has been modified during unlocked"
<< " period";
" period";
goto Exit;
}
if (nSamples != static_cast<int32_t>(_playBlockSize)) {
@ -3261,7 +3262,7 @@ DWORD AudioDeviceWindowsCore::DoCaptureThread() {
if (_ptrCaptureClient == NULL || _ptrClientIn == NULL) {
_UnLock();
RTC_LOG(LS_ERROR) << "input state has been modified during"
<< " unlocked period";
" unlocked period";
goto Exit;
}
}
@ -3282,7 +3283,7 @@ DWORD AudioDeviceWindowsCore::DoCaptureThread() {
// IAudioClient::Stop, IAudioClient::Reset, and releasing the audio
// client.
RTC_LOG(LS_ERROR) << "IAudioCaptureClient::GetBuffer returned"
<< " AUDCLNT_E_BUFFER_ERROR, hr = 0x"
" AUDCLNT_E_BUFFER_ERROR, hr = 0x"
<< rtc::ToHex(hr);
goto Exit;
}
@ -3815,14 +3816,16 @@ int32_t AudioDeviceWindowsCore::_GetDeviceName(IMMDevice* pDevice,
if ((SUCCEEDED(hr)) && (VT_EMPTY == varName.vt)) {
hr = E_FAIL;
RTC_LOG(LS_ERROR) << "IPropertyStore::GetValue returned no value,"
<< " hr = 0x" << rtc::ToHex(hr);
" hr = 0x"
<< rtc::ToHex(hr);
}
if ((SUCCEEDED(hr)) && (VT_LPWSTR != varName.vt)) {
// The returned value is not a wide null terminated string.
hr = E_UNEXPECTED;
RTC_LOG(LS_ERROR) << "IPropertyStore::GetValue returned unexpected"
<< " type, hr = 0x" << rtc::ToHex(hr);
" type, hr = 0x"
<< rtc::ToHex(hr);
}
if (SUCCEEDED(hr) && (varName.pwszVal != NULL)) {

View file

@ -284,7 +284,8 @@ ComPtr<IMMDevice> CreateDeviceInternal(const std::string& device_id,
EDataFlow data_flow,
ERole role) {
RTC_DLOG(INFO) << "CreateDeviceInternal: "
<< "id=" << device_id << ", flow=" << FlowToString(data_flow)
"id="
<< device_id << ", flow=" << FlowToString(data_flow)
<< ", role=" << RoleToString(role);
ComPtr<IMMDevice> audio_endpoint_device;
@ -967,7 +968,7 @@ HRESULT GetBufferSizeLimits(IAudioClient2* client,
// This API seems to be supported in off-load mode only but it is not
// documented as a valid error code. Making a special note about it here.
RTC_LOG(LS_ERROR) << "IAudioClient2::GetBufferSizeLimits failed: "
<< "AUDCLNT_E_OFFLOAD_MODE_ONLY";
"AUDCLNT_E_OFFLOAD_MODE_ONLY";
} else if (FAILED(error.Error())) {
RTC_LOG(LS_ERROR) << "IAudioClient2::GetBufferSizeLimits failed: "
<< ErrorToString(error);

View file

@ -144,9 +144,14 @@ int main(int argc, char* argv[]) {
// Print stats.
std::cout << "Limiting is: " << (absl::GetFlag(FLAGS_limiter) ? "on" : "off")
<< "\n"
<< "Channels: " << num_channels << "\n"
<< "Rate: " << sample_rate << "\n"
<< "Number of input streams: " << input_files.size() << "\n";
"Channels: "
<< num_channels
<< "\n"
"Rate: "
<< sample_rate
<< "\n"
"Number of input streams: "
<< input_files.size() << "\n";
for (const auto& source : sources) {
std::cout << "\t" << source.ToString() << "\n";
}

View file

@ -244,9 +244,13 @@ void MonoAgc::SetLevel(int new_level) {
}
stream_analog_level_ = new_level;
RTC_DLOG(LS_INFO) << "[agc] voe_level=" << voe_level << ", "
<< "level_=" << level_ << ", "
<< "new_level=" << new_level;
RTC_DLOG(LS_INFO) << "[agc] voe_level=" << voe_level
<< ", "
"level_="
<< level_
<< ", "
"new_level="
<< new_level;
level_ = new_level;
}

View file

@ -362,7 +362,7 @@ PitchInfo CheckLowerPitchPeriodsAndComputePitchGain(
}
RTC_DCHECK_NE(candidate_pitch_period, candidate_pitch_secondary_period)
<< "The lower pitch period and the additional sub-harmonic must not "
<< "coincide.";
"coincide.";
// Compute an auto-correlation score for the primary pitch candidate
// |candidate_pitch_period| by also looking at its possible sub-harmonic
// |candidate_pitch_secondary_period|.

View file

@ -331,7 +331,7 @@ GatedRecurrentLayer::GatedRecurrentLayer(
optimization_(optimization) {
RTC_DCHECK_LE(output_size_, kRecurrentLayersMaxUnits)
<< "Static over-allocation of recurrent layers state vectors is not "
<< "sufficient.";
"sufficient.";
RTC_DCHECK_EQ(kNumGruGates * output_size_, bias_.size())
<< "Mismatching output size and bias terms array size.";
RTC_DCHECK_EQ(kNumGruGates * input_size_ * output_size_, weights_.size())
@ -339,7 +339,7 @@ GatedRecurrentLayer::GatedRecurrentLayer(
RTC_DCHECK_EQ(kNumGruGates * output_size_ * output_size_,
recurrent_weights_.size())
<< "Mismatching input-output size and recurrent weight coefficients array"
<< " size.";
" size.";
Reset();
}

View file

@ -352,7 +352,8 @@ AudioProcessingImpl::AudioProcessingImpl(
EnforceSplitBandHpf()),
capture_nonlocked_() {
RTC_LOG(LS_INFO) << "Injected APM submodules:"
<< "\nEcho control factory: " << !!echo_control_factory_
"\nEcho control factory: "
<< !!echo_control_factory_
<< "\nEcho detector: " << !!submodules_.echo_detector
<< "\nCapture analyzer: " << !!submodules_.capture_analyzer
<< "\nCapture post processor: "

View file

@ -2174,21 +2174,36 @@ std::string ProduceDebugText(int render_input_sample_rate_hz,
size_t capture_output_num_channels) {
rtc::StringBuilder ss;
ss << "Sample rates:"
"\n"
" Render input: "
<< render_input_sample_rate_hz
<< " Hz"
"\n"
" Render output: "
<< render_output_sample_rate_hz
<< " Hz"
"\n"
" Capture input: "
<< capture_input_sample_rate_hz
<< " Hz"
"\n"
" Capture output: "
<< capture_output_sample_rate_hz
<< " Hz"
"\n"
"Number of channels:"
"\n"
" Render input: "
<< render_input_num_channels
<< "\n"
<< " Render input: " << render_input_sample_rate_hz << " Hz"
" Render output: "
<< render_output_num_channels
<< "\n"
<< " Render output: " << render_output_sample_rate_hz << " Hz"
" Capture input: "
<< capture_input_num_channels
<< "\n"
<< " Capture input: " << capture_input_sample_rate_hz << " Hz"
<< "\n"
<< " Capture output: " << capture_output_sample_rate_hz << " Hz"
<< "\n"
<< "Number of channels:"
<< "\n"
<< " Render input: " << render_input_num_channels << "\n"
<< " Render output: " << render_output_num_channels << "\n"
<< " Capture input: " << capture_input_num_channels << "\n"
<< " Capture output: " << capture_output_num_channels;
" Capture output: "
<< capture_output_num_channels;
return ss.Release();
}

View file

@ -107,15 +107,15 @@ std::string GainController2::ToString(
// clang-format off
// clang formatting doesn't respect custom nested style.
ss << "{"
<< "enabled: " << (config.enabled ? "true" : "false") << ", "
<< "fixed_digital: {gain_db: " << config.fixed_digital.gain_db << "}, "
<< "adaptive_digital: {"
<< "enabled: "
<< (config.adaptive_digital.enabled ? "true" : "false") << ", "
<< "level_estimator: " << adaptive_digital_level_estimator << ", "
<< "extra_saturation_margin_db:"
<< config.adaptive_digital.extra_saturation_margin_db << "}"
<< "}";
"enabled: " << (config.enabled ? "true" : "false") << ", "
"fixed_digital: {gain_db: " << config.fixed_digital.gain_db << "}, "
"adaptive_digital: {"
"enabled: "
<< (config.adaptive_digital.enabled ? "true" : "false") << ", "
"level_estimator: " << adaptive_digital_level_estimator << ", "
"extra_saturation_margin_db:"
<< config.adaptive_digital.extra_saturation_margin_db << "}"
"}";
// clang-format on
return ss.Release();
}

View file

@ -72,13 +72,16 @@ std::string AudioProcessing::Config::ToString() const {
char buf[1024];
rtc::SimpleStringBuilder builder(buf);
builder << "AudioProcessing::Config{ "
<< "pipeline: {"
<< "maximum_internal_processing_rate: "
"pipeline: {"
"maximum_internal_processing_rate: "
<< pipeline.maximum_internal_processing_rate
<< ", multi_channel_render: " << pipeline.multi_channel_render << ", "
<< ", multi_channel_capture: " << pipeline.multi_channel_capture
<< ", multi_channel_render: " << pipeline.multi_channel_render
<< ", "
", multi_channel_capture: "
<< pipeline.multi_channel_capture
<< "}, "
<< "pre_amplifier: { enabled: " << pre_amplifier.enabled
"pre_amplifier: { enabled: "
<< pre_amplifier.enabled
<< ", fixed_gain_factor: " << pre_amplifier.fixed_gain_factor
<< " }, high_pass_filter: { enabled: " << high_pass_filter.enabled
<< " }, echo_canceller: { enabled: " << echo_canceller.enabled

View file

@ -76,9 +76,14 @@ int main(int argc, char* argv[]) {
// Write config to file.
std::ofstream out_config(config_output_file);
out_config << "{"
<< "'frame_len_ms': " << absl::GetFlag(FLAGS_f) << ", "
<< "'attack_ms': " << absl::GetFlag(FLAGS_a) << ", "
<< "'decay_ms': " << absl::GetFlag(FLAGS_d) << "}\n";
"'frame_len_ms': "
<< absl::GetFlag(FLAGS_f)
<< ", "
"'attack_ms': "
<< absl::GetFlag(FLAGS_a)
<< ", "
"'decay_ms': "
<< absl::GetFlag(FLAGS_d) << "}\n";
out_config.close();
// Measure level frame-by-frame.

View file

@ -145,7 +145,8 @@ TEST(WPDTreeTest, CorrectnessBasedOnMatlabFiles) {
ASSERT_EQ(kLeavesSamples, matlab_samples_read)
<< "Matlab test files are malformed.\n"
<< "File: 3_" << i;
"File: 3_"
<< i;
// Get output data from the corresponding node
const float* node_data = tree.NodeAt(kLevels, i)->data();
// Compare with matlab files.

View file

@ -49,7 +49,7 @@ void RttStats::UpdateRtt(TimeDelta send_delta,
Timestamp now) {
if (send_delta.IsInfinite() || send_delta <= TimeDelta::Zero()) {
RTC_LOG(LS_WARNING) << "Ignoring measured send_delta, because it's is "
<< "either infinite, zero, or negative. send_delta = "
"either infinite, zero, or negative. send_delta = "
<< ToString(send_delta);
return;
}

View file

@ -41,7 +41,8 @@ class WindowedFilterTest : public ::testing::Test {
windowed_min_rtt_.Update(rtt_sample, now_ms);
RTC_LOG(LS_VERBOSE) << "i: " << i << " sample: " << ToString(rtt_sample)
<< " mins: "
<< " " << ToString(windowed_min_rtt_.GetBest()) << " "
" "
<< ToString(windowed_min_rtt_.GetBest()) << " "
<< ToString(windowed_min_rtt_.GetSecondBest()) << " "
<< ToString(windowed_min_rtt_.GetThirdBest());
now_ms += 25;
@ -63,7 +64,8 @@ class WindowedFilterTest : public ::testing::Test {
windowed_max_bw_.Update(bw_sample, now_ms);
RTC_LOG(LS_VERBOSE) << "i: " << i << " sample: " << ToString(bw_sample)
<< " maxs: "
<< " " << ToString(windowed_max_bw_.GetBest()) << " "
" "
<< ToString(windowed_max_bw_.GetBest()) << " "
<< ToString(windowed_max_bw_.GetSecondBest()) << " "
<< ToString(windowed_max_bw_.GetThirdBest());
now_ms += 25;
@ -117,7 +119,8 @@ TEST_F(WindowedFilterTest, MonotonicallyIncreasingMin) {
windowed_min_rtt_.Update(rtt_sample, now_ms);
RTC_LOG(LS_VERBOSE) << "i: " << i << " sample: " << rtt_sample.ms()
<< " mins: "
<< " " << windowed_min_rtt_.GetBest().ms() << " "
" "
<< windowed_min_rtt_.GetBest().ms() << " "
<< windowed_min_rtt_.GetSecondBest().ms() << " "
<< windowed_min_rtt_.GetThirdBest().ms();
if (i < 3) {
@ -144,7 +147,8 @@ TEST_F(WindowedFilterTest, MonotonicallyDecreasingMax) {
windowed_max_bw_.Update(bw_sample, now_ms);
RTC_LOG(LS_VERBOSE) << "i: " << i << " sample: " << bw_sample.bps()
<< " maxs: "
<< " " << windowed_max_bw_.GetBest().bps() << " "
" "
<< windowed_max_bw_.GetBest().bps() << " "
<< windowed_max_bw_.GetSecondBest().bps() << " "
<< windowed_max_bw_.GetThirdBest().bps();
if (i < 3) {

View file

@ -107,10 +107,12 @@ absl::optional<DataRate> ProbeBitrateEstimator::HandleProbeAndEstimateBitrate(
receive_interval <= TimeDelta::Zero() ||
receive_interval > kMaxProbeInterval) {
RTC_LOG(LS_INFO) << "Probing unsuccessful, invalid send/receive interval"
<< " [cluster id: " << cluster_id
<< "] [send interval: " << ToString(send_interval) << "]"
<< " [receive interval: " << ToString(receive_interval)
<< "]";
" [cluster id: "
<< cluster_id
<< "] [send interval: " << ToString(send_interval)
<< "]"
" [receive interval: "
<< ToString(receive_interval) << "]";
if (event_log_) {
event_log_->Log(std::make_unique<RtcEventProbeResultFailure>(
cluster_id, ProbeFailureReason::kInvalidSendReceiveInterval));
@ -134,16 +136,20 @@ absl::optional<DataRate> ProbeBitrateEstimator::HandleProbeAndEstimateBitrate(
double ratio = receive_rate / send_rate;
if (ratio > kMaxValidRatio) {
RTC_LOG(LS_INFO) << "Probing unsuccessful, receive/send ratio too high"
<< " [cluster id: " << cluster_id
<< "] [send: " << ToString(send_size) << " / "
<< ToString(send_interval) << " = " << ToString(send_rate)
" [cluster id: "
<< cluster_id << "] [send: " << ToString(send_size)
<< " / " << ToString(send_interval) << " = "
<< ToString(send_rate)
<< "]"
<< " [receive: " << ToString(receive_size) << " / "
" [receive: "
<< ToString(receive_size) << " / "
<< ToString(receive_interval) << " = "
<< ToString(receive_rate) << " ]"
<< " [ratio: " << ToString(receive_rate) << " / "
<< ToString(send_rate) << " = " << ratio
<< " > kMaxValidRatio (" << kMaxValidRatio << ")]";
<< ToString(receive_rate)
<< " ]"
" [ratio: "
<< ToString(receive_rate) << " / " << ToString(send_rate)
<< " = " << ratio << " > kMaxValidRatio ("
<< kMaxValidRatio << ")]";
if (event_log_) {
event_log_->Log(std::make_unique<RtcEventProbeResultFailure>(
cluster_id, ProbeFailureReason::kInvalidSendReceiveRatio));
@ -151,11 +157,12 @@ absl::optional<DataRate> ProbeBitrateEstimator::HandleProbeAndEstimateBitrate(
return absl::nullopt;
}
RTC_LOG(LS_INFO) << "Probing successful"
<< " [cluster id: " << cluster_id
<< "] [send: " << ToString(send_size) << " / "
" [cluster id: "
<< cluster_id << "] [send: " << ToString(send_size) << " / "
<< ToString(send_interval) << " = " << ToString(send_rate)
<< " ]"
<< " [receive: " << ToString(receive_size) << " / "
" [receive: "
<< ToString(receive_size) << " / "
<< ToString(receive_interval) << " = "
<< ToString(receive_rate) << "]";

View file

@ -99,7 +99,7 @@ void ReceiveSideCongestionController::WrappingBitrateEstimator::
if (packets_since_absolute_send_time_ >= kTimeOffsetSwitchThreshold) {
RTC_LOG(LS_INFO)
<< "WrappingBitrateEstimator: Switching to transmission "
<< "time offset RBE.";
"time offset RBE.";
using_absolute_send_time_ = false;
PickEstimator();
}

View file

@ -46,7 +46,7 @@ Window GetTopLevelWindow(Display* display, Window window) {
if (!XQueryTree(display, window, &root, &parent, &children,
&num_children)) {
RTC_LOG(LS_ERROR) << "Failed to query for child windows although window"
<< "does not have a valid WM_STATE.";
"does not have a valid WM_STATE.";
return None;
}
if (children)

View file

@ -61,7 +61,7 @@ DeferXFree::~DeferXFree() {
if (!XQueryTree(cache->display(), window, &root, &parent, &children,
&num_children)) {
RTC_LOG(LS_ERROR) << "Failed to query for child windows although window"
<< "does not have a valid WM_STATE.";
"does not have a valid WM_STATE.";
return 0;
}
::Window app_window = 0;

View file

@ -54,8 +54,8 @@ void DesktopConfigurationMonitor::DisplaysReconfigured(
CGDisplayChangeSummaryFlags flags) {
TRACE_EVENT0("webrtc", "DesktopConfigurationMonitor::DisplaysReconfigured");
RTC_LOG(LS_INFO) << "DisplaysReconfigured: "
<< "DisplayID " << display << "; ChangeSummaryFlags "
<< flags;
"DisplayID "
<< display << "; ChangeSummaryFlags " << flags;
if (flags & kCGDisplayBeginConfigurationFlag) {
reconfiguring_displays_.insert(display);

View file

@ -230,14 +230,15 @@ bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
!set_window_source_func_ || !set_window_filter_list_func_ ||
!set_image_scaling_callback_func_) {
RTC_LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
<< "library functions missing.";
"library functions missing.";
return false;
}
BOOL result = mag_initialize_func_();
if (!result) {
RTC_LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
<< "error from MagInitialize " << GetLastError();
"error from MagInitialize "
<< GetLastError();
return false;
}
@ -249,7 +250,8 @@ bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
if (!result) {
mag_uninitialize_func_();
RTC_LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
<< "error from GetModulehandleExA " << GetLastError();
"error from GetModulehandleExA "
<< GetLastError();
return false;
}
@ -272,7 +274,7 @@ bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
if (!host_window_) {
mag_uninitialize_func_();
RTC_LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
<< "error from creating host window "
"error from creating host window "
<< GetLastError();
return false;
}
@ -284,7 +286,7 @@ bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
if (!magnifier_window_) {
mag_uninitialize_func_();
RTC_LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
<< "error from creating magnifier window "
"error from creating magnifier window "
<< GetLastError();
return false;
}
@ -299,7 +301,7 @@ bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
if (!result) {
mag_uninitialize_func_();
RTC_LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
<< "error from MagSetImageScalingCallback "
"error from MagSetImageScalingCallback "
<< GetLastError();
return false;
}
@ -311,7 +313,8 @@ bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
mag_uninitialize_func_();
RTC_LOG_F(LS_WARNING)
<< "Failed to initialize ScreenCapturerWinMagnifier: "
<< "error from MagSetWindowFilterList " << GetLastError();
"error from MagSetWindowFilterList "
<< GetLastError();
return false;
}
}
@ -334,11 +337,19 @@ void ScreenCapturerWinMagnifier::OnCaptured(void* data,
captured_bytes_per_pixel != DesktopFrame::kBytesPerPixel) {
RTC_LOG_F(LS_WARNING)
<< "Output format does not match the captured format: "
<< "width = " << header.width << ", "
<< "height = " << header.height << ", "
<< "stride = " << header.stride << ", "
<< "bpp = " << captured_bytes_per_pixel << ", "
<< "pixel format RGBA ? "
"width = "
<< header.width
<< ", "
"height = "
<< header.height
<< ", "
"stride = "
<< header.stride
<< ", "
"bpp = "
<< captured_bytes_per_pixel
<< ", "
"pixel format RGBA ? "
<< (header.format == GUID_WICPixelFormat32bppRGBA) << ".";
return;
}

View file

@ -132,8 +132,9 @@ Timestamp BitrateProber::NextProbeTime(Timestamp now) const {
if (next_probe_time_.IsFinite() &&
now - next_probe_time_ > config_.max_probe_delay.Get()) {
RTC_DLOG(LS_WARNING) << "Probe delay too high"
<< " (next_ms:" << next_probe_time_.ms()
<< ", now_ms: " << now.ms() << ")";
" (next_ms:"
<< next_probe_time_.ms() << ", now_ms: " << now.ms()
<< ")";
return Timestamp::PlusInfinity();
}

View file

@ -111,7 +111,8 @@ AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config,
key_value_config->Lookup("WebRTC-BweAimdRateControlConfig"));
if (initial_backoff_interval_) {
RTC_LOG(LS_INFO) << "Using aimd rate control with initial back-off interval"
<< " " << ToString(*initial_backoff_interval_) << ".";
" "
<< ToString(*initial_backoff_interval_) << ".";
}
RTC_LOG(LS_INFO) << "Using aimd rate control with back off factor " << beta_;
}

View file

@ -58,8 +58,8 @@ RtpHeaderExtensionMap RegisterSupportedExtensions(
} else {
RTC_LOG(LS_INFO)
<< "FlexfecSender only supports RTP header extensions for "
<< "BWE and MID, so the extension " << extension.ToString()
<< " will not be used.";
"BWE and MID, so the extension "
<< extension.ToString() << " will not be used.";
}
}
return map;

View file

@ -131,7 +131,7 @@ int ForwardErrorCorrection::EncodeFec(const PacketList& media_packets,
if (media_packet->data.size() < kRtpHeaderSize) {
RTC_LOG(LS_WARNING) << "Media packet " << media_packet->data.size()
<< " bytes "
<< "is smaller than RTP header.";
"is smaller than RTP header.";
return -1;
}
// Ensure the FEC packets will fit in a typical MTU.
@ -139,8 +139,8 @@ int ForwardErrorCorrection::EncodeFec(const PacketList& media_packets,
IP_PACKET_SIZE) {
RTC_LOG(LS_WARNING) << "Media packet " << media_packet->data.size()
<< " bytes "
<< "with overhead is larger than " << IP_PACKET_SIZE
<< " bytes.";
"with overhead is larger than "
<< IP_PACKET_SIZE << " bytes.";
}
}
@ -549,7 +549,7 @@ bool ForwardErrorCorrection::StartPacketRecovery(
fec_packet.fec_header_size + fec_packet.protection_length) {
RTC_LOG(LS_WARNING)
<< "The FEC packet is truncated: it does not contain enough room "
<< "for its own header.";
"for its own header.";
return false;
}
if (fec_packet.protection_length >
@ -590,7 +590,7 @@ bool ForwardErrorCorrection::FinishPacketRecovery(
ByteReader<uint16_t>::ReadBigEndian(&data[2]) + kRtpHeaderSize;
if (new_size > size_t{IP_PACKET_SIZE - kRtpHeaderSize}) {
RTC_LOG(LS_WARNING) << "The recovered packet had a length larger than a "
<< "typical IP packet, and is thus dropped.";
"typical IP packet, and is thus dropped.";
return false;
}
recovered_packet->pkt->data.SetSize(new_size);

View file

@ -593,7 +593,7 @@ bool RTPSenderVideo::SendVideo(
} else if (require_frame_encryption_) {
RTC_LOG(LS_WARNING)
<< "No FrameEncryptor is attached to this video sending stream but "
<< "one is required since require_frame_encryptor is set";
"one is required since require_frame_encryptor is set";
}
std::unique_ptr<RtpPacketizer> packetizer = RtpPacketizer::Create(

View file

@ -99,7 +99,7 @@ void RunTest(bool use_flexfec) {
sizeof(kPacketMaskBurstyTbl) / sizeof(*kPacketMaskBurstyTbl)};
ASSERT_EQ(12, kMaxMediaPackets[1]) << "Max media packets for bursty mode not "
<< "equal to 12.";
"equal to 12.";
ForwardErrorCorrection::PacketList media_packet_list;
std::list<ForwardErrorCorrection::Packet*> fec_packet_list;
@ -293,8 +293,10 @@ void RunTest(bool use_flexfec) {
<< "EncodeFec() failed";
ASSERT_EQ(num_fec_packets, fec_packet_list.size())
<< "We requested " << num_fec_packets << " FEC packets, but "
<< "EncodeFec() produced " << fec_packet_list.size();
<< "We requested " << num_fec_packets
<< " FEC packets, but "
"EncodeFec() produced "
<< fec_packet_list.size();
memset(media_loss_mask, 0, sizeof(media_loss_mask));
uint32_t media_packet_idx = 0;
@ -419,12 +421,12 @@ void RunTest(bool use_flexfec) {
ASSERT_EQ(recovered_packet->pkt->data.size(),
media_packet->data.size())
<< "Recovered packet length not identical to original "
<< "media packet";
"media packet";
ASSERT_EQ(0, memcmp(recovered_packet->pkt->data.cdata(),
media_packet->data.cdata(),
media_packet->data.size()))
<< "Recovered packet payload not identical to original "
<< "media packet";
"media packet";
recovered_packet_list.pop_front();
}
++media_packet_idx;

View file

@ -124,8 +124,10 @@ void ProcessThreadImpl::RegisterModule(Module* module,
rtc::CritScope lock(&lock_);
for (const ModuleCallback& mc : modules_) {
RTC_DCHECK(mc.module != module)
<< "Already registered here: " << mc.location.ToString() << "\n"
<< "Now attempting from here: " << from.ToString();
<< "Already registered here: " << mc.location.ToString()
<< "\n"
"Now attempting from here: "
<< from.ToString();
}
}
#endif

View file

@ -74,7 +74,8 @@ DeviceInfoDS::DeviceInfoDS()
//
RTC_LOG(LS_INFO) << __FUNCTION__
<< ": CoInitializeEx(NULL, COINIT_APARTMENTTHREADED)"
<< " => RPC_E_CHANGED_MODE, error 0x" << rtc::ToHex(hr);
" => RPC_E_CHANGED_MODE, error 0x"
<< rtc::ToHex(hr);
}
}
}
@ -163,7 +164,8 @@ int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber,
deviceNameLength, NULL, NULL);
if (convResult == 0) {
RTC_LOG(LS_INFO) << "Failed to convert device name to UTF8, "
<< "error = " << GetLastError();
"error = "
<< GetLastError();
return -1;
}
}
@ -173,16 +175,16 @@ int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber,
strncpy_s((char*)deviceUniqueIdUTF8, deviceUniqueIdUTF8Length,
(char*)deviceNameUTF8, convResult);
RTC_LOG(LS_INFO) << "Failed to get "
<< "deviceUniqueIdUTF8 using "
<< "deviceNameUTF8";
"deviceUniqueIdUTF8 using "
"deviceNameUTF8";
} else {
convResult = WideCharToMultiByte(
CP_UTF8, 0, varName.bstrVal, -1, (char*)deviceUniqueIdUTF8,
deviceUniqueIdUTF8Length, NULL, NULL);
if (convResult == 0) {
RTC_LOG(LS_INFO)
<< "Failed to convert device "
<< "name to UTF8, error = " << GetLastError();
RTC_LOG(LS_INFO) << "Failed to convert device "
"name to UTF8, error = "
<< GetLastError();
return -1;
}
if (productUniqueIdUTF8 && productUniqueIdUTF8Length > 0) {
@ -261,7 +263,8 @@ IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8,
if
FAILED(hr) {
RTC_LOG(LS_ERROR) << "Failed to bind to the selected "
<< "capture device " << hr;
"capture device "
<< hr;
}
if (productUniqueIdUTF8 &&
@ -334,7 +337,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
(void**)&streamConfig);
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to get IID_IAMStreamConfig interface "
<< "from capture device";
"from capture device";
return -1;
}

View file

@ -378,7 +378,7 @@ int32_t H264EncoderImpl::Encode(
if (!encoded_image_callback_) {
RTC_LOG(LS_WARNING)
<< "InitEncode() has been called, but a callback function "
<< "has not been set with RegisterEncodeCompleteCallback()";
"has not been set with RegisterEncodeCompleteCallback()";
ReportError();
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}

View file

@ -72,9 +72,11 @@ Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) {
allocated_buffers_.push_back(available_buffer);
if (allocated_buffers_.size() > max_num_buffers_) {
RTC_LOG(LS_WARNING)
<< allocated_buffers_.size() << " Vp9FrameBuffers have been "
<< "allocated by a Vp9FrameBufferPool (exceeding what is "
<< "considered reasonable, " << max_num_buffers_ << ").";
<< allocated_buffers_.size()
<< " Vp9FrameBuffers have been "
"allocated by a Vp9FrameBufferPool (exceeding what is "
"considered reasonable, "
<< max_num_buffers_ << ").";
// TODO(phoglund): this limit is being hit in tests since Oct 5 2016.
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=6484.

View file

@ -1032,7 +1032,8 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
if (rv != VPX_CODEC_OK) {
RTC_LOG(LS_ERROR) << "Encoding error: " << vpx_codec_err_to_string(rv)
<< "\n"
<< "Details: " << vpx_codec_error(encoder_) << "\n"
"Details: "
<< vpx_codec_error(encoder_) << "\n"
<< vpx_codec_error_detail(encoder_);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -1608,8 +1609,9 @@ VP9DecoderImpl::~VP9DecoderImpl() {
// The frame buffers are reference counted and frames are exposed after
// decoding. There may be valid usage cases where previous frames are still
// referenced after ~VP9DecoderImpl that is not a leak.
RTC_LOG(LS_INFO) << num_buffers_in_use << " Vp9FrameBuffers are still "
<< "referenced during ~VP9DecoderImpl.";
RTC_LOG(LS_INFO) << num_buffers_in_use
<< " Vp9FrameBuffers are still "
"referenced during ~VP9DecoderImpl.";
}
}

View file

@ -297,7 +297,7 @@ bool VCMDecodingState::UsingFlexibleMode(const VCMFrameBuffer* frame) const {
frame->CodecSpecific()->codecSpecific.VP9.flexible_mode;
if (is_flexible_mode && frame->PictureId() == kNoPictureId) {
RTC_LOG(LS_WARNING) << "Frame is marked as using flexible mode but no"
<< "picture id is set.";
"picture id is set.";
return false;
}
return is_flexible_mode;

View file

@ -350,7 +350,8 @@ bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
RTC_LOG(LS_WARNING)
<< "A frame about to be decoded is out of the configured "
<< "delay bounds (" << frame_delay << " > " << kMaxVideoDelayMs
"delay bounds ("
<< frame_delay << " > " << kMaxVideoDelayMs
<< "). Resetting the video jitter buffer.";
return true;
}
@ -482,14 +483,14 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
<< id.picture_id << ":"
<< static_cast<int>(id.spatial_layer)
<< ") but buffer is full, clearing"
<< " buffer and inserting the frame.";
" buffer and inserting the frame.";
ClearFramesAndHistory();
} else {
RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
<< id.picture_id << ":"
<< static_cast<int>(id.spatial_layer)
<< ") could not be inserted due to the frame "
<< "buffer being full, dropping frame.";
"buffer being full, dropping frame.";
return last_continuous_picture_id;
}
}
@ -662,7 +663,7 @@ bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
<< "Frame with (picture_id:spatial_id) (" << id.picture_id << ":"
<< static_cast<int>(id.spatial_layer)
<< ") depends on a non-decoded frame more previous than"
<< " the last decoded frame, dropping frame.";
" the last decoded frame, dropping frame.";
last_log_non_decoded_ms_ = now_ms;
}
return false;

View file

@ -357,8 +357,8 @@ std::vector<std::unique_ptr<RtpFrameObject>> PacketBuffer::FindFrames(
if (has_h264_idr && (!has_h264_sps || !has_h264_pps)) {
RTC_LOG(LS_WARNING)
<< "Received H.264-IDR frame "
<< "(SPS: " << has_h264_sps << ", PPS: " << has_h264_pps
<< "). Treating as "
"(SPS: "
<< has_h264_sps << ", PPS: " << has_h264_pps << "). Treating as "
<< (sps_pps_idr_is_h264_keyframe_ ? "delta" : "key")
<< " frame since WebRTC-SpsPpsIdrIsH264Keyframe is "
<< (sps_pps_idr_is_h264_keyframe_ ? "enabled." : "disabled");

View file

@ -114,7 +114,8 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms,
int frame_delay = static_cast<int>(std::abs(render_time_ms - now_ms));
RTC_LOG(LS_WARNING)
<< "A frame about to be decoded is out of the configured "
<< "delay bounds (" << frame_delay << " > " << max_video_delay_ms_
"delay bounds ("
<< frame_delay << " > " << max_video_delay_ms_
<< "). Resetting the video jitter buffer.";
timing_error = true;
} else if (static_cast<int>(timing_->TargetVideoDelay()) >

View file

@ -384,7 +384,7 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp8(
<< " and packet range [" << frame->first_seq_num()
<< ", " << frame->last_seq_num()
<< "] already received, "
<< " dropping frame.";
" dropping frame.";
return kDrop;
}
@ -585,8 +585,9 @@ bool RtpFrameReferenceFinder::MissingRequiredFrameVp9(uint16_t picture_id,
size_t temporal_idx = info.gof->temporal_idx[gof_idx];
if (temporal_idx >= kMaxTemporalLayers) {
RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers << " temporal "
<< "layers are supported.";
RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers
<< " temporal "
"layers are supported.";
return true;
}
@ -628,8 +629,9 @@ void RtpFrameReferenceFinder::FrameReceivedVp9(uint16_t picture_id,
size_t temporal_idx = info->gof->temporal_idx[gof_idx];
if (temporal_idx >= kMaxTemporalLayers) {
RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers << " temporal "
<< "layers are supported.";
RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers
<< " temporal "
"layers are supported.";
return;
}
@ -646,8 +648,9 @@ void RtpFrameReferenceFinder::FrameReceivedVp9(uint16_t picture_id,
size_t temporal_idx = info->gof->temporal_idx[gof_idx];
if (temporal_idx >= kMaxTemporalLayers) {
RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers << " temporal "
<< "layers are supported.";
RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers
<< " temporal "
"layers are supported.";
return;
}
@ -783,7 +786,7 @@ RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameH264(
<< " and packet range [" << frame->first_seq_num()
<< ", " << frame->last_seq_num()
<< "] already received, "
<< " dropping frame.";
" dropping frame.";
return kDrop;
}

View file

@ -525,15 +525,15 @@ void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) {
if (last_ping_sent_ + kMinExtraPingDelayMs <= now) {
RTC_LOG(LS_INFO) << ToString()
<< "WebRTC-ExtraICEPing/Sending extra ping"
<< " last_ping_sent_: " << last_ping_sent_
<< " now: " << now
" last_ping_sent_: "
<< last_ping_sent_ << " now: " << now
<< " (diff: " << (now - last_ping_sent_) << ")";
Ping(now);
} else {
RTC_LOG(LS_INFO) << ToString()
<< "WebRTC-ExtraICEPing/Not sending extra ping"
<< " last_ping_sent_: " << last_ping_sent_
<< " now: " << now
" last_ping_sent_: "
<< last_ping_sent_ << " now: " << now
<< " (diff: " << (now - last_ping_sent_) << ")";
}
}

View file

@ -1105,7 +1105,7 @@ void P2PTransportChannel::ResolveHostnameCandidate(const Candidate& candidate) {
RTC_DCHECK_RUN_ON(network_thread_);
if (!async_resolver_factory_) {
RTC_LOG(LS_WARNING) << "Dropping ICE candidate with hostname address "
<< "(no AsyncResolverFactory)";
"(no AsyncResolverFactory)";
return;
}

View file

@ -2677,7 +2677,8 @@ TEST_P(GoogPingTest, TestGoogPingAnnounceEnable) {
trials.announce_goog_ping = GetParam().first;
trials.enable_goog_ping = GetParam().second;
RTC_LOG(LS_INFO) << "Testing combination: "
<< " announce: " << trials.announce_goog_ping
" announce: "
<< trials.announce_goog_ping
<< " enable:" << trials.enable_goog_ping;
auto port1_unique =

View file

@ -1224,8 +1224,9 @@ bool TurnPort::CreateOrRefreshEntry(const rtc::SocketAddress& addr,
if (webrtc::field_trial::IsEnabled("WebRTC-TurnAddMultiMapping")) {
if (entry->get_remote_ufrag() != remote_ufrag) {
RTC_LOG(LS_INFO) << ToString() << ": remote ufrag updated."
<< " Sending new permission request";
RTC_LOG(LS_INFO) << ToString()
<< ": remote ufrag updated."
" Sending new permission request";
entry->set_remote_ufrag(remote_ufrag);
entry->SendCreatePermissionRequest(0);
}

View file

@ -423,7 +423,7 @@ bool BaseChannel::SendPacket(bool rtcp,
// (and SetSend(true) is called).
RTC_LOG(LS_ERROR)
<< "Can't send outgoing RTP packet when SRTP is inactive"
<< " and crypto is required";
" and crypto is required";
RTC_NOTREACHED();
return false;
}

View file

@ -1262,7 +1262,7 @@ bool PeerConnection::Initialize(
RTC_DCHECK(false)
<< "PeerConnecton is initialized with use_datagram_transport = true "
"or use_datagram_transport_for_data_channels = true "
<< "but media transport factory is not set in PeerConnectionFactory";
"but media transport factory is not set in PeerConnectionFactory";
return false;
}

View file

@ -318,7 +318,8 @@ class RTCStatsVerifier {
EXPECT_TRUE(valid_reference)
<< stats_->type() << "." << member.name()
<< " is not a reference to an "
<< "existing dictionary of type " << expected_type << " (value: "
"existing dictionary of type "
<< expected_type << " (value: "
<< (member.is_defined() ? member.ValueToString() : "null") << ").";
MarkMemberTested(member, valid_reference);
}

View file

@ -266,7 +266,7 @@ ContentInfo::~ContentInfo() {
// If description_ is null, we assume that a move operator
// has been applied.
RTC_LOG(LS_ERROR) << "ContentInfo::description has been updated by "
<< "assignment. This usage is deprecated.";
"assignment. This usage is deprecated.";
description_.reset(description); // ensure that it is destroyed.
}
}
@ -295,7 +295,7 @@ const MediaContentDescription* ContentInfo::media_description() const {
// Someone's updated |description|, or used a move operator
// on the record.
RTC_LOG(LS_ERROR) << "ContentInfo::description has been updated by "
<< "assignment. This usage is deprecated.";
"assignment. This usage is deprecated.";
const_cast<ContentInfo*>(this)->description_.reset(description);
}
return description_.get();
@ -306,7 +306,7 @@ MediaContentDescription* ContentInfo::media_description() {
// Someone's updated |description|, or used a move operator
// on the record.
RTC_LOG(LS_ERROR) << "ContentInfo::description has been updated by "
<< "assignment. This usage is deprecated.";
"assignment. This usage is deprecated.";
description_.reset(description);
}
return description_.get();

View file

@ -1061,8 +1061,9 @@ bool ParseCandidate(const std::string& message,
attribute_candidate != kAttributeCandidate) {
if (is_raw) {
rtc::StringBuilder description;
description << "Expect line: " << kAttributeCandidate << ":"
<< "<candidate-str>";
description << "Expect line: " << kAttributeCandidate
<< ":"
"<candidate-str>";
return ParseFailed(first_line, 0, description.str(), error);
} else {
return ParseFailedExpectLine(first_line, 0, kLineTypeAttributes,

View file

@ -1963,18 +1963,22 @@ class WebRtcSdpTest : public ::testing::Test {
os << "minptime=" << params.min_ptime << "; stereo=" << params.stereo
<< "; sprop-stereo=" << params.sprop_stereo
<< "; useinbandfec=" << params.useinband
<< "; maxaveragebitrate=" << params.maxaveragebitrate << "\r\n"
<< "a=ptime:" << params.ptime << "\r\n"
<< "a=maxptime:" << params.max_ptime << "\r\n";
<< "; maxaveragebitrate=" << params.maxaveragebitrate
<< "\r\n"
"a=ptime:"
<< params.ptime
<< "\r\n"
"a=maxptime:"
<< params.max_ptime << "\r\n";
sdp += os.str();
os.clear();
os.str("");
// Pl type 100 preferred.
os << "m=video 9 RTP/SAVPF 99 95\r\n"
<< "a=rtpmap:99 VP8/90000\r\n"
<< "a=rtpmap:95 RTX/90000\r\n"
<< "a=fmtp:95 apt=99;\r\n";
"a=rtpmap:99 VP8/90000\r\n"
"a=rtpmap:95 RTX/90000\r\n"
"a=fmtp:95 apt=99;\r\n";
sdp += os.str();
// Deserialize
@ -2118,8 +2122,11 @@ void TestMismatch(const std::string& string1, const std::string& string2) {
}
EXPECT_EQ(0, position) << "Strings mismatch at the " << position
<< " character\n"
<< " 1: " << string1.substr(position, 20) << "\n"
<< " 2: " << string2.substr(position, 20) << "\n";
" 1: "
<< string1.substr(position, 20)
<< "\n"
" 2: "
<< string2.substr(position, 20) << "\n";
}
TEST_F(WebRtcSdpTest, SerializeSessionDescription) {

View file

@ -339,8 +339,10 @@ TEST(LogTest, Perf) {
stream.Close();
EXPECT_EQ(str.size(), (message.size() + logging_overhead) * kRepetitions);
RTC_LOG(LS_INFO) << "Total log time: " << TimeDiff(finish, start) << " ms "
<< " total bytes logged: " << str.size();
RTC_LOG(LS_INFO) << "Total log time: " << TimeDiff(finish, start)
<< " ms "
" total bytes logged: "
<< str.size();
}
TEST(LogTest, EnumsAreSupported) {

View file

@ -773,7 +773,7 @@ bool IsDefaultRoute(const std::string& network_name) {
if (!f) {
RTC_LOG(LS_WARNING)
<< "Couldn't read /proc/net/route, skipping default "
<< "route check (assuming everything is a default route).";
"route check (assuming everything is a default route).";
return true;
}
bool is_default_route = false;

View file

@ -930,7 +930,7 @@ TEST_F(NetworkTest, TestIgnoreNonDefaultRoutes) {
return;
}
RTC_LOG(LS_INFO) << "Found dummy, running again while ignoring non-default "
<< "routes.";
"routes.";
manager.set_ignore_non_default_routes(true);
list = GetNetworks(manager, false);
for (NetworkManager::NetworkList::iterator it = list.begin();

View file

@ -857,8 +857,10 @@ SSL_CTX* OpenSSLAdapter::CreateContext(SSLMode mode, bool enable_cache) {
if (ctx == nullptr) {
unsigned long error = ERR_get_error(); // NOLINT: type used by OpenSSL.
RTC_LOG(LS_WARNING) << "SSL_CTX creation failed: " << '"'
<< ERR_reason_error_string(error) << "\" "
<< "(error=" << error << ')';
<< ERR_reason_error_string(error)
<< "\" "
"(error="
<< error << ')';
return nullptr;
}
@ -906,7 +908,7 @@ std::string TransformAlpnProtocols(
for (const std::string& proto : alpn_protocols) {
if (proto.size() == 0 || proto.size() > 0xFF) {
RTC_LOG(LS_ERROR) << "OpenSSLAdapter::Error("
<< "TransformAlpnProtocols received proto with size "
"TransformAlpnProtocols received proto with size "
<< proto.size() << ")";
return "";
}

View file

@ -1265,8 +1265,8 @@ void PhysicalSocketServer::Remove(Dispatcher* pdispatcher) {
if (!pending_add_dispatchers_.erase(pdispatcher) &&
dispatchers_.find(pdispatcher) == dispatchers_.end()) {
RTC_LOG(LS_WARNING) << "PhysicalSocketServer asked to remove a unknown "
<< "dispatcher, potentially from a duplicate call to "
<< "Add.";
"dispatcher, potentially from a duplicate call to "
"Add.";
return;
}
@ -1274,7 +1274,7 @@ void PhysicalSocketServer::Remove(Dispatcher* pdispatcher) {
} else if (!dispatchers_.erase(pdispatcher)) {
RTC_LOG(LS_WARNING)
<< "PhysicalSocketServer asked to remove a unknown "
<< "dispatcher, potentially from a duplicate call to Add.";
"dispatcher, potentially from a duplicate call to Add.";
return;
}
#if defined(WEBRTC_USE_EPOLL)

View file

@ -391,7 +391,7 @@ void SocketTest::ConnectWithDnsLookupFailInternal(const IPAddress& loopback) {
dns_lookup_finished);
if (!dns_lookup_finished) {
RTC_LOG(LS_WARNING) << "Skipping test; DNS resolution took longer than 5 "
<< "seconds.";
"seconds.";
return;
}

View file

@ -766,7 +766,7 @@ void Thread::Join() {
RTC_DCHECK(!IsCurrent());
if (Current() && !Current()->blocking_calls_allowed_) {
RTC_LOG(LS_WARNING) << "Waiting for the thread to join, "
<< "but blocking calls have been disallowed";
"but blocking calls have been disallowed";
}
#if defined(WEBRTC_WIN)

View file

@ -122,8 +122,8 @@ int64_t TimestampAligner::ClipTimestamp(int64_t filtered_time_us,
// duplicate timestamps in case this function is called several times with
// exactly the same |system_time_us|.
RTC_LOG(LS_WARNING) << "too short translated timestamp interval: "
<< "system time (us) = " << system_time_us
<< ", interval (us) = "
"system time (us) = "
<< system_time_us << ", interval (us) = "
<< system_time_us - prev_translated_time_us_;
time_us = system_time_us;
}

View file

@ -67,7 +67,7 @@ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> CreateTestPCF(
auto factory = CreateModularPeerConnectionFactory(std::move(pcf_deps));
RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << factory;
RTC_CHECK(factory) << "Failed to create the peer connection factory; "
<< "WebRTC/libjingle init likely failed on this device";
"WebRTC/libjingle init likely failed on this device";
return factory;
}

View file

@ -712,8 +712,10 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(JNIEnv* jni,
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
ALOGD << "Frames decoded: " << frames_decoded_
<< ". Received: " << frames_received_
<< ". Bitrate: " << current_bitrate << " kbps"
<< ". Fps: " << current_fps
<< ". Bitrate: " << current_bitrate
<< " kbps"
". Fps: "
<< current_fps
<< ". DecTime: " << (current_decoding_time_ms_ / current_frames_)
<< ". DelayTime: " << (current_delay_time_ms_ / current_frames_)
<< " for last " << statistic_time_ms << " ms.";

View file

@ -649,7 +649,8 @@ int32_t MediaCodecVideoEncoder::Encode(
if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) {
ALOGD << "Already " << input_frame_infos_.size()
<< " frames in the queue, dropping"
<< ". TS: " << static_cast<int>(current_timestamp_us_ / 1000)
". TS: "
<< static_cast<int>(current_timestamp_us_ / 1000)
<< ". Fps: " << last_set_fps_
<< ". Consecutive drops: " << consecutive_full_queue_frame_drops_;
current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
@ -1134,8 +1135,10 @@ void MediaCodecVideoEncoder::LogStatistics(bool force_log) {
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
ALOGD << "Encoded frames: " << frames_encoded_
<< ". Bitrate: " << current_bitrate
<< ", target: " << last_set_bitrate_kbps_ << " kbps"
<< ", fps: " << current_fps << ", encTime: "
<< ", target: " << last_set_bitrate_kbps_
<< " kbps"
", fps: "
<< current_fps << ", encTime: "
<< (current_encoding_time_ms_ / current_frames_divider)
<< ". QP: " << (current_acc_qp_ / current_frames_divider)
<< " for last " << statistic_time_ms << " ms.";

View file

@ -248,7 +248,8 @@ rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork(
if (!network_binding_supported) {
RTC_LOG(LS_WARNING)
<< "BindSocketToNetwork is not supported on this platform "
<< "(Android SDK: " << android_sdk_int_ << ")";
"(Android SDK: "
<< android_sdk_int_ << ")";
return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
}

View file

@ -174,7 +174,8 @@ aaudio_data_callback_result_t AAudioPlayer::OnDataCallback(void* audio_data,
// utilized.
if (first_data_callback_) {
RTC_LOG(INFO) << "--- First output data callback: "
<< "device id=" << aaudio_.device_id();
"device id="
<< aaudio_.device_id();
first_data_callback_ = false;
}

View file

@ -173,7 +173,8 @@ aaudio_data_callback_result_t AAudioRecorder::OnDataCallback(
// is obtained.
if (first_data_callback_) {
RTC_LOG(INFO) << "--- First input data callback: "
<< "device id=" << aaudio_.device_id();
"device id="
<< aaudio_.device_id();
aaudio_.ClearInputStream(audio_data, num_frames);
first_data_callback_ = false;
}

View file

@ -330,7 +330,7 @@ ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
CreateModularPeerConnectionFactory(std::move(dependencies));
RTC_CHECK(factory) << "Failed to create the peer connection factory; "
<< "WebRTC/libjingle init likely failed on this device";
"WebRTC/libjingle init likely failed on this device";
// TODO(honghaiz): Maybe put the options as the argument of
// CreatePeerConnectionFactory.
if (options)

View file

@ -99,9 +99,13 @@ bool RTCStats::operator!=(const RTCStats& other) const {
std::string RTCStats::ToJson() const {
rtc::StringBuilder sb;
sb << "{\"type\":\"" << type() << "\","
<< "\"id\":\"" << id_ << "\","
<< "\"timestamp\":" << timestamp_us_;
sb << "{\"type\":\"" << type()
<< "\","
"\"id\":\""
<< id_
<< "\","
"\"timestamp\":"
<< timestamp_us_;
for (const RTCStatsMemberInterface* member : Members()) {
if (member->is_defined()) {
sb << ",\"" << member->name() << "\":";

View file

@ -79,8 +79,8 @@ void CallTest::RegisterRtpExtension(const RtpExtension& extension) {
<< "URI " << extension.uri
<< (extension.encrypt ? " with " : " without ")
<< "encryption already registered with a different "
<< "ID (" << extension.id << " vs. " << registered_extension.id
<< ").";
"ID ("
<< extension.id << " vs. " << registered_extension.id << ").";
}
}
rtp_extensions_.push_back(extension);

View file

@ -202,8 +202,8 @@ int FakeNetworkSocket::RecvFrom(void* pv,
// but we won't to skip such error, so we will assert here.
RTC_CHECK(data_read == pending_->size())
<< "Too small buffer is provided for socket read. "
<< "Received data size: " << pending_->size()
<< "; Provided buffer size: " << cb;
"Received data size: "
<< pending_->size() << "; Provided buffer size: " << cb;
pending_.reset();

View file

@ -173,8 +173,9 @@ TEST_F(FileUtilsTest, ResourcePathReturnsCorrectPath) {
#endif
ASSERT_THAT(result, EndsWith(expected_end));
ASSERT_TRUE(FileExists(result)) << "Expected " << result << " to exist; did "
<< "ResourcePath return an incorrect path?";
ASSERT_TRUE(FileExists(result)) << "Expected " << result
<< " to exist; did "
"ResourcePath return an incorrect path?";
}
TEST_F(FileUtilsTest, ResourcePathFromRootWorkingDir) {

View file

@ -677,9 +677,10 @@ void OveruseFrameDetector::CheckForOveruse(
in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
RTC_LOG(LS_VERBOSE) << " Frame stats: "
<< " encode usage " << *encode_usage_percent_
<< " overuse detections " << num_overuse_detections_
<< " rampup delay " << rampup_delay;
" encode usage "
<< *encode_usage_percent_ << " overuse detections "
<< num_overuse_detections_ << " rampup delay "
<< rampup_delay;
}
void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) {

View file

@ -927,7 +927,8 @@ void RtpVideoStreamReceiver::InsertSpsPpsIntoTracker(uint8_t payload_type) {
return;
RTC_LOG(LS_INFO) << "Found out of band supplied codec parameters for"
<< " payload type: " << static_cast<int>(payload_type);
" payload type: "
<< static_cast<int>(payload_type);
H264SpropParameterSets sprop_decoder;
auto sprop_base64_it =

View file

@ -1172,7 +1172,7 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame,
if (frame_dropping_enabled && frame_dropper_.DropFrame()) {
RTC_LOG(LS_VERBOSE)
<< "Drop Frame: "
<< "target bitrate "
"target bitrate "
<< (last_encoder_rate_settings_
? last_encoder_rate_settings_->encoder_target.bps()
: 0)
@ -1901,8 +1901,8 @@ VideoStreamEncoder::ParseEncoderSwitchFieldTrial() const {
rtc::StringBuilder ss;
ss << "Successfully parsed WebRTC-NetworkCondition-EncoderSwitch field "
"trial."
<< " to_codec:" << result.to_codec
<< " to_param:" << result.to_param.value_or("<none>")
" to_codec:"
<< result.to_codec << " to_param:" << result.to_param.value_or("<none>")
<< " to_value:" << result.to_value.value_or("<none>")
<< " codec_thresholds:";
@ -1935,7 +1935,8 @@ VideoStreamEncoder::ParseAutomatincAnimationDetectionFieldTrial() const {
}
RTC_LOG(LS_INFO) << "Automatic animation detection experiment settings:"
<< " min_duration_ms=" << result.min_duration_ms
" min_duration_ms="
<< result.min_duration_ms
<< " min_area_ration=" << result.min_area_ratio
<< " min_fps=" << result.min_fps;