mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-13 13:50:40 +01:00
Use backticks not vertical bars to denote variables in comments for /examples
Bug: webrtc:12338 Change-Id: I753a476d1574d8dd50f1b6d4bfc2beb7f6f1f913 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/226947 Reviewed-by: Harald Alvestrand <hta@webrtc.org> Commit-Queue: Artem Titov <titovartem@webrtc.org> Cr-Commit-Position: refs/heads/master@{#34562}
This commit is contained in:
parent
d7ac581045
commit
36de9dfb6e
14 changed files with 28 additions and 28 deletions
|
@ -77,7 +77,7 @@ public class AppRTCAudioManager {
|
|||
// This device is changed automatically using a certain scheme where e.g.
|
||||
// a wired headset "wins" over speaker phone. It is also possible for a
|
||||
// user to explicitly select a device (and overrid any predefined scheme).
|
||||
// See |userSelectedAudioDevice| for details.
|
||||
// See `userSelectedAudioDevice` for details.
|
||||
private AudioDevice selectedAudioDevice;
|
||||
|
||||
// Contains the user-selected audio device which overrides the predefined
|
||||
|
@ -216,7 +216,7 @@ public class AppRTCAudioManager {
|
|||
// Create an AudioManager.OnAudioFocusChangeListener instance.
|
||||
audioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() {
|
||||
// Called on the listener to notify if the audio focus for this listener has been changed.
|
||||
// The |focusChange| value indicates whether the focus was gained, whether the focus was lost,
|
||||
// The `focusChange` value indicates whether the focus was gained, whether the focus was lost,
|
||||
// and whether that loss is transient, or whether the new focus holder will hold it for an
|
||||
// unknown amount of time.
|
||||
// TODO(henrika): possibly extend support of handling audio-focus changes. Only contains
|
||||
|
@ -573,7 +573,7 @@ public class AppRTCAudioManager {
|
|||
} else {
|
||||
// No wired headset and no Bluetooth, hence the audio-device list can contain speaker
|
||||
// phone (on a tablet), or speaker phone and earpiece (on mobile phone).
|
||||
// |defaultAudioDevice| contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE
|
||||
// `defaultAudioDevice` contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE
|
||||
// depending on the user's selection.
|
||||
newAudioDevice = defaultAudioDevice;
|
||||
}
|
||||
|
|
|
@ -364,7 +364,7 @@ public class AppRTCBluetoothManager {
|
|||
* Use the BluetoothHeadset proxy object (controls the Bluetooth Headset
|
||||
* Service via IPC) to update the list of connected devices for the HEADSET
|
||||
* profile. The internal state will change to HEADSET_UNAVAILABLE or to
|
||||
* HEADSET_AVAILABLE and |bluetoothDevice| will be mapped to the connected
|
||||
* HEADSET_AVAILABLE and `bluetoothDevice` will be mapped to the connected
|
||||
* device if available.
|
||||
*/
|
||||
public void updateDevice() {
|
||||
|
|
|
@ -100,7 +100,7 @@ public interface AppRTCClient {
|
|||
/**
|
||||
* Callback interface for messages delivered on signaling channel.
|
||||
*
|
||||
* <p>Methods are guaranteed to be invoked on the UI thread of |activity|.
|
||||
* <p>Methods are guaranteed to be invoked on the UI thread of `activity`.
|
||||
*/
|
||||
interface SignalingEvents {
|
||||
/**
|
||||
|
|
|
@ -34,8 +34,8 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
|||
private static final String TAG = "AppRTCProximitySensor";
|
||||
|
||||
// This class should be created, started and stopped on one thread
|
||||
// (e.g. the main thread). We use |nonThreadSafe| to ensure that this is
|
||||
// the case. Only active when |DEBUG| is set to true.
|
||||
// (e.g. the main thread). We use `nonThreadSafe` to ensure that this is
|
||||
// the case. Only active when `DEBUG` is set to true.
|
||||
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
||||
|
||||
private final Runnable onSensorStateListener;
|
||||
|
|
|
@ -676,7 +676,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||
}
|
||||
}
|
||||
|
||||
// Log |msg| and Toast about it.
|
||||
// Log `msg` and Toast about it.
|
||||
private void logAndToast(String msg) {
|
||||
Log.d(TAG, msg);
|
||||
if (logToast != null) {
|
||||
|
|
|
@ -320,7 +320,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
|||
});
|
||||
}
|
||||
|
||||
// Put a |key|->|value| mapping in |json|.
|
||||
// Put a `key`->`value` mapping in `json`.
|
||||
private static void jsonPut(JSONObject json, String key, Object value) {
|
||||
try {
|
||||
json.put(key, value);
|
||||
|
|
|
@ -322,7 +322,7 @@ public class PeerConnectionClient {
|
|||
|
||||
/**
|
||||
* Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes
|
||||
* ownership of |eglBase|.
|
||||
* ownership of `eglBase`.
|
||||
*/
|
||||
public PeerConnectionClient(Context appContext, EglBase eglBase,
|
||||
PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) {
|
||||
|
@ -620,7 +620,7 @@ public class PeerConnectionClient {
|
|||
isInitiator = false;
|
||||
|
||||
// Set INFO libjingle logging.
|
||||
// NOTE: this _must_ happen while |factory| is alive!
|
||||
// NOTE: this _must_ happen while `factory` is alive!
|
||||
Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
|
||||
|
||||
List<String> mediaStreamLabels = Collections.singletonList("ARDAMS");
|
||||
|
@ -1115,7 +1115,7 @@ public class PeerConnectionClient {
|
|||
final List<String> unpreferredPayloadTypes =
|
||||
new ArrayList<>(origLineParts.subList(3, origLineParts.size()));
|
||||
unpreferredPayloadTypes.removeAll(preferredPayloadTypes);
|
||||
// Reconstruct the line with |preferredPayloadTypes| moved to the beginning of the payload
|
||||
// Reconstruct the line with `preferredPayloadTypes` moved to the beginning of the payload
|
||||
// types.
|
||||
final List<String> newLineParts = new ArrayList<>();
|
||||
newLineParts.addAll(header);
|
||||
|
@ -1131,7 +1131,7 @@ public class PeerConnectionClient {
|
|||
Log.w(TAG, "No mediaDescription line, so can't prefer " + codec);
|
||||
return sdp;
|
||||
}
|
||||
// A list with all the payload types with name |codec|. The payload types are integers in the
|
||||
// A list with all the payload types with name `codec`. The payload types are integers in the
|
||||
// range 96-127, but they are stored as strings here.
|
||||
final List<String> codecPayloadTypes = new ArrayList<>();
|
||||
// a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>]
|
||||
|
|
|
@ -66,7 +66,7 @@ public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandle
|
|||
});
|
||||
}
|
||||
|
||||
// Returns the Message attached to the original Cause of |t|.
|
||||
// Returns the Message attached to the original Cause of `t`.
|
||||
private static String getTopLevelCauseMessage(Throwable t) {
|
||||
Throwable topLevelCause = t;
|
||||
while (topLevelCause.getCause() != null) {
|
||||
|
@ -75,8 +75,8 @@ public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandle
|
|||
return topLevelCause.getMessage();
|
||||
}
|
||||
|
||||
// Returns a human-readable String of the stacktrace in |t|, recursively
|
||||
// through all Causes that led to |t|.
|
||||
// Returns a human-readable String of the stacktrace in `t`, recursively
|
||||
// through all Causes that led to `t`.
|
||||
private static String getRecursiveStackTrace(Throwable t) {
|
||||
StringWriter writer = new StringWriter();
|
||||
t.printStackTrace(new PrintWriter(writer));
|
||||
|
|
|
@ -368,7 +368,7 @@ public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents
|
|||
});
|
||||
}
|
||||
|
||||
// Put a |key|->|value| mapping in |json|.
|
||||
// Put a `key`->`value` mapping in `json`.
|
||||
private static void jsonPut(JSONObject json, String key, Object value) {
|
||||
try {
|
||||
json.put(key, value);
|
||||
|
|
|
@ -63,7 +63,7 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
|
|||
// class should only be called from the main queue.
|
||||
@interface ARDAppClient : NSObject
|
||||
|
||||
// If |shouldGetStats| is true, stats will be reported in 1s intervals through
|
||||
// If `shouldGetStats` is true, stats will be reported in 1s intervals through
|
||||
// the delegate.
|
||||
@property(nonatomic, assign) BOOL shouldGetStats;
|
||||
@property(nonatomic, readonly) ARDAppClientState state;
|
||||
|
@ -75,8 +75,8 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
|
|||
- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate;
|
||||
|
||||
// Establishes a connection with the AppRTC servers for the given room id.
|
||||
// |settings| is an object containing settings such as video codec for the call.
|
||||
// If |isLoopback| is true, the call will connect to itself.
|
||||
// `settings` is an object containing settings such as video codec for the call.
|
||||
// If `isLoopback` is true, the call will connect to itself.
|
||||
- (void)connectToRoomWithId:(NSString *)roomId
|
||||
settings:(ARDSettingsModel *)settings
|
||||
isLoopback:(BOOL)isLoopback;
|
||||
|
|
|
@ -64,7 +64,7 @@ static int64_t const kARDAppClientRtcEventLogMaxSizeInBytes = 5e6; // 5 MB.
|
|||
static int const kKbpsMultiplier = 1000;
|
||||
|
||||
// We need a proxy to NSTimer because it causes a strong retain cycle. When
|
||||
// using the proxy, |invalidate| must be called before it properly deallocs.
|
||||
// using the proxy, `invalidate` must be called before it properly deallocs.
|
||||
@interface ARDTimerProxy : NSObject
|
||||
|
||||
- (instancetype)initWithInterval:(NSTimeInterval)interval
|
||||
|
|
|
@ -64,7 +64,7 @@ static CGFloat const kCallControlMargin = 8;
|
|||
|
||||
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
|
||||
// There is no other control that can take focus, so manually resign focus
|
||||
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
|
||||
// when return (Join) is pressed to trigger `textFieldDidEndEditing`.
|
||||
[textField resignFirstResponder];
|
||||
return YES;
|
||||
}
|
||||
|
|
|
@ -99,14 +99,14 @@ class DataSocket : public SocketBase {
|
|||
// Send a raw buffer of bytes.
|
||||
bool Send(const std::string& data) const;
|
||||
|
||||
// Send an HTTP response. The |status| should start with a valid HTTP
|
||||
// Send an HTTP response. The `status` should start with a valid HTTP
|
||||
// response code, followed by a string. E.g. "200 OK".
|
||||
// If |connection_close| is set to true, an extra "Connection: close" HTTP
|
||||
// header will be included. |content_type| is the mime content type, not
|
||||
// If `connection_close` is set to true, an extra "Connection: close" HTTP
|
||||
// header will be included. `content_type` is the mime content type, not
|
||||
// including the "Content-Type: " string.
|
||||
// |extra_headers| should be either empty or a list of headers where each
|
||||
// `extra_headers` should be either empty or a list of headers where each
|
||||
// header terminates with "\r\n".
|
||||
// |data| is the body of the message. It's length will be specified via
|
||||
// `data` is the body of the message. It's length will be specified via
|
||||
// a "Content-Length" header.
|
||||
bool Send(const std::string& status,
|
||||
bool connection_close,
|
||||
|
|
|
@ -80,7 +80,7 @@ class PeerChannel {
|
|||
// request. Otherwise the request is not peerconnection related.
|
||||
static bool IsPeerConnection(const DataSocket* ds);
|
||||
|
||||
// Finds a connected peer that's associated with the |ds| socket.
|
||||
// Finds a connected peer that's associated with the `ds` socket.
|
||||
ChannelMember* Lookup(DataSocket* ds) const;
|
||||
|
||||
// Checks if the request has a "peer_id" parameter and if so, looks up the
|
||||
|
|
Loading…
Reference in a new issue