webrtc/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
Niels Möller 87e2d785a0 Prepare for splitting FrameType into AudioFrameType and VideoFrameType
This cl deprecates the FrameType enum, and adds aliases AudioFrameType
and VideoFrameType.

After downstream usage is updated, the enums will be separated
and be moved out of common_types.h.

Bug: webrtc:6883
Change-Id: I2aaf660169da45f22574b4cbb16aea8522cc07a6
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/123184
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#27011}
2019-03-07 10:12:57 +00:00

66 lines
2.9 KiB
Text

/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCEncodedImage+Private.h"
#include "rtc_base/numerics/safe_conversions.h"
@implementation RTCEncodedImage (Private)
- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage {
if (self = [super init]) {
// Wrap the buffer in NSData without copying, do not take ownership.
self.buffer = [NSData dataWithBytesNoCopy:encodedImage.mutable_data()
length:encodedImage.size()
freeWhenDone:NO];
self.encodedWidth = rtc::dchecked_cast<int32_t>(encodedImage._encodedWidth);
self.encodedHeight = rtc::dchecked_cast<int32_t>(encodedImage._encodedHeight);
self.timeStamp = encodedImage.Timestamp();
self.captureTimeMs = encodedImage.capture_time_ms_;
self.ntpTimeMs = encodedImage.ntp_time_ms_;
self.flags = encodedImage.timing_.flags;
self.encodeStartMs = encodedImage.timing_.encode_start_ms;
self.encodeFinishMs = encodedImage.timing_.encode_finish_ms;
self.frameType = static_cast<RTCFrameType>(encodedImage._frameType);
self.rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
self.completeFrame = encodedImage._completeFrame;
self.qp = @(encodedImage.qp_);
self.contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
RTCVideoContentTypeScreenshare :
RTCVideoContentTypeUnspecified;
}
return self;
}
- (webrtc::EncodedImage)nativeEncodedImage {
// Return the pointer without copying.
webrtc::EncodedImage encodedImage(
(uint8_t *)self.buffer.bytes, (size_t)self.buffer.length, (size_t)self.buffer.length);
encodedImage._encodedWidth = rtc::dchecked_cast<uint32_t>(self.encodedWidth);
encodedImage._encodedHeight = rtc::dchecked_cast<uint32_t>(self.encodedHeight);
encodedImage.SetTimestamp(self.timeStamp);
encodedImage.capture_time_ms_ = self.captureTimeMs;
encodedImage.ntp_time_ms_ = self.ntpTimeMs;
encodedImage.timing_.flags = self.flags;
encodedImage.timing_.encode_start_ms = self.encodeStartMs;
encodedImage.timing_.encode_finish_ms = self.encodeFinishMs;
encodedImage._frameType = webrtc::VideoFrameType(self.frameType);
encodedImage.rotation_ = webrtc::VideoRotation(self.rotation);
encodedImage._completeFrame = self.completeFrame;
encodedImage.qp_ = self.qp ? self.qp.intValue : -1;
encodedImage.content_type_ = (self.contentType == RTCVideoContentTypeScreenshare) ?
webrtc::VideoContentType::SCREENSHARE :
webrtc::VideoContentType::UNSPECIFIED;
return encodedImage;
}
@end