Index: webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h |
diff --git a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h |
index cbd8cc74e81775fbe340a34a7dcd5f7d1d9bc385..ddf8a6b6281666934bbf416338fee0c7fdcb4a1c 100644 |
--- a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h |
+++ b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h |
@@ -50,6 +50,27 @@ RTC_EXPORT |
@property(nonatomic, readonly) CVPixelBufferRef nativeHandle; |
- (instancetype)init NS_UNAVAILABLE; |
+- (instancetype)new NS_UNAVAILABLE; |
tkchin_webrtc
2017/02/22 00:01:18
Did you mean + ?
Personally I don't think we need
|
+ |
+/** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp. |
+ */ |
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer |
+ rotation:(RTCVideoRotation)rotation |
+ timeStampNs:(int64_t)timeStampNs; |
+ |
+/** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and |
+ * scaling. Cropping will be applied first on the pixel buffer, followed by |
+ * scaling to the final resolution of scaledWidth x scaledHeight. |
+ */ |
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer |
+ scaledWidth:(int)scaledWidth |
+ scaledHeight:(int)scaledHeight |
+ cropWidth:(int)cropWidth |
+ cropHeight:(int)cropHeight |
+ cropX:(int)cropX |
+ cropY:(int)cropY |
+ rotation:(RTCVideoRotation)rotation |
+ timeStampNs:(int64_t)timeStampNs; |
/** Return a frame that is guaranteed to be I420, i.e. it is possible to access |
* the YUV data on it. |