Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(54)

Side by Side Diff: webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm

Issue 2987413002: ObjC: Implement HW codecs in ObjC instead of C++ (Closed)
Patch Set: Move RTCH264PacketizationMode to RTCVideoCodec.h Created 3 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 * 9 *
10 */ 10 */
11 11
12 #include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.h" 12 #import "WebRTC/RTCVideoCodecH264.h"
13 13
14 #include <memory> 14 #import <VideoToolbox/VideoToolbox.h>
15 #include <string>
16 #include <vector> 15 #include <vector>
17 16
18 #if defined(WEBRTC_IOS) 17 #if defined(WEBRTC_IOS)
19 #import "Common/RTCUIApplicationStatusObserver.h" 18 #import "Common/RTCUIApplicationStatusObserver.h"
20 #import "WebRTC/UIDevice+RTCDevice.h" 19 #import "WebRTC/UIDevice+RTCDevice.h"
21 #endif 20 #endif
21 #import "PeerConnection/RTCVideoCodec+Private.h"
22 #import "WebRTC/RTCVideoCodec.h"
23 #import "WebRTC/RTCVideoFrame.h"
22 #import "WebRTC/RTCVideoFrameBuffer.h" 24 #import "WebRTC/RTCVideoFrameBuffer.h"
25 #import "helpers.h"
23 #include "libyuv/convert_from.h" 26 #include "libyuv/convert_from.h"
27 #include "webrtc/common_video/h264/h264_bitstream_parser.h"
24 #include "webrtc/common_video/h264/profile_level_id.h" 28 #include "webrtc/common_video/h264/profile_level_id.h"
25 #include "webrtc/rtc_base/checks.h" 29 #include "webrtc/common_video/include/bitrate_adjuster.h"
30 #include "webrtc/modules/include/module_common_types.h"
31 #include "webrtc/modules/video_coding/include/video_error_codes.h"
32 #include "webrtc/rtc_base/buffer.h"
26 #include "webrtc/rtc_base/logging.h" 33 #include "webrtc/rtc_base/logging.h"
27 #include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h" 34 #include "webrtc/rtc_base/timeutils.h"
28 #include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h" 35 #include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
29 #include "webrtc/system_wrappers/include/clock.h" 36 #include "webrtc/system_wrappers/include/clock.h"
30 37
31 namespace internal { 38 @interface RTCVideoEncoderH264 ()
39
40 - (void)frameWasEncoded:(OSStatus)status
41 flags:(VTEncodeInfoFlags)infoFlags
42 sampleBuffer:(CMSampleBufferRef)sampleBuffer
43 codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
44 width:(int32_t)width
45 height:(int32_t)height
46 renderTimeMs:(int64_t)renderTimeMs
47 timestamp:(uint32_t)timestamp
48 rotation:(RTCVideoRotation)rotation;
49
50 @end
32 51
33 // The ratio between kVTCompressionPropertyKey_DataRateLimits and 52 // The ratio between kVTCompressionPropertyKey_DataRateLimits and
34 // kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher 53 // kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher
35 // than the average bit rate to avoid undershooting the target. 54 // than the average bit rate to avoid undershooting the target.
36 const float kLimitToAverageBitRateFactor = 1.5f; 55 const float kLimitToAverageBitRateFactor = 1.5f;
37 // These thresholds deviate from the default h264 QP thresholds, as they 56 // These thresholds deviate from the default h264 QP thresholds, as they
38 // have been found to work better on devices that support VideoToolbox 57 // have been found to work better on devices that support VideoToolbox
39 const int kLowH264QpThreshold = 28; 58 const int kLowH264QpThreshold = 28;
40 const int kHighH264QpThreshold = 39; 59 const int kHighH264QpThreshold = 39;
41 60
42 // Convenience function for creating a dictionary.
43 inline CFDictionaryRef CreateCFDictionary(CFTypeRef* keys,
44 CFTypeRef* values,
45 size_t size) {
46 return CFDictionaryCreate(kCFAllocatorDefault, keys, values, size,
47 &kCFTypeDictionaryKeyCallBacks,
48 &kCFTypeDictionaryValueCallBacks);
49 }
50
51 // Copies characters from a CFStringRef into a std::string.
52 std::string CFStringToString(const CFStringRef cf_string) {
53 RTC_DCHECK(cf_string);
54 std::string std_string;
55 // Get the size needed for UTF8 plus terminating character.
56 size_t buffer_size =
57 CFStringGetMaximumSizeForEncoding(CFStringGetLength(cf_string),
58 kCFStringEncodingUTF8) +
59 1;
60 std::unique_ptr<char[]> buffer(new char[buffer_size]);
61 if (CFStringGetCString(cf_string, buffer.get(), buffer_size,
62 kCFStringEncodingUTF8)) {
63 // Copy over the characters.
64 std_string.assign(buffer.get());
65 }
66 return std_string;
67 }
68
69 // Convenience function for setting a VT property.
70 void SetVTSessionProperty(VTSessionRef session,
71 CFStringRef key,
72 int32_t value) {
73 CFNumberRef cfNum =
74 CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &value);
75 OSStatus status = VTSessionSetProperty(session, key, cfNum);
76 CFRelease(cfNum);
77 if (status != noErr) {
78 std::string key_string = CFStringToString(key);
79 LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
80 << " to " << value << ": " << status;
81 }
82 }
83
84 // Convenience function for setting a VT property.
85 void SetVTSessionProperty(VTSessionRef session,
86 CFStringRef key,
87 uint32_t value) {
88 int64_t value_64 = value;
89 CFNumberRef cfNum =
90 CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &value_64);
91 OSStatus status = VTSessionSetProperty(session, key, cfNum);
92 CFRelease(cfNum);
93 if (status != noErr) {
94 std::string key_string = CFStringToString(key);
95 LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
96 << " to " << value << ": " << status;
97 }
98 }
99
100 // Convenience function for setting a VT property.
101 void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value) {
102 CFBooleanRef cf_bool = (value) ? kCFBooleanTrue : kCFBooleanFalse;
103 OSStatus status = VTSessionSetProperty(session, key, cf_bool);
104 if (status != noErr) {
105 std::string key_string = CFStringToString(key);
106 LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
107 << " to " << value << ": " << status;
108 }
109 }
110
111 // Convenience function for setting a VT property.
112 void SetVTSessionProperty(VTSessionRef session,
113 CFStringRef key,
114 CFStringRef value) {
115 OSStatus status = VTSessionSetProperty(session, key, value);
116 if (status != noErr) {
117 std::string key_string = CFStringToString(key);
118 std::string val_string = CFStringToString(value);
119 LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
120 << " to " << val_string << ": " << status;
121 }
122 }
123
124 // Struct that we pass to the encoder per frame to encode. We receive it again 61 // Struct that we pass to the encoder per frame to encode. We receive it again
125 // in the encoder callback. 62 // in the encoder callback.
126 struct FrameEncodeParams { 63 struct RTCFrameEncodeParams {
127 FrameEncodeParams(webrtc::H264VideoToolboxEncoder* e, 64 RTCFrameEncodeParams(RTCVideoEncoderH264 *e,
128 const webrtc::CodecSpecificInfo* csi, 65 RTCCodecSpecificInfoH264 *csi,
129 int32_t w, 66 int32_t w,
130 int32_t h, 67 int32_t h,
131 int64_t rtms, 68 int64_t rtms,
132 uint32_t ts, 69 uint32_t ts,
133 webrtc::VideoRotation r) 70 RTCVideoRotation r)
134 : encoder(e), 71 : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), ro tation(r) {
135 width(w),
136 height(h),
137 render_time_ms(rtms),
138 timestamp(ts),
139 rotation(r) {
140 if (csi) { 72 if (csi) {
141 codec_specific_info = *csi; 73 codecSpecificInfo = csi;
142 } else { 74 } else {
143 codec_specific_info.codecType = webrtc::kVideoCodecH264; 75 codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init];
144 } 76 }
145 } 77 }
146 78
147 webrtc::H264VideoToolboxEncoder* encoder; 79 RTCVideoEncoderH264 *encoder;
148 webrtc::CodecSpecificInfo codec_specific_info; 80 RTCCodecSpecificInfoH264 *codecSpecificInfo;
149 int32_t width; 81 int32_t width;
150 int32_t height; 82 int32_t height;
151 int64_t render_time_ms; 83 int64_t render_time_ms;
152 uint32_t timestamp; 84 uint32_t timestamp;
153 webrtc::VideoRotation rotation; 85 RTCVideoRotation rotation;
154 }; 86 };
155 87
156 // We receive I420Frames as input, but we need to feed CVPixelBuffers into the 88 // We receive I420Frames as input, but we need to feed CVPixelBuffers into the
157 // encoder. This performs the copy and format conversion. 89 // encoder. This performs the copy and format conversion.
158 // TODO(tkchin): See if encoder will accept i420 frames and compare performance. 90 // TODO(tkchin): See if encoder will accept i420 frames and compare performance.
159 bool CopyVideoFrameToPixelBuffer(const rtc::scoped_refptr<webrtc::I420BufferInte rface>& frame, 91 bool CopyVideoFrameToPixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBufferRef pixelBuffer) {
160 CVPixelBufferRef pixel_buffer) { 92 RTC_DCHECK(pixelBuffer);
161 RTC_DCHECK(pixel_buffer); 93 RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer),
162 RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixel_buffer),
163 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); 94 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
164 RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixel_buffer, 0), 95 RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.heigh t);
165 static_cast<size_t>(frame->height())); 96 RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixelBuffer, 0), frameBuffer.width) ;
166 RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixel_buffer, 0),
167 static_cast<size_t>(frame->width()));
168 97
169 CVReturn cvRet = CVPixelBufferLockBaseAddress(pixel_buffer, 0); 98 CVReturn cvRet = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
170 if (cvRet != kCVReturnSuccess) { 99 if (cvRet != kCVReturnSuccess) {
171 LOG(LS_ERROR) << "Failed to lock base address: " << cvRet; 100 LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
172 return false; 101 return false;
173 } 102 }
174 uint8_t* dst_y = reinterpret_cast<uint8_t*>( 103 uint8_t *dstY = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane (pixelBuffer, 0));
175 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0)); 104 int dstStrideY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
176 int dst_stride_y = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0); 105 uint8_t *dstUV = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlan e(pixelBuffer, 1));
177 uint8_t* dst_uv = reinterpret_cast<uint8_t*>( 106 int dstStrideUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
178 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
179 int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
180 // Convert I420 to NV12. 107 // Convert I420 to NV12.
181 int ret = libyuv::I420ToNV12( 108 int ret = libyuv::I420ToNV12(frameBuffer.dataY,
182 frame->DataY(), frame->StrideY(), 109 frameBuffer.strideY,
183 frame->DataU(), frame->StrideU(), 110 frameBuffer.dataU,
184 frame->DataV(), frame->StrideV(), 111 frameBuffer.strideU,
185 dst_y, dst_stride_y, dst_uv, dst_stride_uv, 112 frameBuffer.dataV,
186 frame->width(), frame->height()); 113 frameBuffer.strideV,
187 CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); 114 dstY,
115 dstStrideY,
116 dstUV,
117 dstStrideUV,
118 frameBuffer.width,
119 frameBuffer.height);
120 CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
188 if (ret) { 121 if (ret) {
189 LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret; 122 LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
190 return false; 123 return false;
191 } 124 }
192 return true; 125 return true;
193 } 126 }
194 127
195 CVPixelBufferRef CreatePixelBuffer(CVPixelBufferPoolRef pixel_buffer_pool) { 128 CVPixelBufferRef CreatePixelBuffer(CVPixelBufferPoolRef pixel_buffer_pool) {
196 if (!pixel_buffer_pool) { 129 if (!pixel_buffer_pool) {
197 LOG(LS_ERROR) << "Failed to get pixel buffer pool."; 130 LOG(LS_ERROR) << "Failed to get pixel buffer pool.";
198 return nullptr; 131 return nullptr;
199 } 132 }
200 CVPixelBufferRef pixel_buffer; 133 CVPixelBufferRef pixel_buffer;
201 CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, 134 CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, &pixel_buffer);
202 &pixel_buffer);
203 if (ret != kCVReturnSuccess) { 135 if (ret != kCVReturnSuccess) {
204 LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret; 136 LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret;
205 // We probably want to drop frames here, since failure probably means 137 // We probably want to drop frames here, since failure probably means
206 // that the pool is empty. 138 // that the pool is empty.
207 return nullptr; 139 return nullptr;
208 } 140 }
209 return pixel_buffer; 141 return pixel_buffer;
210 } 142 }
211 143
212 // This is the callback function that VideoToolbox calls when encode is 144 // This is the callback function that VideoToolbox calls when encode is
213 // complete. From inspection this happens on its own queue. 145 // complete. From inspection this happens on its own queue.
214 void VTCompressionOutputCallback(void* encoder, 146 void compressionOutputCallback(void *encoder,
215 void* params, 147 void *params,
216 OSStatus status, 148 OSStatus status,
217 VTEncodeInfoFlags info_flags, 149 VTEncodeInfoFlags infoFlags,
218 CMSampleBufferRef sample_buffer) { 150 CMSampleBufferRef sampleBuffer) {
219 std::unique_ptr<FrameEncodeParams> encode_params( 151 std::unique_ptr<RTCFrameEncodeParams> encodeParams(
220 reinterpret_cast<FrameEncodeParams*>(params)); 152 reinterpret_cast<RTCFrameEncodeParams *>(params));
221 encode_params->encoder->OnEncodedFrame( 153 [encodeParams->encoder frameWasEncoded:status
222 status, info_flags, sample_buffer, encode_params->codec_specific_info, 154 flags:infoFlags
223 encode_params->width, encode_params->height, 155 sampleBuffer:sampleBuffer
224 encode_params->render_time_ms, encode_params->timestamp, 156 codecSpecificInfo:encodeParams->codecSpecificInfo
225 encode_params->rotation); 157 width:encodeParams->width
158 height:encodeParams->height
159 renderTimeMs:encodeParams->render_time_ms
160 timestamp:encodeParams->timestamp
161 rotation:encodeParams->rotation];
226 } 162 }
227 163
228 // Extract VideoToolbox profile out of the cricket::VideoCodec. If there is no 164 // Extract VideoToolbox profile out of the cricket::VideoCodec. If there is no
229 // specific VideoToolbox profile for the specified level, AutoLevel will be 165 // specific VideoToolbox profile for the specified level, AutoLevel will be
230 // returned. The user must initialize the encoder with a resolution and 166 // returned. The user must initialize the encoder with a resolution and
231 // framerate conforming to the selected H264 level regardless. 167 // framerate conforming to the selected H264 level regardless.
232 CFStringRef ExtractProfile(const cricket::VideoCodec& codec) { 168 CFStringRef ExtractProfile(const cricket::VideoCodec &codec) {
233 const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id = 169 const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
234 webrtc::H264::ParseSdpProfileLevelId(codec.params); 170 webrtc::H264::ParseSdpProfileLevelId(codec.params);
235 RTC_DCHECK(profile_level_id); 171 RTC_DCHECK(profile_level_id);
236 switch (profile_level_id->profile) { 172 switch (profile_level_id->profile) {
237 case webrtc::H264::kProfileConstrainedBaseline: 173 case webrtc::H264::kProfileConstrainedBaseline:
238 case webrtc::H264::kProfileBaseline: 174 case webrtc::H264::kProfileBaseline:
239 switch (profile_level_id->level) { 175 switch (profile_level_id->level) {
240 case webrtc::H264::kLevel3: 176 case webrtc::H264::kLevel3:
241 return kVTProfileLevel_H264_Baseline_3_0; 177 return kVTProfileLevel_H264_Baseline_3_0;
242 case webrtc::H264::kLevel3_1: 178 case webrtc::H264::kLevel3_1:
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
324 case webrtc::H264::kLevel1_2: 260 case webrtc::H264::kLevel1_2:
325 case webrtc::H264::kLevel1_3: 261 case webrtc::H264::kLevel1_3:
326 case webrtc::H264::kLevel2: 262 case webrtc::H264::kLevel2:
327 case webrtc::H264::kLevel2_1: 263 case webrtc::H264::kLevel2_1:
328 case webrtc::H264::kLevel2_2: 264 case webrtc::H264::kLevel2_2:
329 return kVTProfileLevel_H264_High_AutoLevel; 265 return kVTProfileLevel_H264_High_AutoLevel;
330 } 266 }
331 } 267 }
332 } 268 }
333 269
334 } // namespace internal 270 @implementation RTCVideoEncoderH264 {
271 RTCVideoCodecInfo *_codecInfo;
272 webrtc::BitrateAdjuster *_bitrateAdjuster;
273 uint32_t _targetBitrateBps;
274 uint32_t _encoderBitrateBps;
275 RTCH264PacketizationMode _packetizationMode;
276 CFStringRef _profile;
277 RTCVideoEncoderCallback _callback;
278 int32_t _width;
279 int32_t _height;
280 VTCompressionSessionRef _compressionSession;
281 RTCVideoCodecMode _mode;
335 282
336 namespace webrtc { 283 webrtc::H264BitstreamParser _h264BitstreamParser;
284 std::vector<uint8_t> _nv12ScaleBuffer;
285 }
337 286
338 // .5 is set as a mininum to prevent overcompensating for large temporary 287 // .5 is set as a mininum to prevent overcompensating for large temporary
339 // overshoots. We don't want to degrade video quality too badly. 288 // overshoots. We don't want to degrade video quality too badly.
340 // .95 is set to prevent oscillations. When a lower bitrate is set on the 289 // .95 is set to prevent oscillations. When a lower bitrate is set on the
341 // encoder than previously set, its output seems to have a brief period of 290 // encoder than previously set, its output seems to have a brief period of
342 // drastically reduced bitrate, so we want to avoid that. In steady state 291 // drastically reduced bitrate, so we want to avoid that. In steady state
343 // conditions, 0.95 seems to give us better overall bitrate over long periods 292 // conditions, 0.95 seems to give us better overall bitrate over long periods
344 // of time. 293 // of time.
345 H264VideoToolboxEncoder::H264VideoToolboxEncoder(const cricket::VideoCodec& code c) 294 - (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo {
346 : callback_(nullptr), 295 if (self = [super init]) {
347 compression_session_(nullptr), 296 _codecInfo = codecInfo;
348 bitrate_adjuster_(Clock::GetRealTimeClock(), .5, .95), 297 _bitrateAdjuster = new webrtc::BitrateAdjuster(webrtc::Clock::GetRealTimeClo ck(), .5, .95);
349 packetization_mode_(H264PacketizationMode::NonInterleaved), 298 _packetizationMode = RTCH264PacketizationModeNonInterleaved;
350 profile_(internal::ExtractProfile(codec)) { 299 _profile = ExtractProfile([codecInfo nativeVideoCodec]);
351 LOG(LS_INFO) << "Using profile " << internal::CFStringToString(profile_); 300 LOG(LS_INFO) << "Using profile " << CFStringToString(_profile);
352 RTC_CHECK(cricket::CodecNamesEq(codec.name, cricket::kH264CodecName)); 301 RTC_CHECK([codecInfo.name isEqualToString:@"H264"]);
302 }
303 return self;
353 } 304 }
354 305
355 H264VideoToolboxEncoder::~H264VideoToolboxEncoder() { 306 - (void)dealloc {
356 DestroyCompressionSession(); 307 [self destroyCompressionSession];
357 } 308 }
358 309
359 int H264VideoToolboxEncoder::InitEncode(const VideoCodec* codec_settings, 310 - (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
360 int number_of_cores, 311 numberOfCores:(int)numberOfCores {
361 size_t max_payload_size) { 312 RTC_DCHECK(settings);
362 RTC_DCHECK(codec_settings); 313 RTC_DCHECK([settings.name isEqualToString:@"H264"]);
363 RTC_DCHECK_EQ(codec_settings->codecType, kVideoCodecH264);
364 314
365 width_ = codec_settings->width; 315 _width = settings.width;
366 height_ = codec_settings->height; 316 _height = settings.height;
367 mode_ = codec_settings->mode; 317 _mode = settings.mode;
318
368 // We can only set average bitrate on the HW encoder. 319 // We can only set average bitrate on the HW encoder.
369 target_bitrate_bps_ = codec_settings->startBitrate; 320 _targetBitrateBps = settings.startBitrate;
370 bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_); 321 _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
371 322
372 // TODO(tkchin): Try setting payload size via 323 // TODO(tkchin): Try setting payload size via
373 // kVTCompressionPropertyKey_MaxH264SliceBytes. 324 // kVTCompressionPropertyKey_MaxH264SliceBytes.
374 325
375 return ResetCompressionSession(); 326 return [self resetCompressionSession];
376 } 327 }
377 328
378 int H264VideoToolboxEncoder::Encode( 329 - (NSInteger)encode:(RTCVideoFrame *)frame
379 const VideoFrame& frame, 330 codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
380 const CodecSpecificInfo* codec_specific_info, 331 frameTypes:(NSArray<NSNumber *> *)frameTypes {
381 const std::vector<FrameType>* frame_types) { 332 RTC_DCHECK_EQ(frame.width, _width);
382 // |input_frame| size should always match codec settings. 333 RTC_DCHECK_EQ(frame.height, _height);
383 RTC_DCHECK_EQ(frame.width(), width_); 334 if (!_callback || !_compressionSession) {
384 RTC_DCHECK_EQ(frame.height(), height_);
385 if (!callback_ || !compression_session_) {
386 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 335 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
387 } 336 }
388 #if defined(WEBRTC_IOS) 337 #if defined(WEBRTC_IOS)
389 if (![[RTCUIApplicationStatusObserver sharedInstance] isApplicationActive]) { 338 if (![[RTCUIApplicationStatusObserver sharedInstance] isApplicationActive]) {
390 // Ignore all encode requests when app isn't active. In this state, the 339 // Ignore all encode requests when app isn't active. In this state, the
391 // hardware encoder has been invalidated by the OS. 340 // hardware encoder has been invalidated by the OS.
392 return WEBRTC_VIDEO_CODEC_OK; 341 return WEBRTC_VIDEO_CODEC_OK;
393 } 342 }
394 #endif 343 #endif
395 bool is_keyframe_required = false; 344 BOOL isKeyframeRequired = NO;
396 345
397 // Get a pixel buffer from the pool and copy frame data over. 346 // Get a pixel buffer from the pool and copy frame data over.
398 CVPixelBufferPoolRef pixel_buffer_pool = 347 CVPixelBufferPoolRef pixelBufferPool =
399 VTCompressionSessionGetPixelBufferPool(compression_session_); 348 VTCompressionSessionGetPixelBufferPool(_compressionSession);
349
400 #if defined(WEBRTC_IOS) 350 #if defined(WEBRTC_IOS)
401 if (!pixel_buffer_pool) { 351 if (!pixelBufferPool) {
402 // Kind of a hack. On backgrounding, the compression session seems to get 352 // Kind of a hack. On backgrounding, the compression session seems to get
403 // invalidated, which causes this pool call to fail when the application 353 // invalidated, which causes this pool call to fail when the application
404 // is foregrounded and frames are being sent for encoding again. 354 // is foregrounded and frames are being sent for encoding again.
405 // Resetting the session when this happens fixes the issue. 355 // Resetting the session when this happens fixes the issue.
406 // In addition we request a keyframe so video can recover quickly. 356 // In addition we request a keyframe so video can recover quickly.
407 ResetCompressionSession(); 357 [self resetCompressionSession];
408 pixel_buffer_pool = 358 pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession );
409 VTCompressionSessionGetPixelBufferPool(compression_session_); 359 isKeyframeRequired = YES;
410 is_keyframe_required = true;
411 LOG(LS_INFO) << "Resetting compression session due to invalid pool."; 360 LOG(LS_INFO) << "Resetting compression session due to invalid pool.";
412 } 361 }
413 #endif 362 #endif
414 363
415 CVPixelBufferRef pixel_buffer = nullptr; 364 CVPixelBufferRef pixelBuffer = nullptr;
416 if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative) { 365 if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
417 // Native frame. 366 // Native frame buffer
418 rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer( 367 RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
419 static_cast<ObjCFrameBuffer*>(frame.video_frame_buffer().get())); 368 if (![rtcPixelBuffer requiresCropping]) {
420 id<RTCVideoFrameBuffer> wrapped_frame_buffer = 369 // This pixel buffer might have a higher resolution than what the
421 (id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer(); 370 // compression session is configured to. The compression session can
422 371 // handle that and will output encoded frames in the configured
423 if ([wrapped_frame_buffer isKindOfClass:[RTCCVPixelBuffer class]]) { 372 // resolution regardless of the input pixel buffer resolution.
424 RTCCVPixelBuffer* rtc_pixel_buffer = (RTCCVPixelBuffer*)wrapped_frame_buff er; 373 pixelBuffer = rtcPixelBuffer.pixelBuffer;
425 if (![rtc_pixel_buffer requiresCropping]) { 374 CVBufferRetain(pixelBuffer);
426 // This pixel buffer might have a higher resolution than what the 375 } else {
427 // compression session is configured to. The compression session can 376 // Cropping required, we need to crop and scale to a new pixel buffer.
428 // handle that and will output encoded frames in the configured 377 pixelBuffer = CreatePixelBuffer(pixelBufferPool);
429 // resolution regardless of the input pixel buffer resolution. 378 if (!pixelBuffer) {
430 pixel_buffer = rtc_pixel_buffer.pixelBuffer; 379 return WEBRTC_VIDEO_CODEC_ERROR;
431 CVBufferRetain(pixel_buffer); 380 }
381 int dstWidth = CVPixelBufferGetWidth(pixelBuffer);
382 int dstHeight = CVPixelBufferGetHeight(pixelBuffer);
383 if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) {
384 int size =
385 [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth heig ht:dstHeight];
386 _nv12ScaleBuffer.resize(size);
432 } else { 387 } else {
433 // Cropping required, we need to crop and scale to a new pixel buffer. 388 _nv12ScaleBuffer.clear();
434 pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool); 389 }
435 if (!pixel_buffer) { 390 _nv12ScaleBuffer.shrink_to_fit();
436 return WEBRTC_VIDEO_CODEC_ERROR; 391 if (![rtcPixelBuffer cropAndScaleTo:pixelBuffer withTempBuffer:_nv12ScaleB uffer.data()]) {
437 } 392 return WEBRTC_VIDEO_CODEC_ERROR;
438 int dst_width = CVPixelBufferGetWidth(pixel_buffer);
439 int dst_height = CVPixelBufferGetHeight(pixel_buffer);
440 if ([rtc_pixel_buffer requiresScalingToWidth:dst_width height:dst_height ]) {
441 int size =
442 [rtc_pixel_buffer bufferSizeForCroppingAndScalingToWidth:dst_width height:dst_height];
443 nv12_scale_buffer_.resize(size);
444 } else {
445 nv12_scale_buffer_.clear();
446 }
447 nv12_scale_buffer_.shrink_to_fit();
448 if (![rtc_pixel_buffer cropAndScaleTo:pixel_buffer
449 withTempBuffer:nv12_scale_buffer_.data()]) {
450 return WEBRTC_VIDEO_CODEC_ERROR;
451 }
452 } 393 }
453 } 394 }
454 } 395 }
455 396
456 if (!pixel_buffer) { 397 if (!pixelBuffer) {
457 // We did not have a native frame, or the ObjCVideoFrame wrapped a non-nativ e frame 398 // We did not have a native frame buffer
458 pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool); 399 pixelBuffer = CreatePixelBuffer(pixelBufferPool);
459 if (!pixel_buffer) { 400 if (!pixelBuffer) {
460 return WEBRTC_VIDEO_CODEC_ERROR; 401 return WEBRTC_VIDEO_CODEC_ERROR;
461 } 402 }
462 RTC_DCHECK(pixel_buffer); 403 RTC_DCHECK(pixelBuffer);
463 if (!internal::CopyVideoFrameToPixelBuffer(frame.video_frame_buffer()->ToI42 0(), 404 if (!CopyVideoFrameToPixelBuffer([frame.buffer toI420], pixelBuffer)) {
464 pixel_buffer)) {
465 LOG(LS_ERROR) << "Failed to copy frame data."; 405 LOG(LS_ERROR) << "Failed to copy frame data.";
466 CVBufferRelease(pixel_buffer); 406 CVBufferRelease(pixelBuffer);
467 return WEBRTC_VIDEO_CODEC_ERROR; 407 return WEBRTC_VIDEO_CODEC_ERROR;
468 } 408 }
469 } 409 }
470 410
471 // Check if we need a keyframe. 411 // Check if we need a keyframe.
472 if (!is_keyframe_required && frame_types) { 412 if (!isKeyframeRequired && frameTypes) {
473 for (auto frame_type : *frame_types) { 413 for (NSNumber *frameType in frameTypes) {
474 if (frame_type == kVideoFrameKey) { 414 if ((RTCFrameType)frameType.intValue == RTCFrameTypeVideoFrameKey) {
475 is_keyframe_required = true; 415 isKeyframeRequired = YES;
476 break; 416 break;
477 } 417 }
478 } 418 }
479 } 419 }
480 420
481 CMTime presentation_time_stamp = 421 CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosec sPerMillisec, 1000);
482 CMTimeMake(frame.render_time_ms(), 1000); 422 CFDictionaryRef frameProperties = nullptr;
483 CFDictionaryRef frame_properties = nullptr; 423 if (isKeyframeRequired) {
484 if (is_keyframe_required) {
485 CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; 424 CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
486 CFTypeRef values[] = {kCFBooleanTrue}; 425 CFTypeRef values[] = {kCFBooleanTrue};
487 frame_properties = internal::CreateCFDictionary(keys, values, 1); 426 frameProperties = CreateCFTypeDictionary(keys, values, 1);
488 } 427 }
489 std::unique_ptr<internal::FrameEncodeParams> encode_params;
490 encode_params.reset(new internal::FrameEncodeParams(
491 this, codec_specific_info, width_, height_, frame.render_time_ms(),
492 frame.timestamp(), frame.rotation()));
493 428
494 encode_params->codec_specific_info.codecSpecific.H264.packetization_mode = 429 std::unique_ptr<RTCFrameEncodeParams> encodeParams;
495 packetization_mode_; 430 encodeParams.reset(new RTCFrameEncodeParams(self,
431 codecSpecificInfo,
432 _width,
433 _height,
434 frame.timeStampNs / rtc::kNumNanos ecsPerMillisec,
435 frame.timeStamp,
436 frame.rotation));
437 encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode;
496 438
497 // Update the bitrate if needed. 439 // Update the bitrate if needed.
498 SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps()); 440 [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()];
499 441
500 OSStatus status = VTCompressionSessionEncodeFrame( 442 OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession,
501 compression_session_, pixel_buffer, presentation_time_stamp, 443 pixelBuffer,
502 kCMTimeInvalid, frame_properties, encode_params.release(), nullptr); 444 presentationTimeStamp,
503 if (frame_properties) { 445 kCMTimeInvalid,
504 CFRelease(frame_properties); 446 frameProperties,
447 encodeParams.release(),
448 nullptr);
449 if (frameProperties) {
450 CFRelease(frameProperties);
505 } 451 }
506 if (pixel_buffer) { 452 if (pixelBuffer) {
507 CVBufferRelease(pixel_buffer); 453 CVBufferRelease(pixelBuffer);
508 } 454 }
509 if (status != noErr) { 455 if (status != noErr) {
510 LOG(LS_ERROR) << "Failed to encode frame with code: " << status; 456 LOG(LS_ERROR) << "Failed to encode frame with code: " << status;
511 return WEBRTC_VIDEO_CODEC_ERROR; 457 return WEBRTC_VIDEO_CODEC_ERROR;
512 } 458 }
513 return WEBRTC_VIDEO_CODEC_OK; 459 return WEBRTC_VIDEO_CODEC_OK;
514 } 460 }
515 461
516 int H264VideoToolboxEncoder::RegisterEncodeCompleteCallback( 462 - (void)setCallback:(RTCVideoEncoderCallback)callback {
517 EncodedImageCallback* callback) { 463 _callback = callback;
518 callback_ = callback; 464 }
465
466 - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate {
467 _targetBitrateBps = 1000 * bitrateKbit;
468 _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps);
469 [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps()];
519 return WEBRTC_VIDEO_CODEC_OK; 470 return WEBRTC_VIDEO_CODEC_OK;
520 } 471 }
521 472
522 int H264VideoToolboxEncoder::SetChannelParameters(uint32_t packet_loss, 473 #pragma mark - Private
523 int64_t rtt) { 474
524 // Encoder doesn't know anything about packet loss or rtt so just return. 475 - (NSInteger)releaseEncoder {
476 // Need to destroy so that the session is invalidated and won't use the
477 // callback anymore. Do not remove callback until the session is invalidated
478 // since async encoder callbacks can occur until invalidation.
479 [self destroyCompressionSession];
480 _callback = nullptr;
525 return WEBRTC_VIDEO_CODEC_OK; 481 return WEBRTC_VIDEO_CODEC_OK;
526 } 482 }
527 483
528 int H264VideoToolboxEncoder::SetRates(uint32_t new_bitrate_kbit, 484 - (int)resetCompressionSession {
529 uint32_t frame_rate) { 485 [self destroyCompressionSession];
530 target_bitrate_bps_ = 1000 * new_bitrate_kbit;
531 bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_);
532 SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps());
533 return WEBRTC_VIDEO_CODEC_OK;
534 }
535
536 int H264VideoToolboxEncoder::Release() {
537 // Need to destroy so that the session is invalidated and won't use the
538 // callback anymore. Do not remove callback until the session is invalidated
539 // since async encoder callbacks can occur until invalidation.
540 DestroyCompressionSession();
541 callback_ = nullptr;
542 return WEBRTC_VIDEO_CODEC_OK;
543 }
544
545 int H264VideoToolboxEncoder::ResetCompressionSession() {
546 DestroyCompressionSession();
547 486
548 // Set source image buffer attributes. These attributes will be present on 487 // Set source image buffer attributes. These attributes will be present on
549 // buffers retrieved from the encoder's pixel buffer pool. 488 // buffers retrieved from the encoder's pixel buffer pool.
550 const size_t attributes_size = 3; 489 const size_t attributesSize = 3;
551 CFTypeRef keys[attributes_size] = { 490 CFTypeRef keys[attributesSize] = {
552 #if defined(WEBRTC_IOS) 491 #if defined(WEBRTC_IOS)
553 kCVPixelBufferOpenGLESCompatibilityKey, 492 kCVPixelBufferOpenGLESCompatibilityKey,
554 #elif defined(WEBRTC_MAC) 493 #elif defined(WEBRTC_MAC)
555 kCVPixelBufferOpenGLCompatibilityKey, 494 kCVPixelBufferOpenGLCompatibilityKey,
556 #endif 495 #endif
557 kCVPixelBufferIOSurfacePropertiesKey, 496 kCVPixelBufferIOSurfacePropertiesKey,
558 kCVPixelBufferPixelFormatTypeKey 497 kCVPixelBufferPixelFormatTypeKey
559 }; 498 };
560 CFDictionaryRef io_surface_value = 499 CFDictionaryRef ioSurfaceValue = CreateCFTypeDictionary(nullptr, nullptr, 0);
561 internal::CreateCFDictionary(nullptr, nullptr, 0);
562 int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; 500 int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
563 CFNumberRef pixel_format = 501 CFNumberRef pixelFormat = CFNumberCreate(nullptr, kCFNumberLongType, &nv12type );
564 CFNumberCreate(nullptr, kCFNumberLongType, &nv12type); 502 CFTypeRef values[attributesSize] = {kCFBooleanTrue, ioSurfaceValue, pixelForma t};
565 CFTypeRef values[attributes_size] = {kCFBooleanTrue, io_surface_value, 503 CFDictionaryRef sourceAttributes = CreateCFTypeDictionary(keys, values, attrib utesSize);
566 pixel_format}; 504 if (ioSurfaceValue) {
567 CFDictionaryRef source_attributes = 505 CFRelease(ioSurfaceValue);
568 internal::CreateCFDictionary(keys, values, attributes_size); 506 ioSurfaceValue = nullptr;
569 if (io_surface_value) {
570 CFRelease(io_surface_value);
571 io_surface_value = nullptr;
572 } 507 }
573 if (pixel_format) { 508 if (pixelFormat) {
574 CFRelease(pixel_format); 509 CFRelease(pixelFormat);
575 pixel_format = nullptr; 510 pixelFormat = nullptr;
576 } 511 }
577 OSStatus status = VTCompressionSessionCreate( 512 OSStatus status = VTCompressionSessionCreate(nullptr, // use default allocato r
578 nullptr, // use default allocator 513 _width,
579 width_, height_, kCMVideoCodecType_H264, 514 _height,
580 nullptr, // use default encoder 515 kCMVideoCodecType_H264,
581 source_attributes, 516 nullptr, // use default encoder
582 nullptr, // use default compressed data allocator 517 sourceAttributes,
583 internal::VTCompressionOutputCallback, this, &compression_session_); 518 nullptr, // use default compress ed data allocator
584 if (source_attributes) { 519 compressionOutputCallback,
585 CFRelease(source_attributes); 520 nullptr,
586 source_attributes = nullptr; 521 &_compressionSession);
522 if (sourceAttributes) {
523 CFRelease(sourceAttributes);
524 sourceAttributes = nullptr;
587 } 525 }
588 if (status != noErr) { 526 if (status != noErr) {
589 LOG(LS_ERROR) << "Failed to create compression session: " << status; 527 LOG(LS_ERROR) << "Failed to create compression session: " << status;
590 return WEBRTC_VIDEO_CODEC_ERROR; 528 return WEBRTC_VIDEO_CODEC_ERROR;
591 } 529 }
592 ConfigureCompressionSession(); 530 [self configureCompressionSession];
593 return WEBRTC_VIDEO_CODEC_OK; 531 return WEBRTC_VIDEO_CODEC_OK;
594 } 532 }
595 533
596 void H264VideoToolboxEncoder::ConfigureCompressionSession() { 534 - (void)configureCompressionSession {
597 RTC_DCHECK(compression_session_); 535 RTC_DCHECK(_compressionSession);
598 internal::SetVTSessionProperty(compression_session_, 536 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true);
599 kVTCompressionPropertyKey_RealTime, true); 537 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLev el, _profile);
600 internal::SetVTSessionProperty(compression_session_, 538 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrame Reordering, false);
601 kVTCompressionPropertyKey_ProfileLevel, 539 [self setEncoderBitrateBps:_targetBitrateBps];
602 profile_);
603 internal::SetVTSessionProperty(compression_session_,
604 kVTCompressionPropertyKey_AllowFrameReordering,
605 false);
606 SetEncoderBitrateBps(target_bitrate_bps_);
607 // TODO(tkchin): Look at entropy mode and colorspace matrices. 540 // TODO(tkchin): Look at entropy mode and colorspace matrices.
608 // TODO(tkchin): Investigate to see if there's any way to make this work. 541 // TODO(tkchin): Investigate to see if there's any way to make this work.
609 // May need it to interop with Android. Currently this call just fails. 542 // May need it to interop with Android. Currently this call just fails.
610 // On inspecting encoder output on iOS8, this value is set to 6. 543 // On inspecting encoder output on iOS8, this value is set to 6.
611 // internal::SetVTSessionProperty(compression_session_, 544 // internal::SetVTSessionProperty(compression_session_,
612 // kVTCompressionPropertyKey_MaxFrameDelayCount, 545 // kVTCompressionPropertyKey_MaxFrameDelayCount,
613 // 1); 546 // 1);
614 547
615 // Set a relatively large value for keyframe emission (7200 frames or 548 // Set a relatively large value for keyframe emission (7200 frames or 4 minute s).
616 // 4 minutes). 549 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFram eInterval, 7200);
617 internal::SetVTSessionProperty( 550 SetVTSessionProperty(
618 compression_session_, 551 _compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration , 240);
619 kVTCompressionPropertyKey_MaxKeyFrameInterval, 7200);
620 internal::SetVTSessionProperty(
621 compression_session_,
622 kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, 240);
623 } 552 }
624 553
625 void H264VideoToolboxEncoder::DestroyCompressionSession() { 554 - (void)destroyCompressionSession {
626 if (compression_session_) { 555 if (_compressionSession) {
627 VTCompressionSessionInvalidate(compression_session_); 556 VTCompressionSessionInvalidate(_compressionSession);
628 CFRelease(compression_session_); 557 CFRelease(_compressionSession);
629 compression_session_ = nullptr; 558 _compressionSession = nullptr;
630 } 559 }
631 } 560 }
632 561
633 const char* H264VideoToolboxEncoder::ImplementationName() const { 562 - (NSString *)implementationName {
634 return "VideoToolbox"; 563 return @"VideoToolbox";
635 } 564 }
636 565
637 bool H264VideoToolboxEncoder::SupportsNativeHandle() const { 566 - (void)setBitrateBps:(uint32_t)bitrateBps {
638 return true; 567 if (_encoderBitrateBps != bitrateBps) {
639 } 568 [self setEncoderBitrateBps:bitrateBps];
640
641 void H264VideoToolboxEncoder::SetBitrateBps(uint32_t bitrate_bps) {
642 if (encoder_bitrate_bps_ != bitrate_bps) {
643 SetEncoderBitrateBps(bitrate_bps);
644 } 569 }
645 } 570 }
646 571
647 void H264VideoToolboxEncoder::SetEncoderBitrateBps(uint32_t bitrate_bps) { 572 - (void)setEncoderBitrateBps:(uint32_t)bitrateBps {
648 if (compression_session_) { 573 if (_compressionSession) {
649 internal::SetVTSessionProperty(compression_session_, 574 SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageB itRate, bitrateBps);
650 kVTCompressionPropertyKey_AverageBitRate,
651 bitrate_bps);
652 575
653 // TODO(tkchin): Add a helper method to set array value. 576 // TODO(tkchin): Add a helper method to set array value.
654 int64_t data_limit_bytes_per_second_value = static_cast<int64_t>( 577 int64_t dataLimitBytesPerSecondValue =
655 bitrate_bps * internal::kLimitToAverageBitRateFactor / 8); 578 static_cast<int64_t>(bitrateBps * kLimitToAverageBitRateFactor / 8);
656 CFNumberRef bytes_per_second = 579 CFNumberRef bytesPerSecond =
657 CFNumberCreate(kCFAllocatorDefault, 580 CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytes PerSecondValue);
658 kCFNumberSInt64Type, 581 int64_t oneSecondValue = 1;
659 &data_limit_bytes_per_second_value); 582 CFNumberRef oneSecond =
660 int64_t one_second_value = 1; 583 CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue );
661 CFNumberRef one_second = 584 const void *nums[2] = {bytesPerSecond, oneSecond};
662 CFNumberCreate(kCFAllocatorDefault, 585 CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCal lBacks);
663 kCFNumberSInt64Type, 586 OSStatus status = VTSessionSetProperty(
664 &one_second_value); 587 _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateL imits);
665 const void* nums[2] = { bytes_per_second, one_second }; 588 if (bytesPerSecond) {
666 CFArrayRef data_rate_limits = 589 CFRelease(bytesPerSecond);
667 CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks);
668 OSStatus status =
669 VTSessionSetProperty(compression_session_,
670 kVTCompressionPropertyKey_DataRateLimits,
671 data_rate_limits);
672 if (bytes_per_second) {
673 CFRelease(bytes_per_second);
674 } 590 }
675 if (one_second) { 591 if (oneSecond) {
676 CFRelease(one_second); 592 CFRelease(oneSecond);
677 } 593 }
678 if (data_rate_limits) { 594 if (dataRateLimits) {
679 CFRelease(data_rate_limits); 595 CFRelease(dataRateLimits);
680 } 596 }
681 if (status != noErr) { 597 if (status != noErr) {
682 LOG(LS_ERROR) << "Failed to set data rate limit"; 598 LOG(LS_ERROR) << "Failed to set data rate limit";
683 } 599 }
684 600
685 encoder_bitrate_bps_ = bitrate_bps; 601 _encoderBitrateBps = bitrateBps;
686 } 602 }
687 } 603 }
688 604
689 void H264VideoToolboxEncoder::OnEncodedFrame( 605 - (void)frameWasEncoded:(OSStatus)status
690 OSStatus status, 606 flags:(VTEncodeInfoFlags)infoFlags
691 VTEncodeInfoFlags info_flags, 607 sampleBuffer:(CMSampleBufferRef)sampleBuffer
692 CMSampleBufferRef sample_buffer, 608 codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
693 CodecSpecificInfo codec_specific_info, 609 width:(int32_t)width
694 int32_t width, 610 height:(int32_t)height
695 int32_t height, 611 renderTimeMs:(int64_t)renderTimeMs
696 int64_t render_time_ms, 612 timestamp:(uint32_t)timestamp
697 uint32_t timestamp, 613 rotation:(RTCVideoRotation)rotation {
698 VideoRotation rotation) {
699 if (status != noErr) { 614 if (status != noErr) {
700 LOG(LS_ERROR) << "H264 encode failed."; 615 LOG(LS_ERROR) << "H264 encode failed.";
701 return; 616 return;
702 } 617 }
703 if (info_flags & kVTEncodeInfo_FrameDropped) { 618 if (infoFlags & kVTEncodeInfo_FrameDropped) {
704 LOG(LS_INFO) << "H264 encode dropped frame."; 619 LOG(LS_INFO) << "H264 encode dropped frame.";
705 return; 620 return;
706 } 621 }
707 622
708 bool is_keyframe = false; 623 BOOL isKeyframe = NO;
709 CFArrayRef attachments = 624 CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
710 CMSampleBufferGetSampleAttachmentsArray(sample_buffer, 0);
711 if (attachments != nullptr && CFArrayGetCount(attachments)) { 625 if (attachments != nullptr && CFArrayGetCount(attachments)) {
712 CFDictionaryRef attachment = 626 CFDictionaryRef attachment =
713 static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0)); 627 static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0));
714 is_keyframe = 628 isKeyframe = !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_Not Sync);
715 !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync);
716 } 629 }
717 630
718 if (is_keyframe) { 631 if (isKeyframe) {
719 LOG(LS_INFO) << "Generated keyframe"; 632 LOG(LS_INFO) << "Generated keyframe";
720 } 633 }
721 634
722 // Convert the sample buffer into a buffer suitable for RTP packetization. 635 // Convert the sample buffer into a buffer suitable for RTP packetization.
723 // TODO(tkchin): Allocate buffers through a pool. 636 // TODO(tkchin): Allocate buffers through a pool.
724 std::unique_ptr<rtc::Buffer> buffer(new rtc::Buffer()); 637 std::unique_ptr<rtc::Buffer> buffer(new rtc::Buffer());
725 std::unique_ptr<webrtc::RTPFragmentationHeader> header; 638 RTCRtpFragmentationHeader *header;
726 { 639 {
727 webrtc::RTPFragmentationHeader* header_raw; 640 webrtc::RTPFragmentationHeader *header_cpp;
728 bool result = H264CMSampleBufferToAnnexBBuffer(sample_buffer, is_keyframe, 641 bool result =
729 buffer.get(), &header_raw); 642 H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp);
730 header.reset(header_raw); 643 header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeade r:header_cpp];
731 if (!result) { 644 if (!result) {
732 return; 645 return;
733 } 646 }
734 } 647 }
735 webrtc::EncodedImage frame(buffer->data(), buffer->size(), buffer->size());
736 frame._encodedWidth = width;
737 frame._encodedHeight = height;
738 frame._completeFrame = true;
739 frame._frameType =
740 is_keyframe ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta;
741 frame.capture_time_ms_ = render_time_ms;
742 frame._timeStamp = timestamp;
743 frame.rotation_ = rotation;
744 frame.content_type_ =
745 (mode_ == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentTy pe::UNSPECIFIED;
746 frame.timing_.is_timing_frame = false;
747 648
748 h264_bitstream_parser_.ParseBitstream(buffer->data(), buffer->size()); 649 RTCEncodedImage *frame = [[RTCEncodedImage alloc] init];
749 h264_bitstream_parser_.GetLastSliceQp(&frame.qp_); 650 frame.buffer = [NSData dataWithBytesNoCopy:buffer->data() length:buffer->size( ) freeWhenDone:NO];
651 frame.encodedWidth = width;
652 frame.encodedHeight = height;
653 frame.completeFrame = YES;
654 frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFr ameDelta;
655 frame.captureTimeMs = renderTimeMs;
656 frame.timeStamp = timestamp;
657 frame.rotation = rotation;
658 frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoConten tTypeScreenshare :
659 RTCVideoConten tTypeUnspecified;
660 frame.isTimingFrame = NO;
750 661
751 EncodedImageCallback::Result res = 662 int qp;
752 callback_->OnEncodedImage(frame, &codec_specific_info, header.get()); 663 _h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size());
753 if (res.error != EncodedImageCallback::Result::OK) { 664 _h264BitstreamParser.GetLastSliceQp(&qp);
754 LOG(LS_ERROR) << "Encode callback failed: " << res.error; 665 frame.qp = @(qp);
666
667 BOOL res = _callback(frame, codecSpecificInfo, header);
668 if (!res) {
669 LOG(LS_ERROR) << "Encode callback failed";
755 return; 670 return;
756 } 671 }
757 bitrate_adjuster_.Update(frame._length); 672 _bitrateAdjuster->Update(frame.buffer.length);
758 } 673 }
759 674
760 // TODO(magjed): This function is not used by RTCVideoEncoderH264, but this whol e file will be 675 - (RTCVideoEncoderQpThresholds *)scalingSettings {
761 // removed soon and inlined as ObjC. 676 return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpTh reshold
762 VideoEncoder::ScalingSettings H264VideoToolboxEncoder::GetScalingSettings() 677 high:kHighH264QpT hreshold];
763 const {
764 return VideoEncoder::ScalingSettings(true, internal::kLowH264QpThreshold,
765 internal::kHighH264QpThreshold);
766 } 678 }
767 } // namespace webrtc 679
680 @end
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698