OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 * | 9 * |
10 */ | 10 */ |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
138 int32_t width; | 138 int32_t width; |
139 int32_t height; | 139 int32_t height; |
140 int64_t render_time_ms; | 140 int64_t render_time_ms; |
141 uint32_t timestamp; | 141 uint32_t timestamp; |
142 webrtc::VideoRotation rotation; | 142 webrtc::VideoRotation rotation; |
143 }; | 143 }; |
144 | 144 |
145 // We receive I420Frames as input, but we need to feed CVPixelBuffers into the | 145 // We receive I420Frames as input, but we need to feed CVPixelBuffers into the |
146 // encoder. This performs the copy and format conversion. | 146 // encoder. This performs the copy and format conversion. |
147 // TODO(tkchin): See if encoder will accept i420 frames and compare performance. | 147 // TODO(tkchin): See if encoder will accept i420 frames and compare performance. |
148 bool CopyVideoFrameToPixelBuffer(const webrtc::VideoFrame& frame, | 148 bool CopyVideoFrameToPixelBuffer( |
149 CVPixelBufferRef pixel_buffer) { | 149 const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& frame, |
| 150 CVPixelBufferRef pixel_buffer) { |
150 RTC_DCHECK(pixel_buffer); | 151 RTC_DCHECK(pixel_buffer); |
151 RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer) == | 152 RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer) == |
152 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); | 153 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); |
153 RTC_DCHECK(CVPixelBufferGetHeightOfPlane(pixel_buffer, 0) == | 154 RTC_DCHECK(CVPixelBufferGetHeightOfPlane(pixel_buffer, 0) == |
154 static_cast<size_t>(frame.height())); | 155 static_cast<size_t>(frame->height())); |
155 RTC_DCHECK(CVPixelBufferGetWidthOfPlane(pixel_buffer, 0) == | 156 RTC_DCHECK(CVPixelBufferGetWidthOfPlane(pixel_buffer, 0) == |
156 static_cast<size_t>(frame.width())); | 157 static_cast<size_t>(frame->width())); |
157 | 158 |
158 CVReturn cvRet = CVPixelBufferLockBaseAddress(pixel_buffer, 0); | 159 CVReturn cvRet = CVPixelBufferLockBaseAddress(pixel_buffer, 0); |
159 if (cvRet != kCVReturnSuccess) { | 160 if (cvRet != kCVReturnSuccess) { |
160 LOG(LS_ERROR) << "Failed to lock base address: " << cvRet; | 161 LOG(LS_ERROR) << "Failed to lock base address: " << cvRet; |
161 return false; | 162 return false; |
162 } | 163 } |
163 uint8_t* dst_y = reinterpret_cast<uint8_t*>( | 164 uint8_t* dst_y = reinterpret_cast<uint8_t*>( |
164 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0)); | 165 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0)); |
165 int dst_stride_y = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0); | 166 int dst_stride_y = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0); |
166 uint8_t* dst_uv = reinterpret_cast<uint8_t*>( | 167 uint8_t* dst_uv = reinterpret_cast<uint8_t*>( |
167 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1)); | 168 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1)); |
168 int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1); | 169 int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1); |
169 // Convert I420 to NV12. | 170 // Convert I420 to NV12. |
170 int ret = libyuv::I420ToNV12( | 171 int ret = libyuv::I420ToNV12( |
171 frame.video_frame_buffer()->DataY(), | 172 frame->DataY(), frame->StrideY(), |
172 frame.video_frame_buffer()->StrideY(), | 173 frame->DataU(), frame->StrideU(), |
173 frame.video_frame_buffer()->DataU(), | 174 frame->DataV(), frame->StrideV(), |
174 frame.video_frame_buffer()->StrideU(), | |
175 frame.video_frame_buffer()->DataV(), | |
176 frame.video_frame_buffer()->StrideV(), | |
177 dst_y, dst_stride_y, dst_uv, dst_stride_uv, | 175 dst_y, dst_stride_y, dst_uv, dst_stride_uv, |
178 frame.width(), frame.height()); | 176 frame->width(), frame->height()); |
179 CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); | 177 CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); |
180 if (ret) { | 178 if (ret) { |
181 LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret; | 179 LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret; |
182 return false; | 180 return false; |
183 } | 181 } |
184 return true; | 182 return true; |
185 } | 183 } |
186 | 184 |
187 // This is the callback function that VideoToolbox calls when encode is | 185 // This is the callback function that VideoToolbox calls when encode is |
188 // complete. From inspection this happens on its own queue. | 186 // complete. From inspection this happens on its own queue. |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
240 // We can only set average bitrate on the HW encoder. | 238 // We can only set average bitrate on the HW encoder. |
241 target_bitrate_bps_ = codec_settings->startBitrate; | 239 target_bitrate_bps_ = codec_settings->startBitrate; |
242 bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_); | 240 bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_); |
243 | 241 |
244 // TODO(tkchin): Try setting payload size via | 242 // TODO(tkchin): Try setting payload size via |
245 // kVTCompressionPropertyKey_MaxH264SliceBytes. | 243 // kVTCompressionPropertyKey_MaxH264SliceBytes. |
246 | 244 |
247 return ResetCompressionSession(); | 245 return ResetCompressionSession(); |
248 } | 246 } |
249 | 247 |
250 const VideoFrame& H264VideoToolboxEncoder::GetScaledFrameOnEncode( | 248 rtc::scoped_refptr<VideoFrameBuffer> |
251 const VideoFrame& frame) { | 249 H264VideoToolboxEncoder::GetScaledFrameOnEncode( |
| 250 const rtc::scoped_refptr<VideoFrameBuffer>& frame) { |
252 rtc::CritScope lock(&quality_scaler_crit_); | 251 rtc::CritScope lock(&quality_scaler_crit_); |
253 quality_scaler_.OnEncodeFrame(frame); | 252 quality_scaler_.OnEncodeFrame(frame->width(), frame->height()); |
254 return quality_scaler_.GetScaledFrame(frame); | 253 return quality_scaler_.GetScaledFrame(frame); |
255 } | 254 } |
256 | 255 |
257 int H264VideoToolboxEncoder::Encode( | 256 int H264VideoToolboxEncoder::Encode( |
258 const VideoFrame& frame, | 257 const VideoFrame& frame, |
259 const CodecSpecificInfo* codec_specific_info, | 258 const CodecSpecificInfo* codec_specific_info, |
260 const std::vector<FrameType>* frame_types) { | 259 const std::vector<FrameType>* frame_types) { |
261 RTC_DCHECK(!frame.IsZeroSize()); | 260 RTC_DCHECK(!frame.IsZeroSize()); |
262 if (!callback_ || !compression_session_) { | 261 if (!callback_ || !compression_session_) { |
263 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 262 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
264 } | 263 } |
265 #if defined(WEBRTC_IOS) | 264 #if defined(WEBRTC_IOS) |
266 if (!RTCIsUIApplicationActive()) { | 265 if (!RTCIsUIApplicationActive()) { |
267 // Ignore all encode requests when app isn't active. In this state, the | 266 // Ignore all encode requests when app isn't active. In this state, the |
268 // hardware encoder has been invalidated by the OS. | 267 // hardware encoder has been invalidated by the OS. |
269 return WEBRTC_VIDEO_CODEC_OK; | 268 return WEBRTC_VIDEO_CODEC_OK; |
270 } | 269 } |
271 #endif | 270 #endif |
272 bool is_keyframe_required = false; | 271 bool is_keyframe_required = false; |
273 const VideoFrame& input_image = GetScaledFrameOnEncode(frame); | 272 rtc::scoped_refptr<VideoFrameBuffer> input_image( |
| 273 GetScaledFrameOnEncode(frame.video_frame_buffer())); |
274 | 274 |
275 if (input_image.width() != width_ || input_image.height() != height_) { | 275 if (input_image->width() != width_ || input_image->height() != height_) { |
276 width_ = input_image.width(); | 276 width_ = input_image->width(); |
277 height_ = input_image.height(); | 277 height_ = input_image->height(); |
278 int ret = ResetCompressionSession(); | 278 int ret = ResetCompressionSession(); |
279 if (ret < 0) | 279 if (ret < 0) |
280 return ret; | 280 return ret; |
281 } | 281 } |
282 | 282 |
283 // Get a pixel buffer from the pool and copy frame data over. | 283 // Get a pixel buffer from the pool and copy frame data over. |
284 CVPixelBufferPoolRef pixel_buffer_pool = | 284 CVPixelBufferPoolRef pixel_buffer_pool = |
285 VTCompressionSessionGetPixelBufferPool(compression_session_); | 285 VTCompressionSessionGetPixelBufferPool(compression_session_); |
286 #if defined(WEBRTC_IOS) | 286 #if defined(WEBRTC_IOS) |
287 if (!pixel_buffer_pool) { | 287 if (!pixel_buffer_pool) { |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
320 if (!is_keyframe_required && frame_types) { | 320 if (!is_keyframe_required && frame_types) { |
321 for (auto frame_type : *frame_types) { | 321 for (auto frame_type : *frame_types) { |
322 if (frame_type == kVideoFrameKey) { | 322 if (frame_type == kVideoFrameKey) { |
323 is_keyframe_required = true; | 323 is_keyframe_required = true; |
324 break; | 324 break; |
325 } | 325 } |
326 } | 326 } |
327 } | 327 } |
328 | 328 |
329 CMTime presentation_time_stamp = | 329 CMTime presentation_time_stamp = |
330 CMTimeMake(input_image.render_time_ms(), 1000); | 330 CMTimeMake(frame.render_time_ms(), 1000); |
331 CFDictionaryRef frame_properties = nullptr; | 331 CFDictionaryRef frame_properties = nullptr; |
332 if (is_keyframe_required) { | 332 if (is_keyframe_required) { |
333 CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; | 333 CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; |
334 CFTypeRef values[] = {kCFBooleanTrue}; | 334 CFTypeRef values[] = {kCFBooleanTrue}; |
335 frame_properties = internal::CreateCFDictionary(keys, values, 1); | 335 frame_properties = internal::CreateCFDictionary(keys, values, 1); |
336 } | 336 } |
337 std::unique_ptr<internal::FrameEncodeParams> encode_params; | 337 std::unique_ptr<internal::FrameEncodeParams> encode_params; |
338 encode_params.reset(new internal::FrameEncodeParams( | 338 encode_params.reset(new internal::FrameEncodeParams( |
339 this, codec_specific_info, width_, height_, input_image.render_time_ms(), | 339 this, codec_specific_info, width_, height_, frame.render_time_ms(), |
340 input_image.timestamp(), input_image.rotation())); | 340 frame.timestamp(), frame.rotation())); |
341 | 341 |
342 // Update the bitrate if needed. | 342 // Update the bitrate if needed. |
343 SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps()); | 343 SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps()); |
344 | 344 |
345 OSStatus status = VTCompressionSessionEncodeFrame( | 345 OSStatus status = VTCompressionSessionEncodeFrame( |
346 compression_session_, pixel_buffer, presentation_time_stamp, | 346 compression_session_, pixel_buffer, presentation_time_stamp, |
347 kCMTimeInvalid, frame_properties, encode_params.release(), nullptr); | 347 kCMTimeInvalid, frame_properties, encode_params.release(), nullptr); |
348 if (frame_properties) { | 348 if (frame_properties) { |
349 CFRelease(frame_properties); | 349 CFRelease(frame_properties); |
350 } | 350 } |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
567 if (result != 0) { | 567 if (result != 0) { |
568 LOG(LS_ERROR) << "Encode callback failed: " << result; | 568 LOG(LS_ERROR) << "Encode callback failed: " << result; |
569 return; | 569 return; |
570 } | 570 } |
571 bitrate_adjuster_.Update(frame._size); | 571 bitrate_adjuster_.Update(frame._size); |
572 } | 572 } |
573 | 573 |
574 } // namespace webrtc | 574 } // namespace webrtc |
575 | 575 |
576 #endif // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED) | 576 #endif // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED) |
OLD | NEW |