Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1768)

Side by Side Diff: webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc

Issue 1528503003: Lint enabled for webrtc/modules/video_coding folder. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Rebase Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 * 9 *
10 */ 10 */
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
92 92
93 // Struct that we pass to the encoder per frame to encode. We receive it again 93 // Struct that we pass to the encoder per frame to encode. We receive it again
94 // in the encoder callback. 94 // in the encoder callback.
95 struct FrameEncodeParams { 95 struct FrameEncodeParams {
96 FrameEncodeParams(webrtc::EncodedImageCallback* cb, 96 FrameEncodeParams(webrtc::EncodedImageCallback* cb,
97 const webrtc::CodecSpecificInfo* csi, 97 const webrtc::CodecSpecificInfo* csi,
98 int32_t w, 98 int32_t w,
99 int32_t h, 99 int32_t h,
100 int64_t rtms, 100 int64_t rtms,
101 uint32_t ts) 101 uint32_t ts)
102 : callback(cb), 102 : callback(cb), width(w), height(h), render_time_ms(rtms), timestamp(ts) {
103 width(w),
104 height(h),
105 render_time_ms(rtms),
106 timestamp(ts) {
107 if (csi) { 103 if (csi) {
108 codec_specific_info = *csi; 104 codec_specific_info = *csi;
109 } else { 105 } else {
110 codec_specific_info.codecType = webrtc::kVideoCodecH264; 106 codec_specific_info.codecType = webrtc::kVideoCodecH264;
111 } 107 }
112 } 108 }
113 webrtc::EncodedImageCallback* callback; 109 webrtc::EncodedImageCallback* callback;
114 webrtc::CodecSpecificInfo codec_specific_info; 110 webrtc::CodecSpecificInfo codec_specific_info;
115 int32_t width; 111 int32_t width;
116 int32_t height; 112 int32_t height;
(...skipping 22 matching lines...) Expand all
139 uint8_t* dst_y = reinterpret_cast<uint8_t*>( 135 uint8_t* dst_y = reinterpret_cast<uint8_t*>(
140 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0)); 136 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
141 int dst_stride_y = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0); 137 int dst_stride_y = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0);
142 uint8_t* dst_uv = reinterpret_cast<uint8_t*>( 138 uint8_t* dst_uv = reinterpret_cast<uint8_t*>(
143 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1)); 139 CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
144 int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1); 140 int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
145 // Convert I420 to NV12. 141 // Convert I420 to NV12.
146 int ret = libyuv::I420ToNV12( 142 int ret = libyuv::I420ToNV12(
147 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane), 143 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
148 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), 144 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
149 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), 145 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), dst_y,
150 dst_y, dst_stride_y, dst_uv, dst_stride_uv, 146 dst_stride_y, dst_uv, dst_stride_uv, frame.width(), frame.height());
151 frame.width(), frame.height());
152 CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); 147 CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
153 if (ret) { 148 if (ret) {
154 LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret; 149 LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
155 return false; 150 return false;
156 } 151 }
157 return true; 152 return true;
158 } 153 }
159 154
160 // This is the callback function that VideoToolbox calls when encode is 155 // This is the callback function that VideoToolbox calls when encode is
161 // complete. 156 // complete.
(...skipping 19 matching lines...) Expand all
181 CFDictionaryRef attachment = 176 CFDictionaryRef attachment =
182 static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0)); 177 static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, 0));
183 is_keyframe = 178 is_keyframe =
184 !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync); 179 !CFDictionaryContainsKey(attachment, kCMSampleAttachmentKey_NotSync);
185 } 180 }
186 181
187 // Convert the sample buffer into a buffer suitable for RTP packetization. 182 // Convert the sample buffer into a buffer suitable for RTP packetization.
188 // TODO(tkchin): Allocate buffers through a pool. 183 // TODO(tkchin): Allocate buffers through a pool.
189 rtc::scoped_ptr<rtc::Buffer> buffer(new rtc::Buffer()); 184 rtc::scoped_ptr<rtc::Buffer> buffer(new rtc::Buffer());
190 rtc::scoped_ptr<webrtc::RTPFragmentationHeader> header; 185 rtc::scoped_ptr<webrtc::RTPFragmentationHeader> header;
191 if (!H264CMSampleBufferToAnnexBBuffer(sample_buffer, 186 if (!H264CMSampleBufferToAnnexBBuffer(sample_buffer, is_keyframe,
192 is_keyframe, 187 buffer.get(), header.accept())) {
193 buffer.get(),
194 header.accept())) {
195 return; 188 return;
196 } 189 }
197 webrtc::EncodedImage frame(buffer->data(), buffer->size(), buffer->size()); 190 webrtc::EncodedImage frame(buffer->data(), buffer->size(), buffer->size());
198 frame._encodedWidth = encode_params->width; 191 frame._encodedWidth = encode_params->width;
199 frame._encodedHeight = encode_params->height; 192 frame._encodedHeight = encode_params->height;
200 frame._completeFrame = true; 193 frame._completeFrame = true;
201 frame._frameType = 194 frame._frameType =
202 is_keyframe ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta; 195 is_keyframe ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta;
203 frame.capture_time_ms_ = encode_params->render_time_ms; 196 frame.capture_time_ms_ = encode_params->render_time_ms;
204 frame._timeStamp = encode_params->timestamp; 197 frame._timeStamp = encode_params->timestamp;
205 198
206 int result = encode_params->callback->Encoded( 199 int result = encode_params->callback->Encoded(
207 frame, &(encode_params->codec_specific_info), header.get()); 200 frame, &(encode_params->codec_specific_info), header.get());
208 if (result != 0) { 201 if (result != 0) {
209 LOG(LS_ERROR) << "Encoded callback failed: " << result; 202 LOG(LS_ERROR) << "Encoded callback failed: " << result;
210 } 203 }
211 } 204 }
212 205
213 } // namespace internal 206 } // namespace internal
214 207
215 namespace webrtc { 208 namespace webrtc {
216 209
217 H264VideoToolboxEncoder::H264VideoToolboxEncoder() 210 H264VideoToolboxEncoder::H264VideoToolboxEncoder()
218 : callback_(nullptr), compression_session_(nullptr) { 211 : callback_(nullptr), compression_session_(nullptr) {}
219 }
220 212
221 H264VideoToolboxEncoder::~H264VideoToolboxEncoder() { 213 H264VideoToolboxEncoder::~H264VideoToolboxEncoder() {
222 DestroyCompressionSession(); 214 DestroyCompressionSession();
223 } 215 }
224 216
225 int H264VideoToolboxEncoder::InitEncode(const VideoCodec* codec_settings, 217 int H264VideoToolboxEncoder::InitEncode(const VideoCodec* codec_settings,
226 int number_of_cores, 218 int number_of_cores,
227 size_t max_payload_size) { 219 size_t max_payload_size) {
228 RTC_DCHECK(codec_settings); 220 RTC_DCHECK(codec_settings);
229 RTC_DCHECK_EQ(codec_settings->codecType, kVideoCodecH264); 221 RTC_DCHECK_EQ(codec_settings->codecType, kVideoCodecH264);
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
282 is_keyframe_required = true; 274 is_keyframe_required = true;
283 break; 275 break;
284 } 276 }
285 } 277 }
286 } 278 }
287 279
288 CMTime presentation_time_stamp = 280 CMTime presentation_time_stamp =
289 CMTimeMake(input_image.render_time_ms(), 1000); 281 CMTimeMake(input_image.render_time_ms(), 1000);
290 CFDictionaryRef frame_properties = nullptr; 282 CFDictionaryRef frame_properties = nullptr;
291 if (is_keyframe_required) { 283 if (is_keyframe_required) {
292 CFTypeRef keys[] = { kVTEncodeFrameOptionKey_ForceKeyFrame }; 284 CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
293 CFTypeRef values[] = { kCFBooleanTrue }; 285 CFTypeRef values[] = {kCFBooleanTrue};
294 frame_properties = internal::CreateCFDictionary(keys, values, 1); 286 frame_properties = internal::CreateCFDictionary(keys, values, 1);
295 } 287 }
296 rtc::scoped_ptr<internal::FrameEncodeParams> encode_params; 288 rtc::scoped_ptr<internal::FrameEncodeParams> encode_params;
297 encode_params.reset(new internal::FrameEncodeParams( 289 encode_params.reset(new internal::FrameEncodeParams(
298 callback_, codec_specific_info, width_, height_, 290 callback_, codec_specific_info, width_, height_,
299 input_image.render_time_ms(), input_image.timestamp())); 291 input_image.render_time_ms(), input_image.timestamp()));
300 VTCompressionSessionEncodeFrame( 292 VTCompressionSessionEncodeFrame(
301 compression_session_, pixel_buffer, presentation_time_stamp, 293 compression_session_, pixel_buffer, presentation_time_stamp,
302 kCMTimeInvalid, frame_properties, encode_params.release(), nullptr); 294 kCMTimeInvalid, frame_properties, encode_params.release(), nullptr);
303 if (frame_properties) { 295 if (frame_properties) {
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
352 kCVPixelBufferOpenGLCompatibilityKey, 344 kCVPixelBufferOpenGLCompatibilityKey,
353 #endif 345 #endif
354 kCVPixelBufferIOSurfacePropertiesKey, 346 kCVPixelBufferIOSurfacePropertiesKey,
355 kCVPixelBufferPixelFormatTypeKey 347 kCVPixelBufferPixelFormatTypeKey
356 }; 348 };
357 CFDictionaryRef io_surface_value = 349 CFDictionaryRef io_surface_value =
358 internal::CreateCFDictionary(nullptr, nullptr, 0); 350 internal::CreateCFDictionary(nullptr, nullptr, 0);
359 int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; 351 int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
360 CFNumberRef pixel_format = 352 CFNumberRef pixel_format =
361 CFNumberCreate(nullptr, kCFNumberLongType, &nv12type); 353 CFNumberCreate(nullptr, kCFNumberLongType, &nv12type);
362 CFTypeRef values[attributes_size] = { 354 CFTypeRef values[attributes_size] = {kCFBooleanTrue, io_surface_value,
363 kCFBooleanTrue, 355 pixel_format};
364 io_surface_value,
365 pixel_format
366 };
367 CFDictionaryRef source_attributes = 356 CFDictionaryRef source_attributes =
368 internal::CreateCFDictionary(keys, values, attributes_size); 357 internal::CreateCFDictionary(keys, values, attributes_size);
369 if (io_surface_value) { 358 if (io_surface_value) {
370 CFRelease(io_surface_value); 359 CFRelease(io_surface_value);
371 io_surface_value = nullptr; 360 io_surface_value = nullptr;
372 } 361 }
373 if (pixel_format) { 362 if (pixel_format) {
374 CFRelease(pixel_format); 363 CFRelease(pixel_format);
375 pixel_format = nullptr; 364 pixel_format = nullptr;
376 } 365 }
377 OSStatus status = VTCompressionSessionCreate( 366 OSStatus status = VTCompressionSessionCreate(
378 nullptr, // use default allocator 367 nullptr, // use default allocator
379 width_, 368 width_, height_, kCMVideoCodecType_H264,
380 height_,
381 kCMVideoCodecType_H264,
382 nullptr, // use default encoder 369 nullptr, // use default encoder
383 source_attributes, 370 source_attributes,
384 nullptr, // use default compressed data allocator 371 nullptr, // use default compressed data allocator
385 internal::VTCompressionOutputCallback, 372 internal::VTCompressionOutputCallback, this, &compression_session_);
386 this,
387 &compression_session_);
388 if (source_attributes) { 373 if (source_attributes) {
389 CFRelease(source_attributes); 374 CFRelease(source_attributes);
390 source_attributes = nullptr; 375 source_attributes = nullptr;
391 } 376 }
392 if (status != noErr) { 377 if (status != noErr) {
393 LOG(LS_ERROR) << "Failed to create compression session: " << status; 378 LOG(LS_ERROR) << "Failed to create compression session: " << status;
394 return WEBRTC_VIDEO_CODEC_ERROR; 379 return WEBRTC_VIDEO_CODEC_ERROR;
395 } 380 }
396 ConfigureCompressionSession(); 381 ConfigureCompressionSession();
397 return WEBRTC_VIDEO_CODEC_OK; 382 return WEBRTC_VIDEO_CODEC_OK;
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
434 } 419 }
435 } 420 }
436 421
437 const char* H264VideoToolboxEncoder::ImplementationName() const { 422 const char* H264VideoToolboxEncoder::ImplementationName() const {
438 return "VideoToolbox"; 423 return "VideoToolbox";
439 } 424 }
440 425
441 } // namespace webrtc 426 } // namespace webrtc
442 427
443 #endif // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED) 428 #endif // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698