OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 * | 9 * |
10 */ | 10 */ |
11 | 11 |
12 #include "webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h" | 12 #include "webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h" |
13 | 13 |
14 #include <limits> | 14 #include <limits> |
15 | 15 |
16 #include "third_party/openh264/src/codec/api/svc/codec_api.h" | 16 #include "third_party/openh264/src/codec/api/svc/codec_api.h" |
17 #include "third_party/openh264/src/codec/api/svc/codec_app_def.h" | 17 #include "third_party/openh264/src/codec/api/svc/codec_app_def.h" |
18 #include "third_party/openh264/src/codec/api/svc/codec_def.h" | 18 #include "third_party/openh264/src/codec/api/svc/codec_def.h" |
19 | 19 |
20 #include "webrtc/base/checks.h" | 20 #include "webrtc/base/checks.h" |
21 #include "webrtc/base/logging.h" | 21 #include "webrtc/base/logging.h" |
22 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 22 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
| 23 #include "webrtc/system_wrappers/include/metrics.h" |
23 | 24 |
24 namespace webrtc { | 25 namespace webrtc { |
25 | 26 |
26 namespace { | 27 namespace { |
27 | 28 |
28 const bool kOpenH264EncoderDetailedLogging = false; | 29 const bool kOpenH264EncoderDetailedLogging = false; |
29 | 30 |
| 31 // Used by histograms. Values of entries should not be changed. |
| 32 enum H264EncoderImplEvent { |
| 33 kH264EncoderEventInit = 0, |
| 34 kH264EncoderEventError = 1, |
| 35 kH264EncoderEventMax = 16, |
| 36 }; |
| 37 |
30 int NumberOfThreads(int width, int height, int number_of_cores) { | 38 int NumberOfThreads(int width, int height, int number_of_cores) { |
31 // TODO(hbos): In Chromium, multiple threads do not work with sandbox on Mac, | 39 // TODO(hbos): In Chromium, multiple threads do not work with sandbox on Mac, |
32 // see crbug.com/583348. Until further investigated, only use one thread. | 40 // see crbug.com/583348. Until further investigated, only use one thread. |
33 // if (width * height >= 1920 * 1080 && number_of_cores > 8) { | 41 // if (width * height >= 1920 * 1080 && number_of_cores > 8) { |
34 // return 8; // 8 threads for 1080p on high perf machines. | 42 // return 8; // 8 threads for 1080p on high perf machines. |
35 // } else if (width * height > 1280 * 960 && number_of_cores >= 6) { | 43 // } else if (width * height > 1280 * 960 && number_of_cores >= 6) { |
36 // return 3; // 3 threads for 1080p. | 44 // return 3; // 3 threads for 1080p. |
37 // } else if (width * height > 640 * 480 && number_of_cores >= 3) { | 45 // } else if (width * height > 640 * 480 && number_of_cores >= 3) { |
38 // return 2; // 2 threads for qHD/HD. | 46 // return 2; // 2 threads for qHD/HD. |
39 // } else { | 47 // } else { |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
134 // Copy the entire layer's data (including start codes). | 142 // Copy the entire layer's data (including start codes). |
135 memcpy(encoded_image->_buffer + encoded_image->_length, | 143 memcpy(encoded_image->_buffer + encoded_image->_length, |
136 layerInfo.pBsBuf, | 144 layerInfo.pBsBuf, |
137 layer_len); | 145 layer_len); |
138 encoded_image->_length += layer_len; | 146 encoded_image->_length += layer_len; |
139 } | 147 } |
140 } | 148 } |
141 | 149 |
142 H264EncoderImpl::H264EncoderImpl() | 150 H264EncoderImpl::H264EncoderImpl() |
143 : openh264_encoder_(nullptr), | 151 : openh264_encoder_(nullptr), |
144 encoded_image_callback_(nullptr) { | 152 encoded_image_callback_(nullptr), |
| 153 has_reported_init_(false), |
| 154 has_reported_error_(false) { |
145 } | 155 } |
146 | 156 |
147 H264EncoderImpl::~H264EncoderImpl() { | 157 H264EncoderImpl::~H264EncoderImpl() { |
148 Release(); | 158 Release(); |
149 } | 159 } |
150 | 160 |
151 int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings, | 161 int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings, |
152 int32_t number_of_cores, | 162 int32_t number_of_cores, |
153 size_t /*max_payload_size*/) { | 163 size_t /*max_payload_size*/) { |
| 164 ReportInit(); |
154 if (!codec_settings || | 165 if (!codec_settings || |
155 codec_settings->codecType != kVideoCodecH264) { | 166 codec_settings->codecType != kVideoCodecH264) { |
| 167 ReportError(); |
156 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 168 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
157 } | 169 } |
158 if (codec_settings->maxFramerate == 0) | 170 if (codec_settings->maxFramerate == 0) { |
| 171 ReportError(); |
159 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 172 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
160 if (codec_settings->width < 1 || codec_settings->height < 1) | 173 } |
| 174 if (codec_settings->width < 1 || codec_settings->height < 1) { |
| 175 ReportError(); |
161 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 176 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 177 } |
162 | 178 |
163 int32_t release_ret = Release(); | 179 int32_t release_ret = Release(); |
164 if (release_ret != WEBRTC_VIDEO_CODEC_OK) | 180 if (release_ret != WEBRTC_VIDEO_CODEC_OK) { |
| 181 ReportError(); |
165 return release_ret; | 182 return release_ret; |
| 183 } |
166 RTC_DCHECK(!openh264_encoder_); | 184 RTC_DCHECK(!openh264_encoder_); |
167 | 185 |
168 // Create encoder. | 186 // Create encoder. |
169 if (WelsCreateSVCEncoder(&openh264_encoder_) != 0) { | 187 if (WelsCreateSVCEncoder(&openh264_encoder_) != 0) { |
170 // Failed to create encoder. | 188 // Failed to create encoder. |
171 LOG(LS_ERROR) << "Failed to create OpenH264 encoder"; | 189 LOG(LS_ERROR) << "Failed to create OpenH264 encoder"; |
172 RTC_DCHECK(!openh264_encoder_); | 190 RTC_DCHECK(!openh264_encoder_); |
| 191 ReportError(); |
173 return WEBRTC_VIDEO_CODEC_ERROR; | 192 return WEBRTC_VIDEO_CODEC_ERROR; |
174 } | 193 } |
175 RTC_DCHECK(openh264_encoder_); | 194 RTC_DCHECK(openh264_encoder_); |
176 if (kOpenH264EncoderDetailedLogging) { | 195 if (kOpenH264EncoderDetailedLogging) { |
177 int trace_level = WELS_LOG_DETAIL; | 196 int trace_level = WELS_LOG_DETAIL; |
178 openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, | 197 openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, |
179 &trace_level); | 198 &trace_level); |
180 } | 199 } |
181 // else WELS_LOG_DEFAULT is used by default. | 200 // else WELS_LOG_DEFAULT is used by default. |
182 | 201 |
183 codec_settings_ = *codec_settings; | 202 codec_settings_ = *codec_settings; |
184 if (codec_settings_.targetBitrate == 0) | 203 if (codec_settings_.targetBitrate == 0) |
185 codec_settings_.targetBitrate = codec_settings_.startBitrate; | 204 codec_settings_.targetBitrate = codec_settings_.startBitrate; |
186 | 205 |
187 // Initialization parameters. | 206 // Initialization parameters. |
188 // There are two ways to initialize. There is SEncParamBase (cleared with | 207 // There are two ways to initialize. There is SEncParamBase (cleared with |
189 // memset(&p, 0, sizeof(SEncParamBase)) used in Initialize, and SEncParamExt | 208 // memset(&p, 0, sizeof(SEncParamBase)) used in Initialize, and SEncParamExt |
190 // which is a superset of SEncParamBase (cleared with GetDefaultParams) used | 209 // which is a superset of SEncParamBase (cleared with GetDefaultParams) used |
191 // in InitializeExt. | 210 // in InitializeExt. |
192 SEncParamExt init_params; | 211 SEncParamExt init_params; |
193 openh264_encoder_->GetDefaultParams(&init_params); | 212 openh264_encoder_->GetDefaultParams(&init_params); |
194 if (codec_settings_.mode == kRealtimeVideo) { | 213 if (codec_settings_.mode == kRealtimeVideo) { |
195 init_params.iUsageType = CAMERA_VIDEO_REAL_TIME; | 214 init_params.iUsageType = CAMERA_VIDEO_REAL_TIME; |
196 } else if (codec_settings_.mode == kScreensharing) { | 215 } else if (codec_settings_.mode == kScreensharing) { |
197 init_params.iUsageType = SCREEN_CONTENT_REAL_TIME; | 216 init_params.iUsageType = SCREEN_CONTENT_REAL_TIME; |
198 } else { | 217 } else { |
| 218 ReportError(); |
199 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 219 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
200 } | 220 } |
201 init_params.iPicWidth = codec_settings_.width; | 221 init_params.iPicWidth = codec_settings_.width; |
202 init_params.iPicHeight = codec_settings_.height; | 222 init_params.iPicHeight = codec_settings_.height; |
203 // |init_params| uses bit/s, |codec_settings_| uses kbit/s. | 223 // |init_params| uses bit/s, |codec_settings_| uses kbit/s. |
204 init_params.iTargetBitrate = codec_settings_.targetBitrate * 1000; | 224 init_params.iTargetBitrate = codec_settings_.targetBitrate * 1000; |
205 init_params.iMaxBitrate = codec_settings_.maxBitrate * 1000; | 225 init_params.iMaxBitrate = codec_settings_.maxBitrate * 1000; |
206 // Rate Control mode | 226 // Rate Control mode |
207 init_params.iRCMode = RC_BITRATE_MODE; | 227 init_params.iRCMode = RC_BITRATE_MODE; |
208 init_params.fMaxFrameRate = static_cast<float>(codec_settings_.maxFramerate); | 228 init_params.fMaxFrameRate = static_cast<float>(codec_settings_.maxFramerate); |
(...skipping 20 matching lines...) Expand all Loading... |
229 init_params.sSpatialLayers[0].fFrameRate = init_params.fMaxFrameRate; | 249 init_params.sSpatialLayers[0].fFrameRate = init_params.fMaxFrameRate; |
230 init_params.sSpatialLayers[0].iSpatialBitrate = init_params.iTargetBitrate; | 250 init_params.sSpatialLayers[0].iSpatialBitrate = init_params.iTargetBitrate; |
231 init_params.sSpatialLayers[0].iMaxSpatialBitrate = init_params.iMaxBitrate; | 251 init_params.sSpatialLayers[0].iMaxSpatialBitrate = init_params.iMaxBitrate; |
232 // Slice num according to number of threads. | 252 // Slice num according to number of threads. |
233 init_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_AUTO_SLICE; | 253 init_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_AUTO_SLICE; |
234 | 254 |
235 // Initialize. | 255 // Initialize. |
236 if (openh264_encoder_->InitializeExt(&init_params) != 0) { | 256 if (openh264_encoder_->InitializeExt(&init_params) != 0) { |
237 LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder"; | 257 LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder"; |
238 Release(); | 258 Release(); |
| 259 ReportError(); |
239 return WEBRTC_VIDEO_CODEC_ERROR; | 260 return WEBRTC_VIDEO_CODEC_ERROR; |
240 } | 261 } |
241 int video_format = EVideoFormatType::videoFormatI420; | 262 int video_format = EVideoFormatType::videoFormatI420; |
242 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, | 263 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, |
243 &video_format); | 264 &video_format); |
244 | 265 |
245 // Initialize encoded image. Default buffer size: size of unencoded data. | 266 // Initialize encoded image. Default buffer size: size of unencoded data. |
246 encoded_image_._size = CalcBufferSize( | 267 encoded_image_._size = CalcBufferSize( |
247 kI420, codec_settings_.width, codec_settings_.height); | 268 kI420, codec_settings_.width, codec_settings_.height); |
248 encoded_image_._buffer = new uint8_t[encoded_image_._size]; | 269 encoded_image_._buffer = new uint8_t[encoded_image_._size]; |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
292 &target_bitrate); | 313 &target_bitrate); |
293 float max_framerate = static_cast<float>(codec_settings_.maxFramerate); | 314 float max_framerate = static_cast<float>(codec_settings_.maxFramerate); |
294 openh264_encoder_->SetOption(ENCODER_OPTION_FRAME_RATE, | 315 openh264_encoder_->SetOption(ENCODER_OPTION_FRAME_RATE, |
295 &max_framerate); | 316 &max_framerate); |
296 return WEBRTC_VIDEO_CODEC_OK; | 317 return WEBRTC_VIDEO_CODEC_OK; |
297 } | 318 } |
298 | 319 |
299 int32_t H264EncoderImpl::Encode( | 320 int32_t H264EncoderImpl::Encode( |
300 const VideoFrame& frame, const CodecSpecificInfo* codec_specific_info, | 321 const VideoFrame& frame, const CodecSpecificInfo* codec_specific_info, |
301 const std::vector<FrameType>* frame_types) { | 322 const std::vector<FrameType>* frame_types) { |
302 if (!IsInitialized()) | 323 if (!IsInitialized()) { |
| 324 ReportError(); |
303 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 325 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
304 if (frame.IsZeroSize()) | 326 } |
| 327 if (frame.IsZeroSize()) { |
| 328 ReportError(); |
305 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 329 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 330 } |
306 if (!encoded_image_callback_) { | 331 if (!encoded_image_callback_) { |
307 LOG(LS_WARNING) << "InitEncode() has been called, but a callback function " | 332 LOG(LS_WARNING) << "InitEncode() has been called, but a callback function " |
308 << "has not been set with RegisterEncodeCompleteCallback()"; | 333 << "has not been set with RegisterEncodeCompleteCallback()"; |
| 334 ReportError(); |
309 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 335 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
310 } | 336 } |
311 if (frame.width() != codec_settings_.width || | 337 if (frame.width() != codec_settings_.width || |
312 frame.height() != codec_settings_.height) { | 338 frame.height() != codec_settings_.height) { |
313 LOG(LS_WARNING) << "Encoder initialized for " << codec_settings_.width | 339 LOG(LS_WARNING) << "Encoder initialized for " << codec_settings_.width |
314 << "x" << codec_settings_.height << " but trying to encode " | 340 << "x" << codec_settings_.height << " but trying to encode " |
315 << frame.width() << "x" << frame.height() << " frame."; | 341 << frame.width() << "x" << frame.height() << " frame."; |
| 342 ReportError(); |
316 return WEBRTC_VIDEO_CODEC_ERR_SIZE; | 343 return WEBRTC_VIDEO_CODEC_ERR_SIZE; |
317 } | 344 } |
318 | 345 |
319 bool force_key_frame = false; | 346 bool force_key_frame = false; |
320 if (frame_types != nullptr) { | 347 if (frame_types != nullptr) { |
321 // We only support a single stream. | 348 // We only support a single stream. |
322 RTC_DCHECK_EQ(frame_types->size(), static_cast<size_t>(1)); | 349 RTC_DCHECK_EQ(frame_types->size(), static_cast<size_t>(1)); |
323 // Skip frame? | 350 // Skip frame? |
324 if ((*frame_types)[0] == kEmptyFrame) { | 351 if ((*frame_types)[0] == kEmptyFrame) { |
325 return WEBRTC_VIDEO_CODEC_OK; | 352 return WEBRTC_VIDEO_CODEC_OK; |
(...skipping 24 matching lines...) Expand all Loading... |
350 | 377 |
351 // EncodeFrame output. | 378 // EncodeFrame output. |
352 SFrameBSInfo info; | 379 SFrameBSInfo info; |
353 memset(&info, 0, sizeof(SFrameBSInfo)); | 380 memset(&info, 0, sizeof(SFrameBSInfo)); |
354 | 381 |
355 // Encode! | 382 // Encode! |
356 int enc_ret = openh264_encoder_->EncodeFrame(&picture, &info); | 383 int enc_ret = openh264_encoder_->EncodeFrame(&picture, &info); |
357 if (enc_ret != 0) { | 384 if (enc_ret != 0) { |
358 LOG(LS_ERROR) << "OpenH264 frame encoding failed, EncodeFrame returned " | 385 LOG(LS_ERROR) << "OpenH264 frame encoding failed, EncodeFrame returned " |
359 << enc_ret << "."; | 386 << enc_ret << "."; |
| 387 ReportError(); |
360 return WEBRTC_VIDEO_CODEC_ERROR; | 388 return WEBRTC_VIDEO_CODEC_ERROR; |
361 } | 389 } |
362 | 390 |
363 encoded_image_._encodedWidth = frame.width(); | 391 encoded_image_._encodedWidth = frame.width(); |
364 encoded_image_._encodedHeight = frame.height(); | 392 encoded_image_._encodedHeight = frame.height(); |
365 encoded_image_._timeStamp = frame.timestamp(); | 393 encoded_image_._timeStamp = frame.timestamp(); |
366 encoded_image_.ntp_time_ms_ = frame.ntp_time_ms(); | 394 encoded_image_.ntp_time_ms_ = frame.ntp_time_ms(); |
367 encoded_image_.capture_time_ms_ = frame.render_time_ms(); | 395 encoded_image_.capture_time_ms_ = frame.render_time_ms(); |
368 encoded_image_._frameType = EVideoFrameType_to_FrameType(info.eFrameType); | 396 encoded_image_._frameType = EVideoFrameType_to_FrameType(info.eFrameType); |
369 | 397 |
(...skipping 12 matching lines...) Expand all Loading... |
382 &codec_specific, | 410 &codec_specific, |
383 &frag_header); | 411 &frag_header); |
384 } | 412 } |
385 return WEBRTC_VIDEO_CODEC_OK; | 413 return WEBRTC_VIDEO_CODEC_OK; |
386 } | 414 } |
387 | 415 |
388 bool H264EncoderImpl::IsInitialized() const { | 416 bool H264EncoderImpl::IsInitialized() const { |
389 return openh264_encoder_ != nullptr; | 417 return openh264_encoder_ != nullptr; |
390 } | 418 } |
391 | 419 |
| 420 void H264EncoderImpl::ReportInit() { |
| 421 if (has_reported_init_) |
| 422 return; |
| 423 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event", |
| 424 kH264EncoderEventInit, |
| 425 kH264EncoderEventMax); |
| 426 has_reported_init_ = true; |
| 427 } |
| 428 |
| 429 void H264EncoderImpl::ReportError() { |
| 430 if (has_reported_error_) |
| 431 return; |
| 432 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event", |
| 433 kH264EncoderEventError, |
| 434 kH264EncoderEventMax); |
| 435 has_reported_error_ = true; |
| 436 } |
| 437 |
392 int32_t H264EncoderImpl::SetChannelParameters( | 438 int32_t H264EncoderImpl::SetChannelParameters( |
393 uint32_t packet_loss, int64_t rtt) { | 439 uint32_t packet_loss, int64_t rtt) { |
394 return WEBRTC_VIDEO_CODEC_OK; | 440 return WEBRTC_VIDEO_CODEC_OK; |
395 } | 441 } |
396 | 442 |
397 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { | 443 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { |
398 return WEBRTC_VIDEO_CODEC_OK; | 444 return WEBRTC_VIDEO_CODEC_OK; |
399 } | 445 } |
400 | 446 |
401 void H264EncoderImpl::OnDroppedFrame() { | 447 void H264EncoderImpl::OnDroppedFrame() { |
402 } | 448 } |
403 | 449 |
404 } // namespace webrtc | 450 } // namespace webrtc |
OLD | NEW |