| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 * | 9 * |
| 10 */ | 10 */ |
| 11 | 11 |
| 12 #include "webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h" | 12 #include "webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.h" |
| 13 | 13 |
| 14 #include <limits> | 14 #include <limits> |
| 15 #include <string> | |
| 16 | 15 |
| 17 #include "third_party/openh264/src/codec/api/svc/codec_api.h" | 16 #include "third_party/openh264/src/codec/api/svc/codec_api.h" |
| 18 #include "third_party/openh264/src/codec/api/svc/codec_app_def.h" | 17 #include "third_party/openh264/src/codec/api/svc/codec_app_def.h" |
| 19 #include "third_party/openh264/src/codec/api/svc/codec_def.h" | 18 #include "third_party/openh264/src/codec/api/svc/codec_def.h" |
| 20 #include "third_party/openh264/src/codec/api/svc/codec_ver.h" | 19 #include "third_party/openh264/src/codec/api/svc/codec_ver.h" |
| 21 | 20 |
| 22 #include "webrtc/base/checks.h" | 21 #include "webrtc/base/checks.h" |
| 23 #include "webrtc/base/logging.h" | 22 #include "webrtc/base/logging.h" |
| 24 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 23 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
| 25 #include "webrtc/media/base/mediaconstants.h" | |
| 26 #include "webrtc/system_wrappers/include/metrics.h" | 24 #include "webrtc/system_wrappers/include/metrics.h" |
| 27 | 25 |
| 28 namespace webrtc { | 26 namespace webrtc { |
| 29 | 27 |
| 30 namespace { | 28 namespace { |
| 31 | 29 |
| 32 const bool kOpenH264EncoderDetailedLogging = false; | 30 const bool kOpenH264EncoderDetailedLogging = false; |
| 33 | 31 |
| 34 // Used by histograms. Values of entries should not be changed. | 32 // Used by histograms. Values of entries should not be changed. |
| 35 enum H264EncoderImplEvent { | 33 enum H264EncoderImplEvent { |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 145 layer_len += layerInfo.pNalLengthInByte[nal]; | 143 layer_len += layerInfo.pNalLengthInByte[nal]; |
| 146 } | 144 } |
| 147 // Copy the entire layer's data (including start codes). | 145 // Copy the entire layer's data (including start codes). |
| 148 memcpy(encoded_image->_buffer + encoded_image->_length, | 146 memcpy(encoded_image->_buffer + encoded_image->_length, |
| 149 layerInfo.pBsBuf, | 147 layerInfo.pBsBuf, |
| 150 layer_len); | 148 layer_len); |
| 151 encoded_image->_length += layer_len; | 149 encoded_image->_length += layer_len; |
| 152 } | 150 } |
| 153 } | 151 } |
| 154 | 152 |
| 155 H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec) | 153 H264EncoderImpl::H264EncoderImpl() |
| 156 : openh264_encoder_(nullptr), | 154 : openh264_encoder_(nullptr), |
| 157 width_(0), | 155 width_(0), |
| 158 height_(0), | 156 height_(0), |
| 159 max_frame_rate_(0.0f), | 157 max_frame_rate_(0.0f), |
| 160 target_bps_(0), | 158 target_bps_(0), |
| 161 max_bps_(0), | 159 max_bps_(0), |
| 162 mode_(kRealtimeVideo), | 160 mode_(kRealtimeVideo), |
| 163 frame_dropping_on_(false), | 161 frame_dropping_on_(false), |
| 164 key_frame_interval_(0), | 162 key_frame_interval_(0), |
| 165 packetization_mode_(H264PacketizationMode::SingleNalUnit), | |
| 166 max_payload_size_(0), | |
| 167 number_of_cores_(0), | 163 number_of_cores_(0), |
| 168 encoded_image_callback_(nullptr), | 164 encoded_image_callback_(nullptr), |
| 169 has_reported_init_(false), | 165 has_reported_init_(false), |
| 170 has_reported_error_(false) { | 166 has_reported_error_(false) {} |
| 171 RTC_CHECK(cricket::CodecNamesEq(codec.name, cricket::kH264CodecName)); | |
| 172 std::string packetization_mode_string; | |
| 173 if (codec.GetParam(cricket::kH264FmtpPacketizationMode, | |
| 174 &packetization_mode_string) && | |
| 175 packetization_mode_string == "1") { | |
| 176 packetization_mode_ = H264PacketizationMode::NonInterleaved; | |
| 177 } | |
| 178 } | |
| 179 | 167 |
| 180 H264EncoderImpl::~H264EncoderImpl() { | 168 H264EncoderImpl::~H264EncoderImpl() { |
| 181 Release(); | 169 Release(); |
| 182 } | 170 } |
| 183 | 171 |
| 184 int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings, | 172 int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings, |
| 185 int32_t number_of_cores, | 173 int32_t number_of_cores, |
| 186 size_t max_payload_size) { | 174 size_t /*max_payload_size*/) { |
| 187 ReportInit(); | 175 ReportInit(); |
| 188 if (!codec_settings || | 176 if (!codec_settings || |
| 189 codec_settings->codecType != kVideoCodecH264) { | 177 codec_settings->codecType != kVideoCodecH264) { |
| 190 ReportError(); | 178 ReportError(); |
| 191 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 179 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 192 } | 180 } |
| 193 if (codec_settings->maxFramerate == 0) { | 181 if (codec_settings->maxFramerate == 0) { |
| 194 ReportError(); | 182 ReportError(); |
| 195 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 183 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 196 } | 184 } |
| (...skipping 26 matching lines...) Expand all Loading... |
| 223 // else WELS_LOG_DEFAULT is used by default. | 211 // else WELS_LOG_DEFAULT is used by default. |
| 224 | 212 |
| 225 number_of_cores_ = number_of_cores; | 213 number_of_cores_ = number_of_cores; |
| 226 // Set internal settings from codec_settings | 214 // Set internal settings from codec_settings |
| 227 width_ = codec_settings->width; | 215 width_ = codec_settings->width; |
| 228 height_ = codec_settings->height; | 216 height_ = codec_settings->height; |
| 229 max_frame_rate_ = static_cast<float>(codec_settings->maxFramerate); | 217 max_frame_rate_ = static_cast<float>(codec_settings->maxFramerate); |
| 230 mode_ = codec_settings->mode; | 218 mode_ = codec_settings->mode; |
| 231 frame_dropping_on_ = codec_settings->H264().frameDroppingOn; | 219 frame_dropping_on_ = codec_settings->H264().frameDroppingOn; |
| 232 key_frame_interval_ = codec_settings->H264().keyFrameInterval; | 220 key_frame_interval_ = codec_settings->H264().keyFrameInterval; |
| 233 max_payload_size_ = max_payload_size; | |
| 234 | 221 |
| 235 // Codec_settings uses kbits/second; encoder uses bits/second. | 222 // Codec_settings uses kbits/second; encoder uses bits/second. |
| 236 max_bps_ = codec_settings->maxBitrate * 1000; | 223 max_bps_ = codec_settings->maxBitrate * 1000; |
| 237 if (codec_settings->targetBitrate == 0) | 224 if (codec_settings->targetBitrate == 0) |
| 238 target_bps_ = codec_settings->startBitrate * 1000; | 225 target_bps_ = codec_settings->startBitrate * 1000; |
| 239 else | 226 else |
| 240 target_bps_ = codec_settings->targetBitrate * 1000; | 227 target_bps_ = codec_settings->targetBitrate * 1000; |
| 241 | 228 |
| 242 SEncParamExt encoder_params = CreateEncoderParams(); | 229 SEncParamExt encoder_params = CreateEncoderParams(); |
| 243 | |
| 244 // Initialize. | 230 // Initialize. |
| 245 if (openh264_encoder_->InitializeExt(&encoder_params) != 0) { | 231 if (openh264_encoder_->InitializeExt(&encoder_params) != 0) { |
| 246 LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder"; | 232 LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder"; |
| 247 Release(); | 233 Release(); |
| 248 ReportError(); | 234 ReportError(); |
| 249 return WEBRTC_VIDEO_CODEC_ERROR; | 235 return WEBRTC_VIDEO_CODEC_ERROR; |
| 250 } | 236 } |
| 251 // TODO(pbos): Base init params on these values before submitting. | 237 // TODO(pbos): Base init params on these values before submitting. |
| 252 int video_format = EVideoFormatType::videoFormatI420; | 238 int video_format = EVideoFormatType::videoFormatI420; |
| 253 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, | 239 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, |
| (...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 377 RTPFragmentationHeader frag_header; | 363 RTPFragmentationHeader frag_header; |
| 378 RtpFragmentize(&encoded_image_, &encoded_image_buffer_, *frame_buffer, &info, | 364 RtpFragmentize(&encoded_image_, &encoded_image_buffer_, *frame_buffer, &info, |
| 379 &frag_header); | 365 &frag_header); |
| 380 | 366 |
| 381 // Encoder can skip frames to save bandwidth in which case | 367 // Encoder can skip frames to save bandwidth in which case |
| 382 // |encoded_image_._length| == 0. | 368 // |encoded_image_._length| == 0. |
| 383 if (encoded_image_._length > 0) { | 369 if (encoded_image_._length > 0) { |
| 384 // Deliver encoded image. | 370 // Deliver encoded image. |
| 385 CodecSpecificInfo codec_specific; | 371 CodecSpecificInfo codec_specific; |
| 386 codec_specific.codecType = kVideoCodecH264; | 372 codec_specific.codecType = kVideoCodecH264; |
| 387 codec_specific.codecSpecific.H264.packetization_mode = packetization_mode_; | |
| 388 encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific, | 373 encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific, |
| 389 &frag_header); | 374 &frag_header); |
| 390 | 375 |
| 391 // Parse and report QP. | 376 // Parse and report QP. |
| 392 h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer, | 377 h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer, |
| 393 encoded_image_._length); | 378 encoded_image_._length); |
| 394 h264_bitstream_parser_.GetLastSliceQp(&encoded_image_.qp_); | 379 h264_bitstream_parser_.GetLastSliceQp(&encoded_image_.qp_); |
| 395 } | 380 } |
| 396 return WEBRTC_VIDEO_CODEC_OK; | 381 return WEBRTC_VIDEO_CODEC_OK; |
| 397 } | 382 } |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 442 encoder_params.iMultipleThreadIdc = NumberOfThreads( | 427 encoder_params.iMultipleThreadIdc = NumberOfThreads( |
| 443 encoder_params.iPicWidth, encoder_params.iPicHeight, number_of_cores_); | 428 encoder_params.iPicWidth, encoder_params.iPicHeight, number_of_cores_); |
| 444 // The base spatial layer 0 is the only one we use. | 429 // The base spatial layer 0 is the only one we use. |
| 445 encoder_params.sSpatialLayers[0].iVideoWidth = encoder_params.iPicWidth; | 430 encoder_params.sSpatialLayers[0].iVideoWidth = encoder_params.iPicWidth; |
| 446 encoder_params.sSpatialLayers[0].iVideoHeight = encoder_params.iPicHeight; | 431 encoder_params.sSpatialLayers[0].iVideoHeight = encoder_params.iPicHeight; |
| 447 encoder_params.sSpatialLayers[0].fFrameRate = encoder_params.fMaxFrameRate; | 432 encoder_params.sSpatialLayers[0].fFrameRate = encoder_params.fMaxFrameRate; |
| 448 encoder_params.sSpatialLayers[0].iSpatialBitrate = | 433 encoder_params.sSpatialLayers[0].iSpatialBitrate = |
| 449 encoder_params.iTargetBitrate; | 434 encoder_params.iTargetBitrate; |
| 450 encoder_params.sSpatialLayers[0].iMaxSpatialBitrate = | 435 encoder_params.sSpatialLayers[0].iMaxSpatialBitrate = |
| 451 encoder_params.iMaxBitrate; | 436 encoder_params.iMaxBitrate; |
| 452 LOG(INFO) << "OpenH264 version is " << OPENH264_MAJOR << "." | |
| 453 << OPENH264_MINOR; | |
| 454 switch (packetization_mode_) { | |
| 455 case H264PacketizationMode::SingleNalUnit: | |
| 456 // Limit the size of the packets produced. | |
| 457 #if (OPENH264_MAJOR == 1) && (OPENH264_MINOR <= 5) | 437 #if (OPENH264_MAJOR == 1) && (OPENH264_MINOR <= 5) |
| 458 encoder_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_DYN_SLICE; | 438 // Slice num according to number of threads. |
| 459 // The slice size is max payload size - room for a NAL header. | 439 encoder_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_AUTO_SLICE; |
| 460 // The constant 50 is NAL_HEADER_ADD_0X30BYTES in openh264 source, | |
| 461 // but is not exported. | |
| 462 const kNalHeaderSizeAllocation = 50; | |
| 463 encoder_params.sSpatialLayers[0] | |
| 464 .sSliceCfg.sSliceArgument.uiSliceSizeConstraint = | |
| 465 static_cast<unsigned int>(max_payload_size_ - | |
| 466 kNalHeaderSizeAllocation); | |
| 467 encoder_params.uiMaxNalSize = | |
| 468 static_cast<unsigned int>(max_payload_size_); | |
| 469 #else | 440 #else |
| 470 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1; | 441 // When uiSliceMode = SM_FIXEDSLCNUM_SLICE, uiSliceNum = 0 means auto design |
| 471 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode = | 442 // it with cpu core number. |
| 472 SM_SIZELIMITED_SLICE; | 443 // TODO(sprang): Set to 0 when we understand why the rate controller borks |
| 473 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint = | 444 // when uiSliceNum > 1. |
| 474 static_cast<unsigned int>(max_payload_size_); | 445 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1; |
| 446 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode = |
| 447 SM_FIXEDSLCNUM_SLICE; |
| 475 #endif | 448 #endif |
| 476 break; | 449 |
| 477 case H264PacketizationMode::NonInterleaved: | |
| 478 #if (OPENH264_MAJOR == 1) && (OPENH264_MINOR <= 5) | |
| 479 // Slice num according to number of threads. | |
| 480 encoder_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = SM_AUTO_SLICE; | |
| 481 #else | |
| 482 // When uiSliceMode = SM_FIXEDSLCNUM_SLICE, uiSliceNum = 0 means auto | |
| 483 // design it with cpu core number. | |
| 484 // TODO(sprang): Set to 0 when we understand why the rate controller borks | |
| 485 // when uiSliceNum > 1. | |
| 486 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1; | |
| 487 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode = | |
| 488 SM_FIXEDSLCNUM_SLICE; | |
| 489 #endif | |
| 490 break; | |
| 491 } | |
| 492 return encoder_params; | 450 return encoder_params; |
| 493 } | 451 } |
| 494 | 452 |
| 495 void H264EncoderImpl::ReportInit() { | 453 void H264EncoderImpl::ReportInit() { |
| 496 if (has_reported_init_) | 454 if (has_reported_init_) |
| 497 return; | 455 return; |
| 498 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event", | 456 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event", |
| 499 kH264EncoderEventInit, | 457 kH264EncoderEventInit, |
| 500 kH264EncoderEventMax); | 458 kH264EncoderEventMax); |
| 501 has_reported_init_ = true; | 459 has_reported_init_ = true; |
| (...skipping 15 matching lines...) Expand all Loading... |
| 517 | 475 |
| 518 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { | 476 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { |
| 519 return WEBRTC_VIDEO_CODEC_OK; | 477 return WEBRTC_VIDEO_CODEC_OK; |
| 520 } | 478 } |
| 521 | 479 |
| 522 VideoEncoder::ScalingSettings H264EncoderImpl::GetScalingSettings() const { | 480 VideoEncoder::ScalingSettings H264EncoderImpl::GetScalingSettings() const { |
| 523 return VideoEncoder::ScalingSettings(true); | 481 return VideoEncoder::ScalingSettings(true); |
| 524 } | 482 } |
| 525 | 483 |
| 526 } // namespace webrtc | 484 } // namespace webrtc |
| OLD | NEW |