OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 * | 9 * |
10 */ | 10 */ |
(...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
234 SEncParamExt encoder_params = CreateEncoderParams(); | 234 SEncParamExt encoder_params = CreateEncoderParams(); |
235 | 235 |
236 // Initialize. | 236 // Initialize. |
237 if (openh264_encoder_->InitializeExt(&encoder_params) != 0) { | 237 if (openh264_encoder_->InitializeExt(&encoder_params) != 0) { |
238 LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder"; | 238 LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder"; |
239 Release(); | 239 Release(); |
240 ReportError(); | 240 ReportError(); |
241 return WEBRTC_VIDEO_CODEC_ERROR; | 241 return WEBRTC_VIDEO_CODEC_ERROR; |
242 } | 242 } |
243 // TODO(pbos): Base init params on these values before submitting. | 243 // TODO(pbos): Base init params on these values before submitting. |
244 quality_scaler_.Init(codec_settings->codecType, codec_settings->startBitrate, | |
245 codec_settings->width, codec_settings->height, | |
246 codec_settings->maxFramerate); | |
247 int video_format = EVideoFormatType::videoFormatI420; | 244 int video_format = EVideoFormatType::videoFormatI420; |
248 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, | 245 openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, |
249 &video_format); | 246 &video_format); |
250 | 247 |
251 // Initialize encoded image. Default buffer size: size of unencoded data. | 248 // Initialize encoded image. Default buffer size: size of unencoded data. |
252 encoded_image_._size = | 249 encoded_image_._size = |
253 CalcBufferSize(kI420, codec_settings->width, codec_settings->height); | 250 CalcBufferSize(kI420, codec_settings->width, codec_settings->height); |
254 encoded_image_._buffer = new uint8_t[encoded_image_._size]; | 251 encoded_image_._buffer = new uint8_t[encoded_image_._size]; |
255 encoded_image_buffer_.reset(encoded_image_._buffer); | 252 encoded_image_buffer_.reset(encoded_image_._buffer); |
256 encoded_image_._completeFrame = true; | 253 encoded_image_._completeFrame = true; |
(...skipping 19 matching lines...) Expand all Loading... | |
276 encoded_image_callback_ = callback; | 273 encoded_image_callback_ = callback; |
277 return WEBRTC_VIDEO_CODEC_OK; | 274 return WEBRTC_VIDEO_CODEC_OK; |
278 } | 275 } |
279 | 276 |
280 int32_t H264EncoderImpl::SetRates(uint32_t bitrate, uint32_t framerate) { | 277 int32_t H264EncoderImpl::SetRates(uint32_t bitrate, uint32_t framerate) { |
281 if (bitrate <= 0 || framerate <= 0) { | 278 if (bitrate <= 0 || framerate <= 0) { |
282 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 279 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
283 } | 280 } |
284 target_bps_ = bitrate * 1000; | 281 target_bps_ = bitrate * 1000; |
285 max_frame_rate_ = static_cast<float>(framerate); | 282 max_frame_rate_ = static_cast<float>(framerate); |
286 quality_scaler_.ReportFramerate(framerate); | |
287 | 283 |
288 SBitrateInfo target_bitrate; | 284 SBitrateInfo target_bitrate; |
289 memset(&target_bitrate, 0, sizeof(SBitrateInfo)); | 285 memset(&target_bitrate, 0, sizeof(SBitrateInfo)); |
290 target_bitrate.iLayer = SPATIAL_LAYER_ALL, | 286 target_bitrate.iLayer = SPATIAL_LAYER_ALL, |
291 target_bitrate.iBitrate = target_bps_; | 287 target_bitrate.iBitrate = target_bps_; |
292 openh264_encoder_->SetOption(ENCODER_OPTION_BITRATE, | 288 openh264_encoder_->SetOption(ENCODER_OPTION_BITRATE, |
293 &target_bitrate); | 289 &target_bitrate); |
294 openh264_encoder_->SetOption(ENCODER_OPTION_FRAME_RATE, &max_frame_rate_); | 290 openh264_encoder_->SetOption(ENCODER_OPTION_FRAME_RATE, &max_frame_rate_); |
295 return WEBRTC_VIDEO_CODEC_OK; | 291 return WEBRTC_VIDEO_CODEC_OK; |
296 } | 292 } |
297 | 293 |
298 int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame, | 294 int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame, |
299 const CodecSpecificInfo* codec_specific_info, | 295 const CodecSpecificInfo* codec_specific_info, |
300 const std::vector<FrameType>* frame_types) { | 296 const std::vector<FrameType>* frame_types) { |
301 if (!IsInitialized()) { | 297 if (!IsInitialized()) { |
302 ReportError(); | 298 ReportError(); |
303 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 299 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
304 } | 300 } |
305 if (input_frame.IsZeroSize()) { | 301 if (input_frame.IsZeroSize()) { |
306 ReportError(); | 302 ReportError(); |
307 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 303 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
308 } | 304 } |
309 if (!encoded_image_callback_) { | 305 if (!encoded_image_callback_) { |
310 LOG(LS_WARNING) << "InitEncode() has been called, but a callback function " | 306 LOG(LS_WARNING) << "InitEncode() has been called, but a callback function " |
311 << "has not been set with RegisterEncodeCompleteCallback()"; | 307 << "has not been set with RegisterEncodeCompleteCallback()"; |
312 ReportError(); | 308 ReportError(); |
313 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 309 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
314 } | 310 } |
315 | 311 |
316 quality_scaler_.OnEncodeFrame(input_frame.width(), input_frame.height()); | |
317 rtc::scoped_refptr<const VideoFrameBuffer> frame_buffer = | |
318 quality_scaler_.GetScaledBuffer(input_frame.video_frame_buffer()); | |
319 if (frame_buffer->width() != width_ || frame_buffer->height() != height_) { | |
320 LOG(LS_INFO) << "Encoder reinitialized from " << width_ << "x" << height_ | |
321 << " to " << frame_buffer->width() << "x" | |
322 << frame_buffer->height(); | |
323 width_ = frame_buffer->width(); | |
324 height_ = frame_buffer->height(); | |
325 SEncParamExt encoder_params = CreateEncoderParams(); | |
326 openh264_encoder_->SetOption(ENCODER_OPTION_SVC_ENCODE_PARAM_EXT, | |
327 &encoder_params); | |
328 } | |
329 | |
330 bool force_key_frame = false; | 312 bool force_key_frame = false; |
331 if (frame_types != nullptr) { | 313 if (frame_types != nullptr) { |
332 // We only support a single stream. | 314 // We only support a single stream. |
333 RTC_DCHECK_EQ(frame_types->size(), static_cast<size_t>(1)); | 315 RTC_DCHECK_EQ(frame_types->size(), static_cast<size_t>(1)); |
334 // Skip frame? | 316 // Skip frame? |
335 if ((*frame_types)[0] == kEmptyFrame) { | 317 if ((*frame_types)[0] == kEmptyFrame) { |
336 return WEBRTC_VIDEO_CODEC_OK; | 318 return WEBRTC_VIDEO_CODEC_OK; |
337 } | 319 } |
338 // Force key frame? | 320 // Force key frame? |
339 force_key_frame = (*frame_types)[0] == kVideoFrameKey; | 321 force_key_frame = (*frame_types)[0] == kVideoFrameKey; |
340 } | 322 } |
341 if (force_key_frame) { | 323 if (force_key_frame) { |
342 // API doc says ForceIntraFrame(false) does nothing, but calling this | 324 // API doc says ForceIntraFrame(false) does nothing, but calling this |
343 // function forces a key frame regardless of the |bIDR| argument's value. | 325 // function forces a key frame regardless of the |bIDR| argument's value. |
344 // (If every frame is a key frame we get lag/delays.) | 326 // (If every frame is a key frame we get lag/delays.) |
345 openh264_encoder_->ForceIntraFrame(true); | 327 openh264_encoder_->ForceIntraFrame(true); |
346 } | 328 } |
347 | 329 rtc::scoped_refptr<const VideoFrameBuffer> frame_buffer = |
330 input_frame.video_frame_buffer(); | |
348 // EncodeFrame input. | 331 // EncodeFrame input. |
349 SSourcePicture picture; | 332 SSourcePicture picture; |
350 memset(&picture, 0, sizeof(SSourcePicture)); | 333 memset(&picture, 0, sizeof(SSourcePicture)); |
351 picture.iPicWidth = frame_buffer->width(); | 334 picture.iPicWidth = frame_buffer->width(); |
352 picture.iPicHeight = frame_buffer->height(); | 335 picture.iPicHeight = frame_buffer->height(); |
353 picture.iColorFormat = EVideoFormatType::videoFormatI420; | 336 picture.iColorFormat = EVideoFormatType::videoFormatI420; |
354 picture.uiTimeStamp = input_frame.ntp_time_ms(); | 337 picture.uiTimeStamp = input_frame.ntp_time_ms(); |
355 picture.iStride[0] = frame_buffer->StrideY(); | 338 picture.iStride[0] = frame_buffer->StrideY(); |
356 picture.iStride[1] = frame_buffer->StrideU(); | 339 picture.iStride[1] = frame_buffer->StrideU(); |
357 picture.iStride[2] = frame_buffer->StrideV(); | 340 picture.iStride[2] = frame_buffer->StrideV(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
391 // Deliver encoded image. | 374 // Deliver encoded image. |
392 CodecSpecificInfo codec_specific; | 375 CodecSpecificInfo codec_specific; |
393 codec_specific.codecType = kVideoCodecH264; | 376 codec_specific.codecType = kVideoCodecH264; |
394 codec_specific.codecSpecific.H264.packetization_mode = packetization_mode_; | 377 codec_specific.codecSpecific.H264.packetization_mode = packetization_mode_; |
395 encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific, | 378 encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific, |
396 &frag_header); | 379 &frag_header); |
397 | 380 |
398 // Parse and report QP. | 381 // Parse and report QP. |
399 h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer, | 382 h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer, |
400 encoded_image_._length); | 383 encoded_image_._length); |
401 int qp = -1; | 384 h264_bitstream_parser_.GetLastSliceQp(&encoded_image_.qp_); |
402 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) | |
403 quality_scaler_.ReportQP(qp); | |
404 } else { | |
405 quality_scaler_.ReportDroppedFrame(); | |
406 } | 385 } |
407 return WEBRTC_VIDEO_CODEC_OK; | 386 return WEBRTC_VIDEO_CODEC_OK; |
408 } | 387 } |
409 | 388 |
410 const char* H264EncoderImpl::ImplementationName() const { | 389 const char* H264EncoderImpl::ImplementationName() const { |
411 return "OpenH264"; | 390 return "OpenH264"; |
412 } | 391 } |
413 | 392 |
414 bool H264EncoderImpl::IsInitialized() const { | 393 bool H264EncoderImpl::IsInitialized() const { |
415 return openh264_encoder_ != nullptr; | 394 return openh264_encoder_ != nullptr; |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
496 // design it with cpu core number. | 475 // design it with cpu core number. |
497 // TODO(sprang): Set to 0 when we understand why the rate controller borks | 476 // TODO(sprang): Set to 0 when we understand why the rate controller borks |
498 // when uiSliceNum > 1. | 477 // when uiSliceNum > 1. |
499 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1; | 478 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1; |
500 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode = | 479 encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode = |
501 SM_FIXEDSLCNUM_SLICE; | 480 SM_FIXEDSLCNUM_SLICE; |
502 #endif | 481 #endif |
503 break; | 482 break; |
504 default: | 483 default: |
505 RTC_NOTREACHED() << "Illegal packetization mode specified"; | 484 RTC_NOTREACHED() << "Illegal packetization mode specified"; |
506 } | 485 } |
sprang_webrtc
2016/11/16 16:25:50
This is an artifact of a rebase, right?
| |
507 return encoder_params; | 486 return encoder_params; |
508 } | 487 } |
509 | 488 |
510 void H264EncoderImpl::ReportInit() { | 489 void H264EncoderImpl::ReportInit() { |
511 if (has_reported_init_) | 490 if (has_reported_init_) |
512 return; | 491 return; |
513 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event", | 492 RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event", |
514 kH264EncoderEventInit, | 493 kH264EncoderEventInit, |
515 kH264EncoderEventMax); | 494 kH264EncoderEventMax); |
516 has_reported_init_ = true; | 495 has_reported_init_ = true; |
(...skipping 10 matching lines...) Expand all Loading... | |
527 | 506 |
528 int32_t H264EncoderImpl::SetChannelParameters( | 507 int32_t H264EncoderImpl::SetChannelParameters( |
529 uint32_t packet_loss, int64_t rtt) { | 508 uint32_t packet_loss, int64_t rtt) { |
530 return WEBRTC_VIDEO_CODEC_OK; | 509 return WEBRTC_VIDEO_CODEC_OK; |
531 } | 510 } |
532 | 511 |
533 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { | 512 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { |
534 return WEBRTC_VIDEO_CODEC_OK; | 513 return WEBRTC_VIDEO_CODEC_OK; |
535 } | 514 } |
536 | 515 |
537 void H264EncoderImpl::OnDroppedFrame() { | 516 QualityScaler::Settings H264EncoderImpl::GetQPThresholds() const { |
538 quality_scaler_.ReportDroppedFrame(); | 517 return QualityScaler::Settings(true); |
539 } | 518 } |
540 | 519 |
541 } // namespace webrtc | 520 } // namespace webrtc |
OLD | NEW |