OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 * | 9 * |
10 */ | 10 */ |
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
367 encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType); | 367 encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType); |
368 | 368 |
369 // Split encoded image up into fragments. This also updates |encoded_image_|. | 369 // Split encoded image up into fragments. This also updates |encoded_image_|. |
370 RTPFragmentationHeader frag_header; | 370 RTPFragmentationHeader frag_header; |
371 RtpFragmentize(&encoded_image_, &encoded_image_buffer_, *frame_buffer, &info, | 371 RtpFragmentize(&encoded_image_, &encoded_image_buffer_, *frame_buffer, &info, |
372 &frag_header); | 372 &frag_header); |
373 | 373 |
374 // Encoder can skip frames to save bandwidth in which case | 374 // Encoder can skip frames to save bandwidth in which case |
375 // |encoded_image_._length| == 0. | 375 // |encoded_image_._length| == 0. |
376 if (encoded_image_._length > 0) { | 376 if (encoded_image_._length > 0) { |
| 377 // Parse and report QP. |
| 378 h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer, |
| 379 encoded_image_._length); |
| 380 int qp = -1; |
| 381 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { |
| 382 quality_scaler_.ReportQP(qp); |
| 383 encoded_image_.qp_ = qp; |
| 384 } |
| 385 |
377 // Deliver encoded image. | 386 // Deliver encoded image. |
378 CodecSpecificInfo codec_specific; | 387 CodecSpecificInfo codec_specific; |
379 codec_specific.codecType = kVideoCodecH264; | 388 codec_specific.codecType = kVideoCodecH264; |
380 encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific, | 389 encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific, |
381 &frag_header); | 390 &frag_header); |
382 | |
383 // Parse and report QP. | |
384 h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer, | |
385 encoded_image_._length); | |
386 int qp = -1; | |
387 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) | |
388 quality_scaler_.ReportQP(qp); | |
389 } else { | 391 } else { |
390 quality_scaler_.ReportDroppedFrame(); | 392 quality_scaler_.ReportDroppedFrame(); |
391 } | 393 } |
392 return WEBRTC_VIDEO_CODEC_OK; | 394 return WEBRTC_VIDEO_CODEC_OK; |
393 } | 395 } |
394 | 396 |
395 const char* H264EncoderImpl::ImplementationName() const { | 397 const char* H264EncoderImpl::ImplementationName() const { |
396 return "OpenH264"; | 398 return "OpenH264"; |
397 } | 399 } |
398 | 400 |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
486 | 488 |
487 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { | 489 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { |
488 return WEBRTC_VIDEO_CODEC_OK; | 490 return WEBRTC_VIDEO_CODEC_OK; |
489 } | 491 } |
490 | 492 |
491 void H264EncoderImpl::OnDroppedFrame() { | 493 void H264EncoderImpl::OnDroppedFrame() { |
492 quality_scaler_.ReportDroppedFrame(); | 494 quality_scaler_.ReportDroppedFrame(); |
493 } | 495 } |
494 | 496 |
495 } // namespace webrtc | 497 } // namespace webrtc |
OLD | NEW |