OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
141 timestamp_(0), | 141 timestamp_(0), |
142 feedback_mode_(false), | 142 feedback_mode_(false), |
143 qp_max_(56), // Setting for max quantizer. | 143 qp_max_(56), // Setting for max quantizer. |
144 cpu_speed_default_(-6), | 144 cpu_speed_default_(-6), |
145 rc_max_intra_target_(0), | 145 rc_max_intra_target_(0), |
146 token_partitions_(VP8_ONE_TOKENPARTITION), | 146 token_partitions_(VP8_ONE_TOKENPARTITION), |
147 down_scale_requested_(false), | 147 down_scale_requested_(false), |
148 down_scale_bitrate_(0), | 148 down_scale_bitrate_(0), |
149 tl0_frame_dropper_(), | 149 tl0_frame_dropper_(), |
150 tl1_frame_dropper_(kTl1MaxTimeToDropFrames), | 150 tl1_frame_dropper_(kTl1MaxTimeToDropFrames), |
151 key_frame_request_(kMaxSimulcastStreams, false) { | 151 key_frame_request_(kMaxSimulcastStreams, false), |
| 152 quality_scaler_enabled_(false) { |
152 uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp()); | 153 uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp()); |
153 srand(seed); | 154 srand(seed); |
154 | 155 |
155 picture_id_.reserve(kMaxSimulcastStreams); | 156 picture_id_.reserve(kMaxSimulcastStreams); |
156 last_key_frame_picture_id_.reserve(kMaxSimulcastStreams); | 157 last_key_frame_picture_id_.reserve(kMaxSimulcastStreams); |
157 temporal_layers_.reserve(kMaxSimulcastStreams); | 158 temporal_layers_.reserve(kMaxSimulcastStreams); |
158 raw_images_.reserve(kMaxSimulcastStreams); | 159 raw_images_.reserve(kMaxSimulcastStreams); |
159 encoded_images_.reserve(kMaxSimulcastStreams); | 160 encoded_images_.reserve(kMaxSimulcastStreams); |
160 send_stream_.reserve(kMaxSimulcastStreams); | 161 send_stream_.reserve(kMaxSimulcastStreams); |
161 cpu_speed_.assign(kMaxSimulcastStreams, -6); // Set default to -6. | 162 cpu_speed_.assign(kMaxSimulcastStreams, -6); // Set default to -6. |
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
579 } | 580 } |
580 | 581 |
581 rps_.Init(); | 582 rps_.Init(); |
582 // Disable both high-QP limits and framedropping. Both are handled by libvpx | 583 // Disable both high-QP limits and framedropping. Both are handled by libvpx |
583 // internally. | 584 // internally. |
584 const int kDisabledBadQpThreshold = 64; | 585 const int kDisabledBadQpThreshold = 64; |
585 quality_scaler_.Init(codec_.qpMax / QualityScaler::kDefaultLowQpDenominator, | 586 quality_scaler_.Init(codec_.qpMax / QualityScaler::kDefaultLowQpDenominator, |
586 kDisabledBadQpThreshold, false); | 587 kDisabledBadQpThreshold, false); |
587 quality_scaler_.ReportFramerate(codec_.maxFramerate); | 588 quality_scaler_.ReportFramerate(codec_.maxFramerate); |
588 | 589 |
| 590 // Only apply scaling to improve for single-layer streams. The scaling metrics |
| 591 // use frame drops as a signal and is only applicable when we drop frames. |
| 592 quality_scaler_enabled_ = encoders_.size() == 1 && |
| 593 configurations_[0].rc_dropframe_thresh > 0 && |
| 594 codec_.codecSpecific.VP8.automaticResizeOn; |
| 595 |
589 return InitAndSetControlSettings(); | 596 return InitAndSetControlSettings(); |
590 } | 597 } |
591 | 598 |
592 int VP8EncoderImpl::SetCpuSpeed(int width, int height) { | 599 int VP8EncoderImpl::SetCpuSpeed(int width, int height) { |
593 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) | 600 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) |
594 // On mobile platform, always set to -12 to leverage between cpu usage | 601 // On mobile platform, always set to -12 to leverage between cpu usage |
595 // and video quality. | 602 // and video quality. |
596 return -12; | 603 return -12; |
597 #else | 604 #else |
598 // For non-ARM, increase encoding complexity (i.e., use lower speed setting) | 605 // For non-ARM, increase encoding complexity (i.e., use lower speed setting) |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
702 const std::vector<VideoFrameType>* frame_types) { | 709 const std::vector<VideoFrameType>* frame_types) { |
703 TRACE_EVENT1("webrtc", "VP8::Encode", "timestamp", frame.timestamp()); | 710 TRACE_EVENT1("webrtc", "VP8::Encode", "timestamp", frame.timestamp()); |
704 | 711 |
705 if (!inited_) | 712 if (!inited_) |
706 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 713 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
707 if (frame.IsZeroSize()) | 714 if (frame.IsZeroSize()) |
708 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 715 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
709 if (encoded_complete_callback_ == NULL) | 716 if (encoded_complete_callback_ == NULL) |
710 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 717 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
711 | 718 |
712 // Only apply scaling to improve for single-layer streams. The scaling metrics | 719 if (quality_scaler_enabled_) |
713 // use frame drops as a signal and is only applicable when we drop frames. | |
714 const bool use_quality_scaler = encoders_.size() == 1 && | |
715 configurations_[0].rc_dropframe_thresh > 0 && | |
716 codec_.codecSpecific.VP8.automaticResizeOn; | |
717 if (use_quality_scaler) | |
718 quality_scaler_.OnEncodeFrame(frame); | 720 quality_scaler_.OnEncodeFrame(frame); |
719 const VideoFrame& input_image = | 721 const VideoFrame& input_image = |
720 use_quality_scaler ? quality_scaler_.GetScaledFrame(frame) : frame; | 722 quality_scaler_enabled_ ? quality_scaler_.GetScaledFrame(frame) : frame; |
721 | 723 |
722 if (use_quality_scaler && (input_image.width() != codec_.width || | 724 if (quality_scaler_enabled_ && (input_image.width() != codec_.width || |
723 input_image.height() != codec_.height)) { | 725 input_image.height() != codec_.height)) { |
724 int ret = UpdateCodecFrameSize(input_image); | 726 int ret = UpdateCodecFrameSize(input_image); |
725 if (ret < 0) | 727 if (ret < 0) |
726 return ret; | 728 return ret; |
727 } | 729 } |
728 | 730 |
729 // Since we are extracting raw pointers from |input_image| to | 731 // Since we are extracting raw pointers from |input_image| to |
730 // |raw_images_[0]|, the resolution of these frames must match. Note that | 732 // |raw_images_[0]|, the resolution of these frames must match. Note that |
731 // |input_image| might be scaled from |frame|. In that case, the resolution of | 733 // |input_image| might be scaled from |frame|. In that case, the resolution of |
732 // |raw_images_[0]| should have been updated in UpdateCodecFrameSize. | 734 // |raw_images_[0]| should have been updated in UpdateCodecFrameSize. |
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1006 encoded_images_[encoder_idx]._length, | 1008 encoded_images_[encoder_idx]._length, |
1007 encoded_images_[encoder_idx]._timeStamp, qp); | 1009 encoded_images_[encoder_idx]._timeStamp, qp); |
1008 if (send_stream_[stream_idx]) { | 1010 if (send_stream_[stream_idx]) { |
1009 if (encoded_images_[encoder_idx]._length > 0) { | 1011 if (encoded_images_[encoder_idx]._length > 0) { |
1010 TRACE_COUNTER_ID1("webrtc", "EncodedFrameSize", encoder_idx, | 1012 TRACE_COUNTER_ID1("webrtc", "EncodedFrameSize", encoder_idx, |
1011 encoded_images_[encoder_idx]._length); | 1013 encoded_images_[encoder_idx]._length); |
1012 encoded_images_[encoder_idx]._encodedHeight = | 1014 encoded_images_[encoder_idx]._encodedHeight = |
1013 codec_.simulcastStream[stream_idx].height; | 1015 codec_.simulcastStream[stream_idx].height; |
1014 encoded_images_[encoder_idx]._encodedWidth = | 1016 encoded_images_[encoder_idx]._encodedWidth = |
1015 codec_.simulcastStream[stream_idx].width; | 1017 codec_.simulcastStream[stream_idx].width; |
| 1018 encoded_images_[encoder_idx] |
| 1019 .adapt_reason_.quality_resolution_downscales = |
| 1020 quality_scaler_enabled_ ? quality_scaler_.downscale_shift() : -1; |
1016 encoded_complete_callback_->Encoded(encoded_images_[encoder_idx], | 1021 encoded_complete_callback_->Encoded(encoded_images_[encoder_idx], |
1017 &codec_specific, &frag_info); | 1022 &codec_specific, &frag_info); |
1018 } else if (codec_.mode == kScreensharing) { | 1023 } else if (codec_.mode == kScreensharing) { |
1019 result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT; | 1024 result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT; |
1020 } | 1025 } |
1021 } | 1026 } |
1022 } | 1027 } |
1023 if (encoders_.size() == 1 && send_stream_[0]) { | 1028 if (encoders_.size() == 1 && send_stream_[0]) { |
1024 if (encoded_images_[0]._length > 0) { | 1029 if (encoded_images_[0]._length > 0) { |
1025 int qp; | 1030 int qp; |
(...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1387 return -1; | 1392 return -1; |
1388 } | 1393 } |
1389 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) | 1394 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) |
1390 != VPX_CODEC_OK) { | 1395 != VPX_CODEC_OK) { |
1391 return -1; | 1396 return -1; |
1392 } | 1397 } |
1393 return 0; | 1398 return 0; |
1394 } | 1399 } |
1395 | 1400 |
1396 } // namespace webrtc | 1401 } // namespace webrtc |
OLD | NEW |