OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
127 } | 127 } |
128 | 128 |
129 VP8Decoder* VP8Decoder::Create() { | 129 VP8Decoder* VP8Decoder::Create() { |
130 return new VP8DecoderImpl(); | 130 return new VP8DecoderImpl(); |
131 } | 131 } |
132 | 132 |
133 VP8EncoderImpl::VP8EncoderImpl() | 133 VP8EncoderImpl::VP8EncoderImpl() |
134 : encoded_complete_callback_(nullptr), | 134 : encoded_complete_callback_(nullptr), |
135 inited_(false), | 135 inited_(false), |
136 timestamp_(0), | 136 timestamp_(0), |
137 feedback_mode_(false), | |
138 qp_max_(56), // Setting for max quantizer. | 137 qp_max_(56), // Setting for max quantizer. |
139 cpu_speed_default_(-6), | 138 cpu_speed_default_(-6), |
140 number_of_cores_(0), | 139 number_of_cores_(0), |
141 rc_max_intra_target_(0), | 140 rc_max_intra_target_(0), |
142 token_partitions_(VP8_ONE_TOKENPARTITION), | 141 token_partitions_(VP8_ONE_TOKENPARTITION), |
143 down_scale_requested_(false), | 142 down_scale_requested_(false), |
144 down_scale_bitrate_(0), | 143 down_scale_bitrate_(0), |
145 use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)), | 144 use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)), |
146 key_frame_request_(kMaxSimulcastStreams, false) { | 145 key_frame_request_(kMaxSimulcastStreams, false) { |
147 uint32_t seed = rtc::Time32(); | 146 uint32_t seed = rtc::Time32(); |
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
343 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 342 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
344 } | 343 } |
345 | 344 |
346 int num_temporal_layers = | 345 int num_temporal_layers = |
347 doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers | 346 doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers |
348 : inst->VP8().numberOfTemporalLayers; | 347 : inst->VP8().numberOfTemporalLayers; |
349 RTC_DCHECK_GT(num_temporal_layers, 0); | 348 RTC_DCHECK_GT(num_temporal_layers, 0); |
350 | 349 |
351 SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst); | 350 SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst); |
352 | 351 |
353 feedback_mode_ = inst->VP8().feedbackModeOn; | |
354 | |
355 number_of_cores_ = number_of_cores; | 352 number_of_cores_ = number_of_cores; |
356 timestamp_ = 0; | 353 timestamp_ = 0; |
357 codec_ = *inst; | 354 codec_ = *inst; |
358 | 355 |
359 // Code expects simulcastStream resolutions to be correct, make sure they are | 356 // Code expects simulcastStream resolutions to be correct, make sure they are |
360 // filled even when there are no simulcast layers. | 357 // filled even when there are no simulcast layers. |
361 if (codec_.numberOfSimulcastStreams == 0) { | 358 if (codec_.numberOfSimulcastStreams == 0) { |
362 codec_.simulcastStream[0].width = codec_.width; | 359 codec_.simulcastStream[0].width = codec_.width; |
363 codec_.simulcastStream[0].height = codec_.height; | 360 codec_.simulcastStream[0].height = codec_.height; |
364 } | 361 } |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
447 configurations_[0].rc_max_quantizer = qp_max_; | 444 configurations_[0].rc_max_quantizer = qp_max_; |
448 configurations_[0].rc_undershoot_pct = 100; | 445 configurations_[0].rc_undershoot_pct = 100; |
449 configurations_[0].rc_overshoot_pct = 15; | 446 configurations_[0].rc_overshoot_pct = 15; |
450 configurations_[0].rc_buf_initial_sz = 500; | 447 configurations_[0].rc_buf_initial_sz = 500; |
451 configurations_[0].rc_buf_optimal_sz = 600; | 448 configurations_[0].rc_buf_optimal_sz = 600; |
452 configurations_[0].rc_buf_sz = 1000; | 449 configurations_[0].rc_buf_sz = 1000; |
453 | 450 |
454 // Set the maximum target size of any key-frame. | 451 // Set the maximum target size of any key-frame. |
455 rc_max_intra_target_ = MaxIntraTarget(configurations_[0].rc_buf_optimal_sz); | 452 rc_max_intra_target_ = MaxIntraTarget(configurations_[0].rc_buf_optimal_sz); |
456 | 453 |
457 if (feedback_mode_) { | 454 if (inst->VP8().keyFrameInterval > 0) { |
458 // Disable periodic key frames if we get feedback from the decoder | |
459 // through SLI and RPSI. | |
460 configurations_[0].kf_mode = VPX_KF_DISABLED; | |
461 } else if (inst->VP8().keyFrameInterval > 0) { | |
462 configurations_[0].kf_mode = VPX_KF_AUTO; | 455 configurations_[0].kf_mode = VPX_KF_AUTO; |
463 configurations_[0].kf_max_dist = inst->VP8().keyFrameInterval; | 456 configurations_[0].kf_max_dist = inst->VP8().keyFrameInterval; |
464 } else { | 457 } else { |
465 configurations_[0].kf_mode = VPX_KF_DISABLED; | 458 configurations_[0].kf_mode = VPX_KF_DISABLED; |
466 } | 459 } |
467 | 460 |
468 // Allow the user to set the complexity for the base stream. | 461 // Allow the user to set the complexity for the base stream. |
469 switch (inst->VP8().complexity) { | 462 switch (inst->VP8().complexity) { |
470 case kComplexityHigh: | 463 case kComplexityHigh: |
471 cpu_speed_[0] = -5; | 464 cpu_speed_[0] = -5; |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
535 vpx_img_alloc(&raw_images_[i], VPX_IMG_FMT_I420, | 528 vpx_img_alloc(&raw_images_[i], VPX_IMG_FMT_I420, |
536 inst->simulcastStream[stream_idx].width, | 529 inst->simulcastStream[stream_idx].width, |
537 inst->simulcastStream[stream_idx].height, kVp832ByteAlign); | 530 inst->simulcastStream[stream_idx].height, kVp832ByteAlign); |
538 SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx); | 531 SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx); |
539 configurations_[i].rc_target_bitrate = stream_bitrates[stream_idx]; | 532 configurations_[i].rc_target_bitrate = stream_bitrates[stream_idx]; |
540 temporal_layers_[stream_idx]->OnRatesUpdated( | 533 temporal_layers_[stream_idx]->OnRatesUpdated( |
541 stream_bitrates[stream_idx], inst->maxBitrate, inst->maxFramerate); | 534 stream_bitrates[stream_idx], inst->maxBitrate, inst->maxFramerate); |
542 temporal_layers_[stream_idx]->UpdateConfiguration(&configurations_[i]); | 535 temporal_layers_[stream_idx]->UpdateConfiguration(&configurations_[i]); |
543 } | 536 } |
544 | 537 |
545 rps_.Init(); | |
546 return InitAndSetControlSettings(); | 538 return InitAndSetControlSettings(); |
547 } | 539 } |
548 | 540 |
549 int VP8EncoderImpl::SetCpuSpeed(int width, int height) { | 541 int VP8EncoderImpl::SetCpuSpeed(int width, int height) { |
550 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) | 542 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) |
551 // On mobile platform, use a lower speed setting for lower resolutions for | 543 // On mobile platform, use a lower speed setting for lower resolutions for |
552 // CPUs with 4 or more cores. | 544 // CPUs with 4 or more cores. |
553 RTC_DCHECK_GT(number_of_cores_, 0); | 545 RTC_DCHECK_GT(number_of_cores_, 0); |
554 if (number_of_cores_ <= 3) | 546 if (number_of_cores_ <= 3) |
555 return -12; | 547 return -12; |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
765 const uint32_t forceKeyFrameIntraTh = 100; | 757 const uint32_t forceKeyFrameIntraTh = 100; |
766 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, | 758 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
767 forceKeyFrameIntraTh); | 759 forceKeyFrameIntraTh); |
768 } | 760 } |
769 // Key frame request from caller. | 761 // Key frame request from caller. |
770 // Will update both golden and alt-ref. | 762 // Will update both golden and alt-ref. |
771 for (size_t i = 0; i < encoders_.size(); ++i) { | 763 for (size_t i = 0; i < encoders_.size(); ++i) { |
772 flags[i] = VPX_EFLAG_FORCE_KF; | 764 flags[i] = VPX_EFLAG_FORCE_KF; |
773 } | 765 } |
774 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); | 766 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); |
775 } else if (codec_specific_info && | |
776 codec_specific_info->codecType == kVideoCodecVP8) { | |
777 if (feedback_mode_) { | |
778 // Handle RPSI and SLI messages and set up the appropriate encode flags. | |
779 bool sendRefresh = false; | |
780 if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) { | |
781 rps_.ReceivedRPSI(codec_specific_info->codecSpecific.VP8.pictureIdRPSI); | |
782 } | |
783 if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) { | |
784 sendRefresh = rps_.ReceivedSLI(frame.timestamp()); | |
785 } | |
786 for (size_t i = 0; i < encoders_.size(); ++i) { | |
787 flags[i] = rps_.EncodeFlags(picture_id_[i], sendRefresh, | |
788 frame.timestamp()); | |
789 } | |
790 } else { | |
791 if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) { | |
792 // Is this our last key frame? If not ignore. | |
793 // |picture_id_| is defined per spatial stream/layer, so check that | |
794 // |RPSI| matches the last key frame from any of the spatial streams. | |
795 // If so, then all spatial streams for this encoding will predict from | |
796 // its long-term reference (last key frame). | |
797 int RPSI = codec_specific_info->codecSpecific.VP8.pictureIdRPSI; | |
798 for (size_t i = 0; i < encoders_.size(); ++i) { | |
799 if (last_key_frame_picture_id_[i] == RPSI) { | |
800 // Request for a long term reference frame. | |
801 // Note 1: overwrites any temporal settings. | |
802 // Note 2: VP8_EFLAG_NO_UPD_ENTROPY is not needed as that flag is | |
803 // set by error_resilient mode. | |
804 for (size_t j = 0; j < encoders_.size(); ++j) { | |
805 flags[j] = VP8_EFLAG_NO_UPD_ARF; | |
806 flags[j] |= VP8_EFLAG_NO_REF_GF; | |
807 flags[j] |= VP8_EFLAG_NO_REF_LAST; | |
808 } | |
809 only_predict_from_key_frame = true; | |
810 break; | |
811 } | |
812 } | |
813 } | |
814 } | |
815 } | 767 } |
| 768 |
816 // Set the encoder frame flags and temporal layer_id for each spatial stream. | 769 // Set the encoder frame flags and temporal layer_id for each spatial stream. |
817 // Note that |temporal_layers_| are defined starting from lowest resolution at | 770 // Note that |temporal_layers_| are defined starting from lowest resolution at |
818 // position 0 to highest resolution at position |encoders_.size() - 1|, | 771 // position 0 to highest resolution at position |encoders_.size() - 1|, |
819 // whereas |encoder_| is from highest to lowest resolution. | 772 // whereas |encoder_| is from highest to lowest resolution. |
820 size_t stream_idx = encoders_.size() - 1; | 773 size_t stream_idx = encoders_.size() - 1; |
821 for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) { | 774 for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) { |
822 // Allow the layers adapter to temporarily modify the configuration. This | 775 // Allow the layers adapter to temporarily modify the configuration. This |
823 // change isn't stored in configurations_ so change will be discarded at | 776 // change isn't stored in configurations_ so change will be discarded at |
824 // the next update. | 777 // the next update. |
825 vpx_codec_enc_cfg_t temp_config; | 778 vpx_codec_enc_cfg_t temp_config; |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
955 break; | 908 break; |
956 } | 909 } |
957 default: | 910 default: |
958 break; | 911 break; |
959 } | 912 } |
960 // End of frame | 913 // End of frame |
961 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) { | 914 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) { |
962 // check if encoded frame is a key frame | 915 // check if encoded frame is a key frame |
963 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { | 916 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { |
964 encoded_images_[encoder_idx]._frameType = kVideoFrameKey; | 917 encoded_images_[encoder_idx]._frameType = kVideoFrameKey; |
965 rps_.EncodedKeyFrame(picture_id_[stream_idx]); | |
966 } | 918 } |
967 PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, | 919 PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, |
968 input_image.timestamp(), | 920 input_image.timestamp(), |
969 only_predicting_from_key_frame); | 921 only_predicting_from_key_frame); |
970 break; | 922 break; |
971 } | 923 } |
972 } | 924 } |
973 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp(); | 925 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp(); |
974 encoded_images_[encoder_idx].capture_time_ms_ = | 926 encoded_images_[encoder_idx].capture_time_ms_ = |
975 input_image.render_time_ms(); | 927 input_image.render_time_ms(); |
(...skipping 30 matching lines...) Expand all Loading... |
1006 } | 958 } |
1007 | 959 |
1008 VideoEncoder::ScalingSettings VP8EncoderImpl::GetScalingSettings() const { | 960 VideoEncoder::ScalingSettings VP8EncoderImpl::GetScalingSettings() const { |
1009 const bool enable_scaling = encoders_.size() == 1 && | 961 const bool enable_scaling = encoders_.size() == 1 && |
1010 configurations_[0].rc_dropframe_thresh > 0 && | 962 configurations_[0].rc_dropframe_thresh > 0 && |
1011 codec_.VP8().automaticResizeOn; | 963 codec_.VP8().automaticResizeOn; |
1012 return VideoEncoder::ScalingSettings(enable_scaling); | 964 return VideoEncoder::ScalingSettings(enable_scaling); |
1013 } | 965 } |
1014 | 966 |
1015 int VP8EncoderImpl::SetChannelParameters(uint32_t packetLoss, int64_t rtt) { | 967 int VP8EncoderImpl::SetChannelParameters(uint32_t packetLoss, int64_t rtt) { |
1016 rps_.SetRtt(rtt); | |
1017 return WEBRTC_VIDEO_CODEC_OK; | 968 return WEBRTC_VIDEO_CODEC_OK; |
1018 } | 969 } |
1019 | 970 |
1020 int VP8EncoderImpl::RegisterEncodeCompleteCallback( | 971 int VP8EncoderImpl::RegisterEncodeCompleteCallback( |
1021 EncodedImageCallback* callback) { | 972 EncodedImageCallback* callback) { |
1022 encoded_complete_callback_ = callback; | 973 encoded_complete_callback_ = callback; |
1023 return WEBRTC_VIDEO_CODEC_OK; | 974 return WEBRTC_VIDEO_CODEC_OK; |
1024 } | 975 } |
1025 | 976 |
1026 VP8DecoderImpl::VP8DecoderImpl() | 977 VP8DecoderImpl::VP8DecoderImpl() |
1027 : buffer_pool_(false, 300 /* max_number_of_buffers*/), | 978 : buffer_pool_(false, 300 /* max_number_of_buffers*/), |
1028 decode_complete_callback_(NULL), | 979 decode_complete_callback_(NULL), |
1029 inited_(false), | 980 inited_(false), |
1030 feedback_mode_(false), | |
1031 decoder_(NULL), | 981 decoder_(NULL), |
1032 image_format_(VPX_IMG_FMT_NONE), | 982 image_format_(VPX_IMG_FMT_NONE), |
1033 ref_frame_(NULL), | 983 ref_frame_(NULL), |
1034 propagation_cnt_(-1), | 984 propagation_cnt_(-1), |
1035 last_frame_width_(0), | 985 last_frame_width_(0), |
1036 last_frame_height_(0), | 986 last_frame_height_(0), |
1037 key_frame_required_(true), | 987 key_frame_required_(true), |
1038 use_postproc_arm_(webrtc::field_trial::FindFullName( | 988 use_postproc_arm_(webrtc::field_trial::FindFullName( |
1039 kVp8PostProcArmFieldTrial) == "Enabled") {} | 989 kVp8PostProcArmFieldTrial) == "Enabled") {} |
1040 | 990 |
1041 VP8DecoderImpl::~VP8DecoderImpl() { | 991 VP8DecoderImpl::~VP8DecoderImpl() { |
1042 inited_ = true; // in order to do the actual release | 992 inited_ = true; // in order to do the actual release |
1043 Release(); | 993 Release(); |
1044 } | 994 } |
1045 | 995 |
1046 int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { | 996 int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { |
1047 int ret_val = Release(); | 997 int ret_val = Release(); |
1048 if (ret_val < 0) { | 998 if (ret_val < 0) { |
1049 return ret_val; | 999 return ret_val; |
1050 } | 1000 } |
1051 if (decoder_ == NULL) { | 1001 if (decoder_ == NULL) { |
1052 decoder_ = new vpx_codec_ctx_t; | 1002 decoder_ = new vpx_codec_ctx_t; |
1053 memset(decoder_, 0, sizeof(*decoder_)); | 1003 memset(decoder_, 0, sizeof(*decoder_)); |
1054 } | 1004 } |
1055 if (inst && inst->codecType == kVideoCodecVP8) { | |
1056 feedback_mode_ = inst->VP8().feedbackModeOn; | |
1057 } | |
1058 vpx_codec_dec_cfg_t cfg; | 1005 vpx_codec_dec_cfg_t cfg; |
1059 // Setting number of threads to a constant value (1) | 1006 // Setting number of threads to a constant value (1) |
1060 cfg.threads = 1; | 1007 cfg.threads = 1; |
1061 cfg.h = cfg.w = 0; // set after decode | 1008 cfg.h = cfg.w = 0; // set after decode |
1062 | 1009 |
1063 vpx_codec_flags_t flags = 0; | 1010 vpx_codec_flags_t flags = 0; |
1064 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) | 1011 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) |
1065 if (use_postproc_arm_) { | 1012 if (use_postproc_arm_) { |
1066 flags = VPX_CODEC_USE_POSTPROC; | 1013 flags = VPX_CODEC_USE_POSTPROC; |
1067 } | 1014 } |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1136 if (key_frame_required_) { | 1083 if (key_frame_required_) { |
1137 if (input_image._frameType != kVideoFrameKey) | 1084 if (input_image._frameType != kVideoFrameKey) |
1138 return WEBRTC_VIDEO_CODEC_ERROR; | 1085 return WEBRTC_VIDEO_CODEC_ERROR; |
1139 // We have a key frame - is it complete? | 1086 // We have a key frame - is it complete? |
1140 if (input_image._completeFrame) { | 1087 if (input_image._completeFrame) { |
1141 key_frame_required_ = false; | 1088 key_frame_required_ = false; |
1142 } else { | 1089 } else { |
1143 return WEBRTC_VIDEO_CODEC_ERROR; | 1090 return WEBRTC_VIDEO_CODEC_ERROR; |
1144 } | 1091 } |
1145 } | 1092 } |
1146 // Restrict error propagation using key frame requests. Disabled when | 1093 // Restrict error propagation using key frame requests. |
1147 // the feedback mode is enabled (RPS). | |
1148 // Reset on a key frame refresh. | 1094 // Reset on a key frame refresh. |
1149 if (!feedback_mode_) { | 1095 if (input_image._frameType == kVideoFrameKey && |
1150 if (input_image._frameType == kVideoFrameKey && | 1096 input_image._completeFrame) { |
1151 input_image._completeFrame) { | |
1152 propagation_cnt_ = -1; | 1097 propagation_cnt_ = -1; |
1153 // Start count on first loss. | 1098 // Start count on first loss. |
1154 } else if ((!input_image._completeFrame || missing_frames) && | 1099 } else if ((!input_image._completeFrame || missing_frames) && |
1155 propagation_cnt_ == -1) { | 1100 propagation_cnt_ == -1) { |
1156 propagation_cnt_ = 0; | 1101 propagation_cnt_ = 0; |
1157 } | 1102 } |
1158 if (propagation_cnt_ >= 0) { | 1103 if (propagation_cnt_ >= 0) { |
1159 propagation_cnt_++; | 1104 propagation_cnt_++; |
1160 } | |
1161 } | 1105 } |
1162 | 1106 |
1163 vpx_codec_iter_t iter = NULL; | 1107 vpx_codec_iter_t iter = NULL; |
1164 vpx_image_t* img; | 1108 vpx_image_t* img; |
1165 int ret; | 1109 int ret; |
1166 | 1110 |
1167 // Check for missing frames. | 1111 // Check for missing frames. |
1168 if (missing_frames) { | 1112 if (missing_frames) { |
1169 // Call decoder with zero data length to signal missing frames. | 1113 // Call decoder with zero data length to signal missing frames. |
1170 if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME)) { | 1114 if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME)) { |
(...skipping 24 matching lines...) Expand all Loading... |
1195 vpx_codec_err_t vpx_ret = | 1139 vpx_codec_err_t vpx_ret = |
1196 vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp); | 1140 vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp); |
1197 RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK); | 1141 RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK); |
1198 ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_, qp); | 1142 ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_, qp); |
1199 if (ret != 0) { | 1143 if (ret != 0) { |
1200 // Reset to avoid requesting key frames too often. | 1144 // Reset to avoid requesting key frames too often. |
1201 if (ret < 0 && propagation_cnt_ > 0) | 1145 if (ret < 0 && propagation_cnt_ > 0) |
1202 propagation_cnt_ = 0; | 1146 propagation_cnt_ = 0; |
1203 return ret; | 1147 return ret; |
1204 } | 1148 } |
1205 if (feedback_mode_) { | |
1206 // Whenever we receive an incomplete key frame all reference buffers will | |
1207 // be corrupt. If that happens we must request new key frames until we | |
1208 // decode a complete key frame. | |
1209 if (input_image._frameType == kVideoFrameKey && !input_image._completeFrame) | |
1210 return WEBRTC_VIDEO_CODEC_ERROR; | |
1211 // Check for reference updates and last reference buffer corruption and | |
1212 // signal successful reference propagation or frame corruption to the | |
1213 // encoder. | |
1214 int reference_updates = 0; | |
1215 if (vpx_codec_control(decoder_, VP8D_GET_LAST_REF_UPDATES, | |
1216 &reference_updates)) { | |
1217 // Reset to avoid requesting key frames too often. | |
1218 if (propagation_cnt_ > 0) { | |
1219 propagation_cnt_ = 0; | |
1220 } | |
1221 return WEBRTC_VIDEO_CODEC_ERROR; | |
1222 } | |
1223 int corrupted = 0; | |
1224 if (vpx_codec_control(decoder_, VP8D_GET_FRAME_CORRUPTED, &corrupted)) { | |
1225 // Reset to avoid requesting key frames too often. | |
1226 if (propagation_cnt_ > 0) | |
1227 propagation_cnt_ = 0; | |
1228 return WEBRTC_VIDEO_CODEC_ERROR; | |
1229 } | |
1230 int16_t picture_id = -1; | |
1231 if (codec_specific_info) { | |
1232 picture_id = codec_specific_info->codecSpecific.VP8.pictureId; | |
1233 } | |
1234 if (picture_id > -1) { | |
1235 if (((reference_updates & VP8_GOLD_FRAME) || | |
1236 (reference_updates & VP8_ALTR_FRAME)) && | |
1237 !corrupted) { | |
1238 decode_complete_callback_->ReceivedDecodedReferenceFrame(picture_id); | |
1239 } | |
1240 decode_complete_callback_->ReceivedDecodedFrame(picture_id); | |
1241 } | |
1242 if (corrupted) { | |
1243 // we can decode but with artifacts | |
1244 return WEBRTC_VIDEO_CODEC_REQUEST_SLI; | |
1245 } | |
1246 } | |
1247 // Check Vs. threshold | 1149 // Check Vs. threshold |
1248 if (propagation_cnt_ > kVp8ErrorPropagationTh) { | 1150 if (propagation_cnt_ > kVp8ErrorPropagationTh) { |
1249 // Reset to avoid requesting key frames too often. | 1151 // Reset to avoid requesting key frames too often. |
1250 propagation_cnt_ = 0; | 1152 propagation_cnt_ = 0; |
1251 return WEBRTC_VIDEO_CODEC_ERROR; | 1153 return WEBRTC_VIDEO_CODEC_ERROR; |
1252 } | 1154 } |
1253 return WEBRTC_VIDEO_CODEC_OK; | 1155 return WEBRTC_VIDEO_CODEC_OK; |
1254 } | 1156 } |
1255 | 1157 |
1256 int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, | 1158 int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1327 return -1; | 1229 return -1; |
1328 } | 1230 } |
1329 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != | 1231 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != |
1330 VPX_CODEC_OK) { | 1232 VPX_CODEC_OK) { |
1331 return -1; | 1233 return -1; |
1332 } | 1234 } |
1333 return 0; | 1235 return 0; |
1334 } | 1236 } |
1335 | 1237 |
1336 } // namespace webrtc | 1238 } // namespace webrtc |
OLD | NEW |