Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(627)

Side by Side Diff: webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc

Issue 2742383004: Delete support for receiving RTCP RPSI and SLI messages. (Closed)
Patch Set: Add back OnReceivedSLI and OnReceivedRPSI, to not break downstream sub classes. Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after
317 // allow zero to represent an unspecified maxBitRate 317 // allow zero to represent an unspecified maxBitRate
318 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { 318 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
319 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 319 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
320 } 320 }
321 if (inst->width <= 1 || inst->height <= 1) { 321 if (inst->width <= 1 || inst->height <= 1) {
322 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 322 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
323 } 323 }
324 if (number_of_cores < 1) { 324 if (number_of_cores < 1) {
325 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 325 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
326 } 326 }
327 if (inst->VP8().feedbackModeOn && inst->numberOfSimulcastStreams > 1) {
328 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
329 }
330 if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { 327 if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) {
331 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 328 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
332 } 329 }
333 int retVal = Release(); 330 int retVal = Release();
334 if (retVal < 0) { 331 if (retVal < 0) {
335 return retVal; 332 return retVal;
336 } 333 }
337 334
338 int number_of_streams = NumberOfStreams(*inst); 335 int number_of_streams = NumberOfStreams(*inst);
339 bool doing_simulcast = (number_of_streams > 1); 336 bool doing_simulcast = (number_of_streams > 1);
(...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after
736 } 733 }
737 if (!send_key_frame && frame_types) { 734 if (!send_key_frame && frame_types) {
738 for (size_t i = 0; i < frame_types->size() && i < send_stream_.size(); 735 for (size_t i = 0; i < frame_types->size() && i < send_stream_.size();
739 ++i) { 736 ++i) {
740 if ((*frame_types)[i] == kVideoFrameKey && send_stream_[i]) { 737 if ((*frame_types)[i] == kVideoFrameKey && send_stream_[i]) {
741 send_key_frame = true; 738 send_key_frame = true;
742 break; 739 break;
743 } 740 }
744 } 741 }
745 } 742 }
746 // The flag modification below (due to forced key frame, RPS, etc.,) for now
747 // will be the same for all encoders/spatial layers.
748 // TODO(marpan/holmer): Allow for key frame request to be set per encoder.
749 bool only_predict_from_key_frame = false;
750 if (send_key_frame) { 743 if (send_key_frame) {
751 // Adapt the size of the key frame when in screenshare with 1 temporal 744 // Adapt the size of the key frame when in screenshare with 1 temporal
752 // layer. 745 // layer.
753 if (encoders_.size() == 1 && codec_.mode == kScreensharing && 746 if (encoders_.size() == 1 && codec_.mode == kScreensharing &&
754 codec_.VP8()->numberOfTemporalLayers <= 1) { 747 codec_.VP8()->numberOfTemporalLayers <= 1) {
755 const uint32_t forceKeyFrameIntraTh = 100; 748 const uint32_t forceKeyFrameIntraTh = 100;
756 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, 749 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
757 forceKeyFrameIntraTh); 750 forceKeyFrameIntraTh);
758 } 751 }
759 // Key frame request from caller. 752 // Key frame request from caller.
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
798 duration, 0, VPX_DL_REALTIME); 791 duration, 0, VPX_DL_REALTIME);
799 // Reset specific intra frame thresholds, following the key frame. 792 // Reset specific intra frame thresholds, following the key frame.
800 if (send_key_frame) { 793 if (send_key_frame) {
801 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, 794 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
802 rc_max_intra_target_); 795 rc_max_intra_target_);
803 } 796 }
804 if (error) 797 if (error)
805 return WEBRTC_VIDEO_CODEC_ERROR; 798 return WEBRTC_VIDEO_CODEC_ERROR;
806 timestamp_ += duration; 799 timestamp_ += duration;
807 // Examines frame timestamps only. 800 // Examines frame timestamps only.
808 return GetEncodedPartitions(frame, only_predict_from_key_frame); 801 return GetEncodedPartitions(frame);
809 } 802 }
810 803
811 // TODO(pbos): Make sure this works for properly for >1 encoders. 804 // TODO(pbos): Make sure this works for properly for >1 encoders.
812 int VP8EncoderImpl::UpdateCodecFrameSize(int width, int height) { 805 int VP8EncoderImpl::UpdateCodecFrameSize(int width, int height) {
813 codec_.width = width; 806 codec_.width = width;
814 codec_.height = height; 807 codec_.height = height;
815 if (codec_.numberOfSimulcastStreams <= 1) { 808 if (codec_.numberOfSimulcastStreams <= 1) {
816 // For now scaling is only used for single-layer streams. 809 // For now scaling is only used for single-layer streams.
817 codec_.simulcastStream[0].width = width; 810 codec_.simulcastStream[0].width = width;
818 codec_.simulcastStream[0].height = height; 811 codec_.simulcastStream[0].height = height;
(...skipping 14 matching lines...) Expand all
833 if (vpx_codec_enc_config_set(&encoders_[0], &configurations_[0])) { 826 if (vpx_codec_enc_config_set(&encoders_[0], &configurations_[0])) {
834 return WEBRTC_VIDEO_CODEC_ERROR; 827 return WEBRTC_VIDEO_CODEC_ERROR;
835 } 828 }
836 return WEBRTC_VIDEO_CODEC_OK; 829 return WEBRTC_VIDEO_CODEC_OK;
837 } 830 }
838 831
839 void VP8EncoderImpl::PopulateCodecSpecific( 832 void VP8EncoderImpl::PopulateCodecSpecific(
840 CodecSpecificInfo* codec_specific, 833 CodecSpecificInfo* codec_specific,
841 const vpx_codec_cx_pkt_t& pkt, 834 const vpx_codec_cx_pkt_t& pkt,
842 int stream_idx, 835 int stream_idx,
843 uint32_t timestamp, 836 uint32_t timestamp) {
844 bool only_predicting_from_key_frame) {
845 assert(codec_specific != NULL); 837 assert(codec_specific != NULL);
846 codec_specific->codecType = kVideoCodecVP8; 838 codec_specific->codecType = kVideoCodecVP8;
847 codec_specific->codec_name = ImplementationName(); 839 codec_specific->codec_name = ImplementationName();
848 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8); 840 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8);
849 vp8Info->pictureId = picture_id_[stream_idx]; 841 vp8Info->pictureId = picture_id_[stream_idx];
850 if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) { 842 if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) {
851 last_key_frame_picture_id_[stream_idx] = picture_id_[stream_idx]; 843 last_key_frame_picture_id_[stream_idx] = picture_id_[stream_idx];
852 } 844 }
853 vp8Info->simulcastIdx = stream_idx; 845 vp8Info->simulcastIdx = stream_idx;
854 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this 846 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this
855 vp8Info->nonReference = 847 vp8Info->nonReference =
856 (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) ? true : false; 848 (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) ? true : false;
857 bool base_layer_sync_point = (pkt.data.frame.flags & VPX_FRAME_IS_KEY) || 849 bool base_layer_sync_point = pkt.data.frame.flags & VPX_FRAME_IS_KEY;
858 only_predicting_from_key_frame;
859 temporal_layers_[stream_idx]->PopulateCodecSpecific(base_layer_sync_point, 850 temporal_layers_[stream_idx]->PopulateCodecSpecific(base_layer_sync_point,
860 vp8Info, timestamp); 851 vp8Info, timestamp);
861 // Prepare next. 852 // Prepare next.
862 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF; 853 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF;
863 } 854 }
864 855
865 int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image, 856 int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) {
866 bool only_predicting_from_key_frame) {
867 int bw_resolutions_disabled = 857 int bw_resolutions_disabled =
868 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1; 858 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1;
869 859
870 int stream_idx = static_cast<int>(encoders_.size()) - 1; 860 int stream_idx = static_cast<int>(encoders_.size()) - 1;
871 int result = WEBRTC_VIDEO_CODEC_OK; 861 int result = WEBRTC_VIDEO_CODEC_OK;
872 for (size_t encoder_idx = 0; encoder_idx < encoders_.size(); 862 for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
873 ++encoder_idx, --stream_idx) { 863 ++encoder_idx, --stream_idx) {
874 vpx_codec_iter_t iter = NULL; 864 vpx_codec_iter_t iter = NULL;
875 int part_idx = 0; 865 int part_idx = 0;
876 encoded_images_[encoder_idx]._length = 0; 866 encoded_images_[encoder_idx]._length = 0;
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
908 default: 898 default:
909 break; 899 break;
910 } 900 }
911 // End of frame 901 // End of frame
912 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) { 902 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
913 // check if encoded frame is a key frame 903 // check if encoded frame is a key frame
914 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { 904 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
915 encoded_images_[encoder_idx]._frameType = kVideoFrameKey; 905 encoded_images_[encoder_idx]._frameType = kVideoFrameKey;
916 } 906 }
917 PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, 907 PopulateCodecSpecific(&codec_specific, *pkt, stream_idx,
918 input_image.timestamp(), 908 input_image.timestamp());
919 only_predicting_from_key_frame);
920 break; 909 break;
921 } 910 }
922 } 911 }
923 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp(); 912 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp();
924 encoded_images_[encoder_idx].capture_time_ms_ = 913 encoded_images_[encoder_idx].capture_time_ms_ =
925 input_image.render_time_ms(); 914 input_image.render_time_ms();
926 encoded_images_[encoder_idx].rotation_ = input_image.rotation(); 915 encoded_images_[encoder_idx].rotation_ = input_image.rotation();
927 916
928 int qp = -1; 917 int qp = -1;
929 vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp); 918 vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
1085 if (input_image._completeFrame) { 1074 if (input_image._completeFrame) {
1086 key_frame_required_ = false; 1075 key_frame_required_ = false;
1087 } else { 1076 } else {
1088 return WEBRTC_VIDEO_CODEC_ERROR; 1077 return WEBRTC_VIDEO_CODEC_ERROR;
1089 } 1078 }
1090 } 1079 }
1091 // Restrict error propagation using key frame requests. 1080 // Restrict error propagation using key frame requests.
1092 // Reset on a key frame refresh. 1081 // Reset on a key frame refresh.
1093 if (input_image._frameType == kVideoFrameKey && 1082 if (input_image._frameType == kVideoFrameKey &&
1094 input_image._completeFrame) { 1083 input_image._completeFrame) {
1095 propagation_cnt_ = -1; 1084 propagation_cnt_ = -1;
1096 // Start count on first loss. 1085 // Start count on first loss.
1097 } else if ((!input_image._completeFrame || missing_frames) && 1086 } else if ((!input_image._completeFrame || missing_frames) &&
1098 propagation_cnt_ == -1) { 1087 propagation_cnt_ == -1) {
1099 propagation_cnt_ = 0; 1088 propagation_cnt_ = 0;
1100 } 1089 }
1101 if (propagation_cnt_ >= 0) { 1090 if (propagation_cnt_ >= 0) {
1102 propagation_cnt_++; 1091 propagation_cnt_++;
1103 } 1092 }
1104 1093
1105 vpx_codec_iter_t iter = NULL; 1094 vpx_codec_iter_t iter = NULL;
1106 vpx_image_t* img; 1095 vpx_image_t* img;
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1227 return -1; 1216 return -1;
1228 } 1217 }
1229 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != 1218 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) !=
1230 VPX_CODEC_OK) { 1219 VPX_CODEC_OK) {
1231 return -1; 1220 return -1;
1232 } 1221 }
1233 return 0; 1222 return 0;
1234 } 1223 }
1235 1224
1236 } // namespace webrtc 1225 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698