Chromium Code Reviews

Side by Side Diff: webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc

Issue 2853073004: Derive current layer from TL frame config. (Closed)
Patch Set: Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff |
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 114 matching lines...)
125 125
126 VP8Encoder* VP8Encoder::Create() { 126 VP8Encoder* VP8Encoder::Create() {
127 return new VP8EncoderImpl(); 127 return new VP8EncoderImpl();
128 } 128 }
129 129
130 VP8Decoder* VP8Decoder::Create() { 130 VP8Decoder* VP8Decoder::Create() {
131 return new VP8DecoderImpl(); 131 return new VP8DecoderImpl();
132 } 132 }
133 133
134 vpx_enc_frame_flags_t VP8EncoderImpl::EncodeFlags( 134 vpx_enc_frame_flags_t VP8EncoderImpl::EncodeFlags(
135 TemporalReferences references) { 135 TemporalLayers::FrameConfig references) {
136 RTC_DCHECK(!references.drop_frame); 136 RTC_DCHECK(!references.drop_frame);
137 137
138 vpx_enc_frame_flags_t flags = 0; 138 vpx_enc_frame_flags_t flags = 0;
139 139
140 if ((references.last_buffer_flags & kReference) == 0) 140 if ((references.last_buffer_flags & TemporalLayers::kReference) == 0)
141 flags |= VP8_EFLAG_NO_REF_LAST; 141 flags |= VP8_EFLAG_NO_REF_LAST;
142 if ((references.last_buffer_flags & kUpdate) == 0) 142 if ((references.last_buffer_flags & TemporalLayers::kUpdate) == 0)
143 flags |= VP8_EFLAG_NO_UPD_LAST; 143 flags |= VP8_EFLAG_NO_UPD_LAST;
144 if ((references.golden_buffer_flags & kReference) == 0) 144 if ((references.golden_buffer_flags & TemporalLayers::kReference) == 0)
145 flags |= VP8_EFLAG_NO_REF_GF; 145 flags |= VP8_EFLAG_NO_REF_GF;
146 if ((references.golden_buffer_flags & kUpdate) == 0) 146 if ((references.golden_buffer_flags & TemporalLayers::kUpdate) == 0)
147 flags |= VP8_EFLAG_NO_UPD_GF; 147 flags |= VP8_EFLAG_NO_UPD_GF;
148 if ((references.arf_buffer_flags & kReference) == 0) 148 if ((references.arf_buffer_flags & TemporalLayers::kReference) == 0)
149 flags |= VP8_EFLAG_NO_REF_ARF; 149 flags |= VP8_EFLAG_NO_REF_ARF;
150 if ((references.arf_buffer_flags & kUpdate) == 0) 150 if ((references.arf_buffer_flags & TemporalLayers::kUpdate) == 0)
151 flags |= VP8_EFLAG_NO_UPD_ARF; 151 flags |= VP8_EFLAG_NO_UPD_ARF;
152 if (references.freeze_entropy) 152 if (references.freeze_entropy)
153 flags |= VP8_EFLAG_NO_UPD_ENTROPY; 153 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
154 154
155 return flags; 155 return flags;
156 } 156 }
157 157
158 VP8EncoderImpl::VP8EncoderImpl() 158 VP8EncoderImpl::VP8EncoderImpl()
159 : use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)), 159 : use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)),
160 encoded_complete_callback_(nullptr), 160 encoded_complete_callback_(nullptr),
(...skipping 537 matching lines...)
698 raw_images_[i - 1].stride[VPX_PLANE_U], 698 raw_images_[i - 1].stride[VPX_PLANE_U],
699 raw_images_[i - 1].planes[VPX_PLANE_V], 699 raw_images_[i - 1].planes[VPX_PLANE_V],
700 raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, 700 raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w,
701 raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], 701 raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y],
702 raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], 702 raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U],
703 raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V], 703 raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V],
704 raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w, 704 raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w,
705 raw_images_[i].d_h, libyuv::kFilterBilinear); 705 raw_images_[i].d_h, libyuv::kFilterBilinear);
706 } 706 }
707 vpx_enc_frame_flags_t flags[kMaxSimulcastStreams]; 707 vpx_enc_frame_flags_t flags[kMaxSimulcastStreams];
708 TemporalLayers::FrameConfig tl_configs[kMaxSimulcastStreams];
708 for (size_t i = 0; i < encoders_.size(); ++i) { 709 for (size_t i = 0; i < encoders_.size(); ++i) {
709 TemporalReferences tl_config = 710 tl_configs[i] = temporal_layers_[i]->UpdateLayerConfig(frame.timestamp());
710 temporal_layers_[i]->UpdateLayerConfig(frame.timestamp());
711 711
712 if (tl_config.drop_frame) { 712 if (tl_configs[i].drop_frame) {
713 // Drop this frame. 713 // Drop this frame.
714 return WEBRTC_VIDEO_CODEC_OK; 714 return WEBRTC_VIDEO_CODEC_OK;
715 } 715 }
716 flags[i] = EncodeFlags(tl_config); 716 flags[i] = EncodeFlags(tl_configs[i]);
717 } 717 }
718 bool send_key_frame = false; 718 bool send_key_frame = false;
719 for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size(); 719 for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size();
720 ++i) { 720 ++i) {
721 if (key_frame_request_[i] && send_stream_[i]) { 721 if (key_frame_request_[i] && send_stream_[i]) {
722 send_key_frame = true; 722 send_key_frame = true;
723 break; 723 break;
724 } 724 }
725 } 725 }
726 if (!send_key_frame && frame_types) { 726 if (!send_key_frame && frame_types) {
(...skipping 32 matching lines...)
759 // change isn't stored in configurations_ so change will be discarded at 759 // change isn't stored in configurations_ so change will be discarded at
760 // the next update. 760 // the next update.
761 vpx_codec_enc_cfg_t temp_config; 761 vpx_codec_enc_cfg_t temp_config;
762 memcpy(&temp_config, &configurations_[i], sizeof(vpx_codec_enc_cfg_t)); 762 memcpy(&temp_config, &configurations_[i], sizeof(vpx_codec_enc_cfg_t));
763 if (temporal_layers_[stream_idx]->UpdateConfiguration(&temp_config)) { 763 if (temporal_layers_[stream_idx]->UpdateConfiguration(&temp_config)) {
764 if (vpx_codec_enc_config_set(&encoders_[i], &temp_config)) 764 if (vpx_codec_enc_config_set(&encoders_[i], &temp_config))
765 return WEBRTC_VIDEO_CODEC_ERROR; 765 return WEBRTC_VIDEO_CODEC_ERROR;
766 } 766 }
767 767
768 vpx_codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS, flags[stream_idx]); 768 vpx_codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS, flags[stream_idx]);
769 vpx_codec_control(&encoders_[i], VP8E_SET_TEMPORAL_LAYER_ID, 769 vpx_codec_control(
770 temporal_layers_[stream_idx]->CurrentLayerId()); 770 &encoders_[i], VP8E_SET_TEMPORAL_LAYER_ID,
771 temporal_layers_[stream_idx]->GetTemporalLayerId(tl_configs[i]));
771 } 772 }
772 // TODO(holmer): Ideally the duration should be the timestamp diff of this 773 // TODO(holmer): Ideally the duration should be the timestamp diff of this
773 // frame and the next frame to be encoded, which we don't have. Instead we 774 // frame and the next frame to be encoded, which we don't have. Instead we
774 // would like to use the duration of the previous frame. Unfortunately the 775 // would like to use the duration of the previous frame. Unfortunately the
775 // rate control seems to be off with that setup. Using the average input 776 // rate control seems to be off with that setup. Using the average input
776 // frame rate to calculate an average duration for now. 777 // frame rate to calculate an average duration for now.
777 assert(codec_.maxFramerate > 0); 778 assert(codec_.maxFramerate > 0);
778 uint32_t duration = 90000 / codec_.maxFramerate; 779 uint32_t duration = 90000 / codec_.maxFramerate;
779 780
780 // Note we must pass 0 for |flags| field in encode call below since they are 781 // Note we must pass 0 for |flags| field in encode call below since they are
781 // set above in |vpx_codec_control| function for each encoder/spatial layer. 782 // set above in |vpx_codec_control| function for each encoder/spatial layer.
782 int error = vpx_codec_encode(&encoders_[0], &raw_images_[0], timestamp_, 783 int error = vpx_codec_encode(&encoders_[0], &raw_images_[0], timestamp_,
783 duration, 0, VPX_DL_REALTIME); 784 duration, 0, VPX_DL_REALTIME);
784 // Reset specific intra frame thresholds, following the key frame. 785 // Reset specific intra frame thresholds, following the key frame.
785 if (send_key_frame) { 786 if (send_key_frame) {
786 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, 787 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
787 rc_max_intra_target_); 788 rc_max_intra_target_);
788 } 789 }
789 if (error) 790 if (error)
790 return WEBRTC_VIDEO_CODEC_ERROR; 791 return WEBRTC_VIDEO_CODEC_ERROR;
791 timestamp_ += duration; 792 timestamp_ += duration;
792 // Examines frame timestamps only. 793 // Examines frame timestamps only.
793 return GetEncodedPartitions(frame); 794 return GetEncodedPartitions(tl_configs, frame);
794 } 795 }
795 796
796 void VP8EncoderImpl::PopulateCodecSpecific( 797 void VP8EncoderImpl::PopulateCodecSpecific(
797 CodecSpecificInfo* codec_specific, 798 CodecSpecificInfo* codec_specific,
799 TemporalLayers::FrameConfig tl_config,
798 const vpx_codec_cx_pkt_t& pkt, 800 const vpx_codec_cx_pkt_t& pkt,
799 int stream_idx, 801 int stream_idx,
800 uint32_t timestamp) { 802 uint32_t timestamp) {
801 assert(codec_specific != NULL); 803 assert(codec_specific != NULL);
802 codec_specific->codecType = kVideoCodecVP8; 804 codec_specific->codecType = kVideoCodecVP8;
803 codec_specific->codec_name = ImplementationName(); 805 codec_specific->codec_name = ImplementationName();
804 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8); 806 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8);
805 vp8Info->pictureId = picture_id_[stream_idx]; 807 vp8Info->pictureId = picture_id_[stream_idx];
806 if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) { 808 if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) {
807 last_key_frame_picture_id_[stream_idx] = picture_id_[stream_idx]; 809 last_key_frame_picture_id_[stream_idx] = picture_id_[stream_idx];
808 } 810 }
809 vp8Info->simulcastIdx = stream_idx; 811 vp8Info->simulcastIdx = stream_idx;
810 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this 812 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this
811 vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0; 813 vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
812 temporal_layers_[stream_idx]->PopulateCodecSpecific( 814 temporal_layers_[stream_idx]->PopulateCodecSpecific(
813 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0, vp8Info, timestamp); 815 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0, tl_config, vp8Info,
816 timestamp);
814 // Prepare next. 817 // Prepare next.
815 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF; 818 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF;
816 } 819 }
817 820
818 int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) { 821 int VP8EncoderImpl::GetEncodedPartitions(
822 const TemporalLayers::FrameConfig tl_configs[],
823 const VideoFrame& input_image) {
819 int bw_resolutions_disabled = 824 int bw_resolutions_disabled =
820 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1; 825 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1;
821 826
822 int stream_idx = static_cast<int>(encoders_.size()) - 1; 827 int stream_idx = static_cast<int>(encoders_.size()) - 1;
823 int result = WEBRTC_VIDEO_CODEC_OK; 828 int result = WEBRTC_VIDEO_CODEC_OK;
824 for (size_t encoder_idx = 0; encoder_idx < encoders_.size(); 829 for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
825 ++encoder_idx, --stream_idx) { 830 ++encoder_idx, --stream_idx) {
826 vpx_codec_iter_t iter = NULL; 831 vpx_codec_iter_t iter = NULL;
827 int part_idx = 0; 832 int part_idx = 0;
828 encoded_images_[encoder_idx]._length = 0; 833 encoded_images_[encoder_idx]._length = 0;
(...skipping 29 matching lines...)
858 } 863 }
859 default: 864 default:
860 break; 865 break;
861 } 866 }
862 // End of frame 867 // End of frame
863 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) { 868 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
864 // check if encoded frame is a key frame 869 // check if encoded frame is a key frame
865 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { 870 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
866 encoded_images_[encoder_idx]._frameType = kVideoFrameKey; 871 encoded_images_[encoder_idx]._frameType = kVideoFrameKey;
867 } 872 }
868 PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, 873 PopulateCodecSpecific(&codec_specific, tl_configs[stream_idx], *pkt,
869 input_image.timestamp()); 874 stream_idx, input_image.timestamp());
870 break; 875 break;
871 } 876 }
872 } 877 }
873 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp(); 878 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp();
874 encoded_images_[encoder_idx].capture_time_ms_ = 879 encoded_images_[encoder_idx].capture_time_ms_ =
875 input_image.render_time_ms(); 880 input_image.render_time_ms();
876 encoded_images_[encoder_idx].rotation_ = input_image.rotation(); 881 encoded_images_[encoder_idx].rotation_ = input_image.rotation();
877 encoded_images_[encoder_idx].content_type_ = 882 encoded_images_[encoder_idx].content_type_ =
878 (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE 883 (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
879 : VideoContentType::UNSPECIFIED; 884 : VideoContentType::UNSPECIFIED;
(...skipping 268 matching lines...)
1148 buffer_pool_.Release(); 1153 buffer_pool_.Release();
1149 inited_ = false; 1154 inited_ = false;
1150 return WEBRTC_VIDEO_CODEC_OK; 1155 return WEBRTC_VIDEO_CODEC_OK;
1151 } 1156 }
1152 1157
1153 const char* VP8DecoderImpl::ImplementationName() const { 1158 const char* VP8DecoderImpl::ImplementationName() const {
1154 return "libvpx"; 1159 return "libvpx";
1155 } 1160 }
1156 1161
1157 } // namespace webrtc 1162 } // namespace webrtc
OLDNEW

Powered by Google App Engine