Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(662)

Side by Side Diff: webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc

Issue 2853073004: Derive current layer from TL frame config. (Closed)
Patch Set: rebase Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
126 126
127 VP8Encoder* VP8Encoder::Create() { 127 VP8Encoder* VP8Encoder::Create() {
128 return new VP8EncoderImpl(); 128 return new VP8EncoderImpl();
129 } 129 }
130 130
131 VP8Decoder* VP8Decoder::Create() { 131 VP8Decoder* VP8Decoder::Create() {
132 return new VP8DecoderImpl(); 132 return new VP8DecoderImpl();
133 } 133 }
134 134
135 vpx_enc_frame_flags_t VP8EncoderImpl::EncodeFlags( 135 vpx_enc_frame_flags_t VP8EncoderImpl::EncodeFlags(
136 TemporalReferences references) { 136 const TemporalLayers::FrameConfig& references) {
137 RTC_DCHECK(!references.drop_frame); 137 RTC_DCHECK(!references.drop_frame);
138 138
139 vpx_enc_frame_flags_t flags = 0; 139 vpx_enc_frame_flags_t flags = 0;
140 140
141 if ((references.last_buffer_flags & kReference) == 0) 141 if ((references.last_buffer_flags & TemporalLayers::kReference) == 0)
142 flags |= VP8_EFLAG_NO_REF_LAST; 142 flags |= VP8_EFLAG_NO_REF_LAST;
143 if ((references.last_buffer_flags & kUpdate) == 0) 143 if ((references.last_buffer_flags & TemporalLayers::kUpdate) == 0)
144 flags |= VP8_EFLAG_NO_UPD_LAST; 144 flags |= VP8_EFLAG_NO_UPD_LAST;
145 if ((references.golden_buffer_flags & kReference) == 0) 145 if ((references.golden_buffer_flags & TemporalLayers::kReference) == 0)
146 flags |= VP8_EFLAG_NO_REF_GF; 146 flags |= VP8_EFLAG_NO_REF_GF;
147 if ((references.golden_buffer_flags & kUpdate) == 0) 147 if ((references.golden_buffer_flags & TemporalLayers::kUpdate) == 0)
148 flags |= VP8_EFLAG_NO_UPD_GF; 148 flags |= VP8_EFLAG_NO_UPD_GF;
149 if ((references.arf_buffer_flags & kReference) == 0) 149 if ((references.arf_buffer_flags & TemporalLayers::kReference) == 0)
150 flags |= VP8_EFLAG_NO_REF_ARF; 150 flags |= VP8_EFLAG_NO_REF_ARF;
151 if ((references.arf_buffer_flags & kUpdate) == 0) 151 if ((references.arf_buffer_flags & TemporalLayers::kUpdate) == 0)
152 flags |= VP8_EFLAG_NO_UPD_ARF; 152 flags |= VP8_EFLAG_NO_UPD_ARF;
153 if (references.freeze_entropy) 153 if (references.freeze_entropy)
154 flags |= VP8_EFLAG_NO_UPD_ENTROPY; 154 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
155 155
156 return flags; 156 return flags;
157 } 157 }
158 158
159 VP8EncoderImpl::VP8EncoderImpl() 159 VP8EncoderImpl::VP8EncoderImpl()
160 : use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)), 160 : use_gf_boost_(webrtc::field_trial::IsEnabled(kVp8GfBoostFieldTrial)),
161 encoded_complete_callback_(nullptr), 161 encoded_complete_callback_(nullptr),
(...skipping 534 matching lines...) Expand 10 before | Expand all | Expand 10 after
696 raw_images_[i - 1].stride[VPX_PLANE_U], 696 raw_images_[i - 1].stride[VPX_PLANE_U],
697 raw_images_[i - 1].planes[VPX_PLANE_V], 697 raw_images_[i - 1].planes[VPX_PLANE_V],
698 raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, 698 raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w,
699 raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], 699 raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y],
700 raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], 700 raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U],
701 raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V], 701 raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V],
702 raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w, 702 raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w,
703 raw_images_[i].d_h, libyuv::kFilterBilinear); 703 raw_images_[i].d_h, libyuv::kFilterBilinear);
704 } 704 }
705 vpx_enc_frame_flags_t flags[kMaxSimulcastStreams]; 705 vpx_enc_frame_flags_t flags[kMaxSimulcastStreams];
706 TemporalLayers::FrameConfig tl_configs[kMaxSimulcastStreams];
706 for (size_t i = 0; i < encoders_.size(); ++i) { 707 for (size_t i = 0; i < encoders_.size(); ++i) {
707 TemporalReferences tl_config = 708 tl_configs[i] = temporal_layers_[i]->UpdateLayerConfig(frame.timestamp());
708 temporal_layers_[i]->UpdateLayerConfig(frame.timestamp());
709 709
710 if (tl_config.drop_frame) { 710 if (tl_configs[i].drop_frame) {
711 // Drop this frame. 711 // Drop this frame.
712 return WEBRTC_VIDEO_CODEC_OK; 712 return WEBRTC_VIDEO_CODEC_OK;
713 } 713 }
714 flags[i] = EncodeFlags(tl_config); 714 flags[i] = EncodeFlags(tl_configs[i]);
715 } 715 }
716 bool send_key_frame = false; 716 bool send_key_frame = false;
717 for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size(); 717 for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size();
718 ++i) { 718 ++i) {
719 if (key_frame_request_[i] && send_stream_[i]) { 719 if (key_frame_request_[i] && send_stream_[i]) {
720 send_key_frame = true; 720 send_key_frame = true;
721 break; 721 break;
722 } 722 }
723 } 723 }
724 if (!send_key_frame && frame_types) { 724 if (!send_key_frame && frame_types) {
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
757 // change isn't stored in configurations_ so change will be discarded at 757 // change isn't stored in configurations_ so change will be discarded at
758 // the next update. 758 // the next update.
759 vpx_codec_enc_cfg_t temp_config; 759 vpx_codec_enc_cfg_t temp_config;
760 memcpy(&temp_config, &configurations_[i], sizeof(vpx_codec_enc_cfg_t)); 760 memcpy(&temp_config, &configurations_[i], sizeof(vpx_codec_enc_cfg_t));
761 if (temporal_layers_[stream_idx]->UpdateConfiguration(&temp_config)) { 761 if (temporal_layers_[stream_idx]->UpdateConfiguration(&temp_config)) {
762 if (vpx_codec_enc_config_set(&encoders_[i], &temp_config)) 762 if (vpx_codec_enc_config_set(&encoders_[i], &temp_config))
763 return WEBRTC_VIDEO_CODEC_ERROR; 763 return WEBRTC_VIDEO_CODEC_ERROR;
764 } 764 }
765 765
766 vpx_codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS, flags[stream_idx]); 766 vpx_codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS, flags[stream_idx]);
767 vpx_codec_control(&encoders_[i], VP8E_SET_TEMPORAL_LAYER_ID, 767 vpx_codec_control(
768 temporal_layers_[stream_idx]->CurrentLayerId()); 768 &encoders_[i], VP8E_SET_TEMPORAL_LAYER_ID,
769 temporal_layers_[stream_idx]->GetTemporalLayerId(tl_configs[i]));
769 } 770 }
770 // TODO(holmer): Ideally the duration should be the timestamp diff of this 771 // TODO(holmer): Ideally the duration should be the timestamp diff of this
771 // frame and the next frame to be encoded, which we don't have. Instead we 772 // frame and the next frame to be encoded, which we don't have. Instead we
772 // would like to use the duration of the previous frame. Unfortunately the 773 // would like to use the duration of the previous frame. Unfortunately the
773 // rate control seems to be off with that setup. Using the average input 774 // rate control seems to be off with that setup. Using the average input
774 // frame rate to calculate an average duration for now. 775 // frame rate to calculate an average duration for now.
775 assert(codec_.maxFramerate > 0); 776 assert(codec_.maxFramerate > 0);
776 uint32_t duration = 90000 / codec_.maxFramerate; 777 uint32_t duration = 90000 / codec_.maxFramerate;
777 778
778 // Note we must pass 0 for |flags| field in encode call below since they are 779 // Note we must pass 0 for |flags| field in encode call below since they are
779 // set above in |vpx_codec_control| function for each encoder/spatial layer. 780 // set above in |vpx_codec_control| function for each encoder/spatial layer.
780 int error = vpx_codec_encode(&encoders_[0], &raw_images_[0], timestamp_, 781 int error = vpx_codec_encode(&encoders_[0], &raw_images_[0], timestamp_,
781 duration, 0, VPX_DL_REALTIME); 782 duration, 0, VPX_DL_REALTIME);
782 // Reset specific intra frame thresholds, following the key frame. 783 // Reset specific intra frame thresholds, following the key frame.
783 if (send_key_frame) { 784 if (send_key_frame) {
784 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, 785 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
785 rc_max_intra_target_); 786 rc_max_intra_target_);
786 } 787 }
787 if (error) 788 if (error)
788 return WEBRTC_VIDEO_CODEC_ERROR; 789 return WEBRTC_VIDEO_CODEC_ERROR;
789 timestamp_ += duration; 790 timestamp_ += duration;
790 // Examines frame timestamps only. 791 // Examines frame timestamps only.
791 return GetEncodedPartitions(frame); 792 return GetEncodedPartitions(tl_configs, frame);
792 } 793 }
793 794
794 void VP8EncoderImpl::PopulateCodecSpecific( 795 void VP8EncoderImpl::PopulateCodecSpecific(
795 CodecSpecificInfo* codec_specific, 796 CodecSpecificInfo* codec_specific,
797 const TemporalLayers::FrameConfig& tl_config,
796 const vpx_codec_cx_pkt_t& pkt, 798 const vpx_codec_cx_pkt_t& pkt,
797 int stream_idx, 799 int stream_idx,
798 uint32_t timestamp) { 800 uint32_t timestamp) {
799 assert(codec_specific != NULL); 801 assert(codec_specific != NULL);
800 codec_specific->codecType = kVideoCodecVP8; 802 codec_specific->codecType = kVideoCodecVP8;
801 codec_specific->codec_name = ImplementationName(); 803 codec_specific->codec_name = ImplementationName();
802 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8); 804 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8);
803 vp8Info->pictureId = picture_id_[stream_idx]; 805 vp8Info->pictureId = picture_id_[stream_idx];
804 vp8Info->simulcastIdx = stream_idx; 806 vp8Info->simulcastIdx = stream_idx;
805 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this 807 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this
806 vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0; 808 vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
807 temporal_layers_[stream_idx]->PopulateCodecSpecific( 809 temporal_layers_[stream_idx]->PopulateCodecSpecific(
808 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0, vp8Info, timestamp); 810 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0, tl_config, vp8Info,
811 timestamp);
809 // Prepare next. 812 // Prepare next.
810 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF; 813 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF;
811 } 814 }
812 815
813 int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) { 816 int VP8EncoderImpl::GetEncodedPartitions(
817 const TemporalLayers::FrameConfig tl_configs[],
818 const VideoFrame& input_image) {
814 int bw_resolutions_disabled = 819 int bw_resolutions_disabled =
815 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1; 820 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1;
816 821
817 int stream_idx = static_cast<int>(encoders_.size()) - 1; 822 int stream_idx = static_cast<int>(encoders_.size()) - 1;
818 int result = WEBRTC_VIDEO_CODEC_OK; 823 int result = WEBRTC_VIDEO_CODEC_OK;
819 for (size_t encoder_idx = 0; encoder_idx < encoders_.size(); 824 for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
820 ++encoder_idx, --stream_idx) { 825 ++encoder_idx, --stream_idx) {
821 vpx_codec_iter_t iter = NULL; 826 vpx_codec_iter_t iter = NULL;
822 int part_idx = 0; 827 int part_idx = 0;
823 encoded_images_[encoder_idx]._length = 0; 828 encoded_images_[encoder_idx]._length = 0;
(...skipping 29 matching lines...) Expand all
853 } 858 }
854 default: 859 default:
855 break; 860 break;
856 } 861 }
857 // End of frame 862 // End of frame
858 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) { 863 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
859 // check if encoded frame is a key frame 864 // check if encoded frame is a key frame
860 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { 865 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
861 encoded_images_[encoder_idx]._frameType = kVideoFrameKey; 866 encoded_images_[encoder_idx]._frameType = kVideoFrameKey;
862 } 867 }
863 PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, 868 PopulateCodecSpecific(&codec_specific, tl_configs[stream_idx], *pkt,
864 input_image.timestamp()); 869 stream_idx, input_image.timestamp());
865 break; 870 break;
866 } 871 }
867 } 872 }
868 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp(); 873 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp();
869 encoded_images_[encoder_idx].capture_time_ms_ = 874 encoded_images_[encoder_idx].capture_time_ms_ =
870 input_image.render_time_ms(); 875 input_image.render_time_ms();
871 encoded_images_[encoder_idx].rotation_ = input_image.rotation(); 876 encoded_images_[encoder_idx].rotation_ = input_image.rotation();
872 encoded_images_[encoder_idx].content_type_ = 877 encoded_images_[encoder_idx].content_type_ =
873 (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE 878 (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
874 : VideoContentType::UNSPECIFIED; 879 : VideoContentType::UNSPECIFIED;
(...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after
1143 buffer_pool_.Release(); 1148 buffer_pool_.Release();
1144 inited_ = false; 1149 inited_ = false;
1145 return WEBRTC_VIDEO_CODEC_OK; 1150 return WEBRTC_VIDEO_CODEC_OK;
1146 } 1151 }
1147 1152
1148 const char* VP8DecoderImpl::ImplementationName() const { 1153 const char* VP8DecoderImpl::ImplementationName() const {
1149 return "libvpx"; 1154 return "libvpx";
1150 } 1155 }
1151 1156
1152 } // namespace webrtc 1157 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698