| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 690 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 701 } | 701 } |
| 702 if (!send_key_frame && frame_types) { | 702 if (!send_key_frame && frame_types) { |
| 703 for (size_t i = 0; i < frame_types->size() && i < send_stream_.size(); | 703 for (size_t i = 0; i < frame_types->size() && i < send_stream_.size(); |
| 704 ++i) { | 704 ++i) { |
| 705 if ((*frame_types)[i] == kVideoFrameKey && send_stream_[i]) { | 705 if ((*frame_types)[i] == kVideoFrameKey && send_stream_[i]) { |
| 706 send_key_frame = true; | 706 send_key_frame = true; |
| 707 break; | 707 break; |
| 708 } | 708 } |
| 709 } | 709 } |
| 710 } | 710 } |
| 711 // The flag modification below (due to forced key frame, RPS, etc.,) for now | |
| 712 // will be the same for all encoders/spatial layers. | |
| 713 // TODO(marpan/holmer): Allow for key frame request to be set per encoder. | |
| 714 bool only_predict_from_key_frame = false; | |
| 715 if (send_key_frame) { | 711 if (send_key_frame) { |
| 716 // Adapt the size of the key frame when in screenshare with 1 temporal | 712 // Adapt the size of the key frame when in screenshare with 1 temporal |
| 717 // layer. | 713 // layer. |
| 718 if (encoders_.size() == 1 && codec_.mode == kScreensharing && | 714 if (encoders_.size() == 1 && codec_.mode == kScreensharing && |
| 719 codec_.VP8()->numberOfTemporalLayers <= 1) { | 715 codec_.VP8()->numberOfTemporalLayers <= 1) { |
| 720 const uint32_t forceKeyFrameIntraTh = 100; | 716 const uint32_t forceKeyFrameIntraTh = 100; |
| 721 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, | 717 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
| 722 forceKeyFrameIntraTh); | 718 forceKeyFrameIntraTh); |
| 723 } | 719 } |
| 724 // Key frame request from caller. | 720 // Key frame request from caller. |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 763 duration, 0, VPX_DL_REALTIME); | 759 duration, 0, VPX_DL_REALTIME); |
| 764 // Reset specific intra frame thresholds, following the key frame. | 760 // Reset specific intra frame thresholds, following the key frame. |
| 765 if (send_key_frame) { | 761 if (send_key_frame) { |
| 766 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, | 762 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
| 767 rc_max_intra_target_); | 763 rc_max_intra_target_); |
| 768 } | 764 } |
| 769 if (error) | 765 if (error) |
| 770 return WEBRTC_VIDEO_CODEC_ERROR; | 766 return WEBRTC_VIDEO_CODEC_ERROR; |
| 771 timestamp_ += duration; | 767 timestamp_ += duration; |
| 772 // Examines frame timestamps only. | 768 // Examines frame timestamps only. |
| 773 return GetEncodedPartitions(frame, only_predict_from_key_frame); | 769 return GetEncodedPartitions(frame); |
| 774 } | 770 } |
| 775 | 771 |
| 776 // TODO(pbos): Make sure this works for properly for >1 encoders. | 772 // TODO(pbos): Make sure this works for properly for >1 encoders. |
| 777 int VP8EncoderImpl::UpdateCodecFrameSize(int width, int height) { | 773 int VP8EncoderImpl::UpdateCodecFrameSize(int width, int height) { |
| 778 codec_.width = width; | 774 codec_.width = width; |
| 779 codec_.height = height; | 775 codec_.height = height; |
| 780 if (codec_.numberOfSimulcastStreams <= 1) { | 776 if (codec_.numberOfSimulcastStreams <= 1) { |
| 781 // For now scaling is only used for single-layer streams. | 777 // For now scaling is only used for single-layer streams. |
| 782 codec_.simulcastStream[0].width = width; | 778 codec_.simulcastStream[0].width = width; |
| 783 codec_.simulcastStream[0].height = height; | 779 codec_.simulcastStream[0].height = height; |
| (...skipping 14 matching lines...) Expand all Loading... |
| 798 if (vpx_codec_enc_config_set(&encoders_[0], &configurations_[0])) { | 794 if (vpx_codec_enc_config_set(&encoders_[0], &configurations_[0])) { |
| 799 return WEBRTC_VIDEO_CODEC_ERROR; | 795 return WEBRTC_VIDEO_CODEC_ERROR; |
| 800 } | 796 } |
| 801 return WEBRTC_VIDEO_CODEC_OK; | 797 return WEBRTC_VIDEO_CODEC_OK; |
| 802 } | 798 } |
| 803 | 799 |
| 804 void VP8EncoderImpl::PopulateCodecSpecific( | 800 void VP8EncoderImpl::PopulateCodecSpecific( |
| 805 CodecSpecificInfo* codec_specific, | 801 CodecSpecificInfo* codec_specific, |
| 806 const vpx_codec_cx_pkt_t& pkt, | 802 const vpx_codec_cx_pkt_t& pkt, |
| 807 int stream_idx, | 803 int stream_idx, |
| 808 uint32_t timestamp, | 804 uint32_t timestamp) { |
| 809 bool only_predicting_from_key_frame) { | |
| 810 assert(codec_specific != NULL); | 805 assert(codec_specific != NULL); |
| 811 codec_specific->codecType = kVideoCodecVP8; | 806 codec_specific->codecType = kVideoCodecVP8; |
| 812 codec_specific->codec_name = ImplementationName(); | 807 codec_specific->codec_name = ImplementationName(); |
| 813 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8); | 808 CodecSpecificInfoVP8* vp8Info = &(codec_specific->codecSpecific.VP8); |
| 814 vp8Info->pictureId = picture_id_[stream_idx]; | 809 vp8Info->pictureId = picture_id_[stream_idx]; |
| 815 if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) { | 810 if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) { |
| 816 last_key_frame_picture_id_[stream_idx] = picture_id_[stream_idx]; | 811 last_key_frame_picture_id_[stream_idx] = picture_id_[stream_idx]; |
| 817 } | 812 } |
| 818 vp8Info->simulcastIdx = stream_idx; | 813 vp8Info->simulcastIdx = stream_idx; |
| 819 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this | 814 vp8Info->keyIdx = kNoKeyIdx; // TODO(hlundin) populate this |
| 820 vp8Info->nonReference = | 815 vp8Info->nonReference = |
| 821 (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) ? true : false; | 816 (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) ? true : false; |
| 822 bool base_layer_sync_point = (pkt.data.frame.flags & VPX_FRAME_IS_KEY) || | 817 bool base_layer_sync_point = pkt.data.frame.flags & VPX_FRAME_IS_KEY; |
| 823 only_predicting_from_key_frame; | |
| 824 temporal_layers_[stream_idx]->PopulateCodecSpecific(base_layer_sync_point, | 818 temporal_layers_[stream_idx]->PopulateCodecSpecific(base_layer_sync_point, |
| 825 vp8Info, timestamp); | 819 vp8Info, timestamp); |
| 826 // Prepare next. | 820 // Prepare next. |
| 827 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF; | 821 picture_id_[stream_idx] = (picture_id_[stream_idx] + 1) & 0x7FFF; |
| 828 } | 822 } |
| 829 | 823 |
| 830 int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image, | 824 int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image) { |
| 831 bool only_predicting_from_key_frame) { | |
| 832 int bw_resolutions_disabled = | 825 int bw_resolutions_disabled = |
| 833 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1; | 826 (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1; |
| 834 | 827 |
| 835 int stream_idx = static_cast<int>(encoders_.size()) - 1; | 828 int stream_idx = static_cast<int>(encoders_.size()) - 1; |
| 836 int result = WEBRTC_VIDEO_CODEC_OK; | 829 int result = WEBRTC_VIDEO_CODEC_OK; |
| 837 for (size_t encoder_idx = 0; encoder_idx < encoders_.size(); | 830 for (size_t encoder_idx = 0; encoder_idx < encoders_.size(); |
| 838 ++encoder_idx, --stream_idx) { | 831 ++encoder_idx, --stream_idx) { |
| 839 vpx_codec_iter_t iter = NULL; | 832 vpx_codec_iter_t iter = NULL; |
| 840 int part_idx = 0; | 833 int part_idx = 0; |
| 841 encoded_images_[encoder_idx]._length = 0; | 834 encoded_images_[encoder_idx]._length = 0; |
| (...skipping 30 matching lines...) Expand all Loading... |
| 872 default: | 865 default: |
| 873 break; | 866 break; |
| 874 } | 867 } |
| 875 // End of frame | 868 // End of frame |
| 876 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) { | 869 if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) { |
| 877 // check if encoded frame is a key frame | 870 // check if encoded frame is a key frame |
| 878 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { | 871 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { |
| 879 encoded_images_[encoder_idx]._frameType = kVideoFrameKey; | 872 encoded_images_[encoder_idx]._frameType = kVideoFrameKey; |
| 880 } | 873 } |
| 881 PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, | 874 PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, |
| 882 input_image.timestamp(), | 875 input_image.timestamp()); |
| 883 only_predicting_from_key_frame); | |
| 884 break; | 876 break; |
| 885 } | 877 } |
| 886 } | 878 } |
| 887 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp(); | 879 encoded_images_[encoder_idx]._timeStamp = input_image.timestamp(); |
| 888 encoded_images_[encoder_idx].capture_time_ms_ = | 880 encoded_images_[encoder_idx].capture_time_ms_ = |
| 889 input_image.render_time_ms(); | 881 input_image.render_time_ms(); |
| 890 encoded_images_[encoder_idx].rotation_ = input_image.rotation(); | 882 encoded_images_[encoder_idx].rotation_ = input_image.rotation(); |
| 891 | 883 |
| 892 int qp = -1; | 884 int qp = -1; |
| 893 vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp); | 885 vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp); |
| (...skipping 266 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1160 buffer_pool_.Release(); | 1152 buffer_pool_.Release(); |
| 1161 inited_ = false; | 1153 inited_ = false; |
| 1162 return WEBRTC_VIDEO_CODEC_OK; | 1154 return WEBRTC_VIDEO_CODEC_OK; |
| 1163 } | 1155 } |
| 1164 | 1156 |
| 1165 const char* VP8DecoderImpl::ImplementationName() const { | 1157 const char* VP8DecoderImpl::ImplementationName() const { |
| 1166 return "libvpx"; | 1158 return "libvpx"; |
| 1167 } | 1159 } |
| 1168 | 1160 |
| 1169 } // namespace webrtc | 1161 } // namespace webrtc |
| OLD | NEW |