OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 * | 9 * |
10 */ | 10 */ |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
57 } | 57 } |
58 | 58 |
59 void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, | 59 void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, |
60 void* user_data) { | 60 void* user_data) { |
61 VP9EncoderImpl* enc = static_cast<VP9EncoderImpl*>(user_data); | 61 VP9EncoderImpl* enc = static_cast<VP9EncoderImpl*>(user_data); |
62 enc->GetEncodedLayerFrame(pkt); | 62 enc->GetEncodedLayerFrame(pkt); |
63 } | 63 } |
64 | 64 |
65 VP9EncoderImpl::VP9EncoderImpl() | 65 VP9EncoderImpl::VP9EncoderImpl() |
66 : encoded_image_(), | 66 : encoded_image_(), |
67 encoded_complete_callback_(NULL), | 67 encoded_complete_callback_(nullptr), |
68 inited_(false), | 68 inited_(false), |
69 timestamp_(0), | 69 timestamp_(0), |
70 picture_id_(0), | 70 picture_id_(0), |
71 cpu_speed_(3), | 71 cpu_speed_(3), |
72 rc_max_intra_target_(0), | 72 rc_max_intra_target_(0), |
73 encoder_(NULL), | 73 encoder_(nullptr), |
74 config_(NULL), | 74 config_(nullptr), |
75 raw_(NULL), | 75 raw_(nullptr), |
76 input_image_(NULL), | 76 input_image_(nullptr), |
77 tl0_pic_idx_(0), | 77 tl0_pic_idx_(0), |
78 frames_since_kf_(0), | 78 frames_since_kf_(0), |
79 num_temporal_layers_(0), | 79 num_temporal_layers_(0), |
80 num_spatial_layers_(0), | 80 num_spatial_layers_(0), |
81 is_flexible_mode_(false), | 81 is_flexible_mode_(false), |
82 frames_encoded_(0), | 82 frames_encoded_(0), |
83 // Use two spatial when screensharing with flexible mode. | 83 // Use two spatial when screensharing with flexible mode. |
84 spatial_layer_(new ScreenshareLayersVP9(2)) { | 84 spatial_layer_(new ScreenshareLayersVP9(2)) { |
85 memset(&codec_, 0, sizeof(codec_)); | 85 memset(&codec_, 0, sizeof(codec_)); |
86 memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); | 86 memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); |
87 uint32_t seed = rtc::Time32(); | 87 uint32_t seed = rtc::Time32(); |
88 srand(seed); | 88 srand(seed); |
89 } | 89 } |
90 | 90 |
91 VP9EncoderImpl::~VP9EncoderImpl() { | 91 VP9EncoderImpl::~VP9EncoderImpl() { |
92 Release(); | 92 Release(); |
93 } | 93 } |
94 | 94 |
95 int VP9EncoderImpl::Release() { | 95 int VP9EncoderImpl::Release() { |
96 if (encoded_image_._buffer != NULL) { | 96 if (encoded_image_._buffer != nullptr) { |
97 delete[] encoded_image_._buffer; | 97 delete[] encoded_image_._buffer; |
98 encoded_image_._buffer = NULL; | 98 encoded_image_._buffer = nullptr; |
99 } | 99 } |
100 if (encoder_ != NULL) { | 100 if (encoder_ != nullptr) { |
101 if (vpx_codec_destroy(encoder_)) { | 101 if (vpx_codec_destroy(encoder_)) { |
102 return WEBRTC_VIDEO_CODEC_MEMORY; | 102 return WEBRTC_VIDEO_CODEC_MEMORY; |
103 } | 103 } |
104 delete encoder_; | 104 delete encoder_; |
105 encoder_ = NULL; | 105 encoder_ = nullptr; |
106 } | 106 } |
107 if (config_ != NULL) { | 107 if (config_ != nullptr) { |
108 delete config_; | 108 delete config_; |
109 config_ = NULL; | 109 config_ = nullptr; |
110 } | 110 } |
111 if (raw_ != NULL) { | 111 if (raw_ != nullptr) { |
112 vpx_img_free(raw_); | 112 vpx_img_free(raw_); |
113 raw_ = NULL; | 113 raw_ = nullptr; |
114 } | 114 } |
115 inited_ = false; | 115 inited_ = false; |
116 return WEBRTC_VIDEO_CODEC_OK; | 116 return WEBRTC_VIDEO_CODEC_OK; |
117 } | 117 } |
118 | 118 |
119 bool VP9EncoderImpl::ExplicitlyConfiguredSpatialLayers() const { | 119 bool VP9EncoderImpl::ExplicitlyConfiguredSpatialLayers() const { |
120 // We check target_bitrate_bps of the 0th layer to see if the spatial layers | 120 // We check target_bitrate_bps of the 0th layer to see if the spatial layers |
121 // (i.e. bitrates) were explicitly configured. | 121 // (i.e. bitrates) were explicitly configured. |
122 return num_spatial_layers_ > 1 && | 122 return num_spatial_layers_ > 1 && |
123 codec_.spatialLayers[0].target_bitrate_bps > 0; | 123 codec_.spatialLayers[0].target_bitrate_bps > 0; |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
225 // Update encoder context | 225 // Update encoder context |
226 if (vpx_codec_enc_config_set(encoder_, config_)) { | 226 if (vpx_codec_enc_config_set(encoder_, config_)) { |
227 return WEBRTC_VIDEO_CODEC_ERROR; | 227 return WEBRTC_VIDEO_CODEC_ERROR; |
228 } | 228 } |
229 return WEBRTC_VIDEO_CODEC_OK; | 229 return WEBRTC_VIDEO_CODEC_OK; |
230 } | 230 } |
231 | 231 |
232 int VP9EncoderImpl::InitEncode(const VideoCodec* inst, | 232 int VP9EncoderImpl::InitEncode(const VideoCodec* inst, |
233 int number_of_cores, | 233 int number_of_cores, |
234 size_t /*max_payload_size*/) { | 234 size_t /*max_payload_size*/) { |
235 if (inst == NULL) { | 235 if (inst == nullptr) { |
236 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 236 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
237 } | 237 } |
238 if (inst->maxFramerate < 1) { | 238 if (inst->maxFramerate < 1) { |
239 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 239 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
240 } | 240 } |
241 // Allow zero to represent an unspecified maxBitRate | 241 // Allow zero to represent an unspecified maxBitRate |
242 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { | 242 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { |
243 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 243 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
244 } | 244 } |
245 if (inst->width < 1 || inst->height < 1) { | 245 if (inst->width < 1 || inst->height < 1) { |
246 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 246 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
247 } | 247 } |
248 if (number_of_cores < 1) { | 248 if (number_of_cores < 1) { |
249 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 249 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
250 } | 250 } |
251 if (inst->VP9().numberOfTemporalLayers > 3) { | 251 if (inst->VP9().numberOfTemporalLayers > 3) { |
252 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 252 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
253 } | 253 } |
254 // libvpx currently supports only one or two spatial layers. | 254 // libvpx currently supports only one or two spatial layers. |
255 if (inst->VP9().numberOfSpatialLayers > 2) { | 255 if (inst->VP9().numberOfSpatialLayers > 2) { |
256 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 256 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
257 } | 257 } |
258 | 258 |
259 int ret_val = Release(); | 259 int ret_val = Release(); |
260 if (ret_val < 0) { | 260 if (ret_val < 0) { |
261 return ret_val; | 261 return ret_val; |
262 } | 262 } |
263 if (encoder_ == NULL) { | 263 if (encoder_ == nullptr) { |
264 encoder_ = new vpx_codec_ctx_t; | 264 encoder_ = new vpx_codec_ctx_t; |
265 // Only randomize pid/tl0 the first time the encoder is initialized | 265 // Only randomize pid/tl0 the first time the encoder is initialized |
266 // in order to not make random jumps mid-stream. | 266 // in order to not make random jumps mid-stream. |
267 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT | 267 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT |
268 tl0_pic_idx_ = static_cast<uint8_t>(rand()); // NOLINT | 268 tl0_pic_idx_ = static_cast<uint8_t>(rand()); // NOLINT |
269 } | 269 } |
270 if (config_ == NULL) { | 270 if (config_ == nullptr) { |
271 config_ = new vpx_codec_enc_cfg_t; | 271 config_ = new vpx_codec_enc_cfg_t; |
272 } | 272 } |
273 timestamp_ = 0; | 273 timestamp_ = 0; |
274 if (&codec_ != inst) { | 274 if (&codec_ != inst) { |
275 codec_ = *inst; | 275 codec_ = *inst; |
276 } | 276 } |
277 | 277 |
278 num_spatial_layers_ = inst->VP9().numberOfSpatialLayers; | 278 num_spatial_layers_ = inst->VP9().numberOfSpatialLayers; |
279 num_temporal_layers_ = inst->VP9().numberOfTemporalLayers; | 279 num_temporal_layers_ = inst->VP9().numberOfTemporalLayers; |
280 if (num_temporal_layers_ == 0) | 280 if (num_temporal_layers_ == 0) |
281 num_temporal_layers_ = 1; | 281 num_temporal_layers_ = 1; |
282 | 282 |
283 // Allocate memory for encoded image | 283 // Allocate memory for encoded image |
284 if (encoded_image_._buffer != NULL) { | 284 if (encoded_image_._buffer != nullptr) { |
285 delete[] encoded_image_._buffer; | 285 delete[] encoded_image_._buffer; |
286 } | 286 } |
287 encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height); | 287 encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height); |
288 encoded_image_._buffer = new uint8_t[encoded_image_._size]; | 288 encoded_image_._buffer = new uint8_t[encoded_image_._size]; |
289 encoded_image_._completeFrame = true; | 289 encoded_image_._completeFrame = true; |
290 // Creating a wrapper to the image - setting image data to NULL. Actual | 290 // Creating a wrapper to the image - setting image data to null. Actual |
291 // pointer will be set in encode. Setting align to 1, as it is meaningless | 291 // pointer will be set in encode. Setting align to 1, as it is meaningless |
292 // (actual memory is not allocated). | 292 // (actual memory is not allocated). |
293 raw_ = vpx_img_wrap(NULL, VPX_IMG_FMT_I420, codec_.width, codec_.height, 1, | 293 raw_ = vpx_img_wrap(nullptr, VPX_IMG_FMT_I420, codec_.width, codec_.height, 1, |
294 NULL); | 294 nullptr); |
295 // Populate encoder configuration with default values. | 295 // Populate encoder configuration with default values. |
296 if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) { | 296 if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) { |
297 return WEBRTC_VIDEO_CODEC_ERROR; | 297 return WEBRTC_VIDEO_CODEC_ERROR; |
298 } | 298 } |
299 config_->g_w = codec_.width; | 299 config_->g_w = codec_.width; |
300 config_->g_h = codec_.height; | 300 config_->g_h = codec_.height; |
301 config_->rc_target_bitrate = inst->startBitrate; // in kbit/s | 301 config_->rc_target_bitrate = inst->startBitrate; // in kbit/s |
302 config_->g_error_resilient = 1; | 302 config_->g_error_resilient = 1; |
303 // Setting the time base of the codec. | 303 // Setting the time base of the codec. |
304 config_->g_timebase.num = 1; | 304 config_->g_timebase.num = 1; |
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
480 const uint32_t min_intra_size = 300; | 480 const uint32_t min_intra_size = 300; |
481 return (target_pct < min_intra_size) ? min_intra_size : target_pct; | 481 return (target_pct < min_intra_size) ? min_intra_size : target_pct; |
482 } | 482 } |
483 | 483 |
484 int VP9EncoderImpl::Encode(const VideoFrame& input_image, | 484 int VP9EncoderImpl::Encode(const VideoFrame& input_image, |
485 const CodecSpecificInfo* codec_specific_info, | 485 const CodecSpecificInfo* codec_specific_info, |
486 const std::vector<FrameType>* frame_types) { | 486 const std::vector<FrameType>* frame_types) { |
487 if (!inited_) { | 487 if (!inited_) { |
488 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 488 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
489 } | 489 } |
490 if (encoded_complete_callback_ == NULL) { | 490 if (encoded_complete_callback_ == nullptr) { |
491 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 491 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
492 } | 492 } |
493 FrameType frame_type = kVideoFrameDelta; | 493 FrameType frame_type = kVideoFrameDelta; |
494 // We only support one stream at the moment. | 494 // We only support one stream at the moment. |
495 if (frame_types && frame_types->size() > 0) { | 495 if (frame_types && frame_types->size() > 0) { |
496 frame_type = (*frame_types)[0]; | 496 frame_type = (*frame_types)[0]; |
497 } | 497 } |
498 RTC_DCHECK_EQ(input_image.width(), raw_->d_w); | 498 RTC_DCHECK_EQ(input_image.width(), raw_->d_w); |
499 RTC_DCHECK_EQ(input_image.height(), raw_->d_h); | 499 RTC_DCHECK_EQ(input_image.height(), raw_->d_h); |
500 | 500 |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
552 return WEBRTC_VIDEO_CODEC_ERROR; | 552 return WEBRTC_VIDEO_CODEC_ERROR; |
553 } | 553 } |
554 timestamp_ += duration; | 554 timestamp_ += duration; |
555 | 555 |
556 return WEBRTC_VIDEO_CODEC_OK; | 556 return WEBRTC_VIDEO_CODEC_OK; |
557 } | 557 } |
558 | 558 |
559 void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, | 559 void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, |
560 const vpx_codec_cx_pkt& pkt, | 560 const vpx_codec_cx_pkt& pkt, |
561 uint32_t timestamp) { | 561 uint32_t timestamp) { |
562 assert(codec_specific != NULL); | 562 assert(codec_specific != nullptr); |
563 codec_specific->codecType = kVideoCodecVP9; | 563 codec_specific->codecType = kVideoCodecVP9; |
564 codec_specific->codec_name = ImplementationName(); | 564 codec_specific->codec_name = ImplementationName(); |
565 CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9); | 565 CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9); |
566 // TODO(asapersson): Set correct value. | 566 // TODO(asapersson): Set correct value. |
567 vp9_info->inter_pic_predicted = | 567 vp9_info->inter_pic_predicted = |
568 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? false : true; | 568 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? false : true; |
569 vp9_info->flexible_mode = codec_.VP9()->flexibleMode; | 569 vp9_info->flexible_mode = codec_.VP9()->flexibleMode; |
570 vp9_info->ss_data_available = | 570 vp9_info->ss_data_available = |
571 ((pkt.data.frame.flags & VPX_FRAME_IS_KEY) && !codec_.VP9()->flexibleMode) | 571 ((pkt.data.frame.flags & VPX_FRAME_IS_KEY) && !codec_.VP9()->flexibleMode) |
572 ? true | 572 ? true |
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
832 | 832 |
833 bool VP9Decoder::IsSupported() { | 833 bool VP9Decoder::IsSupported() { |
834 return true; | 834 return true; |
835 } | 835 } |
836 | 836 |
837 VP9Decoder* VP9Decoder::Create() { | 837 VP9Decoder* VP9Decoder::Create() { |
838 return new VP9DecoderImpl(); | 838 return new VP9DecoderImpl(); |
839 } | 839 } |
840 | 840 |
841 VP9DecoderImpl::VP9DecoderImpl() | 841 VP9DecoderImpl::VP9DecoderImpl() |
842 : decode_complete_callback_(NULL), | 842 : decode_complete_callback_(nullptr), |
843 inited_(false), | 843 inited_(false), |
844 decoder_(NULL), | 844 decoder_(nullptr), |
845 key_frame_required_(true) { | 845 key_frame_required_(true) { |
846 memset(&codec_, 0, sizeof(codec_)); | 846 memset(&codec_, 0, sizeof(codec_)); |
847 } | 847 } |
848 | 848 |
849 VP9DecoderImpl::~VP9DecoderImpl() { | 849 VP9DecoderImpl::~VP9DecoderImpl() { |
850 inited_ = true; // in order to do the actual release | 850 inited_ = true; // in order to do the actual release |
851 Release(); | 851 Release(); |
852 int num_buffers_in_use = frame_buffer_pool_.GetNumBuffersInUse(); | 852 int num_buffers_in_use = frame_buffer_pool_.GetNumBuffersInUse(); |
853 if (num_buffers_in_use > 0) { | 853 if (num_buffers_in_use > 0) { |
854 // The frame buffers are reference counted and frames are exposed after | 854 // The frame buffers are reference counted and frames are exposed after |
855 // decoding. There may be valid usage cases where previous frames are still | 855 // decoding. There may be valid usage cases where previous frames are still |
856 // referenced after ~VP9DecoderImpl that is not a leak. | 856 // referenced after ~VP9DecoderImpl that is not a leak. |
857 LOG(LS_INFO) << num_buffers_in_use << " Vp9FrameBuffers are still " | 857 LOG(LS_INFO) << num_buffers_in_use << " Vp9FrameBuffers are still " |
858 << "referenced during ~VP9DecoderImpl."; | 858 << "referenced during ~VP9DecoderImpl."; |
859 } | 859 } |
860 } | 860 } |
861 | 861 |
862 int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { | 862 int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { |
863 if (inst == NULL) { | 863 if (inst == nullptr) { |
864 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 864 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
865 } | 865 } |
866 int ret_val = Release(); | 866 int ret_val = Release(); |
867 if (ret_val < 0) { | 867 if (ret_val < 0) { |
868 return ret_val; | 868 return ret_val; |
869 } | 869 } |
870 if (decoder_ == NULL) { | 870 if (decoder_ == nullptr) { |
871 decoder_ = new vpx_codec_ctx_t; | 871 decoder_ = new vpx_codec_ctx_t; |
872 } | 872 } |
873 vpx_codec_dec_cfg_t cfg; | 873 vpx_codec_dec_cfg_t cfg; |
874 // Setting number of threads to a constant value (1) | 874 // Setting number of threads to a constant value (1) |
875 cfg.threads = 1; | 875 cfg.threads = 1; |
876 cfg.h = cfg.w = 0; // set after decode | 876 cfg.h = cfg.w = 0; // set after decode |
877 vpx_codec_flags_t flags = 0; | 877 vpx_codec_flags_t flags = 0; |
878 if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { | 878 if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { |
879 return WEBRTC_VIDEO_CODEC_MEMORY; | 879 return WEBRTC_VIDEO_CODEC_MEMORY; |
880 } | 880 } |
(...skipping 13 matching lines...) Expand all Loading... |
894 } | 894 } |
895 | 895 |
896 int VP9DecoderImpl::Decode(const EncodedImage& input_image, | 896 int VP9DecoderImpl::Decode(const EncodedImage& input_image, |
897 bool missing_frames, | 897 bool missing_frames, |
898 const RTPFragmentationHeader* fragmentation, | 898 const RTPFragmentationHeader* fragmentation, |
899 const CodecSpecificInfo* codec_specific_info, | 899 const CodecSpecificInfo* codec_specific_info, |
900 int64_t /*render_time_ms*/) { | 900 int64_t /*render_time_ms*/) { |
901 if (!inited_) { | 901 if (!inited_) { |
902 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 902 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
903 } | 903 } |
904 if (decode_complete_callback_ == NULL) { | 904 if (decode_complete_callback_ == nullptr) { |
905 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 905 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
906 } | 906 } |
907 // Always start with a complete key frame. | 907 // Always start with a complete key frame. |
908 if (key_frame_required_) { | 908 if (key_frame_required_) { |
909 if (input_image._frameType != kVideoFrameKey) | 909 if (input_image._frameType != kVideoFrameKey) |
910 return WEBRTC_VIDEO_CODEC_ERROR; | 910 return WEBRTC_VIDEO_CODEC_ERROR; |
911 // We have a key frame - is it complete? | 911 // We have a key frame - is it complete? |
912 if (input_image._completeFrame) { | 912 if (input_image._completeFrame) { |
913 key_frame_required_ = false; | 913 key_frame_required_ = false; |
914 } else { | 914 } else { |
915 return WEBRTC_VIDEO_CODEC_ERROR; | 915 return WEBRTC_VIDEO_CODEC_ERROR; |
916 } | 916 } |
917 } | 917 } |
918 vpx_codec_iter_t iter = NULL; | 918 vpx_codec_iter_t iter = nullptr; |
919 vpx_image_t* img; | 919 vpx_image_t* img; |
920 uint8_t* buffer = input_image._buffer; | 920 uint8_t* buffer = input_image._buffer; |
921 if (input_image._length == 0) { | 921 if (input_image._length == 0) { |
922 buffer = NULL; // Triggers full frame concealment. | 922 buffer = nullptr; // Triggers full frame concealment. |
923 } | 923 } |
924 // During decode libvpx may get and release buffers from |frame_buffer_pool_|. | 924 // During decode libvpx may get and release buffers from |frame_buffer_pool_|. |
925 // In practice libvpx keeps a few (~3-4) buffers alive at a time. | 925 // In practice libvpx keeps a few (~3-4) buffers alive at a time. |
926 if (vpx_codec_decode(decoder_, buffer, | 926 if (vpx_codec_decode(decoder_, buffer, |
927 static_cast<unsigned int>(input_image._length), 0, | 927 static_cast<unsigned int>(input_image._length), 0, |
928 VPX_DL_REALTIME)) { | 928 VPX_DL_REALTIME)) { |
929 return WEBRTC_VIDEO_CODEC_ERROR; | 929 return WEBRTC_VIDEO_CODEC_ERROR; |
930 } | 930 } |
931 // |img->fb_priv| contains the image data, a reference counted Vp9FrameBuffer. | 931 // |img->fb_priv| contains the image data, a reference counted Vp9FrameBuffer. |
932 // It may be released by libvpx during future vpx_codec_decode or | 932 // It may be released by libvpx during future vpx_codec_decode or |
933 // vpx_codec_destroy calls. | 933 // vpx_codec_destroy calls. |
934 img = vpx_codec_get_frame(decoder_, &iter); | 934 img = vpx_codec_get_frame(decoder_, &iter); |
935 int ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_); | 935 int ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_); |
936 if (ret != 0) { | 936 if (ret != 0) { |
937 return ret; | 937 return ret; |
938 } | 938 } |
939 return WEBRTC_VIDEO_CODEC_OK; | 939 return WEBRTC_VIDEO_CODEC_OK; |
940 } | 940 } |
941 | 941 |
942 int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, | 942 int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, |
943 uint32_t timestamp, | 943 uint32_t timestamp, |
944 int64_t ntp_time_ms) { | 944 int64_t ntp_time_ms) { |
945 if (img == NULL) { | 945 if (img == nullptr) { |
946 // Decoder OK and NULL image => No show frame. | 946 // Decoder OK and null image => No show frame. |
947 return WEBRTC_VIDEO_CODEC_NO_OUTPUT; | 947 return WEBRTC_VIDEO_CODEC_NO_OUTPUT; |
948 } | 948 } |
949 | 949 |
950 // This buffer contains all of |img|'s image data, a reference counted | 950 // This buffer contains all of |img|'s image data, a reference counted |
951 // Vp9FrameBuffer. (libvpx is done with the buffers after a few | 951 // Vp9FrameBuffer. (libvpx is done with the buffers after a few |
952 // vpx_codec_decode calls or vpx_codec_destroy). | 952 // vpx_codec_decode calls or vpx_codec_destroy). |
953 Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer = | 953 Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer = |
954 static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv); | 954 static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv); |
955 // The buffer can be used directly by the VideoFrame (without copy) by | 955 // The buffer can be used directly by the VideoFrame (without copy) by |
956 // using a WrappedI420Buffer. | 956 // using a WrappedI420Buffer. |
(...skipping 18 matching lines...) Expand all Loading... |
975 return WEBRTC_VIDEO_CODEC_OK; | 975 return WEBRTC_VIDEO_CODEC_OK; |
976 } | 976 } |
977 | 977 |
978 int VP9DecoderImpl::RegisterDecodeCompleteCallback( | 978 int VP9DecoderImpl::RegisterDecodeCompleteCallback( |
979 DecodedImageCallback* callback) { | 979 DecodedImageCallback* callback) { |
980 decode_complete_callback_ = callback; | 980 decode_complete_callback_ = callback; |
981 return WEBRTC_VIDEO_CODEC_OK; | 981 return WEBRTC_VIDEO_CODEC_OK; |
982 } | 982 } |
983 | 983 |
984 int VP9DecoderImpl::Release() { | 984 int VP9DecoderImpl::Release() { |
985 if (decoder_ != NULL) { | 985 if (decoder_ != nullptr) { |
986 // When a codec is destroyed libvpx will release any buffers of | 986 // When a codec is destroyed libvpx will release any buffers of |
987 // |frame_buffer_pool_| it is currently using. | 987 // |frame_buffer_pool_| it is currently using. |
988 if (vpx_codec_destroy(decoder_)) { | 988 if (vpx_codec_destroy(decoder_)) { |
989 return WEBRTC_VIDEO_CODEC_MEMORY; | 989 return WEBRTC_VIDEO_CODEC_MEMORY; |
990 } | 990 } |
991 delete decoder_; | 991 delete decoder_; |
992 decoder_ = NULL; | 992 decoder_ = nullptr; |
993 } | 993 } |
994 // Releases buffers from the pool. Any buffers not in use are deleted. Buffers | 994 // Releases buffers from the pool. Any buffers not in use are deleted. Buffers |
995 // still referenced externally are deleted once fully released, not returning | 995 // still referenced externally are deleted once fully released, not returning |
996 // to the pool. | 996 // to the pool. |
997 frame_buffer_pool_.ClearPool(); | 997 frame_buffer_pool_.ClearPool(); |
998 inited_ = false; | 998 inited_ = false; |
999 return WEBRTC_VIDEO_CODEC_OK; | 999 return WEBRTC_VIDEO_CODEC_OK; |
1000 } | 1000 } |
1001 | 1001 |
1002 const char* VP9DecoderImpl::ImplementationName() const { | 1002 const char* VP9DecoderImpl::ImplementationName() const { |
1003 return "libvpx"; | 1003 return "libvpx"; |
1004 } | 1004 } |
1005 | 1005 |
1006 } // namespace webrtc | 1006 } // namespace webrtc |
OLD | NEW |