OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 * | 9 * |
10 */ | 10 */ |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
47 return 7; | 47 return 7; |
48 #endif | 48 #endif |
49 } | 49 } |
50 | 50 |
51 VP9Encoder* VP9Encoder::Create() { | 51 VP9Encoder* VP9Encoder::Create() { |
52 return new VP9EncoderImpl(); | 52 return new VP9EncoderImpl(); |
53 } | 53 } |
54 | 54 |
55 void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, | 55 void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, |
56 void* user_data) { | 56 void* user_data) { |
57 VP9EncoderImpl* enc = (VP9EncoderImpl*)(user_data); | 57 VP9EncoderImpl* enc = static_cast<VP9EncoderImpl*>(user_data); |
58 enc->GetEncodedLayerFrame(pkt); | 58 enc->GetEncodedLayerFrame(pkt); |
59 } | 59 } |
60 | 60 |
61 VP9EncoderImpl::VP9EncoderImpl() | 61 VP9EncoderImpl::VP9EncoderImpl() |
62 : encoded_image_(), | 62 : encoded_image_(), |
63 encoded_complete_callback_(NULL), | 63 encoded_complete_callback_(NULL), |
64 inited_(false), | 64 inited_(false), |
65 timestamp_(0), | 65 timestamp_(0), |
66 picture_id_(0), | 66 picture_id_(0), |
67 cpu_speed_(3), | 67 cpu_speed_(3), |
(...skipping 13 matching lines...) Expand all Loading... |
81 uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp()); | 81 uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp()); |
82 srand(seed); | 82 srand(seed); |
83 } | 83 } |
84 | 84 |
85 VP9EncoderImpl::~VP9EncoderImpl() { | 85 VP9EncoderImpl::~VP9EncoderImpl() { |
86 Release(); | 86 Release(); |
87 } | 87 } |
88 | 88 |
89 int VP9EncoderImpl::Release() { | 89 int VP9EncoderImpl::Release() { |
90 if (encoded_image_._buffer != NULL) { | 90 if (encoded_image_._buffer != NULL) { |
91 delete [] encoded_image_._buffer; | 91 delete[] encoded_image_._buffer; |
92 encoded_image_._buffer = NULL; | 92 encoded_image_._buffer = NULL; |
93 } | 93 } |
94 if (encoder_ != NULL) { | 94 if (encoder_ != NULL) { |
95 if (vpx_codec_destroy(encoder_)) { | 95 if (vpx_codec_destroy(encoder_)) { |
96 return WEBRTC_VIDEO_CODEC_MEMORY; | 96 return WEBRTC_VIDEO_CODEC_MEMORY; |
97 } | 97 } |
98 delete encoder_; | 98 delete encoder_; |
99 encoder_ = NULL; | 99 encoder_ = NULL; |
100 } | 100 } |
101 if (config_ != NULL) { | 101 if (config_ != NULL) { |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
260 if (&codec_ != inst) { | 260 if (&codec_ != inst) { |
261 codec_ = *inst; | 261 codec_ = *inst; |
262 } | 262 } |
263 | 263 |
264 num_spatial_layers_ = inst->codecSpecific.VP9.numberOfSpatialLayers; | 264 num_spatial_layers_ = inst->codecSpecific.VP9.numberOfSpatialLayers; |
265 num_temporal_layers_ = inst->codecSpecific.VP9.numberOfTemporalLayers; | 265 num_temporal_layers_ = inst->codecSpecific.VP9.numberOfTemporalLayers; |
266 if (num_temporal_layers_ == 0) | 266 if (num_temporal_layers_ == 0) |
267 num_temporal_layers_ = 1; | 267 num_temporal_layers_ = 1; |
268 | 268 |
269 // Random start 16 bits is enough. | 269 // Random start 16 bits is enough. |
270 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | 270 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT |
271 // Allocate memory for encoded image | 271 // Allocate memory for encoded image |
272 if (encoded_image_._buffer != NULL) { | 272 if (encoded_image_._buffer != NULL) { |
273 delete [] encoded_image_._buffer; | 273 delete[] encoded_image_._buffer; |
274 } | 274 } |
275 encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height); | 275 encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height); |
276 encoded_image_._buffer = new uint8_t[encoded_image_._size]; | 276 encoded_image_._buffer = new uint8_t[encoded_image_._size]; |
277 encoded_image_._completeFrame = true; | 277 encoded_image_._completeFrame = true; |
278 // Creating a wrapper to the image - setting image data to NULL. Actual | 278 // Creating a wrapper to the image - setting image data to NULL. Actual |
279 // pointer will be set in encode. Setting align to 1, as it is meaningless | 279 // pointer will be set in encode. Setting align to 1, as it is meaningless |
280 // (actual memory is not allocated). | 280 // (actual memory is not allocated). |
281 raw_ = vpx_img_wrap(NULL, VPX_IMG_FMT_I420, codec_.width, codec_.height, | 281 raw_ = vpx_img_wrap(NULL, VPX_IMG_FMT_I420, codec_.width, codec_.height, 1, |
282 1, NULL); | 282 NULL); |
283 // Populate encoder configuration with default values. | 283 // Populate encoder configuration with default values. |
284 if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) { | 284 if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) { |
285 return WEBRTC_VIDEO_CODEC_ERROR; | 285 return WEBRTC_VIDEO_CODEC_ERROR; |
286 } | 286 } |
287 config_->g_w = codec_.width; | 287 config_->g_w = codec_.width; |
288 config_->g_h = codec_.height; | 288 config_->g_h = codec_.height; |
289 config_->rc_target_bitrate = inst->startBitrate; // in kbit/s | 289 config_->rc_target_bitrate = inst->startBitrate; // in kbit/s |
290 config_->g_error_resilient = 1; | 290 config_->g_error_resilient = 1; |
291 // Setting the time base of the codec. | 291 // Setting the time base of the codec. |
292 config_->g_timebase.num = 1; | 292 config_->g_timebase.num = 1; |
293 config_->g_timebase.den = 90000; | 293 config_->g_timebase.den = 90000; |
294 config_->g_lag_in_frames = 0; // 0- no frame lagging | 294 config_->g_lag_in_frames = 0; // 0- no frame lagging |
295 config_->g_threads = 1; | 295 config_->g_threads = 1; |
296 // Rate control settings. | 296 // Rate control settings. |
297 config_->rc_dropframe_thresh = inst->codecSpecific.VP9.frameDroppingOn ? | 297 config_->rc_dropframe_thresh = |
298 30 : 0; | 298 inst->codecSpecific.VP9.frameDroppingOn ? 30 : 0; |
299 config_->rc_end_usage = VPX_CBR; | 299 config_->rc_end_usage = VPX_CBR; |
300 config_->g_pass = VPX_RC_ONE_PASS; | 300 config_->g_pass = VPX_RC_ONE_PASS; |
301 config_->rc_min_quantizer = 2; | 301 config_->rc_min_quantizer = 2; |
302 config_->rc_max_quantizer = 52; | 302 config_->rc_max_quantizer = 52; |
303 config_->rc_undershoot_pct = 50; | 303 config_->rc_undershoot_pct = 50; |
304 config_->rc_overshoot_pct = 50; | 304 config_->rc_overshoot_pct = 50; |
305 config_->rc_buf_initial_sz = 500; | 305 config_->rc_buf_initial_sz = 500; |
306 config_->rc_buf_optimal_sz = 600; | 306 config_->rc_buf_optimal_sz = 600; |
307 config_->rc_buf_sz = 1000; | 307 config_->rc_buf_sz = 1000; |
308 // Set the maximum target size of any key-frame. | 308 // Set the maximum target size of any key-frame. |
309 rc_max_intra_target_ = MaxIntraTarget(config_->rc_buf_optimal_sz); | 309 rc_max_intra_target_ = MaxIntraTarget(config_->rc_buf_optimal_sz); |
310 if (inst->codecSpecific.VP9.keyFrameInterval > 0) { | 310 if (inst->codecSpecific.VP9.keyFrameInterval > 0) { |
311 config_->kf_mode = VPX_KF_AUTO; | 311 config_->kf_mode = VPX_KF_AUTO; |
312 config_->kf_max_dist = inst->codecSpecific.VP9.keyFrameInterval; | 312 config_->kf_max_dist = inst->codecSpecific.VP9.keyFrameInterval; |
313 // Needs to be set (in svc mode) to get correct periodic key frame interval | 313 // Needs to be set (in svc mode) to get correct periodic key frame interval |
314 // (will have no effect in non-svc). | 314 // (will have no effect in non-svc). |
315 config_->kf_min_dist = config_->kf_max_dist; | 315 config_->kf_min_dist = config_->kf_max_dist; |
316 } else { | 316 } else { |
317 config_->kf_mode = VPX_KF_DISABLED; | 317 config_->kf_mode = VPX_KF_DISABLED; |
318 } | 318 } |
319 config_->rc_resize_allowed = inst->codecSpecific.VP9.automaticResizeOn ? | 319 config_->rc_resize_allowed = |
320 1 : 0; | 320 inst->codecSpecific.VP9.automaticResizeOn ? 1 : 0; |
321 // Determine number of threads based on the image size and #cores. | 321 // Determine number of threads based on the image size and #cores. |
322 config_->g_threads = NumberOfThreads(config_->g_w, | 322 config_->g_threads = |
323 config_->g_h, | 323 NumberOfThreads(config_->g_w, config_->g_h, number_of_cores); |
324 number_of_cores); | |
325 | 324 |
326 cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h); | 325 cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h); |
327 | 326 |
328 // TODO(asapersson): Check configuration of temporal switch up and increase | 327 // TODO(asapersson): Check configuration of temporal switch up and increase |
329 // pattern length. | 328 // pattern length. |
330 is_flexible_mode_ = inst->codecSpecific.VP9.flexibleMode; | 329 is_flexible_mode_ = inst->codecSpecific.VP9.flexibleMode; |
331 if (is_flexible_mode_) { | 330 if (is_flexible_mode_) { |
332 config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_BYPASS; | 331 config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_BYPASS; |
333 config_->ts_number_layers = num_temporal_layers_; | 332 config_->ts_number_layers = num_temporal_layers_; |
334 if (codec_.mode == kScreensharing) | 333 if (codec_.mode == kScreensharing) |
(...skipping 23 matching lines...) Expand all Loading... |
358 config_->ts_rate_decimator[2] = 1; | 357 config_->ts_rate_decimator[2] = 1; |
359 config_->ts_periodicity = 4; | 358 config_->ts_periodicity = 4; |
360 config_->ts_layer_id[0] = 0; | 359 config_->ts_layer_id[0] = 0; |
361 config_->ts_layer_id[1] = 2; | 360 config_->ts_layer_id[1] = 2; |
362 config_->ts_layer_id[2] = 1; | 361 config_->ts_layer_id[2] = 1; |
363 config_->ts_layer_id[3] = 2; | 362 config_->ts_layer_id[3] = 2; |
364 } else { | 363 } else { |
365 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 364 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
366 } | 365 } |
367 | 366 |
368 tl0_pic_idx_ = static_cast<uint8_t>(rand()); | 367 tl0_pic_idx_ = static_cast<uint8_t>(rand()); // NOLINT |
369 | 368 |
370 return InitAndSetControlSettings(inst); | 369 return InitAndSetControlSettings(inst); |
371 } | 370 } |
372 | 371 |
373 int VP9EncoderImpl::NumberOfThreads(int width, | 372 int VP9EncoderImpl::NumberOfThreads(int width, |
374 int height, | 373 int height, |
375 int number_of_cores) { | 374 int number_of_cores) { |
376 // Keep the number of encoder threads equal to the possible number of column | 375 // Keep the number of encoder threads equal to the possible number of column |
377 // tiles, which is (1, 2, 4, 8). See comments below for VP9E_SET_TILE_COLUMNS. | 376 // tiles, which is (1, 2, 4, 8). See comments below for VP9E_SET_TILE_COLUMNS. |
378 if (width * height >= 1280 * 720 && number_of_cores > 4) { | 377 if (width * height >= 1280 * 720 && number_of_cores > 4) { |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
425 | 424 |
426 vpx_codec_control( | 425 vpx_codec_control( |
427 encoder_, VP9E_SET_SVC, | 426 encoder_, VP9E_SET_SVC, |
428 (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) ? 1 : 0); | 427 (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) ? 1 : 0); |
429 if (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) { | 428 if (num_temporal_layers_ > 1 || num_spatial_layers_ > 1) { |
430 vpx_codec_control(encoder_, VP9E_SET_SVC_PARAMETERS, | 429 vpx_codec_control(encoder_, VP9E_SET_SVC_PARAMETERS, |
431 &svc_internal_.svc_params); | 430 &svc_internal_.svc_params); |
432 } | 431 } |
433 // Register callback for getting each spatial layer. | 432 // Register callback for getting each spatial layer. |
434 vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = { | 433 vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = { |
435 VP9EncoderImpl::EncoderOutputCodedPacketCallback, (void*)(this)}; | 434 VP9EncoderImpl::EncoderOutputCodedPacketCallback, |
436 vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK, (void*)(&cbp)); | 435 reinterpret_cast<void*>(this)}; |
| 436 vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK, |
| 437 reinterpret_cast<void*>(&cbp)); |
437 | 438 |
438 // Control function to set the number of column tiles in encoding a frame, in | 439 // Control function to set the number of column tiles in encoding a frame, in |
439 // log2 unit: e.g., 0 = 1 tile column, 1 = 2 tile columns, 2 = 4 tile columns. | 440 // log2 unit: e.g., 0 = 1 tile column, 1 = 2 tile columns, 2 = 4 tile columns. |
440 // The number tile columns will be capped by the encoder based on image size | 441 // The number tile columns will be capped by the encoder based on image size |
441 // (minimum width of tile column is 256 pixels, maximum is 4096). | 442 // (minimum width of tile column is 256 pixels, maximum is 4096). |
442 vpx_codec_control(encoder_, VP9E_SET_TILE_COLUMNS, (config_->g_threads >> 1)); | 443 vpx_codec_control(encoder_, VP9E_SET_TILE_COLUMNS, (config_->g_threads >> 1)); |
443 #if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) | 444 #if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) |
444 // Note denoiser is still off by default until further testing/optimization, | 445 // Note denoiser is still off by default until further testing/optimization, |
445 // i.e., codecSpecific.VP9.denoisingOn == 0. | 446 // i.e., codecSpecific.VP9.denoisingOn == 0. |
446 vpx_codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, | 447 vpx_codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, |
(...skipping 14 matching lines...) Expand all Loading... |
461 // and scaled by a scale_par. | 462 // and scaled by a scale_par. |
462 // Max target size = scale_par * optimal_buffer_size * targetBR[Kbps]. | 463 // Max target size = scale_par * optimal_buffer_size * targetBR[Kbps]. |
463 // This value is presented in percentage of perFrameBw: | 464 // This value is presented in percentage of perFrameBw: |
464 // perFrameBw = targetBR[Kbps] * 1000 / framerate. | 465 // perFrameBw = targetBR[Kbps] * 1000 / framerate. |
465 // The target in % is as follows: | 466 // The target in % is as follows: |
466 float scale_par = 0.5; | 467 float scale_par = 0.5; |
467 uint32_t target_pct = | 468 uint32_t target_pct = |
468 optimal_buffer_size * scale_par * codec_.maxFramerate / 10; | 469 optimal_buffer_size * scale_par * codec_.maxFramerate / 10; |
469 // Don't go below 3 times the per frame bandwidth. | 470 // Don't go below 3 times the per frame bandwidth. |
470 const uint32_t min_intra_size = 300; | 471 const uint32_t min_intra_size = 300; |
471 return (target_pct < min_intra_size) ? min_intra_size: target_pct; | 472 return (target_pct < min_intra_size) ? min_intra_size : target_pct; |
472 } | 473 } |
473 | 474 |
474 int VP9EncoderImpl::Encode(const VideoFrame& input_image, | 475 int VP9EncoderImpl::Encode(const VideoFrame& input_image, |
475 const CodecSpecificInfo* codec_specific_info, | 476 const CodecSpecificInfo* codec_specific_info, |
476 const std::vector<FrameType>* frame_types) { | 477 const std::vector<FrameType>* frame_types) { |
477 if (!inited_) { | 478 if (!inited_) { |
478 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 479 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
479 } | 480 } |
480 if (input_image.IsZeroSize()) { | 481 if (input_image.IsZeroSize()) { |
481 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 482 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
540 if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags, | 541 if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags, |
541 VPX_DL_REALTIME)) { | 542 VPX_DL_REALTIME)) { |
542 return WEBRTC_VIDEO_CODEC_ERROR; | 543 return WEBRTC_VIDEO_CODEC_ERROR; |
543 } | 544 } |
544 timestamp_ += duration; | 545 timestamp_ += duration; |
545 | 546 |
546 return WEBRTC_VIDEO_CODEC_OK; | 547 return WEBRTC_VIDEO_CODEC_OK; |
547 } | 548 } |
548 | 549 |
549 void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, | 550 void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, |
550 const vpx_codec_cx_pkt& pkt, | 551 const vpx_codec_cx_pkt& pkt, |
551 uint32_t timestamp) { | 552 uint32_t timestamp) { |
552 assert(codec_specific != NULL); | 553 assert(codec_specific != NULL); |
553 codec_specific->codecType = kVideoCodecVP9; | 554 codec_specific->codecType = kVideoCodecVP9; |
554 CodecSpecificInfoVP9 *vp9_info = &(codec_specific->codecSpecific.VP9); | 555 CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9); |
555 // TODO(asapersson): Set correct value. | 556 // TODO(asapersson): Set correct value. |
556 vp9_info->inter_pic_predicted = | 557 vp9_info->inter_pic_predicted = |
557 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? false : true; | 558 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? false : true; |
558 vp9_info->flexible_mode = codec_.codecSpecific.VP9.flexibleMode; | 559 vp9_info->flexible_mode = codec_.codecSpecific.VP9.flexibleMode; |
559 vp9_info->ss_data_available = ((pkt.data.frame.flags & VPX_FRAME_IS_KEY) && | 560 vp9_info->ss_data_available = ((pkt.data.frame.flags & VPX_FRAME_IS_KEY) && |
560 !codec_.codecSpecific.VP9.flexibleMode) | 561 !codec_.codecSpecific.VP9.flexibleMode) |
561 ? true | 562 ? true |
562 : false; | 563 : false; |
563 | 564 |
564 vpx_svc_layer_id_t layer_id = {0}; | 565 vpx_svc_layer_id_t layer_id = {0}; |
(...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
850 if (inst == NULL) { | 851 if (inst == NULL) { |
851 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 852 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
852 } | 853 } |
853 int ret_val = Release(); | 854 int ret_val = Release(); |
854 if (ret_val < 0) { | 855 if (ret_val < 0) { |
855 return ret_val; | 856 return ret_val; |
856 } | 857 } |
857 if (decoder_ == NULL) { | 858 if (decoder_ == NULL) { |
858 decoder_ = new vpx_codec_ctx_t; | 859 decoder_ = new vpx_codec_ctx_t; |
859 } | 860 } |
860 vpx_codec_dec_cfg_t cfg; | 861 vpx_codec_dec_cfg_t cfg; |
861 // Setting number of threads to a constant value (1) | 862 // Setting number of threads to a constant value (1) |
862 cfg.threads = 1; | 863 cfg.threads = 1; |
863 cfg.h = cfg.w = 0; // set after decode | 864 cfg.h = cfg.w = 0; // set after decode |
864 vpx_codec_flags_t flags = 0; | 865 vpx_codec_flags_t flags = 0; |
865 if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { | 866 if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { |
866 return WEBRTC_VIDEO_CODEC_MEMORY; | 867 return WEBRTC_VIDEO_CODEC_MEMORY; |
867 } | 868 } |
868 if (&codec_ != inst) { | 869 if (&codec_ != inst) { |
869 // Save VideoCodec instance for later; mainly for duplicating the decoder. | 870 // Save VideoCodec instance for later; mainly for duplicating the decoder. |
870 codec_ = *inst; | 871 codec_ = *inst; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
903 } | 904 } |
904 } | 905 } |
905 vpx_codec_iter_t iter = NULL; | 906 vpx_codec_iter_t iter = NULL; |
906 vpx_image_t* img; | 907 vpx_image_t* img; |
907 uint8_t* buffer = input_image._buffer; | 908 uint8_t* buffer = input_image._buffer; |
908 if (input_image._length == 0) { | 909 if (input_image._length == 0) { |
909 buffer = NULL; // Triggers full frame concealment. | 910 buffer = NULL; // Triggers full frame concealment. |
910 } | 911 } |
911 // During decode libvpx may get and release buffers from |frame_buffer_pool_|. | 912 // During decode libvpx may get and release buffers from |frame_buffer_pool_|. |
912 // In practice libvpx keeps a few (~3-4) buffers alive at a time. | 913 // In practice libvpx keeps a few (~3-4) buffers alive at a time. |
913 if (vpx_codec_decode(decoder_, | 914 if (vpx_codec_decode(decoder_, buffer, |
914 buffer, | 915 static_cast<unsigned int>(input_image._length), 0, |
915 static_cast<unsigned int>(input_image._length), | |
916 0, | |
917 VPX_DL_REALTIME)) { | 916 VPX_DL_REALTIME)) { |
918 return WEBRTC_VIDEO_CODEC_ERROR; | 917 return WEBRTC_VIDEO_CODEC_ERROR; |
919 } | 918 } |
920 // |img->fb_priv| contains the image data, a reference counted Vp9FrameBuffer. | 919 // |img->fb_priv| contains the image data, a reference counted Vp9FrameBuffer. |
921 // It may be released by libvpx during future vpx_codec_decode or | 920 // It may be released by libvpx during future vpx_codec_decode or |
922 // vpx_codec_destroy calls. | 921 // vpx_codec_destroy calls. |
923 img = vpx_codec_get_frame(decoder_, &iter); | 922 img = vpx_codec_get_frame(decoder_, &iter); |
924 int ret = ReturnFrame(img, input_image._timeStamp); | 923 int ret = ReturnFrame(img, input_image._timeStamp); |
925 if (ret != 0) { | 924 if (ret != 0) { |
926 return ret; | 925 return ret; |
927 } | 926 } |
928 return WEBRTC_VIDEO_CODEC_OK; | 927 return WEBRTC_VIDEO_CODEC_OK; |
929 } | 928 } |
930 | 929 |
931 int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) { | 930 int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) { |
932 if (img == NULL) { | 931 if (img == NULL) { |
933 // Decoder OK and NULL image => No show frame. | 932 // Decoder OK and NULL image => No show frame. |
934 return WEBRTC_VIDEO_CODEC_NO_OUTPUT; | 933 return WEBRTC_VIDEO_CODEC_NO_OUTPUT; |
935 } | 934 } |
936 | 935 |
937 // This buffer contains all of |img|'s image data, a reference counted | 936 // This buffer contains all of |img|'s image data, a reference counted |
938 // Vp9FrameBuffer. (libvpx is done with the buffers after a few | 937 // Vp9FrameBuffer. (libvpx is done with the buffers after a few |
939 // vpx_codec_decode calls or vpx_codec_destroy). | 938 // vpx_codec_decode calls or vpx_codec_destroy). |
940 Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer = | 939 Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer = |
941 static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv); | 940 static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv); |
942 // The buffer can be used directly by the VideoFrame (without copy) by | 941 // The buffer can be used directly by the VideoFrame (without copy) by |
943 // using a WrappedI420Buffer. | 942 // using a WrappedI420Buffer. |
944 rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer( | 943 rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer( |
945 new rtc::RefCountedObject<webrtc::WrappedI420Buffer>( | 944 new rtc::RefCountedObject<webrtc::WrappedI420Buffer>( |
946 img->d_w, img->d_h, | 945 img->d_w, img->d_h, img->planes[VPX_PLANE_Y], |
947 img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], | 946 img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], |
948 img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], | 947 img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], |
949 img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], | 948 img->stride[VPX_PLANE_V], |
950 // WrappedI420Buffer's mechanism for allowing the release of its frame | 949 // WrappedI420Buffer's mechanism for allowing the release of its frame |
951 // buffer is through a callback function. This is where we should | 950 // buffer is through a callback function. This is where we should |
952 // release |img_buffer|. | 951 // release |img_buffer|. |
953 rtc::KeepRefUntilDone(img_buffer))); | 952 rtc::KeepRefUntilDone(img_buffer))); |
954 | 953 |
955 VideoFrame decoded_image; | 954 VideoFrame decoded_image; |
956 decoded_image.set_video_frame_buffer(img_wrapped_buffer); | 955 decoded_image.set_video_frame_buffer(img_wrapped_buffer); |
957 decoded_image.set_timestamp(timestamp); | 956 decoded_image.set_timestamp(timestamp); |
958 int ret = decode_complete_callback_->Decoded(decoded_image); | 957 int ret = decode_complete_callback_->Decoded(decoded_image); |
959 if (ret != 0) | 958 if (ret != 0) |
(...skipping 23 matching lines...) Expand all Loading... |
983 frame_buffer_pool_.ClearPool(); | 982 frame_buffer_pool_.ClearPool(); |
984 inited_ = false; | 983 inited_ = false; |
985 return WEBRTC_VIDEO_CODEC_OK; | 984 return WEBRTC_VIDEO_CODEC_OK; |
986 } | 985 } |
987 | 986 |
988 const char* VP9DecoderImpl::ImplementationName() const { | 987 const char* VP9DecoderImpl::ImplementationName() const { |
989 return "libvpx"; | 988 return "libvpx"; |
990 } | 989 } |
991 | 990 |
992 } // namespace webrtc | 991 } // namespace webrtc |
OLD | NEW |