Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(20)

Side by Side Diff: webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc

Issue 1328113004: Work on flexible mode and screen sharing. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Undo renaming Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 * 9 *
10 */ 10 */
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
69 inited_(false), 69 inited_(false),
70 timestamp_(0), 70 timestamp_(0),
71 picture_id_(0), 71 picture_id_(0),
72 cpu_speed_(3), 72 cpu_speed_(3),
73 rc_max_intra_target_(0), 73 rc_max_intra_target_(0),
74 encoder_(NULL), 74 encoder_(NULL),
75 config_(NULL), 75 config_(NULL),
76 raw_(NULL), 76 raw_(NULL),
77 input_image_(NULL), 77 input_image_(NULL),
78 tl0_pic_idx_(0), 78 tl0_pic_idx_(0),
79 gof_idx_(0), 79 frames_since_kf_(0),
80 num_temporal_layers_(0), 80 num_temporal_layers_(0),
81 num_spatial_layers_(0) { 81 num_spatial_layers_(0),
82 frames_encoded_(0) {
82 memset(&codec_, 0, sizeof(codec_)); 83 memset(&codec_, 0, sizeof(codec_));
83 uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp()); 84 uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
84 srand(seed); 85 srand(seed);
85 } 86 }
86 87
87 VP9EncoderImpl::~VP9EncoderImpl() { 88 VP9EncoderImpl::~VP9EncoderImpl() {
88 Release(); 89 Release();
89 } 90 }
90 91
91 int VP9EncoderImpl::Release() { 92 int VP9EncoderImpl::Release() {
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
285 } 286 }
286 // Determine number of threads based on the image size and #cores. 287 // Determine number of threads based on the image size and #cores.
287 config_->g_threads = NumberOfThreads(config_->g_w, 288 config_->g_threads = NumberOfThreads(config_->g_w,
288 config_->g_h, 289 config_->g_h,
289 number_of_cores); 290 number_of_cores);
290 291
291 cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h); 292 cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h);
292 293
293 // TODO(asapersson): Check configuration of temporal switch up and increase 294 // TODO(asapersson): Check configuration of temporal switch up and increase
294 // pattern length. 295 // pattern length.
295 if (num_temporal_layers_ == 1) { 296 is_flexible_mode_ = inst->codecSpecific.VP9.flexibleMode;
297 if (is_flexible_mode_) {
298 config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_BYPASS;
299 config_->ts_number_layers = num_temporal_layers_;
300
301 if (codec_.mode == kScreensharing) {
302 // TODO(philipel): Using VP8 style screen sharing. Change conditions
303 // when flexible mode + spatial layers become available.
304 assert(inst->codecSpecific.VP9.numberOfTemporalLayers == 2);
sprang_webrtc 2015/09/07 14:39:05 CHECK_EQ(2, ...)
philipel 2015/09/10 14:47:02 Code removed, now use spatial layers instead. Hurr
305 config_->ts_number_layers = 2;
306 config_->ts_rate_decimator[0] = 2;
307 config_->ts_rate_decimator[1] = 1;
308 config_->ts_periodicity = 2;
309 config_->ts_layer_id[0] = 0;
310 config_->ts_layer_id[1] = 1;
311
312 // TODO(philipel): The following should be configurable, not fixed:
313 temporal_layer_.ConfigureBitrate(1000);
sprang_webrtc 2015/09/07 14:39:05 Why not set to start bitrate from VideoCodec param
philipel 2015/09/10 14:47:02 Done.
314 }
315 } else if (num_temporal_layers_ == 1) {
296 gof_.SetGofInfoVP9(kTemporalStructureMode1); 316 gof_.SetGofInfoVP9(kTemporalStructureMode1);
297 config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_NOLAYERING; 317 config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_NOLAYERING;
298 config_->ts_number_layers = 1; 318 config_->ts_number_layers = 1;
299 config_->ts_rate_decimator[0] = 1; 319 config_->ts_rate_decimator[0] = 1;
300 config_->ts_periodicity = 1; 320 config_->ts_periodicity = 1;
301 config_->ts_layer_id[0] = 0; 321 config_->ts_layer_id[0] = 0;
302 } else if (num_temporal_layers_ == 2) { 322 } else if (num_temporal_layers_ == 2) {
303 gof_.SetGofInfoVP9(kTemporalStructureMode2); 323 gof_.SetGofInfoVP9(kTemporalStructureMode2);
304 config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_0101; 324 config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_0101;
305 config_->ts_number_layers = 2; 325 config_->ts_number_layers = 2;
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
451 471
452 // Image in vpx_image_t format. 472 // Image in vpx_image_t format.
453 // Input image is const. VPX's raw image is not defined as const. 473 // Input image is const. VPX's raw image is not defined as const.
454 raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(input_image.buffer(kYPlane)); 474 raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(input_image.buffer(kYPlane));
455 raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(input_image.buffer(kUPlane)); 475 raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(input_image.buffer(kUPlane));
456 raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(input_image.buffer(kVPlane)); 476 raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(input_image.buffer(kVPlane));
457 raw_->stride[VPX_PLANE_Y] = input_image.stride(kYPlane); 477 raw_->stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
458 raw_->stride[VPX_PLANE_U] = input_image.stride(kUPlane); 478 raw_->stride[VPX_PLANE_U] = input_image.stride(kUPlane);
459 raw_->stride[VPX_PLANE_V] = input_image.stride(kVPlane); 479 raw_->stride[VPX_PLANE_V] = input_image.stride(kVPlane);
460 480
461 int flags = 0; 481 vpx_enc_frame_flags_t flags = 0;
462 bool send_keyframe = (frame_type == kKeyFrame); 482 bool send_keyframe = (frame_type == kKeyFrame);
463 if (send_keyframe) { 483 if (send_keyframe) {
464 // Key frame request from caller. 484 // Key frame request from caller.
465 flags = VPX_EFLAG_FORCE_KF; 485 flags = VPX_EFLAG_FORCE_KF;
466 } 486 }
487
488 if (is_flexible_mode_) {
489 vpx_svc_layer_id_t svc_layer;
490 svc_layer.spatial_layer_id = 0;
491 if (send_keyframe) {
492 flags = GenerateRefsAndFlags();
493 } else if (codec_.mode == kRealtimeVideo) {
494 // TODO(philipel): For now produce the 0-2-1-2 mod 8 pattern
495 switch ((frames_since_kf_ - 1) % 8) {
496 case 0: {
497 flags = GenerateRefsAndFlags(0, 0);
498 break;
499 }
500 case 1: {
501 flags = GenerateRefsAndFlags(2, 0);
502 break;
503 }
504 case 2: {
505 flags = GenerateRefsAndFlags(1, 0);
506 break;
507 }
508 case 3: {
509 flags = GenerateRefsAndFlags(2, 0, 1, 2);
510 break;
511 }
512 case 4: {
513 flags = GenerateRefsAndFlags(0, 0);
514 break;
515 }
516 case 5: {
517 flags = GenerateRefsAndFlags(2, 0, 1, 2);
518 break;
519 }
520 case 6: {
521 flags = GenerateRefsAndFlags(1, 0, 1);
522 break;
523 }
524 case 7: {
525 flags = GenerateRefsAndFlags(2, 0, 1, 2);
526 break;
527 }
528 }
529 static const int temporal_layers[4] = {0, 2, 1, 2};
530 svc_layer.temporal_layer_id = temporal_layers[(frames_since_kf_ - 1) % 4];
531 } else {
532 flags = GenerateRefsAndFlags(
533 temporal_layer_.BufferArguments(input_image.timestamp()));
534 svc_layer.temporal_layer_id = temporal_layer_.CurrentLayer();
535 }
536 vpx_codec_control(encoder_, VP9E_SET_SVC_LAYER_ID, &svc_layer);
537 }
538
467 assert(codec_.maxFramerate > 0); 539 assert(codec_.maxFramerate > 0);
468 uint32_t duration = 90000 / codec_.maxFramerate; 540 uint32_t duration = 90000 / codec_.maxFramerate;
469 if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags, 541 if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
470 VPX_DL_REALTIME)) { 542 VPX_DL_REALTIME)) {
471 return WEBRTC_VIDEO_CODEC_ERROR; 543 return WEBRTC_VIDEO_CODEC_ERROR;
472 } 544 }
473 timestamp_ += duration; 545 timestamp_ += duration;
474 546
475 return WEBRTC_VIDEO_CODEC_OK; 547 return WEBRTC_VIDEO_CODEC_OK;
476 } 548 }
477 549
478 void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, 550 void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
479 const vpx_codec_cx_pkt& pkt, 551 const vpx_codec_cx_pkt& pkt,
480 uint32_t timestamp) { 552 uint32_t timestamp) {
481 assert(codec_specific != NULL); 553 assert(codec_specific != NULL);
482 codec_specific->codecType = kVideoCodecVP9; 554 codec_specific->codecType = kVideoCodecVP9;
483 CodecSpecificInfoVP9 *vp9_info = &(codec_specific->codecSpecific.VP9); 555 CodecSpecificInfoVP9 *vp9_info = &(codec_specific->codecSpecific.VP9);
484 // TODO(asapersson): Set correct values. 556 // TODO(asapersson): Set correct values.
485 vp9_info->inter_pic_predicted = 557 vp9_info->inter_pic_predicted =
486 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? false : true; 558 (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? false : true;
487 vp9_info->flexible_mode = codec_.codecSpecific.VP9.flexibleMode; 559 vp9_info->flexible_mode = codec_.codecSpecific.VP9.flexibleMode;
488 vp9_info->ss_data_available = ((pkt.data.frame.flags & VPX_FRAME_IS_KEY) && 560 vp9_info->ss_data_available = ((pkt.data.frame.flags & VPX_FRAME_IS_KEY) &&
489 !codec_.codecSpecific.VP9.flexibleMode) 561 !codec_.codecSpecific.VP9.flexibleMode)
490 ? true 562 ? true
491 : false; 563 : false;
492 if (pkt.data.frame.flags & VPX_FRAME_IS_KEY) { 564 if (pkt.data.frame.flags & VPX_FRAME_IS_KEY)
493 gof_idx_ = 0; 565 frames_since_kf_ = 0;
494 }
495 566
496 vpx_svc_layer_id_t layer_id = {0}; 567 vpx_svc_layer_id_t layer_id = {0};
497 vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); 568 vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id);
498 569
499 assert(num_temporal_layers_ > 0); 570 assert(num_temporal_layers_ > 0);
500 assert(num_spatial_layers_ > 0); 571 assert(num_spatial_layers_ > 0);
501 if (num_temporal_layers_ == 1) { 572 if (num_temporal_layers_ == 1) {
502 assert(layer_id.temporal_layer_id == 0); 573 assert(layer_id.temporal_layer_id == 0);
503 vp9_info->temporal_idx = kNoTemporalIdx; 574 vp9_info->temporal_idx = kNoTemporalIdx;
504 } else { 575 } else {
505 vp9_info->temporal_idx = layer_id.temporal_layer_id; 576 vp9_info->temporal_idx = layer_id.temporal_layer_id;
506 } 577 }
507 if (num_spatial_layers_ == 1) { 578 if (num_spatial_layers_ == 1) {
508 assert(layer_id.spatial_layer_id == 0); 579 assert(layer_id.spatial_layer_id == 0);
509 vp9_info->spatial_idx = kNoSpatialIdx; 580 vp9_info->spatial_idx = kNoSpatialIdx;
510 } else { 581 } else {
511 vp9_info->spatial_idx = layer_id.spatial_layer_id; 582 vp9_info->spatial_idx = layer_id.spatial_layer_id;
512 } 583 }
513 if (layer_id.spatial_layer_id != 0) { 584 if (layer_id.spatial_layer_id != 0) {
514 vp9_info->ss_data_available = false; 585 vp9_info->ss_data_available = false;
515 } 586 }
516 587
517 if (vp9_info->flexible_mode) {
518 vp9_info->gof_idx = kNoGofIdx;
519 } else {
520 vp9_info->gof_idx =
521 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
522 }
523
524 // TODO(asapersson): this info has to be obtained from the encoder. 588 // TODO(asapersson): this info has to be obtained from the encoder.
525 vp9_info->temporal_up_switch = true; 589 vp9_info->temporal_up_switch = true;
526 590
527 if (layer_id.spatial_layer_id == 0) { 591 if (layer_id.spatial_layer_id == 0) {
528 picture_id_ = (picture_id_ + 1) & 0x7FFF; 592 picture_id_ = (picture_id_ + 1) & 0x7FFF;
529 // TODO(asapersson): this info has to be obtained from the encoder. 593 // TODO(asapersson): this info has to be obtained from the encoder.
530 vp9_info->inter_layer_predicted = false; 594 vp9_info->inter_layer_predicted = false;
531 } else { 595 } else {
532 // TODO(asapersson): this info has to be obtained from the encoder. 596 // TODO(asapersson): this info has to be obtained from the encoder.
533 vp9_info->inter_layer_predicted = true; 597 vp9_info->inter_layer_predicted = true;
534 } 598 }
535 599
536 vp9_info->picture_id = picture_id_; 600 vp9_info->picture_id = picture_id_;
537 601
538 if (!vp9_info->flexible_mode) { 602 if (!vp9_info->flexible_mode) {
539 if (layer_id.temporal_layer_id == 0 && layer_id.spatial_layer_id == 0) { 603 if (layer_id.temporal_layer_id == 0 && layer_id.spatial_layer_id == 0) {
540 tl0_pic_idx_++; 604 tl0_pic_idx_++;
541 } 605 }
542 vp9_info->tl0_pic_idx = tl0_pic_idx_; 606 vp9_info->tl0_pic_idx = tl0_pic_idx_;
543 } 607 }
544 608
609 vp9_info->num_ref_pics = 0;
610 if (vp9_info->flexible_mode) {
611 vp9_info->gof_idx = kNoGofIdx;
612 if (!(pkt.data.frame.flags & VPX_FRAME_IS_KEY)) {
613 vp9_info->num_ref_pics = num_refs_pics_;
614 for (int i = 0; i < num_refs_pics_; ++i) {
615 vp9_info->p_diff[i] = p_diff_[i];
616 }
617 }
618 } else {
619 vp9_info->gof_idx =
620 static_cast<uint8_t>(frames_since_kf_ % gof_.num_frames_in_gof);
621 }
622 ++frames_since_kf_;
623
545 if (vp9_info->ss_data_available) { 624 if (vp9_info->ss_data_available) {
546 vp9_info->num_spatial_layers = num_spatial_layers_; 625 vp9_info->num_spatial_layers = num_spatial_layers_;
547 vp9_info->spatial_layer_resolution_present = true; 626 vp9_info->spatial_layer_resolution_present = true;
548 for (size_t i = 0; i < vp9_info->num_spatial_layers; ++i) { 627 for (size_t i = 0; i < vp9_info->num_spatial_layers; ++i) {
549 vp9_info->width[i] = codec_.width * 628 vp9_info->width[i] = codec_.width *
550 svc_internal_.svc_params.scaling_factor_num[i] / 629 svc_internal_.svc_params.scaling_factor_num[i] /
551 svc_internal_.svc_params.scaling_factor_den[i]; 630 svc_internal_.svc_params.scaling_factor_den[i];
552 vp9_info->height[i] = codec_.height * 631 vp9_info->height[i] = codec_.height *
553 svc_internal_.svc_params.scaling_factor_num[i] / 632 svc_internal_.svc_params.scaling_factor_num[i] /
554 svc_internal_.svc_params.scaling_factor_den[i]; 633 svc_internal_.svc_params.scaling_factor_den[i];
(...skipping 16 matching lines...) Expand all
571 650
572 assert(pkt->kind == VPX_CODEC_CX_FRAME_PKT); 651 assert(pkt->kind == VPX_CODEC_CX_FRAME_PKT);
573 memcpy(&encoded_image_._buffer[encoded_image_._length], pkt->data.frame.buf, 652 memcpy(&encoded_image_._buffer[encoded_image_._length], pkt->data.frame.buf,
574 pkt->data.frame.sz); 653 pkt->data.frame.sz);
575 frag_info.fragmentationOffset[part_idx] = encoded_image_._length; 654 frag_info.fragmentationOffset[part_idx] = encoded_image_._length;
576 frag_info.fragmentationLength[part_idx] = 655 frag_info.fragmentationLength[part_idx] =
577 static_cast<uint32_t>(pkt->data.frame.sz); 656 static_cast<uint32_t>(pkt->data.frame.sz);
578 frag_info.fragmentationPlType[part_idx] = 0; 657 frag_info.fragmentationPlType[part_idx] = 0;
579 frag_info.fragmentationTimeDiff[part_idx] = 0; 658 frag_info.fragmentationTimeDiff[part_idx] = 0;
580 encoded_image_._length += static_cast<uint32_t>(pkt->data.frame.sz); 659 encoded_image_._length += static_cast<uint32_t>(pkt->data.frame.sz);
660 if (is_flexible_mode_ && codec_.mode == kScreensharing) {
661 temporal_layer_.FrameEncoded(encoded_image_._length);
662 }
sprang_webrtc 2015/09/07 14:39:05 Remove {}
philipel 2015/09/10 14:47:02 Done.
581 assert(encoded_image_._length <= encoded_image_._size); 663 assert(encoded_image_._length <= encoded_image_._size);
582 664
583 // End of frame. 665 // End of frame.
584 // Check if encoded frame is a key frame. 666 // Check if encoded frame is a key frame.
585 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { 667 if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
586 encoded_image_._frameType = kKeyFrame; 668 encoded_image_._frameType = kKeyFrame;
587 } 669 }
588 PopulateCodecSpecific(&codec_specific, *pkt, input_image_->timestamp()); 670 PopulateCodecSpecific(&codec_specific, *pkt, input_image_->timestamp());
589 671
590 if (encoded_image_._length > 0) { 672 if (encoded_image_._length > 0) {
591 TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length); 673 TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length);
592 encoded_image_._timeStamp = input_image_->timestamp(); 674 encoded_image_._timeStamp = input_image_->timestamp();
593 encoded_image_.capture_time_ms_ = input_image_->render_time_ms(); 675 encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
594 encoded_image_._encodedHeight = raw_->d_h; 676 encoded_image_._encodedHeight = raw_->d_h;
595 encoded_image_._encodedWidth = raw_->d_w; 677 encoded_image_._encodedWidth = raw_->d_w;
596 encoded_complete_callback_->Encoded(encoded_image_, &codec_specific, 678 encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
597 &frag_info); 679 &frag_info);
598 } 680 }
599 return WEBRTC_VIDEO_CODEC_OK; 681 return WEBRTC_VIDEO_CODEC_OK;
600 } 682 }
601 683
684 vpx_enc_frame_flags_t VP9EncoderImpl::GenerateRefsAndFlags(int8_t upd_buffer,
685 int8_t ref_buf1,
686 int8_t ref_buf2,
687 int8_t ref_buf3) {
688 // For now we only use 3 out of the 8 buffers available
689 DCHECK(upd_buffer < 3);
sprang_webrtc 2015/09/07 14:39:05 DCHECK_LT
philipel 2015/09/10 14:47:02 Code removed.
690
691 // BUF0 = LAST
692 // BUF1 = GF
693 // BUF2 = ARF
694 num_refs_pics_ = 0;
695 vpx_enc_frame_flags_t flags = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_LAST |
696 VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF |
697 VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_LAST;
698 if (upd_buffer == -2) {
sprang_webrtc 2015/09/07 14:39:05 Have a named constant for -2?
philipel 2015/09/10 14:47:02 Code removed.
699 #ifdef NDEBUG
sprang_webrtc 2015/09/07 14:39:05 Can't just always do this? memset it?
philipel 2015/09/10 14:47:02 Done.
700 // Used later on to make sure we don't make any invalid references.
701 for (int i = 0; i < 8; ++i)
702 buf_upd_at_frame_[i] = -1;
703 #endif
704
705 // Keyframe, always stored in BUF0. Why? Because of line 161 in
sprang_webrtc 2015/09/07 14:39:05 This comment may grow stale quickly :) "Keyframe a
philipel 2015/09/10 14:47:02 Changed so that the keyframes are stored in the bu
706 // vpx_temporal_svc_encoder.c. I'm not sure if that comment applies here
707 buf_upd_at_frame_[0] = frames_encoded_;
708 flags = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
709 } else {
710 int8_t refs[3] = {ref_buf1, ref_buf2, ref_buf3};
sprang_webrtc 2015/09/07 14:39:05 I'd probably add a named constant for 3 and use he
philipel 2015/09/10 14:47:01 Now use the kMaxVp9RefPics instead of '3'.
711 for (int i = 0; i < 3; ++i) {
712 switch (refs[i]) {
713 case -1:
714 goto done;
sprang_webrtc 2015/09/07 14:39:05 Ugh! How bout: i = kNumReferences; continue; Per
philipel 2015/09/10 14:47:02 Code removed.
715 case 0: {
716 flags ^= VP8_EFLAG_NO_REF_LAST;
717 break;
718 }
719 case 1: {
720 flags ^= VP8_EFLAG_NO_REF_GF;
721 break;
722 }
723 case 2: {
724 flags ^= VP8_EFLAG_NO_REF_ARF;
725 break;
726 }
727 default:
728 DCHECK(false);
sprang_webrtc 2015/09/07 14:39:05 RTC_NOTREACHED();
philipel 2015/09/10 14:47:02 Code removed.
729 }
730
731 // Make sure this frame doesn't reference to an unavailable
732 // buffer, either because it has not yet been used or
733 // because a KF has occurred since it was used.
734 DCHECK(buf_upd_at_frame_[refs[i]] != -1);
735
736 p_diff_[i] = frames_encoded_ - buf_upd_at_frame_[refs[i]];
737 num_refs_pics_++;
738 }
739 // Everybody love gotos!
sprang_webrtc 2015/09/07 14:39:05 No. :)
philipel 2015/09/10 14:47:02 Yes!
740 done:
741
742 buf_upd_at_frame_[upd_buffer] = frames_encoded_;
743 switch (upd_buffer) {
744 case -1:
745 break;
746 case 0: {
747 flags ^= VP8_EFLAG_NO_UPD_LAST;
748 break;
749 }
750 case 1: {
751 flags ^= VP8_EFLAG_NO_UPD_GF;
752 break;
753 }
754 case 2: {
755 flags ^= VP8_EFLAG_NO_UPD_ARF;
756 break;
757 }
758 default:
759 DCHECK(false);
sprang_webrtc 2015/09/07 14:39:05 RTC_NOTREACHED();
philipel 2015/09/10 14:47:01 Code removed.
760 }
761 }
762
763 frames_encoded_++;
764 return flags;
765 }
766
767 vpx_enc_frame_flags_t VP9EncoderImpl::GenerateRefsAndFlags(
768 std::array<int8_t, 4> args) {
769 return GenerateRefsAndFlags(args[0], args[1], args[2], args[3]);
770 }
771
602 int VP9EncoderImpl::SetChannelParameters(uint32_t packet_loss, int64_t rtt) { 772 int VP9EncoderImpl::SetChannelParameters(uint32_t packet_loss, int64_t rtt) {
603 return WEBRTC_VIDEO_CODEC_OK; 773 return WEBRTC_VIDEO_CODEC_OK;
604 } 774 }
605 775
606 int VP9EncoderImpl::RegisterEncodeCompleteCallback( 776 int VP9EncoderImpl::RegisterEncodeCompleteCallback(
607 EncodedImageCallback* callback) { 777 EncodedImageCallback* callback) {
608 encoded_complete_callback_ = callback; 778 encoded_complete_callback_ = callback;
609 return WEBRTC_VIDEO_CODEC_OK; 779 return WEBRTC_VIDEO_CODEC_OK;
610 } 780 }
611 781
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after
776 decoder_ = NULL; 946 decoder_ = NULL;
777 } 947 }
778 // Releases buffers from the pool. Any buffers not in use are deleted. Buffers 948 // Releases buffers from the pool. Any buffers not in use are deleted. Buffers
779 // still referenced externally are deleted once fully released, not returning 949 // still referenced externally are deleted once fully released, not returning
780 // to the pool. 950 // to the pool.
781 frame_buffer_pool_.ClearPool(); 951 frame_buffer_pool_.ClearPool();
782 inited_ = false; 952 inited_ = false;
783 return WEBRTC_VIDEO_CODEC_OK; 953 return WEBRTC_VIDEO_CODEC_OK;
784 } 954 }
785 } // namespace webrtc 955 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698