OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
282 | 282 |
283 unsigned int target_bitrate = stream_bitrates[stream_idx]; | 283 unsigned int target_bitrate = stream_bitrates[stream_idx]; |
284 unsigned int max_bitrate = codec_.maxBitrate; | 284 unsigned int max_bitrate = codec_.maxBitrate; |
285 int framerate = new_framerate; | 285 int framerate = new_framerate; |
286 // TODO(holmer): This is a temporary hack for screensharing, where we | 286 // TODO(holmer): This is a temporary hack for screensharing, where we |
287 // interpret the startBitrate as the encoder target bitrate. This is | 287 // interpret the startBitrate as the encoder target bitrate. This is |
288 // to allow for a different max bitrate, so if the codec can't meet | 288 // to allow for a different max bitrate, so if the codec can't meet |
289 // the target we still allow it to overshoot up to the max before dropping | 289 // the target we still allow it to overshoot up to the max before dropping |
290 // frames. This hack should be improved. | 290 // frames. This hack should be improved. |
291 if (codec_.targetBitrate > 0 && | 291 if (codec_.targetBitrate > 0 && |
292 (codec_.codecSpecific.VP8.numberOfTemporalLayers == 2 || | 292 (codec_.VP8()->numberOfTemporalLayers == 2 || |
293 codec_.simulcastStream[0].numberOfTemporalLayers == 2)) { | 293 codec_.simulcastStream[0].numberOfTemporalLayers == 2)) { |
294 int tl0_bitrate = std::min(codec_.targetBitrate, target_bitrate); | 294 int tl0_bitrate = std::min(codec_.targetBitrate, target_bitrate); |
295 max_bitrate = std::min(codec_.maxBitrate, target_bitrate); | 295 max_bitrate = std::min(codec_.maxBitrate, target_bitrate); |
296 target_bitrate = tl0_bitrate; | 296 target_bitrate = tl0_bitrate; |
297 } | 297 } |
298 configurations_[i].rc_target_bitrate = target_bitrate; | 298 configurations_[i].rc_target_bitrate = target_bitrate; |
299 temporal_layers_[stream_idx]->ConfigureBitrates( | 299 temporal_layers_[stream_idx]->ConfigureBitrates( |
300 target_bitrate, max_bitrate, framerate, &configurations_[i]); | 300 target_bitrate, max_bitrate, framerate, &configurations_[i]); |
301 if (vpx_codec_enc_config_set(&encoders_[i], &configurations_[i])) { | 301 if (vpx_codec_enc_config_set(&encoders_[i], &configurations_[i])) { |
302 return WEBRTC_VIDEO_CODEC_ERROR; | 302 return WEBRTC_VIDEO_CODEC_ERROR; |
(...skipping 13 matching lines...) Expand all Loading... |
316 // Need a key frame if we have not sent this stream before. | 316 // Need a key frame if we have not sent this stream before. |
317 key_frame_request_[stream_idx] = true; | 317 key_frame_request_[stream_idx] = true; |
318 } | 318 } |
319 send_stream_[stream_idx] = send_stream; | 319 send_stream_[stream_idx] = send_stream; |
320 } | 320 } |
321 | 321 |
322 void VP8EncoderImpl::SetupTemporalLayers(int num_streams, | 322 void VP8EncoderImpl::SetupTemporalLayers(int num_streams, |
323 int num_temporal_layers, | 323 int num_temporal_layers, |
324 const VideoCodec& codec) { | 324 const VideoCodec& codec) { |
325 TemporalLayersFactory default_factory; | 325 TemporalLayersFactory default_factory; |
326 const TemporalLayersFactory* tl_factory = codec.codecSpecific.VP8.tl_factory; | 326 const TemporalLayersFactory* tl_factory = codec.VP8().tl_factory; |
327 if (!tl_factory) | 327 if (!tl_factory) |
328 tl_factory = &default_factory; | 328 tl_factory = &default_factory; |
329 if (num_streams == 1) { | 329 if (num_streams == 1) { |
330 if (codec.mode == kScreensharing) { | 330 if (codec.mode == kScreensharing) { |
331 // Special mode when screensharing on a single stream. | 331 // Special mode when screensharing on a single stream. |
332 temporal_layers_.push_back(new ScreenshareLayers( | 332 temporal_layers_.push_back(new ScreenshareLayers( |
333 num_temporal_layers, rand(), webrtc::Clock::GetRealTimeClock())); | 333 num_temporal_layers, rand(), webrtc::Clock::GetRealTimeClock())); |
334 } else { | 334 } else { |
335 temporal_layers_.push_back( | 335 temporal_layers_.push_back( |
336 tl_factory->Create(num_temporal_layers, rand())); | 336 tl_factory->Create(num_temporal_layers, rand())); |
(...skipping 21 matching lines...) Expand all Loading... |
358 // allow zero to represent an unspecified maxBitRate | 358 // allow zero to represent an unspecified maxBitRate |
359 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { | 359 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { |
360 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 360 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
361 } | 361 } |
362 if (inst->width <= 1 || inst->height <= 1) { | 362 if (inst->width <= 1 || inst->height <= 1) { |
363 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 363 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
364 } | 364 } |
365 if (number_of_cores < 1) { | 365 if (number_of_cores < 1) { |
366 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 366 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
367 } | 367 } |
368 if (inst->codecSpecific.VP8.feedbackModeOn && | 368 if (inst->VP8().feedbackModeOn && inst->numberOfSimulcastStreams > 1) { |
369 inst->numberOfSimulcastStreams > 1) { | |
370 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 369 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
371 } | 370 } |
372 if (inst->codecSpecific.VP8.automaticResizeOn && | 371 if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { |
373 inst->numberOfSimulcastStreams > 1) { | |
374 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 372 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
375 } | 373 } |
376 int retVal = Release(); | 374 int retVal = Release(); |
377 if (retVal < 0) { | 375 if (retVal < 0) { |
378 return retVal; | 376 return retVal; |
379 } | 377 } |
380 | 378 |
381 int number_of_streams = NumberOfStreams(*inst); | 379 int number_of_streams = NumberOfStreams(*inst); |
382 bool doing_simulcast = (number_of_streams > 1); | 380 bool doing_simulcast = (number_of_streams > 1); |
383 | 381 |
384 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { | 382 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { |
385 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 383 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
386 } | 384 } |
387 | 385 |
388 int num_temporal_layers = | 386 int num_temporal_layers = |
389 doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers | 387 doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers |
390 : inst->codecSpecific.VP8.numberOfTemporalLayers; | 388 : inst->VP8().numberOfTemporalLayers; |
391 | 389 |
392 // TODO(andresp): crash if num temporal layers is bananas. | 390 // TODO(andresp): crash if num temporal layers is bananas. |
393 if (num_temporal_layers < 1) | 391 if (num_temporal_layers < 1) |
394 num_temporal_layers = 1; | 392 num_temporal_layers = 1; |
395 SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst); | 393 SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst); |
396 | 394 |
397 feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn; | 395 feedback_mode_ = inst->VP8().feedbackModeOn; |
398 | 396 |
399 timestamp_ = 0; | 397 timestamp_ = 0; |
400 codec_ = *inst; | 398 codec_ = *inst; |
401 | 399 |
402 // Code expects simulcastStream resolutions to be correct, make sure they are | 400 // Code expects simulcastStream resolutions to be correct, make sure they are |
403 // filled even when there are no simulcast layers. | 401 // filled even when there are no simulcast layers. |
404 if (codec_.numberOfSimulcastStreams == 0) { | 402 if (codec_.numberOfSimulcastStreams == 0) { |
405 codec_.simulcastStream[0].width = codec_.width; | 403 codec_.simulcastStream[0].width = codec_.width; |
406 codec_.simulcastStream[0].height = codec_.height; | 404 codec_.simulcastStream[0].height = codec_.height; |
407 } | 405 } |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
448 if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &configurations_[0], | 446 if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &configurations_[0], |
449 0)) { | 447 0)) { |
450 return WEBRTC_VIDEO_CODEC_ERROR; | 448 return WEBRTC_VIDEO_CODEC_ERROR; |
451 } | 449 } |
452 // setting the time base of the codec | 450 // setting the time base of the codec |
453 configurations_[0].g_timebase.num = 1; | 451 configurations_[0].g_timebase.num = 1; |
454 configurations_[0].g_timebase.den = 90000; | 452 configurations_[0].g_timebase.den = 90000; |
455 configurations_[0].g_lag_in_frames = 0; // 0- no frame lagging | 453 configurations_[0].g_lag_in_frames = 0; // 0- no frame lagging |
456 | 454 |
457 // Set the error resilience mode according to user settings. | 455 // Set the error resilience mode according to user settings. |
458 switch (inst->codecSpecific.VP8.resilience) { | 456 switch (inst->VP8().resilience) { |
459 case kResilienceOff: | 457 case kResilienceOff: |
460 // TODO(marpan): We should set keep error resilience off for this mode, | 458 // TODO(marpan): We should set keep error resilience off for this mode, |
461 // independent of temporal layer settings, and make sure we set | 459 // independent of temporal layer settings, and make sure we set |
462 // |codecSpecific.VP8.resilience| = |kResilientStream| at higher level | 460 // |codecSpecific.VP8.resilience| = |kResilientStream| at higher level |
463 // code if we want to get error resilience on. | 461 // code if we want to get error resilience on. |
464 configurations_[0].g_error_resilient = 1; | 462 configurations_[0].g_error_resilient = 1; |
465 break; | 463 break; |
466 case kResilientStream: | 464 case kResilientStream: |
467 configurations_[0].g_error_resilient = 1; // TODO(holmer): Replace with | 465 configurations_[0].g_error_resilient = 1; // TODO(holmer): Replace with |
468 // VPX_ERROR_RESILIENT_DEFAULT when we | 466 // VPX_ERROR_RESILIENT_DEFAULT when we |
469 // drop support for libvpx 9.6.0. | 467 // drop support for libvpx 9.6.0. |
470 break; | 468 break; |
471 case kResilientFrames: | 469 case kResilientFrames: |
472 #ifdef INDEPENDENT_PARTITIONS | 470 #ifdef INDEPENDENT_PARTITIONS |
473 configurations_[0] - g_error_resilient = | 471 configurations_[0] - g_error_resilient = |
474 VPX_ERROR_RESILIENT_DEFAULT | VPX_ERROR_RESILIENT_PARTITIONS; | 472 VPX_ERROR_RESILIENT_DEFAULT | VPX_ERROR_RESILIENT_PARTITIONS; |
475 break; | 473 break; |
476 #else | 474 #else |
477 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; // Not supported | 475 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; // Not supported |
478 #endif | 476 #endif |
479 } | 477 } |
480 | 478 |
481 // rate control settings | 479 // rate control settings |
482 configurations_[0].rc_dropframe_thresh = | 480 configurations_[0].rc_dropframe_thresh = inst->VP8().frameDroppingOn ? 30 : 0; |
483 inst->codecSpecific.VP8.frameDroppingOn ? 30 : 0; | |
484 configurations_[0].rc_end_usage = VPX_CBR; | 481 configurations_[0].rc_end_usage = VPX_CBR; |
485 configurations_[0].g_pass = VPX_RC_ONE_PASS; | 482 configurations_[0].g_pass = VPX_RC_ONE_PASS; |
486 // TODO(hellner): investigate why the following two lines produce | 483 // TODO(hellner): investigate why the following two lines produce |
487 // automaticResizeOn value of 3 when running | 484 // automaticResizeOn value of 3 when running |
488 // WebRtcVideoMediaChannelTest.GetStatsMultipleSendStreams inside the talk | 485 // WebRtcVideoMediaChannelTest.GetStatsMultipleSendStreams inside the talk |
489 // framework. | 486 // framework. |
490 // configurations_[0].rc_resize_allowed = | 487 // configurations_[0].rc_resize_allowed = |
491 // inst->codecSpecific.VP8.automaticResizeOn ? 1 : 0; | 488 // inst->codecSpecific.VP8.automaticResizeOn ? 1 : 0; |
492 configurations_[0].rc_resize_allowed = 0; | 489 configurations_[0].rc_resize_allowed = 0; |
493 // Handle resizing outside of libvpx when doing single-stream. | 490 // Handle resizing outside of libvpx when doing single-stream. |
494 if (inst->codecSpecific.VP8.automaticResizeOn && number_of_streams > 1) { | 491 if (inst->VP8().automaticResizeOn && number_of_streams > 1) { |
495 configurations_[0].rc_resize_allowed = 1; | 492 configurations_[0].rc_resize_allowed = 1; |
496 } | 493 } |
497 configurations_[0].rc_min_quantizer = 2; | 494 configurations_[0].rc_min_quantizer = 2; |
498 if (inst->qpMax >= configurations_[0].rc_min_quantizer) { | 495 if (inst->qpMax >= configurations_[0].rc_min_quantizer) { |
499 qp_max_ = inst->qpMax; | 496 qp_max_ = inst->qpMax; |
500 } | 497 } |
501 configurations_[0].rc_max_quantizer = qp_max_; | 498 configurations_[0].rc_max_quantizer = qp_max_; |
502 configurations_[0].rc_undershoot_pct = 100; | 499 configurations_[0].rc_undershoot_pct = 100; |
503 configurations_[0].rc_overshoot_pct = 15; | 500 configurations_[0].rc_overshoot_pct = 15; |
504 configurations_[0].rc_buf_initial_sz = 500; | 501 configurations_[0].rc_buf_initial_sz = 500; |
505 configurations_[0].rc_buf_optimal_sz = 600; | 502 configurations_[0].rc_buf_optimal_sz = 600; |
506 configurations_[0].rc_buf_sz = 1000; | 503 configurations_[0].rc_buf_sz = 1000; |
507 | 504 |
508 // Set the maximum target size of any key-frame. | 505 // Set the maximum target size of any key-frame. |
509 rc_max_intra_target_ = MaxIntraTarget(configurations_[0].rc_buf_optimal_sz); | 506 rc_max_intra_target_ = MaxIntraTarget(configurations_[0].rc_buf_optimal_sz); |
510 | 507 |
511 if (feedback_mode_) { | 508 if (feedback_mode_) { |
512 // Disable periodic key frames if we get feedback from the decoder | 509 // Disable periodic key frames if we get feedback from the decoder |
513 // through SLI and RPSI. | 510 // through SLI and RPSI. |
514 configurations_[0].kf_mode = VPX_KF_DISABLED; | 511 configurations_[0].kf_mode = VPX_KF_DISABLED; |
515 } else if (inst->codecSpecific.VP8.keyFrameInterval > 0) { | 512 } else if (inst->VP8().keyFrameInterval > 0) { |
516 configurations_[0].kf_mode = VPX_KF_AUTO; | 513 configurations_[0].kf_mode = VPX_KF_AUTO; |
517 configurations_[0].kf_max_dist = inst->codecSpecific.VP8.keyFrameInterval; | 514 configurations_[0].kf_max_dist = inst->VP8().keyFrameInterval; |
518 } else { | 515 } else { |
519 configurations_[0].kf_mode = VPX_KF_DISABLED; | 516 configurations_[0].kf_mode = VPX_KF_DISABLED; |
520 } | 517 } |
521 | 518 |
522 // Allow the user to set the complexity for the base stream. | 519 // Allow the user to set the complexity for the base stream. |
523 switch (inst->codecSpecific.VP8.complexity) { | 520 switch (inst->VP8().complexity) { |
524 case kComplexityHigh: | 521 case kComplexityHigh: |
525 cpu_speed_[0] = -5; | 522 cpu_speed_[0] = -5; |
526 break; | 523 break; |
527 case kComplexityHigher: | 524 case kComplexityHigher: |
528 cpu_speed_[0] = -4; | 525 cpu_speed_[0] = -4; |
529 break; | 526 break; |
530 case kComplexityMax: | 527 case kComplexityMax: |
531 cpu_speed_[0] = -3; | 528 cpu_speed_[0] = -3; |
532 break; | 529 break; |
533 default: | 530 default: |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
599 | 596 |
600 rps_.Init(); | 597 rps_.Init(); |
601 quality_scaler_.Init(QualityScaler::kLowVp8QpThreshold, | 598 quality_scaler_.Init(QualityScaler::kLowVp8QpThreshold, |
602 QualityScaler::kBadVp8QpThreshold, codec_.startBitrate, | 599 QualityScaler::kBadVp8QpThreshold, codec_.startBitrate, |
603 codec_.width, codec_.height, codec_.maxFramerate); | 600 codec_.width, codec_.height, codec_.maxFramerate); |
604 | 601 |
605 // Only apply scaling to improve for single-layer streams. The scaling metrics | 602 // Only apply scaling to improve for single-layer streams. The scaling metrics |
606 // use frame drops as a signal and is only applicable when we drop frames. | 603 // use frame drops as a signal and is only applicable when we drop frames. |
607 quality_scaler_enabled_ = encoders_.size() == 1 && | 604 quality_scaler_enabled_ = encoders_.size() == 1 && |
608 configurations_[0].rc_dropframe_thresh > 0 && | 605 configurations_[0].rc_dropframe_thresh > 0 && |
609 codec_.codecSpecific.VP8.automaticResizeOn; | 606 codec_.VP8()->automaticResizeOn; |
610 | 607 |
611 return InitAndSetControlSettings(); | 608 return InitAndSetControlSettings(); |
612 } | 609 } |
613 | 610 |
614 int VP8EncoderImpl::SetCpuSpeed(int width, int height) { | 611 int VP8EncoderImpl::SetCpuSpeed(int width, int height) { |
615 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) | 612 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) |
616 // On mobile platform, always set to -12 to leverage between cpu usage | 613 // On mobile platform, always set to -12 to leverage between cpu usage |
617 // and video quality. | 614 // and video quality. |
618 return -12; | 615 return -12; |
619 #else | 616 #else |
620 // For non-ARM, increase encoding complexity (i.e., use lower speed setting) | 617 // For non-ARM, increase encoding complexity (i.e., use lower speed setting) |
621 // if resolution is below CIF. Otherwise, keep the default/user setting | 618 // if resolution is below CIF. Otherwise, keep the default/user setting |
622 // (|cpu_speed_default_|) set on InitEncode via codecSpecific.VP8.complexity. | 619 // (|cpu_speed_default_|) set on InitEncode via VP8().complexity. |
623 if (width * height < 352 * 288) | 620 if (width * height < 352 * 288) |
624 return (cpu_speed_default_ < -4) ? -4 : cpu_speed_default_; | 621 return (cpu_speed_default_ < -4) ? -4 : cpu_speed_default_; |
625 else | 622 else |
626 return cpu_speed_default_; | 623 return cpu_speed_default_; |
627 #endif | 624 #endif |
628 } | 625 } |
629 | 626 |
630 int VP8EncoderImpl::NumberOfThreads(int width, int height, int cpus) { | 627 int VP8EncoderImpl::NumberOfThreads(int width, int height, int cpus) { |
631 if (width * height >= 1920 * 1080 && cpus > 8) { | 628 if (width * height >= 1920 * 1080 && cpus > 8) { |
632 return 8; // 8 threads for 1080p on high perf machines. | 629 return 8; // 8 threads for 1080p on high perf machines. |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
665 // TODO(holmer): Investigate possibility of adding a libvpx API | 662 // TODO(holmer): Investigate possibility of adding a libvpx API |
666 // for getting the denoised frame from the encoder and using that | 663 // for getting the denoised frame from the encoder and using that |
667 // when encoding lower resolution streams. Would it work with the | 664 // when encoding lower resolution streams. Would it work with the |
668 // multi-res encoding feature? | 665 // multi-res encoding feature? |
669 denoiserState denoiser_state = kDenoiserOnYOnly; | 666 denoiserState denoiser_state = kDenoiserOnYOnly; |
670 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) | 667 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) |
671 denoiser_state = kDenoiserOnYOnly; | 668 denoiser_state = kDenoiserOnYOnly; |
672 #else | 669 #else |
673 denoiser_state = kDenoiserOnAdaptive; | 670 denoiser_state = kDenoiserOnAdaptive; |
674 #endif | 671 #endif |
675 vpx_codec_control( | 672 vpx_codec_control(&encoders_[0], VP8E_SET_NOISE_SENSITIVITY, |
676 &encoders_[0], VP8E_SET_NOISE_SENSITIVITY, | 673 codec_.VP8()->denoisingOn ? denoiser_state : kDenoiserOff); |
677 codec_.codecSpecific.VP8.denoisingOn ? denoiser_state : kDenoiserOff); | |
678 if (encoders_.size() > 2) { | 674 if (encoders_.size() > 2) { |
679 vpx_codec_control( | 675 vpx_codec_control( |
680 &encoders_[1], VP8E_SET_NOISE_SENSITIVITY, | 676 &encoders_[1], VP8E_SET_NOISE_SENSITIVITY, |
681 codec_.codecSpecific.VP8.denoisingOn ? denoiser_state : kDenoiserOff); | 677 codec_.VP8()->denoisingOn ? denoiser_state : kDenoiserOff); |
682 } | 678 } |
683 for (size_t i = 0; i < encoders_.size(); ++i) { | 679 for (size_t i = 0; i < encoders_.size(); ++i) { |
684 // Allow more screen content to be detected as static. | 680 // Allow more screen content to be detected as static. |
685 vpx_codec_control(&(encoders_[i]), VP8E_SET_STATIC_THRESHOLD, | 681 vpx_codec_control(&(encoders_[i]), VP8E_SET_STATIC_THRESHOLD, |
686 codec_.mode == kScreensharing ? 300 : 1); | 682 codec_.mode == kScreensharing ? 300 : 1); |
687 vpx_codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]); | 683 vpx_codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]); |
688 vpx_codec_control(&(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS, | 684 vpx_codec_control(&(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS, |
689 static_cast<vp8e_token_partitions>(token_partitions_)); | 685 static_cast<vp8e_token_partitions>(token_partitions_)); |
690 vpx_codec_control(&(encoders_[i]), VP8E_SET_MAX_INTRA_BITRATE_PCT, | 686 vpx_codec_control(&(encoders_[i]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
691 rc_max_intra_target_); | 687 rc_max_intra_target_); |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
801 } | 797 } |
802 } | 798 } |
803 // The flag modification below (due to forced key frame, RPS, etc.,) for now | 799 // The flag modification below (due to forced key frame, RPS, etc.,) for now |
804 // will be the same for all encoders/spatial layers. | 800 // will be the same for all encoders/spatial layers. |
805 // TODO(marpan/holmer): Allow for key frame request to be set per encoder. | 801 // TODO(marpan/holmer): Allow for key frame request to be set per encoder. |
806 bool only_predict_from_key_frame = false; | 802 bool only_predict_from_key_frame = false; |
807 if (send_key_frame) { | 803 if (send_key_frame) { |
808 // Adapt the size of the key frame when in screenshare with 1 temporal | 804 // Adapt the size of the key frame when in screenshare with 1 temporal |
809 // layer. | 805 // layer. |
810 if (encoders_.size() == 1 && codec_.mode == kScreensharing && | 806 if (encoders_.size() == 1 && codec_.mode == kScreensharing && |
811 codec_.codecSpecific.VP8.numberOfTemporalLayers <= 1) { | 807 codec_.VP8()->numberOfTemporalLayers <= 1) { |
812 const uint32_t forceKeyFrameIntraTh = 100; | 808 const uint32_t forceKeyFrameIntraTh = 100; |
813 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, | 809 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
814 forceKeyFrameIntraTh); | 810 forceKeyFrameIntraTh); |
815 } | 811 } |
816 // Key frame request from caller. | 812 // Key frame request from caller. |
817 // Will update both golden and alt-ref. | 813 // Will update both golden and alt-ref. |
818 for (size_t i = 0; i < encoders_.size(); ++i) { | 814 for (size_t i = 0; i < encoders_.size(); ++i) { |
819 flags[i] = VPX_EFLAG_FORCE_KF; | 815 flags[i] = VPX_EFLAG_FORCE_KF; |
820 } | 816 } |
821 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); | 817 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); |
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1093 | 1089 |
1094 int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { | 1090 int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { |
1095 int ret_val = Release(); | 1091 int ret_val = Release(); |
1096 if (ret_val < 0) { | 1092 if (ret_val < 0) { |
1097 return ret_val; | 1093 return ret_val; |
1098 } | 1094 } |
1099 if (decoder_ == NULL) { | 1095 if (decoder_ == NULL) { |
1100 decoder_ = new vpx_codec_ctx_t; | 1096 decoder_ = new vpx_codec_ctx_t; |
1101 } | 1097 } |
1102 if (inst && inst->codecType == kVideoCodecVP8) { | 1098 if (inst && inst->codecType == kVideoCodecVP8) { |
1103 feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn; | 1099 feedback_mode_ = inst->VP8().feedbackModeOn; |
1104 } | 1100 } |
1105 vpx_codec_dec_cfg_t cfg; | 1101 vpx_codec_dec_cfg_t cfg; |
1106 // Setting number of threads to a constant value (1) | 1102 // Setting number of threads to a constant value (1) |
1107 cfg.threads = 1; | 1103 cfg.threads = 1; |
1108 cfg.h = cfg.w = 0; // set after decode | 1104 cfg.h = cfg.w = 0; // set after decode |
1109 | 1105 |
1110 vpx_codec_flags_t flags = 0; | 1106 vpx_codec_flags_t flags = 0; |
1111 #if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) && \ | 1107 #if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) && \ |
1112 !defined(ANDROID) | 1108 !defined(ANDROID) |
1113 flags = VPX_CODEC_USE_POSTPROC; | 1109 flags = VPX_CODEC_USE_POSTPROC; |
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1411 return -1; | 1407 return -1; |
1412 } | 1408 } |
1413 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != | 1409 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != |
1414 VPX_CODEC_OK) { | 1410 VPX_CODEC_OK) { |
1415 return -1; | 1411 return -1; |
1416 } | 1412 } |
1417 return 0; | 1413 return 0; |
1418 } | 1414 } |
1419 | 1415 |
1420 } // namespace webrtc | 1416 } // namespace webrtc |
OLD | NEW |