| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 240 | 240 |
| 241 unsigned int target_bitrate = stream_bitrates[stream_idx]; | 241 unsigned int target_bitrate = stream_bitrates[stream_idx]; |
| 242 unsigned int max_bitrate = codec_.maxBitrate; | 242 unsigned int max_bitrate = codec_.maxBitrate; |
| 243 int framerate = new_framerate; | 243 int framerate = new_framerate; |
| 244 // TODO(holmer): This is a temporary hack for screensharing, where we | 244 // TODO(holmer): This is a temporary hack for screensharing, where we |
| 245 // interpret the startBitrate as the encoder target bitrate. This is | 245 // interpret the startBitrate as the encoder target bitrate. This is |
| 246 // to allow for a different max bitrate, so if the codec can't meet | 246 // to allow for a different max bitrate, so if the codec can't meet |
| 247 // the target we still allow it to overshoot up to the max before dropping | 247 // the target we still allow it to overshoot up to the max before dropping |
| 248 // frames. This hack should be improved. | 248 // frames. This hack should be improved. |
| 249 if (codec_.targetBitrate > 0 && | 249 if (codec_.targetBitrate > 0 && |
| 250 (codec_.codecSpecific.VP8.numberOfTemporalLayers == 2 || | 250 (codec_.VP8()->numberOfTemporalLayers == 2 || |
| 251 codec_.simulcastStream[0].numberOfTemporalLayers == 2)) { | 251 codec_.simulcastStream[0].numberOfTemporalLayers == 2)) { |
| 252 int tl0_bitrate = std::min(codec_.targetBitrate, target_bitrate); | 252 int tl0_bitrate = std::min(codec_.targetBitrate, target_bitrate); |
| 253 max_bitrate = std::min(codec_.maxBitrate, target_bitrate); | 253 max_bitrate = std::min(codec_.maxBitrate, target_bitrate); |
| 254 target_bitrate = tl0_bitrate; | 254 target_bitrate = tl0_bitrate; |
| 255 } | 255 } |
| 256 configurations_[i].rc_target_bitrate = target_bitrate; | 256 configurations_[i].rc_target_bitrate = target_bitrate; |
| 257 temporal_layers_[stream_idx]->ConfigureBitrates( | 257 temporal_layers_[stream_idx]->ConfigureBitrates( |
| 258 target_bitrate, max_bitrate, framerate, &configurations_[i]); | 258 target_bitrate, max_bitrate, framerate, &configurations_[i]); |
| 259 if (vpx_codec_enc_config_set(&encoders_[i], &configurations_[i])) { | 259 if (vpx_codec_enc_config_set(&encoders_[i], &configurations_[i])) { |
| 260 return WEBRTC_VIDEO_CODEC_ERROR; | 260 return WEBRTC_VIDEO_CODEC_ERROR; |
| (...skipping 18 matching lines...) Expand all Loading... |
| 279 // Need a key frame if we have not sent this stream before. | 279 // Need a key frame if we have not sent this stream before. |
| 280 key_frame_request_[stream_idx] = true; | 280 key_frame_request_[stream_idx] = true; |
| 281 } | 281 } |
| 282 send_stream_[stream_idx] = send_stream; | 282 send_stream_[stream_idx] = send_stream; |
| 283 } | 283 } |
| 284 | 284 |
| 285 void VP8EncoderImpl::SetupTemporalLayers(int num_streams, | 285 void VP8EncoderImpl::SetupTemporalLayers(int num_streams, |
| 286 int num_temporal_layers, | 286 int num_temporal_layers, |
| 287 const VideoCodec& codec) { | 287 const VideoCodec& codec) { |
| 288 TemporalLayersFactory default_factory; | 288 TemporalLayersFactory default_factory; |
| 289 const TemporalLayersFactory* tl_factory = codec.codecSpecific.VP8.tl_factory; | 289 const TemporalLayersFactory* tl_factory = codec.VP8().tl_factory; |
| 290 if (!tl_factory) | 290 if (!tl_factory) |
| 291 tl_factory = &default_factory; | 291 tl_factory = &default_factory; |
| 292 if (num_streams == 1) { | 292 if (num_streams == 1) { |
| 293 if (codec.mode == kScreensharing) { | 293 if (codec.mode == kScreensharing) { |
| 294 // Special mode when screensharing on a single stream. | 294 // Special mode when screensharing on a single stream. |
| 295 temporal_layers_.push_back(new ScreenshareLayers( | 295 temporal_layers_.push_back(new ScreenshareLayers( |
| 296 num_temporal_layers, rand(), webrtc::Clock::GetRealTimeClock())); | 296 num_temporal_layers, rand(), webrtc::Clock::GetRealTimeClock())); |
| 297 } else { | 297 } else { |
| 298 temporal_layers_.push_back( | 298 temporal_layers_.push_back( |
| 299 tl_factory->Create(num_temporal_layers, rand())); | 299 tl_factory->Create(num_temporal_layers, rand())); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 321 // allow zero to represent an unspecified maxBitRate | 321 // allow zero to represent an unspecified maxBitRate |
| 322 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { | 322 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { |
| 323 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 323 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 324 } | 324 } |
| 325 if (inst->width <= 1 || inst->height <= 1) { | 325 if (inst->width <= 1 || inst->height <= 1) { |
| 326 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 326 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 327 } | 327 } |
| 328 if (number_of_cores < 1) { | 328 if (number_of_cores < 1) { |
| 329 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 329 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 330 } | 330 } |
| 331 if (inst->codecSpecific.VP8.feedbackModeOn && | 331 if (inst->VP8().feedbackModeOn && inst->numberOfSimulcastStreams > 1) { |
| 332 inst->numberOfSimulcastStreams > 1) { | |
| 333 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 332 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 334 } | 333 } |
| 335 if (inst->codecSpecific.VP8.automaticResizeOn && | 334 if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { |
| 336 inst->numberOfSimulcastStreams > 1) { | |
| 337 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 335 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 338 } | 336 } |
| 339 int retVal = Release(); | 337 int retVal = Release(); |
| 340 if (retVal < 0) { | 338 if (retVal < 0) { |
| 341 return retVal; | 339 return retVal; |
| 342 } | 340 } |
| 343 | 341 |
| 344 int number_of_streams = NumberOfStreams(*inst); | 342 int number_of_streams = NumberOfStreams(*inst); |
| 345 bool doing_simulcast = (number_of_streams > 1); | 343 bool doing_simulcast = (number_of_streams > 1); |
| 346 | 344 |
| 347 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { | 345 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { |
| 348 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 346 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 349 } | 347 } |
| 350 | 348 |
| 351 int num_temporal_layers = | 349 int num_temporal_layers = |
| 352 doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers | 350 doing_simulcast ? inst->simulcastStream[0].numberOfTemporalLayers |
| 353 : inst->codecSpecific.VP8.numberOfTemporalLayers; | 351 : inst->VP8().numberOfTemporalLayers; |
| 354 | 352 |
| 355 // TODO(andresp): crash if num temporal layers is bananas. | 353 // TODO(andresp): crash if num temporal layers is bananas. |
| 356 if (num_temporal_layers < 1) | 354 if (num_temporal_layers < 1) |
| 357 num_temporal_layers = 1; | 355 num_temporal_layers = 1; |
| 358 SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst); | 356 SetupTemporalLayers(number_of_streams, num_temporal_layers, *inst); |
| 359 | 357 |
| 360 feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn; | 358 feedback_mode_ = inst->VP8().feedbackModeOn; |
| 361 | 359 |
| 362 timestamp_ = 0; | 360 timestamp_ = 0; |
| 363 codec_ = *inst; | 361 codec_ = *inst; |
| 364 rate_allocator_.reset(new SimulcastRateAllocator(codec_)); | 362 rate_allocator_.reset(new SimulcastRateAllocator(codec_)); |
| 365 | 363 |
| 366 // Code expects simulcastStream resolutions to be correct, make sure they are | 364 // Code expects simulcastStream resolutions to be correct, make sure they are |
| 367 // filled even when there are no simulcast layers. | 365 // filled even when there are no simulcast layers. |
| 368 if (codec_.numberOfSimulcastStreams == 0) { | 366 if (codec_.numberOfSimulcastStreams == 0) { |
| 369 codec_.simulcastStream[0].width = codec_.width; | 367 codec_.simulcastStream[0].width = codec_.width; |
| 370 codec_.simulcastStream[0].height = codec_.height; | 368 codec_.simulcastStream[0].height = codec_.height; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 412 if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &configurations_[0], | 410 if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &configurations_[0], |
| 413 0)) { | 411 0)) { |
| 414 return WEBRTC_VIDEO_CODEC_ERROR; | 412 return WEBRTC_VIDEO_CODEC_ERROR; |
| 415 } | 413 } |
| 416 // setting the time base of the codec | 414 // setting the time base of the codec |
| 417 configurations_[0].g_timebase.num = 1; | 415 configurations_[0].g_timebase.num = 1; |
| 418 configurations_[0].g_timebase.den = 90000; | 416 configurations_[0].g_timebase.den = 90000; |
| 419 configurations_[0].g_lag_in_frames = 0; // 0- no frame lagging | 417 configurations_[0].g_lag_in_frames = 0; // 0- no frame lagging |
| 420 | 418 |
| 421 // Set the error resilience mode according to user settings. | 419 // Set the error resilience mode according to user settings. |
| 422 switch (inst->codecSpecific.VP8.resilience) { | 420 switch (inst->VP8().resilience) { |
| 423 case kResilienceOff: | 421 case kResilienceOff: |
| 424 // TODO(marpan): We should set keep error resilience off for this mode, | 422 // TODO(marpan): We should set keep error resilience off for this mode, |
| 425 // independent of temporal layer settings, and make sure we set | 423 // independent of temporal layer settings, and make sure we set |
| 426 // |codecSpecific.VP8.resilience| = |kResilientStream| at higher level | 424 // |codecSpecific.VP8.resilience| = |kResilientStream| at higher level |
| 427 // code if we want to get error resilience on. | 425 // code if we want to get error resilience on. |
| 428 configurations_[0].g_error_resilient = 1; | 426 configurations_[0].g_error_resilient = 1; |
| 429 break; | 427 break; |
| 430 case kResilientStream: | 428 case kResilientStream: |
| 431 configurations_[0].g_error_resilient = 1; // TODO(holmer): Replace with | 429 configurations_[0].g_error_resilient = 1; // TODO(holmer): Replace with |
| 432 // VPX_ERROR_RESILIENT_DEFAULT when we | 430 // VPX_ERROR_RESILIENT_DEFAULT when we |
| 433 // drop support for libvpx 9.6.0. | 431 // drop support for libvpx 9.6.0. |
| 434 break; | 432 break; |
| 435 case kResilientFrames: | 433 case kResilientFrames: |
| 436 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; // Not supported | 434 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; // Not supported |
| 437 } | 435 } |
| 438 | 436 |
| 439 // rate control settings | 437 // rate control settings |
| 440 configurations_[0].rc_dropframe_thresh = | 438 configurations_[0].rc_dropframe_thresh = inst->VP8().frameDroppingOn ? 30 : 0; |
| 441 inst->codecSpecific.VP8.frameDroppingOn ? 30 : 0; | |
| 442 configurations_[0].rc_end_usage = VPX_CBR; | 439 configurations_[0].rc_end_usage = VPX_CBR; |
| 443 configurations_[0].g_pass = VPX_RC_ONE_PASS; | 440 configurations_[0].g_pass = VPX_RC_ONE_PASS; |
| 444 // TODO(hellner): investigate why the following two lines produce | 441 // TODO(hellner): investigate why the following two lines produce |
| 445 // automaticResizeOn value of 3 when running | 442 // automaticResizeOn value of 3 when running |
| 446 // WebRtcVideoMediaChannelTest.GetStatsMultipleSendStreams inside the talk | 443 // WebRtcVideoMediaChannelTest.GetStatsMultipleSendStreams inside the talk |
| 447 // framework. | 444 // framework. |
| 448 // configurations_[0].rc_resize_allowed = | 445 // configurations_[0].rc_resize_allowed = |
| 449 // inst->codecSpecific.VP8.automaticResizeOn ? 1 : 0; | 446 // inst->codecSpecific.VP8.automaticResizeOn ? 1 : 0; |
| 450 configurations_[0].rc_resize_allowed = 0; | 447 configurations_[0].rc_resize_allowed = 0; |
| 451 // Handle resizing outside of libvpx when doing single-stream. | 448 // Handle resizing outside of libvpx when doing single-stream. |
| 452 if (inst->codecSpecific.VP8.automaticResizeOn && number_of_streams > 1) { | 449 if (inst->VP8().automaticResizeOn && number_of_streams > 1) { |
| 453 configurations_[0].rc_resize_allowed = 1; | 450 configurations_[0].rc_resize_allowed = 1; |
| 454 } | 451 } |
| 455 configurations_[0].rc_min_quantizer = 2; | 452 configurations_[0].rc_min_quantizer = 2; |
| 456 if (inst->qpMax >= configurations_[0].rc_min_quantizer) { | 453 if (inst->qpMax >= configurations_[0].rc_min_quantizer) { |
| 457 qp_max_ = inst->qpMax; | 454 qp_max_ = inst->qpMax; |
| 458 } | 455 } |
| 459 configurations_[0].rc_max_quantizer = qp_max_; | 456 configurations_[0].rc_max_quantizer = qp_max_; |
| 460 configurations_[0].rc_undershoot_pct = 100; | 457 configurations_[0].rc_undershoot_pct = 100; |
| 461 configurations_[0].rc_overshoot_pct = 15; | 458 configurations_[0].rc_overshoot_pct = 15; |
| 462 configurations_[0].rc_buf_initial_sz = 500; | 459 configurations_[0].rc_buf_initial_sz = 500; |
| 463 configurations_[0].rc_buf_optimal_sz = 600; | 460 configurations_[0].rc_buf_optimal_sz = 600; |
| 464 configurations_[0].rc_buf_sz = 1000; | 461 configurations_[0].rc_buf_sz = 1000; |
| 465 | 462 |
| 466 // Set the maximum target size of any key-frame. | 463 // Set the maximum target size of any key-frame. |
| 467 rc_max_intra_target_ = MaxIntraTarget(configurations_[0].rc_buf_optimal_sz); | 464 rc_max_intra_target_ = MaxIntraTarget(configurations_[0].rc_buf_optimal_sz); |
| 468 | 465 |
| 469 if (feedback_mode_) { | 466 if (feedback_mode_) { |
| 470 // Disable periodic key frames if we get feedback from the decoder | 467 // Disable periodic key frames if we get feedback from the decoder |
| 471 // through SLI and RPSI. | 468 // through SLI and RPSI. |
| 472 configurations_[0].kf_mode = VPX_KF_DISABLED; | 469 configurations_[0].kf_mode = VPX_KF_DISABLED; |
| 473 } else if (inst->codecSpecific.VP8.keyFrameInterval > 0) { | 470 } else if (inst->VP8().keyFrameInterval > 0) { |
| 474 configurations_[0].kf_mode = VPX_KF_AUTO; | 471 configurations_[0].kf_mode = VPX_KF_AUTO; |
| 475 configurations_[0].kf_max_dist = inst->codecSpecific.VP8.keyFrameInterval; | 472 configurations_[0].kf_max_dist = inst->VP8().keyFrameInterval; |
| 476 } else { | 473 } else { |
| 477 configurations_[0].kf_mode = VPX_KF_DISABLED; | 474 configurations_[0].kf_mode = VPX_KF_DISABLED; |
| 478 } | 475 } |
| 479 | 476 |
| 480 // Allow the user to set the complexity for the base stream. | 477 // Allow the user to set the complexity for the base stream. |
| 481 switch (inst->codecSpecific.VP8.complexity) { | 478 switch (inst->VP8().complexity) { |
| 482 case kComplexityHigh: | 479 case kComplexityHigh: |
| 483 cpu_speed_[0] = -5; | 480 cpu_speed_[0] = -5; |
| 484 break; | 481 break; |
| 485 case kComplexityHigher: | 482 case kComplexityHigher: |
| 486 cpu_speed_[0] = -4; | 483 cpu_speed_[0] = -4; |
| 487 break; | 484 break; |
| 488 case kComplexityMax: | 485 case kComplexityMax: |
| 489 cpu_speed_[0] = -3; | 486 cpu_speed_[0] = -3; |
| 490 break; | 487 break; |
| 491 default: | 488 default: |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 556 } | 553 } |
| 557 | 554 |
| 558 rps_.Init(); | 555 rps_.Init(); |
| 559 quality_scaler_.Init(codec_.codecType, codec_.startBitrate, codec_.width, | 556 quality_scaler_.Init(codec_.codecType, codec_.startBitrate, codec_.width, |
| 560 codec_.height, codec_.maxFramerate); | 557 codec_.height, codec_.maxFramerate); |
| 561 | 558 |
| 562 // Only apply scaling to improve for single-layer streams. The scaling metrics | 559 // Only apply scaling to improve for single-layer streams. The scaling metrics |
| 563 // use frame drops as a signal and is only applicable when we drop frames. | 560 // use frame drops as a signal and is only applicable when we drop frames. |
| 564 quality_scaler_enabled_ = encoders_.size() == 1 && | 561 quality_scaler_enabled_ = encoders_.size() == 1 && |
| 565 configurations_[0].rc_dropframe_thresh > 0 && | 562 configurations_[0].rc_dropframe_thresh > 0 && |
| 566 codec_.codecSpecific.VP8.automaticResizeOn; | 563 codec_.VP8()->automaticResizeOn; |
| 567 | 564 |
| 568 return InitAndSetControlSettings(); | 565 return InitAndSetControlSettings(); |
| 569 } | 566 } |
| 570 | 567 |
| 571 int VP8EncoderImpl::SetCpuSpeed(int width, int height) { | 568 int VP8EncoderImpl::SetCpuSpeed(int width, int height) { |
| 572 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) | 569 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) |
| 573 // On mobile platform, always set to -12 to leverage between cpu usage | 570 // On mobile platform, always set to -12 to leverage between cpu usage |
| 574 // and video quality. | 571 // and video quality. |
| 575 return -12; | 572 return -12; |
| 576 #else | 573 #else |
| 577 // For non-ARM, increase encoding complexity (i.e., use lower speed setting) | 574 // For non-ARM, increase encoding complexity (i.e., use lower speed setting) |
| 578 // if resolution is below CIF. Otherwise, keep the default/user setting | 575 // if resolution is below CIF. Otherwise, keep the default/user setting |
| 579 // (|cpu_speed_default_|) set on InitEncode via codecSpecific.VP8.complexity. | 576 // (|cpu_speed_default_|) set on InitEncode via VP8().complexity. |
| 580 if (width * height < 352 * 288) | 577 if (width * height < 352 * 288) |
| 581 return (cpu_speed_default_ < -4) ? -4 : cpu_speed_default_; | 578 return (cpu_speed_default_ < -4) ? -4 : cpu_speed_default_; |
| 582 else | 579 else |
| 583 return cpu_speed_default_; | 580 return cpu_speed_default_; |
| 584 #endif | 581 #endif |
| 585 } | 582 } |
| 586 | 583 |
| 587 int VP8EncoderImpl::NumberOfThreads(int width, int height, int cpus) { | 584 int VP8EncoderImpl::NumberOfThreads(int width, int height, int cpus) { |
| 588 #if defined(ANDROID) | 585 #if defined(ANDROID) |
| 589 if (width * height >= 320 * 180) { | 586 if (width * height >= 320 * 180) { |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 637 // TODO(holmer): Investigate possibility of adding a libvpx API | 634 // TODO(holmer): Investigate possibility of adding a libvpx API |
| 638 // for getting the denoised frame from the encoder and using that | 635 // for getting the denoised frame from the encoder and using that |
| 639 // when encoding lower resolution streams. Would it work with the | 636 // when encoding lower resolution streams. Would it work with the |
| 640 // multi-res encoding feature? | 637 // multi-res encoding feature? |
| 641 denoiserState denoiser_state = kDenoiserOnYOnly; | 638 denoiserState denoiser_state = kDenoiserOnYOnly; |
| 642 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) | 639 #if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) |
| 643 denoiser_state = kDenoiserOnYOnly; | 640 denoiser_state = kDenoiserOnYOnly; |
| 644 #else | 641 #else |
| 645 denoiser_state = kDenoiserOnAdaptive; | 642 denoiser_state = kDenoiserOnAdaptive; |
| 646 #endif | 643 #endif |
| 647 vpx_codec_control( | 644 vpx_codec_control(&encoders_[0], VP8E_SET_NOISE_SENSITIVITY, |
| 648 &encoders_[0], VP8E_SET_NOISE_SENSITIVITY, | 645 codec_.VP8()->denoisingOn ? denoiser_state : kDenoiserOff); |
| 649 codec_.codecSpecific.VP8.denoisingOn ? denoiser_state : kDenoiserOff); | |
| 650 if (encoders_.size() > 2) { | 646 if (encoders_.size() > 2) { |
| 651 vpx_codec_control( | 647 vpx_codec_control( |
| 652 &encoders_[1], VP8E_SET_NOISE_SENSITIVITY, | 648 &encoders_[1], VP8E_SET_NOISE_SENSITIVITY, |
| 653 codec_.codecSpecific.VP8.denoisingOn ? denoiser_state : kDenoiserOff); | 649 codec_.VP8()->denoisingOn ? denoiser_state : kDenoiserOff); |
| 654 } | 650 } |
| 655 for (size_t i = 0; i < encoders_.size(); ++i) { | 651 for (size_t i = 0; i < encoders_.size(); ++i) { |
| 656 // Allow more screen content to be detected as static. | 652 // Allow more screen content to be detected as static. |
| 657 vpx_codec_control(&(encoders_[i]), VP8E_SET_STATIC_THRESHOLD, | 653 vpx_codec_control(&(encoders_[i]), VP8E_SET_STATIC_THRESHOLD, |
| 658 codec_.mode == kScreensharing ? 300 : 1); | 654 codec_.mode == kScreensharing ? 300 : 1); |
| 659 vpx_codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]); | 655 vpx_codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]); |
| 660 vpx_codec_control(&(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS, | 656 vpx_codec_control(&(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS, |
| 661 static_cast<vp8e_token_partitions>(token_partitions_)); | 657 static_cast<vp8e_token_partitions>(token_partitions_)); |
| 662 vpx_codec_control(&(encoders_[i]), VP8E_SET_MAX_INTRA_BITRATE_PCT, | 658 vpx_codec_control(&(encoders_[i]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
| 663 rc_max_intra_target_); | 659 rc_max_intra_target_); |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 773 } | 769 } |
| 774 } | 770 } |
| 775 // The flag modification below (due to forced key frame, RPS, etc.,) for now | 771 // The flag modification below (due to forced key frame, RPS, etc.,) for now |
| 776 // will be the same for all encoders/spatial layers. | 772 // will be the same for all encoders/spatial layers. |
| 777 // TODO(marpan/holmer): Allow for key frame request to be set per encoder. | 773 // TODO(marpan/holmer): Allow for key frame request to be set per encoder. |
| 778 bool only_predict_from_key_frame = false; | 774 bool only_predict_from_key_frame = false; |
| 779 if (send_key_frame) { | 775 if (send_key_frame) { |
| 780 // Adapt the size of the key frame when in screenshare with 1 temporal | 776 // Adapt the size of the key frame when in screenshare with 1 temporal |
| 781 // layer. | 777 // layer. |
| 782 if (encoders_.size() == 1 && codec_.mode == kScreensharing && | 778 if (encoders_.size() == 1 && codec_.mode == kScreensharing && |
| 783 codec_.codecSpecific.VP8.numberOfTemporalLayers <= 1) { | 779 codec_.VP8()->numberOfTemporalLayers <= 1) { |
| 784 const uint32_t forceKeyFrameIntraTh = 100; | 780 const uint32_t forceKeyFrameIntraTh = 100; |
| 785 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, | 781 vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
| 786 forceKeyFrameIntraTh); | 782 forceKeyFrameIntraTh); |
| 787 } | 783 } |
| 788 // Key frame request from caller. | 784 // Key frame request from caller. |
| 789 // Will update both golden and alt-ref. | 785 // Will update both golden and alt-ref. |
| 790 for (size_t i = 0; i < encoders_.size(); ++i) { | 786 for (size_t i = 0; i < encoders_.size(); ++i) { |
| 791 flags[i] = VPX_EFLAG_FORCE_KF; | 787 flags[i] = VPX_EFLAG_FORCE_KF; |
| 792 } | 788 } |
| 793 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); | 789 std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); |
| (...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1066 | 1062 |
| 1067 int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { | 1063 int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { |
| 1068 int ret_val = Release(); | 1064 int ret_val = Release(); |
| 1069 if (ret_val < 0) { | 1065 if (ret_val < 0) { |
| 1070 return ret_val; | 1066 return ret_val; |
| 1071 } | 1067 } |
| 1072 if (decoder_ == NULL) { | 1068 if (decoder_ == NULL) { |
| 1073 decoder_ = new vpx_codec_ctx_t; | 1069 decoder_ = new vpx_codec_ctx_t; |
| 1074 } | 1070 } |
| 1075 if (inst && inst->codecType == kVideoCodecVP8) { | 1071 if (inst && inst->codecType == kVideoCodecVP8) { |
| 1076 feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn; | 1072 feedback_mode_ = inst->VP8().feedbackModeOn; |
| 1077 } | 1073 } |
| 1078 vpx_codec_dec_cfg_t cfg; | 1074 vpx_codec_dec_cfg_t cfg; |
| 1079 // Setting number of threads to a constant value (1) | 1075 // Setting number of threads to a constant value (1) |
| 1080 cfg.threads = 1; | 1076 cfg.threads = 1; |
| 1081 cfg.h = cfg.w = 0; // set after decode | 1077 cfg.h = cfg.w = 0; // set after decode |
| 1082 | 1078 |
| 1083 vpx_codec_flags_t flags = 0; | 1079 vpx_codec_flags_t flags = 0; |
| 1084 #if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) && \ | 1080 #if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) && \ |
| 1085 !defined(ANDROID) | 1081 !defined(ANDROID) |
| 1086 flags = VPX_CODEC_USE_POSTPROC; | 1082 flags = VPX_CODEC_USE_POSTPROC; |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1319 return -1; | 1315 return -1; |
| 1320 } | 1316 } |
| 1321 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != | 1317 if (vpx_codec_control(copy->decoder_, VP8_SET_REFERENCE, ref_frame_) != |
| 1322 VPX_CODEC_OK) { | 1318 VPX_CODEC_OK) { |
| 1323 return -1; | 1319 return -1; |
| 1324 } | 1320 } |
| 1325 return 0; | 1321 return 0; |
| 1326 } | 1322 } |
| 1327 | 1323 |
| 1328 } // namespace webrtc | 1324 } // namespace webrtc |
| OLD | NEW |