Index: webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
index e5a776e066aca93a136a42dd255f0596aef33270..c46c63bb2f7637d4ffbd10da3cbea94936763de7 100644 |
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
@@ -278,7 +278,6 @@ int VP8EncoderImpl::SetRates(uint32_t new_bitrate_kbit, |
int tl0_bitrate = std::min(codec_.targetBitrate, target_bitrate); |
max_bitrate = std::min(codec_.maxBitrate, target_bitrate); |
target_bitrate = tl0_bitrate; |
- framerate = -1; |
} |
configurations_[i].rc_target_bitrate = target_bitrate; |
temporal_layers_[stream_idx]->ConfigureBitrates(target_bitrate, |
@@ -312,10 +311,8 @@ void VP8EncoderImpl::SetupTemporalLayers(int num_streams, |
if (num_streams == 1) { |
if (codec.mode == kScreensharing) { |
// Special mode when screensharing on a single stream. |
- temporal_layers_.push_back(new ScreenshareLayers(num_temporal_layers, |
- rand(), |
- &tl0_frame_dropper_, |
- &tl1_frame_dropper_)); |
+ temporal_layers_.push_back( |
+ new ScreenshareLayers(num_temporal_layers, rand())); |
} else { |
temporal_layers_.push_back( |
tl_factory.Create(num_temporal_layers, rand())); |
@@ -671,7 +668,7 @@ int VP8EncoderImpl::InitAndSetControlSettings() { |
vpx_codec_control(&(encoders_[i]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
rc_max_intra_target_); |
vpx_codec_control(&(encoders_[i]), VP8E_SET_SCREEN_CONTENT_MODE, |
- codec_.mode == kScreensharing); |
+ codec_.mode == kScreensharing ? 2 : 0); |
stefan-webrtc
2015/06/18 09:39:02
What does 2 mean?
sprang_webrtc
2015/06/18 13:05:02
Commented
stefan-webrtc
2015/06/23 14:28:32
Still doesn't say what 2 means
sprang_webrtc
2015/06/23 15:07:30
You mean you miss the actual 2? :)
|
} |
inited_ = true; |
return WEBRTC_VIDEO_CODEC_OK; |
@@ -698,15 +695,12 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame, |
const std::vector<VideoFrameType>* frame_types) { |
TRACE_EVENT1("webrtc", "VP8::Encode", "timestamp", frame.timestamp()); |
- if (!inited_) { |
+ if (!inited_) |
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
- } |
- if (frame.IsZeroSize()) { |
+ if (frame.IsZeroSize()) |
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
- } |
- if (encoded_complete_callback_ == NULL) { |
+ if (encoded_complete_callback_ == NULL) |
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
- } |
// Only apply scaling to improve for single-layer streams. The scaling metrics |
// use frame drops as a signal and is only applicable when we drop frames. |
@@ -851,6 +845,13 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame, |
// whereas |encoder_| is from highest to lowest resolution. |
size_t stream_idx = encoders_.size() - 1; |
for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) { |
+ vpx_codec_enc_cfg_t temp_config; |
stefan-webrtc
2015/06/18 09:39:02
What is temporary with this config? Isn't it a scr
sprang_webrtc
2015/06/18 13:05:02
It's not stored in configurations_ and will be res
|
+ memcpy(&temp_config, &configurations_[i], sizeof(vpx_codec_enc_cfg_t)); |
+ if (temporal_layers_[stream_idx]->UpdateConfiguration(&temp_config)) { |
+ if (vpx_codec_enc_config_set(&encoders_[i], &temp_config)) |
+ return WEBRTC_VIDEO_CODEC_ERROR; |
+ } |
+ |
vpx_codec_control(&encoders_[i], VP8E_SET_FRAME_FLAGS, flags[stream_idx]); |
vpx_codec_control(&encoders_[i], |
VP8E_SET_TEMPORAL_LAYER_ID, |
@@ -873,9 +874,8 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame, |
vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT, |
rc_max_intra_target_); |
} |
- if (error) { |
+ if (error) |
return WEBRTC_VIDEO_CODEC_ERROR; |
- } |
timestamp_ += duration; |
return GetEncodedPartitions(input_image, only_predict_from_key_frame); |
} |
@@ -981,9 +981,12 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image, |
encoded_images_[encoder_idx]._timeStamp = input_image.timestamp(); |
encoded_images_[encoder_idx].capture_time_ms_ = |
input_image.render_time_ms(); |
+ |
+ int qp = -1; |
+ vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp); |
temporal_layers_[stream_idx]->FrameEncoded( |
encoded_images_[encoder_idx]._length, |
- encoded_images_[encoder_idx]._timeStamp); |
+ encoded_images_[encoder_idx]._timeStamp, qp); |
if (send_stream_[stream_idx]) { |
if (encoded_images_[encoder_idx]._length > 0) { |
TRACE_COUNTER_ID1("webrtc", "EncodedFrameSize", encoder_idx, |