Index: webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
index d45abcd1b6e64e94d40429c6304a196394c7a140..bbfe732e973edd4e16529b4bb4cbd039c75d3346 100644 |
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc |
@@ -165,7 +165,8 @@ VP8EncoderImpl::VP8EncoderImpl() |
cpu_speed_default_(-6), |
number_of_cores_(0), |
rc_max_intra_target_(0), |
- key_frame_request_(kMaxSimulcastStreams, false) { |
+ key_frame_request_(kMaxSimulcastStreams, false), |
+ last_timing_frame_time_ms_(0) { |
Random random(rtc::TimeMicros()); |
picture_id_.reserve(kMaxSimulcastStreams); |
for (int i = 0; i < kMaxSimulcastStreams; ++i) { |
@@ -668,6 +669,8 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame, |
if (encoded_complete_callback_ == NULL) |
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
+ int64_t encode_start_ms = rtc::TimeMillis(); |
+ |
rtc::scoped_refptr<VideoFrameBuffer> input_image = frame.video_frame_buffer(); |
// Since we are extracting raw pointers from |input_image| to |
// |raw_images_[0]|, the resolution of these frames must match. |
@@ -789,7 +792,7 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame, |
return WEBRTC_VIDEO_CODEC_ERROR; |
timestamp_ += duration; |
// Examines frame timestamps only. |
- return GetEncodedPartitions(tl_configs, frame); |
+ return GetEncodedPartitions(tl_configs, frame, encode_start_ms); |
} |
void VP8EncoderImpl::PopulateCodecSpecific( |
@@ -815,10 +818,17 @@ void VP8EncoderImpl::PopulateCodecSpecific( |
int VP8EncoderImpl::GetEncodedPartitions( |
const TemporalLayers::FrameConfig tl_configs[], |
- const VideoFrame& input_image) { |
+ const VideoFrame& input_image, |
+ int64_t encode_start_ms) { |
int bw_resolutions_disabled = |
(encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1; |
+ bool is_time_for_timing_frame = |
+ input_image.render_time_ms() - last_timing_frame_time_ms_ >= |
+ codec_.timingFramesDelayMs; |
+ if (is_time_for_timing_frame) |
+ last_timing_frame_time_ms_ = input_image.render_time_ms(); |
+ |
int stream_idx = static_cast<int>(encoders_.size()) - 1; |
int result = WEBRTC_VIDEO_CODEC_OK; |
for (size_t encoder_idx = 0; encoder_idx < encoders_.size(); |
@@ -878,6 +888,16 @@ int VP8EncoderImpl::GetEncodedPartitions( |
(codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE |
: VideoContentType::UNSPECIFIED; |
+ if (is_time_for_timing_frame || |
+ encoded_images_[encoder_idx]._length >= |
+ codec_.minFrameSizeToForceTimingFrameBytes) { |
+ encoded_images_[encoder_idx].timing_.is_timing_frame = true; |
+ encoded_images_[encoder_idx].timing_.encode_start_ms = encode_start_ms; |
+ encoded_images_[encoder_idx].timing_.encode_finish_ms = rtc::TimeMillis(); |
+ } else { |
+ encoded_images_[encoder_idx].timing_.is_timing_frame = false; |
+ } |
+ |
int qp = -1; |
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp); |
temporal_layers_[stream_idx]->FrameEncoded( |