Chromium Code Reviews| Index: webrtc/modules/video_coding/generic_encoder.cc |
| diff --git a/webrtc/modules/video_coding/generic_encoder.cc b/webrtc/modules/video_coding/generic_encoder.cc |
| index 50731c050aeec4548af668ea974191c516d47674..536818733099afed077001bedd97112124653829 100644 |
| --- a/webrtc/modules/video_coding/generic_encoder.cc |
| +++ b/webrtc/modules/video_coding/generic_encoder.cc |
| @@ -15,6 +15,7 @@ |
| #include "webrtc/api/video/i420_buffer.h" |
| #include "webrtc/base/checks.h" |
| #include "webrtc/base/logging.h" |
| +#include "webrtc/base/timeutils.h" |
| #include "webrtc/base/trace_event.h" |
| #include "webrtc/modules/video_coding/encoded_frame.h" |
| #include "webrtc/modules/video_coding/media_optimization.h" |
| @@ -29,7 +30,8 @@ VCMGenericEncoder::VCMGenericEncoder( |
| vcm_encoded_frame_callback_(encoded_frame_callback), |
| internal_source_(internal_source), |
| encoder_params_({BitrateAllocation(), 0, 0, 0}), |
| - is_screenshare_(false) {} |
| + is_screenshare_(false), |
| + streams_or_svc_num_(0) {} |
| VCMGenericEncoder::~VCMGenericEncoder() {} |
| @@ -45,6 +47,17 @@ int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings, |
| RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); |
| TRACE_EVENT0("webrtc", "VCMGenericEncoder::InitEncode"); |
| is_screenshare_ = settings->mode == VideoCodecMode::kScreensharing; |
| + streams_or_svc_num_ = settings->numberOfSimulcastStreams; |
| + if (settings->codecType == kVideoCodecVP9) { |
| + streams_or_svc_num_ = settings->VP9().numberOfSpatialLayers; |
| + } |
| + if (streams_or_svc_num_ == 0) |
| + streams_or_svc_num_ = 1; |
| + |
| + vcm_encoded_frame_callback_->SetTimingFramesThresholds( |
| + settings->timingFrameTriggerThresholds); |
| + vcm_encoded_frame_callback_->OnFrameRateChanged(settings->maxFramerate); |
| + |
| if (encoder_->InitEncode(settings, number_of_cores, max_payload_size) != 0) { |
| LOG(LS_ERROR) << "Failed to initialize the encoder associated with " |
| "payload name: " |
| @@ -65,6 +78,8 @@ int32_t VCMGenericEncoder::Encode(const VideoFrame& frame, |
| for (FrameType frame_type : frame_types) |
| RTC_DCHECK(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta); |
| + for (size_t i = 0; i < streams_or_svc_num_; ++i) |
| + vcm_encoded_frame_callback_->OnEncodeStarted(frame.render_time_ms(), i); |
| int32_t result = encoder_->Encode(frame, codec_specific, &frame_types); |
| if (is_screenshare_ && |
| @@ -107,6 +122,17 @@ void VCMGenericEncoder::SetEncoderParameters(const EncoderParameters& params) { |
| << ", framerate = " << params.input_frame_rate |
| << "): " << res; |
| } |
| + vcm_encoded_frame_callback_->OnFrameRateChanged(params.input_frame_rate); |
| + for (size_t i = 0; i < streams_or_svc_num_; ++i) { |
| + size_t layer_bitrate_bytes_per_sec = |
| + params.target_bitrate.GetSpatialLayerSum(i) / 8; |
| + // VP9 rate control is not yet moved out of VP9Impl. Due to that rates |
| + // are not split among spatial layers. |
| + if (layer_bitrate_bytes_per_sec == 0) |
| + layer_bitrate_bytes_per_sec = params.target_bitrate.get_sum_bps() / 8; |
| + vcm_encoded_frame_callback_->OnTargetBitrateChanged( |
| + layer_bitrate_bytes_per_sec, i); |
| + } |
| } |
| } |
| @@ -124,6 +150,8 @@ int32_t VCMGenericEncoder::RequestFrame( |
| const std::vector<FrameType>& frame_types) { |
| RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); |
| + for (size_t i = 0; i < streams_or_svc_num_; ++i) |
| + vcm_encoded_frame_callback_->OnEncodeStarted(0, i); |
| // TODO(nisse): Used only with internal source. Delete as soon as |
| // that feature is removed. The only implementation I've been able |
| // to find ignores what's in the frame. With one exception: It seems |
| @@ -151,16 +179,90 @@ VCMEncodedFrameCallback::VCMEncodedFrameCallback( |
| media_optimization::MediaOptimization* media_opt) |
| : internal_source_(false), |
| post_encode_callback_(post_encode_callback), |
| - media_opt_(media_opt) {} |
| + media_opt_(media_opt), |
| + framerate_(1), |
| + last_timing_frame_time_ms_(-1), |
| + timing_frames_thresholds_({-1, 0}) {} |
| VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {} |
| +void VCMEncodedFrameCallback::OnTargetBitrateChanged( |
| + size_t bitrate_bytes_per_second, |
| + size_t simulcast_svc_idx) { |
| + if (timing_frames_info_.size() < simulcast_svc_idx + 1) |
| + timing_frames_info_.resize(simulcast_svc_idx + 1); |
| + timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec = |
| + bitrate_bytes_per_second; |
| +} |
| + |
| +void VCMEncodedFrameCallback::OnFrameRateChanged(size_t framerate) { |
| + framerate_ = framerate; |
| +} |
| + |
| +void VCMEncodedFrameCallback::OnEncodeStarted(int64_t capture_time_ms, |
| + size_t simulcast_svc_idx) { |
| + if (timing_frames_info_.size() < simulcast_svc_idx + 1) |
| + timing_frames_info_.resize(simulcast_svc_idx + 1); |
| + timing_frames_info_[simulcast_svc_idx].encode_start_time_ms[capture_time_ms] = |
| + rtc::TimeMillis(); |
| +} |
| + |
| EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage( |
| const EncodedImage& encoded_image, |
| const CodecSpecificInfo* codec_specific, |
| const RTPFragmentationHeader* fragmentation_header) { |
| TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded", |
| "timestamp", encoded_image._timeStamp); |
| + |
| + bool is_timing_frame = false; |
| + size_t simulcast_svc_idx = 0; |
| + if (codec_specific->codecType == kVideoCodecVP9) { |
| + if (codec_specific->codecSpecific.VP9.num_spatial_layers > 1) |
| + simulcast_svc_idx = codec_specific->codecSpecific.VP9.spatial_idx; |
| + } else if (codec_specific->codecType == kVideoCodecVP8) { |
| + simulcast_svc_idx = codec_specific->codecSpecific.VP8.simulcastIdx; |
| + } else if (codec_specific->codecType == kVideoCodecGeneric) { |
| + simulcast_svc_idx = codec_specific->codecSpecific.generic.simulcast_idx; |
| + } else if (codec_specific->codecType == kVideoCodecH264) { |
| + // TODO(ilnik): When h264 simulcast is landed, extract simulcast idx here. |
| + } |
| + RTC_CHECK_LT(simulcast_svc_idx, timing_frames_info_.size()); |
| + |
| + int64_t encode_start_ms = -1; |
| + auto it = timing_frames_info_[simulcast_svc_idx].encode_start_time_ms.find( |
| + encoded_image.capture_time_ms_); |
|
sprang_webrtc
2017/06/09 11:02:01
nit: maybe you could store timing_frames_info_[sim
ilnik
2017/06/09 12:06:05
Done.
|
| + if (it != timing_frames_info_[simulcast_svc_idx].encode_start_time_ms.end()) { |
| + encode_start_ms = it->second; |
| + // Assuming all encoders do not reorder frames within single stream, |
| + // there may be some dropped frames with smaller timestamps. These should be |
| + // purged. |
| + timing_frames_info_[simulcast_svc_idx].encode_start_time_ms.erase( |
| + timing_frames_info_[simulcast_svc_idx].encode_start_time_ms.begin(), |
| + it); |
| + timing_frames_info_[simulcast_svc_idx].encode_start_time_ms.erase(it); |
| + } |
| + RTC_DCHECK_NE(encode_start_ms, -1); |
| + |
| + int64_t timing_frame_delay_ms = |
| + encoded_image.capture_time_ms_ - last_timing_frame_time_ms_; |
| + if (last_timing_frame_time_ms_ == -1 || |
| + timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms || |
| + timing_frame_delay_ms == 0) { |
| + is_timing_frame = true; |
| + last_timing_frame_time_ms_ = encoded_image.capture_time_ms_; |
| + } |
| + RTC_CHECK_GT(framerate_, 0); |
| + size_t outlier_frame_size = |
| + timing_frames_thresholds_.outlier_ratio_percent / 100.0 * |
| + timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec / |
| + framerate_; |
|
sprang_webrtc
2017/06/09 11:02:01
Could you clarify this by adding explicit parenthe
ilnik
2017/06/09 12:06:05
Split in two steps now. No float is needed now.
|
| + if (encoded_image._length >= outlier_frame_size) { |
| + is_timing_frame = true; |
| + } |
| + if (is_timing_frame) { |
| + encoded_image.SetEncodeTime(encode_start_ms, rtc::TimeMillis()); |
| + } |
| + |
| Result result = post_encode_callback_->OnEncodedImage( |
| encoded_image, codec_specific, fragmentation_header); |
| if (result.error != Result::OK) |