| Index: webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
|
| diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
|
| index 3eb47eedc01f0d967b7ab05d4e17e481d8fee504..ae861d7d166a32238edc6a518d61839c6f5e98f1 100644
|
| --- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
|
| +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
|
| @@ -59,64 +59,6 @@ int GCD(int a, int b) {
|
| return b;
|
| }
|
|
|
| -std::vector<int> GetStreamBitratesKbps(const VideoCodec& codec,
|
| - int bitrate_to_allocate_kbps) {
|
| - if (codec.numberOfSimulcastStreams <= 1) {
|
| - return std::vector<int>(1, bitrate_to_allocate_kbps);
|
| - }
|
| -
|
| - std::vector<int> bitrates_kbps(codec.numberOfSimulcastStreams);
|
| - // Allocate min -> target bitrates as long as we have bitrate to spend.
|
| - size_t last_active_stream = 0;
|
| - for (size_t i = 0; i < static_cast<size_t>(codec.numberOfSimulcastStreams) &&
|
| - bitrate_to_allocate_kbps >=
|
| - static_cast<int>(codec.simulcastStream[i].minBitrate);
|
| - ++i) {
|
| - last_active_stream = i;
|
| - int allocated_bitrate_kbps =
|
| - std::min(static_cast<int>(codec.simulcastStream[i].targetBitrate),
|
| - bitrate_to_allocate_kbps);
|
| - bitrates_kbps[i] = allocated_bitrate_kbps;
|
| - bitrate_to_allocate_kbps -= allocated_bitrate_kbps;
|
| - }
|
| -
|
| - // Spend additional bits on the highest-quality active layer, up to max
|
| - // bitrate.
|
| - // TODO(pbos): Consider spending additional bits on last_active_stream-1 down
|
| - // to 0 and not just the top layer when we have additional bitrate to spend.
|
| - int allocated_bitrate_kbps = std::min(
|
| - static_cast<int>(codec.simulcastStream[last_active_stream].maxBitrate -
|
| - bitrates_kbps[last_active_stream]),
|
| - bitrate_to_allocate_kbps);
|
| - bitrates_kbps[last_active_stream] += allocated_bitrate_kbps;
|
| - bitrate_to_allocate_kbps -= allocated_bitrate_kbps;
|
| -
|
| - // Make sure we can always send something. Suspending below min bitrate is
|
| - // controlled outside the codec implementation and is not overriden by this.
|
| - if (bitrates_kbps[0] < static_cast<int>(codec.simulcastStream[0].minBitrate))
|
| - bitrates_kbps[0] = static_cast<int>(codec.simulcastStream[0].minBitrate);
|
| -
|
| - return bitrates_kbps;
|
| -}
|
| -
|
| -uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec) {
|
| - uint32_t bitrate_sum = 0;
|
| - for (int i = 0; i < streams; ++i) {
|
| - bitrate_sum += codec.simulcastStream[i].maxBitrate;
|
| - }
|
| - return bitrate_sum;
|
| -}
|
| -
|
| -int NumberOfStreams(const VideoCodec& codec) {
|
| - int streams =
|
| - codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
|
| - uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
|
| - if (simulcast_max_bitrate == 0) {
|
| - streams = 1;
|
| - }
|
| - return streams;
|
| -}
|
| -
|
| bool ValidSimulcastResolutions(const VideoCodec& codec, int num_streams) {
|
| if (codec.width != codec.simulcastStream[num_streams - 1].width ||
|
| codec.height != codec.simulcastStream[num_streams - 1].height) {
|
| @@ -131,14 +73,13 @@ bool ValidSimulcastResolutions(const VideoCodec& codec, int num_streams) {
|
| return true;
|
| }
|
|
|
| -int NumStreamsDisabled(const std::vector<bool>& streams) {
|
| - int num_disabled = 0;
|
| - for (bool stream : streams) {
|
| - if (!stream)
|
| - ++num_disabled;
|
| - }
|
| - return num_disabled;
|
| +VideoCodec DefaultCodec() {
|
| + VideoCodec codec;
|
| + memset(&codec, 0, sizeof(VideoCodec));
|
| + codec.codecType = kVideoCodecVP8;
|
| + return codec;
|
| }
|
| +
|
| } // namespace
|
|
|
| VP8Encoder* VP8Encoder::Create() {
|
| @@ -149,8 +90,6 @@ VP8Decoder* VP8Decoder::Create() {
|
| return new VP8DecoderImpl();
|
| }
|
|
|
| -const float kTl1MaxTimeToDropFrames = 20.0f;
|
| -
|
| VP8EncoderImpl::VP8EncoderImpl()
|
| : encoded_complete_callback_(NULL),
|
| inited_(false),
|
| @@ -160,11 +99,9 @@ VP8EncoderImpl::VP8EncoderImpl()
|
| cpu_speed_default_(-6),
|
| rc_max_intra_target_(0),
|
| token_partitions_(VP8_ONE_TOKENPARTITION),
|
| + simulcast_state_(DefaultCodec()),
|
| down_scale_requested_(false),
|
| down_scale_bitrate_(0),
|
| - tl0_frame_dropper_(),
|
| - tl1_frame_dropper_(kTl1MaxTimeToDropFrames),
|
| - key_frame_request_(kMaxSimulcastStreams, false),
|
| quality_scaler_enabled_(false) {
|
| uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
|
| srand(seed);
|
| @@ -174,7 +111,6 @@ VP8EncoderImpl::VP8EncoderImpl()
|
| temporal_layers_.reserve(kMaxSimulcastStreams);
|
| raw_images_.reserve(kMaxSimulcastStreams);
|
| encoded_images_.reserve(kMaxSimulcastStreams);
|
| - send_stream_.reserve(kMaxSimulcastStreams);
|
| cpu_speed_.assign(kMaxSimulcastStreams, -6); // Set default to -6.
|
| encoders_.reserve(kMaxSimulcastStreams);
|
| configurations_.reserve(kMaxSimulcastStreams);
|
| @@ -201,7 +137,7 @@ int VP8EncoderImpl::Release() {
|
| encoders_.pop_back();
|
| }
|
| configurations_.clear();
|
| - send_stream_.clear();
|
| + simulcast_state_ = SimulcastState(DefaultCodec());
|
| cpu_speed_.clear();
|
| while (!raw_images_.empty()) {
|
| vpx_img_free(&raw_images_.back());
|
| @@ -250,7 +186,7 @@ int VP8EncoderImpl::SetRates(uint32_t new_bitrate_kbit,
|
| if (k_pixels_per_frame > new_bitrate_kbit) {
|
| down_scale_requested_ = true;
|
| down_scale_bitrate_ = new_bitrate_kbit;
|
| - key_frame_request_[0] = true;
|
| + simulcast_state_.RequestKeyFrame(0);
|
| }
|
| } else {
|
| if (new_bitrate_kbit > (2 * down_scale_bitrate_) ||
|
| @@ -273,31 +209,26 @@ int VP8EncoderImpl::SetRates(uint32_t new_bitrate_kbit,
|
| }
|
| }
|
|
|
| - std::vector<int> stream_bitrates =
|
| - GetStreamBitratesKbps(codec_, new_bitrate_kbit);
|
| - size_t stream_idx = encoders_.size() - 1;
|
| - for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) {
|
| - if (encoders_.size() > 1)
|
| - SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx);
|
| -
|
| - unsigned int target_bitrate = stream_bitrates[stream_idx];
|
| + simulcast_state_.AllocateBitrate(new_bitrate_kbit * 1000);
|
| + for (const SimulcastState::Stream& stream : simulcast_state_.Streams()) {
|
| + unsigned int target_bitrate = stream.allocated_rate_bps / 1000;
|
| unsigned int max_bitrate = codec_.maxBitrate;
|
| - int framerate = new_framerate;
|
| // TODO(holmer): This is a temporary hack for screensharing, where we
|
| // interpret the startBitrate as the encoder target bitrate. This is
|
| // to allow for a different max bitrate, so if the codec can't meet
|
| // the target we still allow it to overshoot up to the max before dropping
|
| // frames. This hack should be improved.
|
| - if (codec_.targetBitrate > 0 &&
|
| + if (codec_.targetBitrate > 0 && simulcast_state_.NumStreams() == 1 &&
|
| (codec_.codecSpecific.VP8.numberOfTemporalLayers == 2 ||
|
| codec_.simulcastStream[0].numberOfTemporalLayers == 2)) {
|
| int tl0_bitrate = std::min(codec_.targetBitrate, target_bitrate);
|
| max_bitrate = std::min(codec_.maxBitrate, target_bitrate);
|
| target_bitrate = tl0_bitrate;
|
| }
|
| + int i = simulcast_state_.NumStreams() - stream.idx - 1;
|
| configurations_[i].rc_target_bitrate = target_bitrate;
|
| - temporal_layers_[stream_idx]->ConfigureBitrates(
|
| - target_bitrate, max_bitrate, framerate, &configurations_[i]);
|
| + temporal_layers_[stream.idx]->ConfigureBitrates(
|
| + target_bitrate, max_bitrate, new_framerate, &configurations_[i]);
|
| if (vpx_codec_enc_config_set(&encoders_[i], &configurations_[i])) {
|
| return WEBRTC_VIDEO_CODEC_ERROR;
|
| }
|
| @@ -310,15 +241,6 @@ const char* VP8EncoderImpl::ImplementationName() const {
|
| return "libvpx";
|
| }
|
|
|
| -void VP8EncoderImpl::SetStreamState(bool send_stream,
|
| - int stream_idx) {
|
| - if (send_stream && !send_stream_[stream_idx]) {
|
| - // Need a key frame if we have not sent this stream before.
|
| - key_frame_request_[stream_idx] = true;
|
| - }
|
| - send_stream_[stream_idx] = send_stream;
|
| -}
|
| -
|
| void VP8EncoderImpl::SetupTemporalLayers(int num_streams,
|
| int num_temporal_layers,
|
| const VideoCodec& codec) {
|
| @@ -378,7 +300,10 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
|
| return retVal;
|
| }
|
|
|
| - int number_of_streams = NumberOfStreams(*inst);
|
| + std::unique_ptr<SimulcastState> new_simulcast_state(
|
| + new SimulcastState(*inst));
|
| +
|
| + int number_of_streams = new_simulcast_state->NumStreams();
|
| bool doing_simulcast = (number_of_streams > 1);
|
|
|
| if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) {
|
| @@ -398,6 +323,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
|
|
|
| timestamp_ = 0;
|
| codec_ = *inst;
|
| + simulcast_state_ = *new_simulcast_state;
|
|
|
| // Code expects simulcastStream resolutions to be correct, make sure they are
|
| // filled even when there are no simulcast layers.
|
| @@ -413,10 +339,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
|
| configurations_.resize(number_of_streams);
|
| downsampling_factors_.resize(number_of_streams);
|
| raw_images_.resize(number_of_streams);
|
| - send_stream_.resize(number_of_streams);
|
| - send_stream_[0] = true; // For non-simulcast case.
|
| cpu_speed_.resize(number_of_streams);
|
| - std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
|
|
|
| int idx = number_of_streams - 1;
|
| for (int i = 0; i < (number_of_streams - 1); ++i, --idx) {
|
| @@ -424,10 +347,10 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
|
| inst->simulcastStream[idx - 1].width);
|
| downsampling_factors_[i].num = inst->simulcastStream[idx].width / gcd;
|
| downsampling_factors_[i].den = inst->simulcastStream[idx - 1].width / gcd;
|
| - send_stream_[i] = false;
|
| + simulcast_state_.SetSending(i, false);
|
| }
|
| if (number_of_streams > 1) {
|
| - send_stream_[number_of_streams - 1] = false;
|
| + simulcast_state_.SetSending(number_of_streams - 1, false);
|
| downsampling_factors_[number_of_streams - 1].num = 1;
|
| downsampling_factors_[number_of_streams - 1].den = 1;
|
| }
|
| @@ -562,15 +485,14 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
|
| inst->maxFramerate,
|
| &configurations_[0]);
|
| } else {
|
| + simulcast_state_.AllocateBitrate(inst->startBitrate * 1000);
|
| // Note the order we use is different from webm, we have lowest resolution
|
| // at position 0 and they have highest resolution at position 0.
|
| int stream_idx = encoders_.size() - 1;
|
| - std::vector<int> stream_bitrates =
|
| - GetStreamBitratesKbps(codec_, inst->startBitrate);
|
| - SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx);
|
| - configurations_[0].rc_target_bitrate = stream_bitrates[stream_idx];
|
| + int stream_bitrate_kbps = simulcast_state_.AllocatedRate(stream_idx) / 1000;
|
| + configurations_[0].rc_target_bitrate = stream_bitrate_kbps;
|
| temporal_layers_[stream_idx]->ConfigureBitrates(
|
| - stream_bitrates[stream_idx], inst->maxBitrate, inst->maxFramerate,
|
| + stream_bitrate_kbps, inst->maxBitrate, inst->maxFramerate,
|
| &configurations_[0]);
|
| --stream_idx;
|
| for (size_t i = 1; i < encoders_.size(); ++i, --stream_idx) {
|
| @@ -589,10 +511,10 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
|
| vpx_img_alloc(&raw_images_[i], VPX_IMG_FMT_I420,
|
| inst->simulcastStream[stream_idx].width,
|
| inst->simulcastStream[stream_idx].height, kVp832ByteAlign);
|
| - SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx);
|
| - configurations_[i].rc_target_bitrate = stream_bitrates[stream_idx];
|
| + stream_bitrate_kbps = simulcast_state_.AllocatedRate(stream_idx);
|
| + configurations_[i].rc_target_bitrate = stream_bitrate_kbps;
|
| temporal_layers_[stream_idx]->ConfigureBitrates(
|
| - stream_bitrates[stream_idx], inst->maxBitrate, inst->maxFramerate,
|
| + stream_bitrate_kbps, inst->maxBitrate, inst->maxFramerate,
|
| &configurations_[i]);
|
| }
|
| }
|
| @@ -787,22 +709,13 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
|
| flags[i] = ret;
|
| }
|
| bool send_key_frame = false;
|
| - for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size();
|
| - ++i) {
|
| - if (key_frame_request_[i] && send_stream_[i]) {
|
| - send_key_frame = true;
|
| - break;
|
| - }
|
| - }
|
| - if (!send_key_frame && frame_types) {
|
| - for (size_t i = 0; i < frame_types->size() && i < send_stream_.size();
|
| - ++i) {
|
| - if ((*frame_types)[i] == kVideoFrameKey && send_stream_[i]) {
|
| - send_key_frame = true;
|
| - break;
|
| - }
|
| - }
|
| + size_t num_entries = std::min(frame_types ? frame_types->size() : 0,
|
| + simulcast_state_.NumStreams());
|
| + for (size_t i = 0; i < num_entries; ++i) {
|
| + send_key_frame |= simulcast_state_.GetAndResetKeyFrameRequest(i) ||
|
| + (frame_types && (*frame_types)[i] == kVideoFrameKey);
|
| }
|
| +
|
| // The flag modification below (due to forced key frame, RPS, etc.,) for now
|
| // will be the same for all encoders/spatial layers.
|
| // TODO(marpan/holmer): Allow for key frame request to be set per encoder.
|
| @@ -821,7 +734,6 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
|
| for (size_t i = 0; i < encoders_.size(); ++i) {
|
| flags[i] = VPX_EFLAG_FORCE_KF;
|
| }
|
| - std::fill(key_frame_request_.begin(), key_frame_request_.end(), false);
|
| } else if (codec_specific_info &&
|
| codec_specific_info->codecType == kVideoCodecVP8) {
|
| if (feedback_mode_) {
|
| @@ -961,8 +873,10 @@ void VP8EncoderImpl::PopulateCodecSpecific(
|
|
|
| int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
|
| bool only_predicting_from_key_frame) {
|
| - int bw_resolutions_disabled =
|
| - (encoders_.size() > 1) ? NumStreamsDisabled(send_stream_) : -1;
|
| + int bw_resolutions_disabled = (!encoders_.empty())
|
| + ? (simulcast_state_.NumStreams() -
|
| + simulcast_state_.NumSendingStreams())
|
| + : -1;
|
|
|
| int stream_idx = static_cast<int>(encoders_.size()) - 1;
|
| int result = WEBRTC_VIDEO_CODEC_OK;
|
| @@ -1028,7 +942,7 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
|
| temporal_layers_[stream_idx]->FrameEncoded(
|
| encoded_images_[encoder_idx]._length,
|
| encoded_images_[encoder_idx]._timeStamp, qp);
|
| - if (send_stream_[stream_idx]) {
|
| + if (simulcast_state_.IsSending(stream_idx)) {
|
| if (encoded_images_[encoder_idx]._length > 0) {
|
| TRACE_COUNTER_ID1("webrtc", "EncodedFrameSize", encoder_idx,
|
| encoded_images_[encoder_idx]._length);
|
| @@ -1053,7 +967,7 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
|
| }
|
| }
|
| }
|
| - if (encoders_.size() == 1 && send_stream_[0]) {
|
| + if (encoders_.size() == 1 && simulcast_state_.IsSending(0)) {
|
| if (encoded_images_[0]._length > 0) {
|
| int qp_128;
|
| vpx_codec_control(&encoders_[0], VP8E_GET_LAST_QUANTIZER, &qp_128);
|
|
|