| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 76 } | 76 } |
| 77 if (inst->VP8().feedbackModeOn && inst->numberOfSimulcastStreams > 1) { | 77 if (inst->VP8().feedbackModeOn && inst->numberOfSimulcastStreams > 1) { |
| 78 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 78 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 79 } | 79 } |
| 80 if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { | 80 if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { |
| 81 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 81 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 82 } | 82 } |
| 83 return WEBRTC_VIDEO_CODEC_OK; | 83 return WEBRTC_VIDEO_CODEC_OK; |
| 84 } | 84 } |
| 85 | 85 |
| 86 struct ScreenshareTemporalLayersFactory : webrtc::TemporalLayersFactory { |
| 87 ScreenshareTemporalLayersFactory() {} |
| 88 virtual ~ScreenshareTemporalLayersFactory() {} |
| 89 |
| 90 virtual webrtc::TemporalLayers* Create(int num_temporal_layers, |
| 91 uint8_t initial_tl0_pic_idx) const { |
| 92 return new webrtc::ScreenshareLayers(num_temporal_layers, rand(), |
| 93 webrtc::Clock::GetRealTimeClock()); |
| 94 } |
| 95 }; |
| 96 |
| 86 // An EncodedImageCallback implementation that forwards on calls to a | 97 // An EncodedImageCallback implementation that forwards on calls to a |
| 87 // SimulcastEncoderAdapter, but with the stream index it's registered with as | 98 // SimulcastEncoderAdapter, but with the stream index it's registered with as |
| 88 // the first parameter to Encoded. | 99 // the first parameter to Encoded. |
| 89 class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback { | 100 class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback { |
| 90 public: | 101 public: |
| 91 AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter, | 102 AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter, |
| 92 size_t stream_idx) | 103 size_t stream_idx) |
| 93 : adapter_(adapter), stream_idx_(stream_idx) {} | 104 : adapter_(adapter), stream_idx_(stream_idx) {} |
| 94 | 105 |
| 95 EncodedImageCallback::Result OnEncodedImage( | 106 EncodedImageCallback::Result OnEncodedImage( |
| 96 const webrtc::EncodedImage& encoded_image, | 107 const webrtc::EncodedImage& encoded_image, |
| 97 const webrtc::CodecSpecificInfo* codec_specific_info, | 108 const webrtc::CodecSpecificInfo* codec_specific_info, |
| 98 const webrtc::RTPFragmentationHeader* fragmentation) override { | 109 const webrtc::RTPFragmentationHeader* fragmentation) override { |
| 99 return adapter_->OnEncodedImage(stream_idx_, encoded_image, | 110 return adapter_->OnEncodedImage(stream_idx_, encoded_image, |
| 100 codec_specific_info, fragmentation); | 111 codec_specific_info, fragmentation); |
| 101 } | 112 } |
| 102 | 113 |
| 103 private: | 114 private: |
| 104 webrtc::SimulcastEncoderAdapter* const adapter_; | 115 webrtc::SimulcastEncoderAdapter* const adapter_; |
| 105 const size_t stream_idx_; | 116 const size_t stream_idx_; |
| 106 }; | 117 }; |
| 107 | 118 |
| 108 // Utility class used to adapt the simulcast id as reported by the temporal | |
| 109 // layers factory, since each sub-encoder will report stream 0. | |
| 110 class TemporalLayersFactoryAdapter : public webrtc::TemporalLayersFactory { | |
| 111 public: | |
| 112 TemporalLayersFactoryAdapter(int adapted_simulcast_id, | |
| 113 const TemporalLayersFactory& tl_factory) | |
| 114 : adapted_simulcast_id_(adapted_simulcast_id), tl_factory_(tl_factory) {} | |
| 115 ~TemporalLayersFactoryAdapter() override {} | |
| 116 webrtc::TemporalLayers* Create(int simulcast_id, | |
| 117 int temporal_layers, | |
| 118 uint8_t initial_tl0_pic_idx) const override { | |
| 119 return tl_factory_.Create(adapted_simulcast_id_, temporal_layers, | |
| 120 initial_tl0_pic_idx); | |
| 121 } | |
| 122 | |
| 123 const int adapted_simulcast_id_; | |
| 124 const TemporalLayersFactory& tl_factory_; | |
| 125 }; | |
| 126 | |
| 127 } // namespace | 119 } // namespace |
| 128 | 120 |
| 129 namespace webrtc { | 121 namespace webrtc { |
| 130 | 122 |
| 131 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) | 123 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) |
| 132 : factory_(factory), | 124 : factory_(factory), |
| 133 encoded_complete_callback_(nullptr), | 125 encoded_complete_callback_(nullptr), |
| 134 implementation_name_("SimulcastEncoderAdapter") { | 126 implementation_name_("SimulcastEncoderAdapter") { |
| 135 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); | 127 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); |
| 128 rate_allocator_.reset(new SimulcastRateAllocator(codec_)); |
| 136 } | 129 } |
| 137 | 130 |
| 138 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { | 131 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { |
| 139 Release(); | 132 Release(); |
| 140 } | 133 } |
| 141 | 134 |
| 142 int SimulcastEncoderAdapter::Release() { | 135 int SimulcastEncoderAdapter::Release() { |
| 143 // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then | 136 // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then |
| 144 // re-use this instance in ::InitEncode(). This means that changing | 137 // re-use this instance in ::InitEncode(). This means that changing |
| 145 // resolutions doesn't require reallocation of the first encoder, but only | 138 // resolutions doesn't require reallocation of the first encoder, but only |
| (...skipping 27 matching lines...) Expand all Loading... |
| 173 } | 166 } |
| 174 | 167 |
| 175 int number_of_streams = NumberOfStreams(*inst); | 168 int number_of_streams = NumberOfStreams(*inst); |
| 176 const bool doing_simulcast = (number_of_streams > 1); | 169 const bool doing_simulcast = (number_of_streams > 1); |
| 177 | 170 |
| 178 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { | 171 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { |
| 179 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 172 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 180 } | 173 } |
| 181 | 174 |
| 182 codec_ = *inst; | 175 codec_ = *inst; |
| 183 SimulcastRateAllocator rate_allocator(codec_, nullptr); | 176 rate_allocator_.reset(new SimulcastRateAllocator(codec_)); |
| 184 BitrateAllocation allocation = rate_allocator.GetAllocation( | 177 std::vector<uint32_t> start_bitrates = |
| 185 codec_.startBitrate * 1000, codec_.maxFramerate); | 178 rate_allocator_->GetAllocation(codec_.startBitrate); |
| 186 std::vector<uint32_t> start_bitrates; | 179 |
| 187 for (int i = 0; i < kMaxSimulcastStreams; ++i) { | 180 // Special mode when screensharing on a single stream. |
| 188 uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000; | 181 if (number_of_streams == 1 && inst->mode == kScreensharing) { |
| 189 start_bitrates.push_back(stream_bitrate); | 182 screensharing_tl_factory_.reset(new ScreenshareTemporalLayersFactory()); |
| 183 codec_.VP8()->tl_factory = screensharing_tl_factory_.get(); |
| 190 } | 184 } |
| 191 | 185 |
| 192 std::string implementation_name; | 186 std::string implementation_name; |
| 193 // Create |number_of_streams| of encoder instances and init them. | 187 // Create |number_of_streams| of encoder instances and init them. |
| 194 for (int i = 0; i < number_of_streams; ++i) { | 188 for (int i = 0; i < number_of_streams; ++i) { |
| 195 VideoCodec stream_codec; | 189 VideoCodec stream_codec; |
| 196 uint32_t start_bitrate_kbps = start_bitrates[i]; | 190 uint32_t start_bitrate_kbps = start_bitrates[i]; |
| 197 if (!doing_simulcast) { | 191 if (!doing_simulcast) { |
| 198 stream_codec = codec_; | 192 stream_codec = codec_; |
| 199 stream_codec.numberOfSimulcastStreams = 1; | 193 stream_codec.numberOfSimulcastStreams = 1; |
| 200 } else { | 194 } else { |
| 201 // Cap start bitrate to the min bitrate in order to avoid strange codec | 195 // Cap start bitrate to the min bitrate in order to avoid strange codec |
| 202 // behavior. Since sending sending will be false, this should not matter. | 196 // behavior. Since sending sending will be false, this should not matter. |
| 203 start_bitrate_kbps = | 197 start_bitrate_kbps = |
| 204 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); | 198 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); |
| 205 bool highest_resolution_stream = (i == (number_of_streams - 1)); | 199 bool highest_resolution_stream = (i == (number_of_streams - 1)); |
| 206 PopulateStreamCodec(&codec_, i, start_bitrate_kbps, | 200 PopulateStreamCodec(&codec_, i, start_bitrate_kbps, |
| 207 highest_resolution_stream, &stream_codec); | 201 highest_resolution_stream, &stream_codec); |
| 208 } | 202 } |
| 209 TemporalLayersFactoryAdapter tl_factory_adapter( | |
| 210 i, *codec_.codecSpecific.VP8.tl_factory); | |
| 211 stream_codec.codecSpecific.VP8.tl_factory = &tl_factory_adapter; | |
| 212 | 203 |
| 213 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. | 204 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. |
| 214 if (stream_codec.qpMax < kDefaultMinQp) { | 205 if (stream_codec.qpMax < kDefaultMinQp) { |
| 215 stream_codec.qpMax = kDefaultMaxQp; | 206 stream_codec.qpMax = kDefaultMaxQp; |
| 216 } | 207 } |
| 217 | 208 |
| 218 VideoEncoder* encoder = factory_->Create(); | 209 VideoEncoder* encoder = factory_->Create(); |
| 219 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); | 210 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); |
| 220 if (ret < 0) { | 211 if (ret < 0) { |
| 221 Release(); | 212 Release(); |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 342 } | 333 } |
| 343 | 334 |
| 344 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, | 335 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, |
| 345 int64_t rtt) { | 336 int64_t rtt) { |
| 346 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 337 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
| 347 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); | 338 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); |
| 348 } | 339 } |
| 349 return WEBRTC_VIDEO_CODEC_OK; | 340 return WEBRTC_VIDEO_CODEC_OK; |
| 350 } | 341 } |
| 351 | 342 |
| 352 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, | 343 int SimulcastEncoderAdapter::SetRates(uint32_t new_bitrate_kbit, |
| 353 uint32_t new_framerate) { | 344 uint32_t new_framerate) { |
| 354 if (!Initialized()) | 345 if (!Initialized()) { |
| 355 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 346 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 347 } |
| 348 if (new_framerate < 1) { |
| 349 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 350 } |
| 351 if (codec_.maxBitrate > 0 && new_bitrate_kbit > codec_.maxBitrate) { |
| 352 new_bitrate_kbit = codec_.maxBitrate; |
| 353 } |
| 356 | 354 |
| 357 if (new_framerate < 1) | 355 std::vector<uint32_t> stream_bitrates; |
| 358 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 356 if (new_bitrate_kbit > 0) { |
| 359 | |
| 360 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) | |
| 361 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 362 | |
| 363 if (bitrate.get_sum_bps() > 0) { | |
| 364 // Make sure the bitrate fits the configured min bitrates. 0 is a special | 357 // Make sure the bitrate fits the configured min bitrates. 0 is a special |
| 365 // value that means paused, though, so leave it alone. | 358 // value that means paused, though, so leave it alone. |
| 366 if (bitrate.get_sum_kbps() < codec_.minBitrate) | 359 if (new_bitrate_kbit < codec_.minBitrate) { |
| 367 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 360 new_bitrate_kbit = codec_.minBitrate; |
| 368 | 361 } |
| 369 if (codec_.numberOfSimulcastStreams > 0 && | 362 if (codec_.numberOfSimulcastStreams > 0 && |
| 370 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { | 363 new_bitrate_kbit < codec_.simulcastStream[0].minBitrate) { |
| 371 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 364 new_bitrate_kbit = codec_.simulcastStream[0].minBitrate; |
| 372 } | 365 } |
| 366 stream_bitrates = rate_allocator_->GetAllocation(new_bitrate_kbit); |
| 373 } | 367 } |
| 374 | |
| 375 codec_.maxFramerate = new_framerate; | 368 codec_.maxFramerate = new_framerate; |
| 376 | 369 |
| 370 // Disable any stream not in the current allocation. |
| 371 stream_bitrates.resize(streaminfos_.size(), 0U); |
| 372 |
| 377 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 373 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
| 378 uint32_t stream_bitrate_kbps = | 374 uint32_t stream_bitrate_kbps = stream_bitrates[stream_idx]; |
| 379 bitrate.GetSpatialLayerSum(stream_idx) / 1000; | |
| 380 | |
| 381 // Need a key frame if we have not sent this stream before. | 375 // Need a key frame if we have not sent this stream before. |
| 382 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { | 376 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { |
| 383 streaminfos_[stream_idx].key_frame_request = true; | 377 streaminfos_[stream_idx].key_frame_request = true; |
| 384 } | 378 } |
| 385 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; | 379 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; |
| 386 | 380 |
| 387 // Slice the temporal layers out of the full allocation and pass it on to | 381 // TODO(holmer): This is a temporary hack for screensharing, where we |
| 388 // the encoder handling the current simulcast stream. | 382 // interpret the startBitrate as the encoder target bitrate. This is |
| 389 BitrateAllocation stream_allocation; | 383 // to allow for a different max bitrate, so if the codec can't meet |
| 390 for (int i = 0; i < kMaxTemporalStreams; ++i) | 384 // the target we still allow it to overshoot up to the max before dropping |
| 391 stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i)); | 385 // frames. This hack should be improved. |
| 392 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, | 386 if (codec_.targetBitrate > 0 && |
| 393 new_framerate); | 387 (codec_.VP8()->numberOfTemporalLayers == 2 || |
| 388 codec_.simulcastStream[0].numberOfTemporalLayers == 2)) { |
| 389 stream_bitrate_kbps = std::min(codec_.maxBitrate, stream_bitrate_kbps); |
| 390 // TODO(ronghuawu): Can't change max bitrate via the VideoEncoder |
| 391 // interface. And VP8EncoderImpl doesn't take negative framerate. |
| 392 // max_bitrate = std::min(codec_.maxBitrate, stream_bitrate_kbps); |
| 393 // new_framerate = -1; |
| 394 } |
| 395 |
| 396 streaminfos_[stream_idx].encoder->SetRates(stream_bitrate_kbps, |
| 397 new_framerate); |
| 394 } | 398 } |
| 395 | 399 |
| 396 return WEBRTC_VIDEO_CODEC_OK; | 400 return WEBRTC_VIDEO_CODEC_OK; |
| 397 } | 401 } |
| 398 | 402 |
| 399 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( | 403 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( |
| 400 size_t stream_idx, | 404 size_t stream_idx, |
| 401 const EncodedImage& encodedImage, | 405 const EncodedImage& encodedImage, |
| 402 const CodecSpecificInfo* codecSpecificInfo, | 406 const CodecSpecificInfo* codecSpecificInfo, |
| 403 const RTPFragmentationHeader* fragmentation) { | 407 const RTPFragmentationHeader* fragmentation) { |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 463 return false; | 467 return false; |
| 464 } | 468 } |
| 465 return true; | 469 return true; |
| 466 } | 470 } |
| 467 | 471 |
| 468 const char* SimulcastEncoderAdapter::ImplementationName() const { | 472 const char* SimulcastEncoderAdapter::ImplementationName() const { |
| 469 return implementation_name_.c_str(); | 473 return implementation_name_.c_str(); |
| 470 } | 474 } |
| 471 | 475 |
| 472 } // namespace webrtc | 476 } // namespace webrtc |
| OLD | NEW |