| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h" | |
| 12 | |
| 13 #include <algorithm> | |
| 14 | |
| 15 // NOTE(ajm): Path provided by gyp. | |
| 16 #include "libyuv/scale.h" // NOLINT | |
| 17 | |
| 18 #include "webrtc/api/video/i420_buffer.h" | |
| 19 #include "webrtc/base/checks.h" | |
| 20 #include "webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h" | |
| 21 #include "webrtc/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" | |
| 22 #include "webrtc/system_wrappers/include/clock.h" | |
| 23 | |
| 24 namespace { | |
| 25 | |
| 26 const unsigned int kDefaultMinQp = 2; | |
| 27 const unsigned int kDefaultMaxQp = 56; | |
| 28 // Max qp for lowest spatial resolution when doing simulcast. | |
| 29 const unsigned int kLowestResMaxQp = 45; | |
| 30 | |
| 31 uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) { | |
| 32 uint32_t bitrate_sum = 0; | |
| 33 for (int i = 0; i < streams; ++i) { | |
| 34 bitrate_sum += codec.simulcastStream[i].maxBitrate; | |
| 35 } | |
| 36 return bitrate_sum; | |
| 37 } | |
| 38 | |
| 39 int NumberOfStreams(const webrtc::VideoCodec& codec) { | |
| 40 int streams = | |
| 41 codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams; | |
| 42 uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec); | |
| 43 if (simulcast_max_bitrate == 0) { | |
| 44 streams = 1; | |
| 45 } | |
| 46 return streams; | |
| 47 } | |
| 48 | |
| 49 bool ValidSimulcastResolutions(const webrtc::VideoCodec& codec, | |
| 50 int num_streams) { | |
| 51 if (codec.width != codec.simulcastStream[num_streams - 1].width || | |
| 52 codec.height != codec.simulcastStream[num_streams - 1].height) { | |
| 53 return false; | |
| 54 } | |
| 55 for (int i = 0; i < num_streams; ++i) { | |
| 56 if (codec.width * codec.simulcastStream[i].height != | |
| 57 codec.height * codec.simulcastStream[i].width) { | |
| 58 return false; | |
| 59 } | |
| 60 } | |
| 61 return true; | |
| 62 } | |
| 63 | |
| 64 int VerifyCodec(const webrtc::VideoCodec* inst) { | |
| 65 if (inst == nullptr) { | |
| 66 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 67 } | |
| 68 if (inst->maxFramerate < 1) { | |
| 69 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 70 } | |
| 71 // allow zero to represent an unspecified maxBitRate | |
| 72 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { | |
| 73 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 74 } | |
| 75 if (inst->width <= 1 || inst->height <= 1) { | |
| 76 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 77 } | |
| 78 if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { | |
| 79 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 80 } | |
| 81 return WEBRTC_VIDEO_CODEC_OK; | |
| 82 } | |
| 83 | |
| 84 // An EncodedImageCallback implementation that forwards on calls to a | |
| 85 // SimulcastEncoderAdapter, but with the stream index it's registered with as | |
| 86 // the first parameter to Encoded. | |
| 87 class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback { | |
| 88 public: | |
| 89 AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter, | |
| 90 size_t stream_idx) | |
| 91 : adapter_(adapter), stream_idx_(stream_idx) {} | |
| 92 | |
| 93 EncodedImageCallback::Result OnEncodedImage( | |
| 94 const webrtc::EncodedImage& encoded_image, | |
| 95 const webrtc::CodecSpecificInfo* codec_specific_info, | |
| 96 const webrtc::RTPFragmentationHeader* fragmentation) override { | |
| 97 return adapter_->OnEncodedImage(stream_idx_, encoded_image, | |
| 98 codec_specific_info, fragmentation); | |
| 99 } | |
| 100 | |
| 101 private: | |
| 102 webrtc::SimulcastEncoderAdapter* const adapter_; | |
| 103 const size_t stream_idx_; | |
| 104 }; | |
| 105 | |
| 106 // Utility class used to adapt the simulcast id as reported by the temporal | |
| 107 // layers factory, since each sub-encoder will report stream 0. | |
| 108 class TemporalLayersFactoryAdapter : public webrtc::TemporalLayersFactory { | |
| 109 public: | |
| 110 TemporalLayersFactoryAdapter(int adapted_simulcast_id, | |
| 111 const TemporalLayersFactory& tl_factory) | |
| 112 : adapted_simulcast_id_(adapted_simulcast_id), tl_factory_(tl_factory) {} | |
| 113 ~TemporalLayersFactoryAdapter() override {} | |
| 114 webrtc::TemporalLayers* Create(int simulcast_id, | |
| 115 int temporal_layers, | |
| 116 uint8_t initial_tl0_pic_idx) const override { | |
| 117 return tl_factory_.Create(adapted_simulcast_id_, temporal_layers, | |
| 118 initial_tl0_pic_idx); | |
| 119 } | |
| 120 | |
| 121 const int adapted_simulcast_id_; | |
| 122 const TemporalLayersFactory& tl_factory_; | |
| 123 }; | |
| 124 | |
| 125 } // namespace | |
| 126 | |
| 127 namespace webrtc { | |
| 128 | |
| 129 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) | |
| 130 : inited_(0), | |
| 131 factory_(factory), | |
| 132 encoded_complete_callback_(nullptr), | |
| 133 implementation_name_("SimulcastEncoderAdapter") { | |
| 134 // The adapter is typically created on the worker thread, but operated on | |
| 135 // the encoder task queue. | |
| 136 encoder_queue_.Detach(); | |
| 137 | |
| 138 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); | |
| 139 } | |
| 140 | |
| 141 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { | |
| 142 RTC_DCHECK(!Initialized()); | |
| 143 DestroyStoredEncoders(); | |
| 144 } | |
| 145 | |
| 146 int SimulcastEncoderAdapter::Release() { | |
| 147 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 148 | |
| 149 while (!streaminfos_.empty()) { | |
| 150 VideoEncoder* encoder = streaminfos_.back().encoder; | |
| 151 encoder->Release(); | |
| 152 // Even though it seems very unlikely, there are no guarantees that the | |
| 153 // encoder will not call back after being Release()'d. Therefore, we disable | |
| 154 // the callbacks here. | |
| 155 encoder->RegisterEncodeCompleteCallback(nullptr); | |
| 156 streaminfos_.pop_back(); // Deletes callback adapter. | |
| 157 stored_encoders_.push(encoder); | |
| 158 } | |
| 159 | |
| 160 // It's legal to move the encoder to another queue now. | |
| 161 encoder_queue_.Detach(); | |
| 162 | |
| 163 rtc::AtomicOps::ReleaseStore(&inited_, 0); | |
| 164 | |
| 165 return WEBRTC_VIDEO_CODEC_OK; | |
| 166 } | |
| 167 | |
| 168 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst, | |
| 169 int number_of_cores, | |
| 170 size_t max_payload_size) { | |
| 171 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 172 | |
| 173 if (number_of_cores < 1) { | |
| 174 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 175 } | |
| 176 | |
| 177 int ret = VerifyCodec(inst); | |
| 178 if (ret < 0) { | |
| 179 return ret; | |
| 180 } | |
| 181 | |
| 182 ret = Release(); | |
| 183 if (ret < 0) { | |
| 184 return ret; | |
| 185 } | |
| 186 | |
| 187 int number_of_streams = NumberOfStreams(*inst); | |
| 188 RTC_DCHECK_LE(number_of_streams, kMaxSimulcastStreams); | |
| 189 const bool doing_simulcast = (number_of_streams > 1); | |
| 190 | |
| 191 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { | |
| 192 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 193 } | |
| 194 | |
| 195 codec_ = *inst; | |
| 196 SimulcastRateAllocator rate_allocator(codec_, nullptr); | |
| 197 BitrateAllocation allocation = rate_allocator.GetAllocation( | |
| 198 codec_.startBitrate * 1000, codec_.maxFramerate); | |
| 199 std::vector<uint32_t> start_bitrates; | |
| 200 for (int i = 0; i < kMaxSimulcastStreams; ++i) { | |
| 201 uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000; | |
| 202 start_bitrates.push_back(stream_bitrate); | |
| 203 } | |
| 204 | |
| 205 std::string implementation_name; | |
| 206 // Create |number_of_streams| of encoder instances and init them. | |
| 207 for (int i = 0; i < number_of_streams; ++i) { | |
| 208 VideoCodec stream_codec; | |
| 209 uint32_t start_bitrate_kbps = start_bitrates[i]; | |
| 210 if (!doing_simulcast) { | |
| 211 stream_codec = codec_; | |
| 212 stream_codec.numberOfSimulcastStreams = 1; | |
| 213 } else { | |
| 214 // Cap start bitrate to the min bitrate in order to avoid strange codec | |
| 215 // behavior. Since sending sending will be false, this should not matter. | |
| 216 start_bitrate_kbps = | |
| 217 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); | |
| 218 bool highest_resolution_stream = (i == (number_of_streams - 1)); | |
| 219 PopulateStreamCodec(codec_, i, start_bitrate_kbps, | |
| 220 highest_resolution_stream, &stream_codec); | |
| 221 } | |
| 222 TemporalLayersFactoryAdapter tl_factory_adapter(i, | |
| 223 *codec_.VP8()->tl_factory); | |
| 224 stream_codec.VP8()->tl_factory = &tl_factory_adapter; | |
| 225 | |
| 226 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. | |
| 227 if (stream_codec.qpMax < kDefaultMinQp) { | |
| 228 stream_codec.qpMax = kDefaultMaxQp; | |
| 229 } | |
| 230 | |
| 231 // If an existing encoder instance exists, reuse it. | |
| 232 // TODO(brandtr): Set initial RTP state (e.g., picture_id/tl0_pic_idx) here, | |
| 233 // when we start storing that state outside the encoder wrappers. | |
| 234 VideoEncoder* encoder; | |
| 235 if (!stored_encoders_.empty()) { | |
| 236 encoder = stored_encoders_.top(); | |
| 237 stored_encoders_.pop(); | |
| 238 } else { | |
| 239 encoder = factory_->Create(); | |
| 240 } | |
| 241 | |
| 242 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); | |
| 243 if (ret < 0) { | |
| 244 // Explicitly destroy the current encoder; because we haven't registered a | |
| 245 // StreamInfo for it yet, Release won't do anything about it. | |
| 246 factory_->Destroy(encoder); | |
| 247 Release(); | |
| 248 return ret; | |
| 249 } | |
| 250 std::unique_ptr<EncodedImageCallback> callback( | |
| 251 new AdapterEncodedImageCallback(this, i)); | |
| 252 encoder->RegisterEncodeCompleteCallback(callback.get()); | |
| 253 streaminfos_.emplace_back(encoder, std::move(callback), stream_codec.width, | |
| 254 stream_codec.height, start_bitrate_kbps > 0); | |
| 255 | |
| 256 if (i != 0) { | |
| 257 implementation_name += ", "; | |
| 258 } | |
| 259 implementation_name += streaminfos_[i].encoder->ImplementationName(); | |
| 260 } | |
| 261 | |
| 262 if (doing_simulcast) { | |
| 263 implementation_name_ = | |
| 264 "SimulcastEncoderAdapter (" + implementation_name + ")"; | |
| 265 } else { | |
| 266 implementation_name_ = implementation_name; | |
| 267 } | |
| 268 | |
| 269 // To save memory, don't store encoders that we don't use. | |
| 270 DestroyStoredEncoders(); | |
| 271 | |
| 272 rtc::AtomicOps::ReleaseStore(&inited_, 1); | |
| 273 | |
| 274 return WEBRTC_VIDEO_CODEC_OK; | |
| 275 } | |
| 276 | |
| 277 int SimulcastEncoderAdapter::Encode( | |
| 278 const VideoFrame& input_image, | |
| 279 const CodecSpecificInfo* codec_specific_info, | |
| 280 const std::vector<FrameType>* frame_types) { | |
| 281 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 282 | |
| 283 if (!Initialized()) { | |
| 284 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 285 } | |
| 286 if (encoded_complete_callback_ == nullptr) { | |
| 287 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 288 } | |
| 289 | |
| 290 // All active streams should generate a key frame if | |
| 291 // a key frame is requested by any stream. | |
| 292 bool send_key_frame = false; | |
| 293 if (frame_types) { | |
| 294 for (size_t i = 0; i < frame_types->size(); ++i) { | |
| 295 if (frame_types->at(i) == kVideoFrameKey) { | |
| 296 send_key_frame = true; | |
| 297 break; | |
| 298 } | |
| 299 } | |
| 300 } | |
| 301 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | |
| 302 if (streaminfos_[stream_idx].key_frame_request && | |
| 303 streaminfos_[stream_idx].send_stream) { | |
| 304 send_key_frame = true; | |
| 305 break; | |
| 306 } | |
| 307 } | |
| 308 | |
| 309 int src_width = input_image.width(); | |
| 310 int src_height = input_image.height(); | |
| 311 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | |
| 312 // Don't encode frames in resolutions that we don't intend to send. | |
| 313 if (!streaminfos_[stream_idx].send_stream) { | |
| 314 continue; | |
| 315 } | |
| 316 | |
| 317 std::vector<FrameType> stream_frame_types; | |
| 318 if (send_key_frame) { | |
| 319 stream_frame_types.push_back(kVideoFrameKey); | |
| 320 streaminfos_[stream_idx].key_frame_request = false; | |
| 321 } else { | |
| 322 stream_frame_types.push_back(kVideoFrameDelta); | |
| 323 } | |
| 324 | |
| 325 int dst_width = streaminfos_[stream_idx].width; | |
| 326 int dst_height = streaminfos_[stream_idx].height; | |
| 327 // If scaling isn't required, because the input resolution | |
| 328 // matches the destination or the input image is empty (e.g. | |
| 329 // a keyframe request for encoders with internal camera | |
| 330 // sources) or the source image has a native handle, pass the image on | |
| 331 // directly. Otherwise, we'll scale it to match what the encoder expects | |
| 332 // (below). | |
| 333 // For texture frames, the underlying encoder is expected to be able to | |
| 334 // correctly sample/scale the source texture. | |
| 335 // TODO(perkj): ensure that works going forward, and figure out how this | |
| 336 // affects webrtc:5683. | |
| 337 if ((dst_width == src_width && dst_height == src_height) || | |
| 338 input_image.video_frame_buffer()->type() == | |
| 339 VideoFrameBuffer::Type::kNative) { | |
| 340 int ret = streaminfos_[stream_idx].encoder->Encode( | |
| 341 input_image, codec_specific_info, &stream_frame_types); | |
| 342 if (ret != WEBRTC_VIDEO_CODEC_OK) { | |
| 343 return ret; | |
| 344 } | |
| 345 } else { | |
| 346 rtc::scoped_refptr<I420Buffer> dst_buffer = | |
| 347 I420Buffer::Create(dst_width, dst_height); | |
| 348 rtc::scoped_refptr<I420BufferInterface> src_buffer = | |
| 349 input_image.video_frame_buffer()->ToI420(); | |
| 350 libyuv::I420Scale(src_buffer->DataY(), src_buffer->StrideY(), | |
| 351 src_buffer->DataU(), src_buffer->StrideU(), | |
| 352 src_buffer->DataV(), src_buffer->StrideV(), src_width, | |
| 353 src_height, dst_buffer->MutableDataY(), | |
| 354 dst_buffer->StrideY(), dst_buffer->MutableDataU(), | |
| 355 dst_buffer->StrideU(), dst_buffer->MutableDataV(), | |
| 356 dst_buffer->StrideV(), dst_width, dst_height, | |
| 357 libyuv::kFilterBilinear); | |
| 358 | |
| 359 int ret = streaminfos_[stream_idx].encoder->Encode( | |
| 360 VideoFrame(dst_buffer, input_image.timestamp(), | |
| 361 input_image.render_time_ms(), webrtc::kVideoRotation_0), | |
| 362 codec_specific_info, &stream_frame_types); | |
| 363 if (ret != WEBRTC_VIDEO_CODEC_OK) { | |
| 364 return ret; | |
| 365 } | |
| 366 } | |
| 367 } | |
| 368 | |
| 369 return WEBRTC_VIDEO_CODEC_OK; | |
| 370 } | |
| 371 | |
| 372 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( | |
| 373 EncodedImageCallback* callback) { | |
| 374 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 375 encoded_complete_callback_ = callback; | |
| 376 return WEBRTC_VIDEO_CODEC_OK; | |
| 377 } | |
| 378 | |
| 379 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, | |
| 380 int64_t rtt) { | |
| 381 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 382 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | |
| 383 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); | |
| 384 } | |
| 385 return WEBRTC_VIDEO_CODEC_OK; | |
| 386 } | |
| 387 | |
| 388 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, | |
| 389 uint32_t new_framerate) { | |
| 390 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 391 | |
| 392 if (!Initialized()) { | |
| 393 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 394 } | |
| 395 | |
| 396 if (new_framerate < 1) { | |
| 397 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 398 } | |
| 399 | |
| 400 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) { | |
| 401 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 402 } | |
| 403 | |
| 404 if (bitrate.get_sum_bps() > 0) { | |
| 405 // Make sure the bitrate fits the configured min bitrates. 0 is a special | |
| 406 // value that means paused, though, so leave it alone. | |
| 407 if (bitrate.get_sum_kbps() < codec_.minBitrate) { | |
| 408 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 409 } | |
| 410 | |
| 411 if (codec_.numberOfSimulcastStreams > 0 && | |
| 412 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { | |
| 413 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 414 } | |
| 415 } | |
| 416 | |
| 417 codec_.maxFramerate = new_framerate; | |
| 418 | |
| 419 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | |
| 420 uint32_t stream_bitrate_kbps = | |
| 421 bitrate.GetSpatialLayerSum(stream_idx) / 1000; | |
| 422 | |
| 423 // Need a key frame if we have not sent this stream before. | |
| 424 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { | |
| 425 streaminfos_[stream_idx].key_frame_request = true; | |
| 426 } | |
| 427 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; | |
| 428 | |
| 429 // Slice the temporal layers out of the full allocation and pass it on to | |
| 430 // the encoder handling the current simulcast stream. | |
| 431 BitrateAllocation stream_allocation; | |
| 432 for (int i = 0; i < kMaxTemporalStreams; ++i) { | |
| 433 stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i)); | |
| 434 } | |
| 435 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, | |
| 436 new_framerate); | |
| 437 } | |
| 438 | |
| 439 return WEBRTC_VIDEO_CODEC_OK; | |
| 440 } | |
| 441 | |
| 442 // TODO(brandtr): Add task checker to this member function, when all encoder | |
| 443 // callbacks are coming in on the encoder queue. | |
| 444 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( | |
| 445 size_t stream_idx, | |
| 446 const EncodedImage& encodedImage, | |
| 447 const CodecSpecificInfo* codecSpecificInfo, | |
| 448 const RTPFragmentationHeader* fragmentation) { | |
| 449 CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; | |
| 450 stream_codec_specific.codec_name = implementation_name_.c_str(); | |
| 451 CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8); | |
| 452 vp8Info->simulcastIdx = stream_idx; | |
| 453 | |
| 454 return encoded_complete_callback_->OnEncodedImage( | |
| 455 encodedImage, &stream_codec_specific, fragmentation); | |
| 456 } | |
| 457 | |
| 458 void SimulcastEncoderAdapter::PopulateStreamCodec( | |
| 459 const webrtc::VideoCodec& inst, | |
| 460 int stream_index, | |
| 461 uint32_t start_bitrate_kbps, | |
| 462 bool highest_resolution_stream, | |
| 463 webrtc::VideoCodec* stream_codec) { | |
| 464 *stream_codec = inst; | |
| 465 | |
| 466 // Stream specific settings. | |
| 467 stream_codec->VP8()->numberOfTemporalLayers = | |
| 468 inst.simulcastStream[stream_index].numberOfTemporalLayers; | |
| 469 stream_codec->numberOfSimulcastStreams = 0; | |
| 470 stream_codec->width = inst.simulcastStream[stream_index].width; | |
| 471 stream_codec->height = inst.simulcastStream[stream_index].height; | |
| 472 stream_codec->maxBitrate = inst.simulcastStream[stream_index].maxBitrate; | |
| 473 stream_codec->minBitrate = inst.simulcastStream[stream_index].minBitrate; | |
| 474 stream_codec->qpMax = inst.simulcastStream[stream_index].qpMax; | |
| 475 // Settings that are based on stream/resolution. | |
| 476 const bool lowest_resolution_stream = (stream_index == 0); | |
| 477 if (lowest_resolution_stream) { | |
| 478 // Settings for lowest spatial resolutions. | |
| 479 stream_codec->qpMax = kLowestResMaxQp; | |
| 480 } | |
| 481 if (!highest_resolution_stream) { | |
| 482 // For resolutions below CIF, set the codec |complexity| parameter to | |
| 483 // kComplexityHigher, which maps to cpu_used = -4. | |
| 484 int pixels_per_frame = stream_codec->width * stream_codec->height; | |
| 485 if (pixels_per_frame < 352 * 288) { | |
| 486 stream_codec->VP8()->complexity = webrtc::kComplexityHigher; | |
| 487 } | |
| 488 // Turn off denoising for all streams but the highest resolution. | |
| 489 stream_codec->VP8()->denoisingOn = false; | |
| 490 } | |
| 491 // TODO(ronghuawu): what to do with targetBitrate. | |
| 492 | |
| 493 stream_codec->startBitrate = start_bitrate_kbps; | |
| 494 } | |
| 495 | |
| 496 bool SimulcastEncoderAdapter::Initialized() const { | |
| 497 return rtc::AtomicOps::AcquireLoad(&inited_) == 1; | |
| 498 } | |
| 499 | |
| 500 void SimulcastEncoderAdapter::DestroyStoredEncoders() { | |
| 501 while (!stored_encoders_.empty()) { | |
| 502 VideoEncoder* encoder = stored_encoders_.top(); | |
| 503 factory_->Destroy(encoder); | |
| 504 stored_encoders_.pop(); | |
| 505 } | |
| 506 } | |
| 507 | |
| 508 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { | |
| 509 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 510 // We should not be calling this method before streaminfos_ are configured. | |
| 511 RTC_DCHECK(!streaminfos_.empty()); | |
| 512 for (const auto& streaminfo : streaminfos_) { | |
| 513 if (!streaminfo.encoder->SupportsNativeHandle()) { | |
| 514 return false; | |
| 515 } | |
| 516 } | |
| 517 return true; | |
| 518 } | |
| 519 | |
| 520 VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings() | |
| 521 const { | |
| 522 // TODO(brandtr): Investigate why the sequence checker below fails on mac. | |
| 523 // RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 524 // Turn off quality scaling for simulcast. | |
| 525 if (!Initialized() || NumberOfStreams(codec_) != 1) { | |
| 526 return VideoEncoder::ScalingSettings(false); | |
| 527 } | |
| 528 return streaminfos_[0].encoder->GetScalingSettings(); | |
| 529 } | |
| 530 | |
| 531 const char* SimulcastEncoderAdapter::ImplementationName() const { | |
| 532 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
| 533 return implementation_name_.c_str(); | |
| 534 } | |
| 535 | |
| 536 } // namespace webrtc | |
| OLD | NEW |