| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 55 for (int i = 0; i < num_streams; ++i) { | 55 for (int i = 0; i < num_streams; ++i) { |
| 56 if (codec.width * codec.simulcastStream[i].height != | 56 if (codec.width * codec.simulcastStream[i].height != |
| 57 codec.height * codec.simulcastStream[i].width) { | 57 codec.height * codec.simulcastStream[i].width) { |
| 58 return false; | 58 return false; |
| 59 } | 59 } |
| 60 } | 60 } |
| 61 return true; | 61 return true; |
| 62 } | 62 } |
| 63 | 63 |
| 64 int VerifyCodec(const webrtc::VideoCodec* inst) { | 64 int VerifyCodec(const webrtc::VideoCodec* inst) { |
| 65 if (inst == NULL) { | 65 if (inst == nullptr) { |
| 66 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 66 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 67 } | 67 } |
| 68 if (inst->maxFramerate < 1) { | 68 if (inst->maxFramerate < 1) { |
| 69 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 69 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 70 } | 70 } |
| 71 // allow zero to represent an unspecified maxBitRate | 71 // allow zero to represent an unspecified maxBitRate |
| 72 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { | 72 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { |
| 73 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 73 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 74 } | 74 } |
| 75 if (inst->width <= 1 || inst->height <= 1) { | 75 if (inst->width <= 1 || inst->height <= 1) { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 120 | 120 |
| 121 const int adapted_simulcast_id_; | 121 const int adapted_simulcast_id_; |
| 122 const TemporalLayersFactory& tl_factory_; | 122 const TemporalLayersFactory& tl_factory_; |
| 123 }; | 123 }; |
| 124 | 124 |
| 125 } // namespace | 125 } // namespace |
| 126 | 126 |
| 127 namespace webrtc { | 127 namespace webrtc { |
| 128 | 128 |
| 129 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) | 129 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) |
| 130 : factory_(factory), | 130 : inited_(0), |
| 131 factory_(factory), |
| 131 encoded_complete_callback_(nullptr), | 132 encoded_complete_callback_(nullptr), |
| 132 implementation_name_("SimulcastEncoderAdapter") { | 133 implementation_name_("SimulcastEncoderAdapter") { |
| 134 // The adapter is typically created on the worker thread, but operated on |
| 135 // the encoder task queue. |
| 136 encoder_queue_.Detach(); |
| 137 |
| 133 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); | 138 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); |
| 134 } | 139 } |
| 135 | 140 |
| 136 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { | 141 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { |
| 137 Release(); | 142 RTC_DCHECK(!Initialized()); |
| 143 DestroyStoredEncoders(); |
| 138 } | 144 } |
| 139 | 145 |
| 140 int SimulcastEncoderAdapter::Release() { | 146 int SimulcastEncoderAdapter::Release() { |
| 141 // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then | 147 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 142 // re-use this instance in ::InitEncode(). This means that changing | 148 |
| 143 // resolutions doesn't require reallocation of the first encoder, but only | |
| 144 // reinitialization, which makes sense. Then Destroy this instance instead in | |
| 145 // ~SimulcastEncoderAdapter(). | |
| 146 while (!streaminfos_.empty()) { | 149 while (!streaminfos_.empty()) { |
| 147 VideoEncoder* encoder = streaminfos_.back().encoder; | 150 VideoEncoder* encoder = streaminfos_.back().encoder; |
| 148 EncodedImageCallback* callback = streaminfos_.back().callback; | |
| 149 encoder->Release(); | 151 encoder->Release(); |
| 150 factory_->Destroy(encoder); | 152 // Even though it seems very unlikely, there are no guarantees that the |
| 151 delete callback; | 153 // encoder will not call back after being Release()'d. Therefore, we disable |
| 152 streaminfos_.pop_back(); | 154 // the callbacks here. |
| 155 encoder->RegisterEncodeCompleteCallback(nullptr); |
| 156 streaminfos_.pop_back(); // Deletes callback adapter. |
| 157 stored_encoders_.push(encoder); |
| 153 } | 158 } |
| 159 |
| 160 // It's legal to move the encoder to another queue now. |
| 161 encoder_queue_.Detach(); |
| 162 |
| 163 rtc::AtomicOps::ReleaseStore(&inited_, 0); |
| 164 |
| 154 return WEBRTC_VIDEO_CODEC_OK; | 165 return WEBRTC_VIDEO_CODEC_OK; |
| 155 } | 166 } |
| 156 | 167 |
| 157 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst, | 168 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst, |
| 158 int number_of_cores, | 169 int number_of_cores, |
| 159 size_t max_payload_size) { | 170 size_t max_payload_size) { |
| 171 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 172 |
| 160 if (number_of_cores < 1) { | 173 if (number_of_cores < 1) { |
| 161 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 174 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 162 } | 175 } |
| 163 | 176 |
| 164 int ret = VerifyCodec(inst); | 177 int ret = VerifyCodec(inst); |
| 165 if (ret < 0) { | 178 if (ret < 0) { |
| 166 return ret; | 179 return ret; |
| 167 } | 180 } |
| 168 | 181 |
| 169 ret = Release(); | 182 ret = Release(); |
| 170 if (ret < 0) { | 183 if (ret < 0) { |
| 171 return ret; | 184 return ret; |
| 172 } | 185 } |
| 173 | 186 |
| 174 int number_of_streams = NumberOfStreams(*inst); | 187 int number_of_streams = NumberOfStreams(*inst); |
| 188 RTC_DCHECK_LE(number_of_streams, kMaxSimulcastStreams); |
| 175 const bool doing_simulcast = (number_of_streams > 1); | 189 const bool doing_simulcast = (number_of_streams > 1); |
| 176 | 190 |
| 177 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { | 191 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { |
| 178 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 192 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 179 } | 193 } |
| 180 | 194 |
| 181 codec_ = *inst; | 195 codec_ = *inst; |
| 182 SimulcastRateAllocator rate_allocator(codec_, nullptr); | 196 SimulcastRateAllocator rate_allocator(codec_, nullptr); |
| 183 BitrateAllocation allocation = rate_allocator.GetAllocation( | 197 BitrateAllocation allocation = rate_allocator.GetAllocation( |
| 184 codec_.startBitrate * 1000, codec_.maxFramerate); | 198 codec_.startBitrate * 1000, codec_.maxFramerate); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 195 uint32_t start_bitrate_kbps = start_bitrates[i]; | 209 uint32_t start_bitrate_kbps = start_bitrates[i]; |
| 196 if (!doing_simulcast) { | 210 if (!doing_simulcast) { |
| 197 stream_codec = codec_; | 211 stream_codec = codec_; |
| 198 stream_codec.numberOfSimulcastStreams = 1; | 212 stream_codec.numberOfSimulcastStreams = 1; |
| 199 } else { | 213 } else { |
| 200 // Cap start bitrate to the min bitrate in order to avoid strange codec | 214 // Cap start bitrate to the min bitrate in order to avoid strange codec |
| 201 // behavior. Since sending sending will be false, this should not matter. | 215 // behavior. Since sending sending will be false, this should not matter. |
| 202 start_bitrate_kbps = | 216 start_bitrate_kbps = |
| 203 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); | 217 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); |
| 204 bool highest_resolution_stream = (i == (number_of_streams - 1)); | 218 bool highest_resolution_stream = (i == (number_of_streams - 1)); |
| 205 PopulateStreamCodec(&codec_, i, start_bitrate_kbps, | 219 PopulateStreamCodec(codec_, i, start_bitrate_kbps, |
| 206 highest_resolution_stream, &stream_codec); | 220 highest_resolution_stream, &stream_codec); |
| 207 } | 221 } |
| 208 TemporalLayersFactoryAdapter tl_factory_adapter(i, | 222 TemporalLayersFactoryAdapter tl_factory_adapter(i, |
| 209 *codec_.VP8()->tl_factory); | 223 *codec_.VP8()->tl_factory); |
| 210 stream_codec.VP8()->tl_factory = &tl_factory_adapter; | 224 stream_codec.VP8()->tl_factory = &tl_factory_adapter; |
| 211 | 225 |
| 212 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. | 226 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. |
| 213 if (stream_codec.qpMax < kDefaultMinQp) { | 227 if (stream_codec.qpMax < kDefaultMinQp) { |
| 214 stream_codec.qpMax = kDefaultMaxQp; | 228 stream_codec.qpMax = kDefaultMaxQp; |
| 215 } | 229 } |
| 216 | 230 |
| 217 VideoEncoder* encoder = factory_->Create(); | 231 // If an existing encoder instance exists, reuse it. |
| 232 // TODO(brandtr): Set initial RTP state (e.g., picture_id/tl0_pic_idx) here, |
| 233 // when we start storing that state outside the encoder wrappers. |
| 234 VideoEncoder* encoder; |
| 235 if (!stored_encoders_.empty()) { |
| 236 encoder = stored_encoders_.top(); |
| 237 stored_encoders_.pop(); |
| 238 } else { |
| 239 encoder = factory_->Create(); |
| 240 } |
| 241 |
| 218 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); | 242 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); |
| 219 if (ret < 0) { | 243 if (ret < 0) { |
| 220 // Explicitly destroy the current encoder; because we haven't registered a | 244 // Explicitly destroy the current encoder; because we haven't registered a |
| 221 // StreamInfo for it yet, Release won't do anything about it. | 245 // StreamInfo for it yet, Release won't do anything about it. |
| 222 factory_->Destroy(encoder); | 246 factory_->Destroy(encoder); |
| 223 Release(); | 247 Release(); |
| 224 return ret; | 248 return ret; |
| 225 } | 249 } |
| 226 EncodedImageCallback* callback = new AdapterEncodedImageCallback(this, i); | 250 std::unique_ptr<EncodedImageCallback> callback( |
| 227 encoder->RegisterEncodeCompleteCallback(callback); | 251 new AdapterEncodedImageCallback(this, i)); |
| 228 streaminfos_.push_back(StreamInfo(encoder, callback, stream_codec.width, | 252 encoder->RegisterEncodeCompleteCallback(callback.get()); |
| 229 stream_codec.height, | 253 streaminfos_.emplace_back(encoder, std::move(callback), stream_codec.width, |
| 230 start_bitrate_kbps > 0)); | 254 stream_codec.height, start_bitrate_kbps > 0); |
| 231 if (i != 0) | 255 |
| 256 if (i != 0) { |
| 232 implementation_name += ", "; | 257 implementation_name += ", "; |
| 258 } |
| 233 implementation_name += streaminfos_[i].encoder->ImplementationName(); | 259 implementation_name += streaminfos_[i].encoder->ImplementationName(); |
| 234 } | 260 } |
| 261 |
| 235 if (doing_simulcast) { | 262 if (doing_simulcast) { |
| 236 implementation_name_ = | 263 implementation_name_ = |
| 237 "SimulcastEncoderAdapter (" + implementation_name + ")"; | 264 "SimulcastEncoderAdapter (" + implementation_name + ")"; |
| 238 } else { | 265 } else { |
| 239 implementation_name_ = implementation_name; | 266 implementation_name_ = implementation_name; |
| 240 } | 267 } |
| 268 |
| 269 // To save memory, don't store encoders that we don't use. |
| 270 DestroyStoredEncoders(); |
| 271 |
| 272 rtc::AtomicOps::ReleaseStore(&inited_, 1); |
| 273 |
| 241 return WEBRTC_VIDEO_CODEC_OK; | 274 return WEBRTC_VIDEO_CODEC_OK; |
| 242 } | 275 } |
| 243 | 276 |
| 244 int SimulcastEncoderAdapter::Encode( | 277 int SimulcastEncoderAdapter::Encode( |
| 245 const VideoFrame& input_image, | 278 const VideoFrame& input_image, |
| 246 const CodecSpecificInfo* codec_specific_info, | 279 const CodecSpecificInfo* codec_specific_info, |
| 247 const std::vector<FrameType>* frame_types) { | 280 const std::vector<FrameType>* frame_types) { |
| 281 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 282 |
| 248 if (!Initialized()) { | 283 if (!Initialized()) { |
| 249 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 284 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 250 } | 285 } |
| 251 if (encoded_complete_callback_ == NULL) { | 286 if (encoded_complete_callback_ == nullptr) { |
| 252 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 287 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 253 } | 288 } |
| 254 | 289 |
| 255 // All active streams should generate a key frame if | 290 // All active streams should generate a key frame if |
| 256 // a key frame is requested by any stream. | 291 // a key frame is requested by any stream. |
| 257 bool send_key_frame = false; | 292 bool send_key_frame = false; |
| 258 if (frame_types) { | 293 if (frame_types) { |
| 259 for (size_t i = 0; i < frame_types->size(); ++i) { | 294 for (size_t i = 0; i < frame_types->size(); ++i) { |
| 260 if (frame_types->at(i) == kVideoFrameKey) { | 295 if (frame_types->at(i) == kVideoFrameKey) { |
| 261 send_key_frame = true; | 296 send_key_frame = true; |
| 262 break; | 297 break; |
| 263 } | 298 } |
| 264 } | 299 } |
| 265 } | 300 } |
| 266 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 301 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
| 267 if (streaminfos_[stream_idx].key_frame_request && | 302 if (streaminfos_[stream_idx].key_frame_request && |
| 268 streaminfos_[stream_idx].send_stream) { | 303 streaminfos_[stream_idx].send_stream) { |
| 269 send_key_frame = true; | 304 send_key_frame = true; |
| 270 break; | 305 break; |
| 271 } | 306 } |
| 272 } | 307 } |
| 273 | 308 |
| 274 int src_width = input_image.width(); | 309 int src_width = input_image.width(); |
| 275 int src_height = input_image.height(); | 310 int src_height = input_image.height(); |
| 276 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 311 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
| 277 // Don't encode frames in resolutions that we don't intend to send. | 312 // Don't encode frames in resolutions that we don't intend to send. |
| 278 if (!streaminfos_[stream_idx].send_stream) | 313 if (!streaminfos_[stream_idx].send_stream) { |
| 279 continue; | 314 continue; |
| 315 } |
| 280 | 316 |
| 281 std::vector<FrameType> stream_frame_types; | 317 std::vector<FrameType> stream_frame_types; |
| 282 if (send_key_frame) { | 318 if (send_key_frame) { |
| 283 stream_frame_types.push_back(kVideoFrameKey); | 319 stream_frame_types.push_back(kVideoFrameKey); |
| 284 streaminfos_[stream_idx].key_frame_request = false; | 320 streaminfos_[stream_idx].key_frame_request = false; |
| 285 } else { | 321 } else { |
| 286 stream_frame_types.push_back(kVideoFrameDelta); | 322 stream_frame_types.push_back(kVideoFrameDelta); |
| 287 } | 323 } |
| 288 | 324 |
| 289 int dst_width = streaminfos_[stream_idx].width; | 325 int dst_width = streaminfos_[stream_idx].width; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 331 return ret; | 367 return ret; |
| 332 } | 368 } |
| 333 } | 369 } |
| 334 } | 370 } |
| 335 | 371 |
| 336 return WEBRTC_VIDEO_CODEC_OK; | 372 return WEBRTC_VIDEO_CODEC_OK; |
| 337 } | 373 } |
| 338 | 374 |
| 339 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( | 375 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( |
| 340 EncodedImageCallback* callback) { | 376 EncodedImageCallback* callback) { |
| 377 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 341 encoded_complete_callback_ = callback; | 378 encoded_complete_callback_ = callback; |
| 342 return WEBRTC_VIDEO_CODEC_OK; | 379 return WEBRTC_VIDEO_CODEC_OK; |
| 343 } | 380 } |
| 344 | 381 |
| 345 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, | 382 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, |
| 346 int64_t rtt) { | 383 int64_t rtt) { |
| 384 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 347 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 385 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
| 348 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); | 386 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); |
| 349 } | 387 } |
| 350 return WEBRTC_VIDEO_CODEC_OK; | 388 return WEBRTC_VIDEO_CODEC_OK; |
| 351 } | 389 } |
| 352 | 390 |
| 353 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, | 391 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, |
| 354 uint32_t new_framerate) { | 392 uint32_t new_framerate) { |
| 355 if (!Initialized()) | 393 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 394 |
| 395 if (!Initialized()) { |
| 356 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 396 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 397 } |
| 357 | 398 |
| 358 if (new_framerate < 1) | 399 if (new_framerate < 1) { |
| 359 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 400 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 401 } |
| 360 | 402 |
| 361 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) | 403 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) { |
| 362 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 404 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 405 } |
| 363 | 406 |
| 364 if (bitrate.get_sum_bps() > 0) { | 407 if (bitrate.get_sum_bps() > 0) { |
| 365 // Make sure the bitrate fits the configured min bitrates. 0 is a special | 408 // Make sure the bitrate fits the configured min bitrates. 0 is a special |
| 366 // value that means paused, though, so leave it alone. | 409 // value that means paused, though, so leave it alone. |
| 367 if (bitrate.get_sum_kbps() < codec_.minBitrate) | 410 if (bitrate.get_sum_kbps() < codec_.minBitrate) { |
| 368 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 411 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 412 } |
| 369 | 413 |
| 370 if (codec_.numberOfSimulcastStreams > 0 && | 414 if (codec_.numberOfSimulcastStreams > 0 && |
| 371 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { | 415 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { |
| 372 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 416 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 373 } | 417 } |
| 374 } | 418 } |
| 375 | 419 |
| 376 codec_.maxFramerate = new_framerate; | 420 codec_.maxFramerate = new_framerate; |
| 377 | 421 |
| 378 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 422 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
| 379 uint32_t stream_bitrate_kbps = | 423 uint32_t stream_bitrate_kbps = |
| 380 bitrate.GetSpatialLayerSum(stream_idx) / 1000; | 424 bitrate.GetSpatialLayerSum(stream_idx) / 1000; |
| 381 | 425 |
| 382 // Need a key frame if we have not sent this stream before. | 426 // Need a key frame if we have not sent this stream before. |
| 383 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { | 427 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { |
| 384 streaminfos_[stream_idx].key_frame_request = true; | 428 streaminfos_[stream_idx].key_frame_request = true; |
| 385 } | 429 } |
| 386 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; | 430 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; |
| 387 | 431 |
| 388 // Slice the temporal layers out of the full allocation and pass it on to | 432 // Slice the temporal layers out of the full allocation and pass it on to |
| 389 // the encoder handling the current simulcast stream. | 433 // the encoder handling the current simulcast stream. |
| 390 BitrateAllocation stream_allocation; | 434 BitrateAllocation stream_allocation; |
| 391 for (int i = 0; i < kMaxTemporalStreams; ++i) | 435 for (int i = 0; i < kMaxTemporalStreams; ++i) { |
| 392 stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i)); | 436 stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i)); |
| 437 } |
| 393 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, | 438 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, |
| 394 new_framerate); | 439 new_framerate); |
| 395 } | 440 } |
| 396 | 441 |
| 397 return WEBRTC_VIDEO_CODEC_OK; | 442 return WEBRTC_VIDEO_CODEC_OK; |
| 398 } | 443 } |
| 399 | 444 |
| 445 // TODO(brandtr): Add task checker to this member function, when all encoder |
| 446 // callbacks are coming in on the encoder queue. |
| 400 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( | 447 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( |
| 401 size_t stream_idx, | 448 size_t stream_idx, |
| 402 const EncodedImage& encodedImage, | 449 const EncodedImage& encodedImage, |
| 403 const CodecSpecificInfo* codecSpecificInfo, | 450 const CodecSpecificInfo* codecSpecificInfo, |
| 404 const RTPFragmentationHeader* fragmentation) { | 451 const RTPFragmentationHeader* fragmentation) { |
| 405 CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; | 452 CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; |
| 406 stream_codec_specific.codec_name = implementation_name_.c_str(); | 453 stream_codec_specific.codec_name = implementation_name_.c_str(); |
| 407 CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8); | 454 CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8); |
| 408 vp8Info->simulcastIdx = stream_idx; | 455 vp8Info->simulcastIdx = stream_idx; |
| 409 | 456 |
| 410 return encoded_complete_callback_->OnEncodedImage( | 457 return encoded_complete_callback_->OnEncodedImage( |
| 411 encodedImage, &stream_codec_specific, fragmentation); | 458 encodedImage, &stream_codec_specific, fragmentation); |
| 412 } | 459 } |
| 413 | 460 |
| 414 void SimulcastEncoderAdapter::PopulateStreamCodec( | 461 void SimulcastEncoderAdapter::PopulateStreamCodec( |
| 415 const webrtc::VideoCodec* inst, | 462 const webrtc::VideoCodec& inst, |
| 416 int stream_index, | 463 int stream_index, |
| 417 uint32_t start_bitrate_kbps, | 464 uint32_t start_bitrate_kbps, |
| 418 bool highest_resolution_stream, | 465 bool highest_resolution_stream, |
| 419 webrtc::VideoCodec* stream_codec) { | 466 webrtc::VideoCodec* stream_codec) { |
| 420 *stream_codec = *inst; | 467 *stream_codec = inst; |
| 421 | 468 |
| 422 // Stream specific settings. | 469 // Stream specific settings. |
| 423 stream_codec->VP8()->numberOfTemporalLayers = | 470 stream_codec->VP8()->numberOfTemporalLayers = |
| 424 inst->simulcastStream[stream_index].numberOfTemporalLayers; | 471 inst.simulcastStream[stream_index].numberOfTemporalLayers; |
| 425 stream_codec->numberOfSimulcastStreams = 0; | 472 stream_codec->numberOfSimulcastStreams = 0; |
| 426 stream_codec->width = inst->simulcastStream[stream_index].width; | 473 stream_codec->width = inst.simulcastStream[stream_index].width; |
| 427 stream_codec->height = inst->simulcastStream[stream_index].height; | 474 stream_codec->height = inst.simulcastStream[stream_index].height; |
| 428 stream_codec->maxBitrate = inst->simulcastStream[stream_index].maxBitrate; | 475 stream_codec->maxBitrate = inst.simulcastStream[stream_index].maxBitrate; |
| 429 stream_codec->minBitrate = inst->simulcastStream[stream_index].minBitrate; | 476 stream_codec->minBitrate = inst.simulcastStream[stream_index].minBitrate; |
| 430 stream_codec->qpMax = inst->simulcastStream[stream_index].qpMax; | 477 stream_codec->qpMax = inst.simulcastStream[stream_index].qpMax; |
| 431 // Settings that are based on stream/resolution. | 478 // Settings that are based on stream/resolution. |
| 432 if (stream_index == 0) { | 479 const bool lowest_resolution_stream = (stream_index == 0); |
| 480 if (lowest_resolution_stream) { |
| 433 // Settings for lowest spatial resolutions. | 481 // Settings for lowest spatial resolutions. |
| 434 stream_codec->qpMax = kLowestResMaxQp; | 482 stream_codec->qpMax = kLowestResMaxQp; |
| 435 } | 483 } |
| 436 if (!highest_resolution_stream) { | 484 if (!highest_resolution_stream) { |
| 437 // For resolutions below CIF, set the codec |complexity| parameter to | 485 // For resolutions below CIF, set the codec |complexity| parameter to |
| 438 // kComplexityHigher, which maps to cpu_used = -4. | 486 // kComplexityHigher, which maps to cpu_used = -4. |
| 439 int pixels_per_frame = stream_codec->width * stream_codec->height; | 487 int pixels_per_frame = stream_codec->width * stream_codec->height; |
| 440 if (pixels_per_frame < 352 * 288) { | 488 if (pixels_per_frame < 352 * 288) { |
| 441 stream_codec->VP8()->complexity = webrtc::kComplexityHigher; | 489 stream_codec->VP8()->complexity = webrtc::kComplexityHigher; |
| 442 } | 490 } |
| 443 // Turn off denoising for all streams but the highest resolution. | 491 // Turn off denoising for all streams but the highest resolution. |
| 444 stream_codec->VP8()->denoisingOn = false; | 492 stream_codec->VP8()->denoisingOn = false; |
| 445 } | 493 } |
| 446 // TODO(ronghuawu): what to do with targetBitrate. | 494 // TODO(ronghuawu): what to do with targetBitrate. |
| 447 | 495 |
| 448 stream_codec->startBitrate = start_bitrate_kbps; | 496 stream_codec->startBitrate = start_bitrate_kbps; |
| 449 } | 497 } |
| 450 | 498 |
| 451 bool SimulcastEncoderAdapter::Initialized() const { | 499 bool SimulcastEncoderAdapter::Initialized() const { |
| 452 return !streaminfos_.empty(); | 500 return rtc::AtomicOps::AcquireLoad(&inited_) == 1; |
| 501 } |
| 502 |
| 503 void SimulcastEncoderAdapter::DestroyStoredEncoders() { |
| 504 while (!stored_encoders_.empty()) { |
| 505 VideoEncoder* encoder = stored_encoders_.top(); |
| 506 factory_->Destroy(encoder); |
| 507 stored_encoders_.pop(); |
| 508 } |
| 453 } | 509 } |
| 454 | 510 |
| 455 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { | 511 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { |
| 512 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 456 // We should not be calling this method before streaminfos_ are configured. | 513 // We should not be calling this method before streaminfos_ are configured. |
| 457 RTC_DCHECK(!streaminfos_.empty()); | 514 RTC_DCHECK(!streaminfos_.empty()); |
| 458 for (const auto& streaminfo : streaminfos_) { | 515 for (const auto& streaminfo : streaminfos_) { |
| 459 if (!streaminfo.encoder->SupportsNativeHandle()) | 516 if (!streaminfo.encoder->SupportsNativeHandle()) { |
| 460 return false; | 517 return false; |
| 518 } |
| 461 } | 519 } |
| 462 return true; | 520 return true; |
| 463 } | 521 } |
| 464 | 522 |
| 465 VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings() | 523 VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings() |
| 466 const { | 524 const { |
| 525 // TODO(brandtr): Investigate why the sequence checker below fails on mac. |
| 526 // RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 467 // Turn off quality scaling for simulcast. | 527 // Turn off quality scaling for simulcast. |
| 468 if (!Initialized() || NumberOfStreams(codec_) != 1) | 528 if (!Initialized() || NumberOfStreams(codec_) != 1) { |
| 469 return VideoEncoder::ScalingSettings(false); | 529 return VideoEncoder::ScalingSettings(false); |
| 530 } |
| 470 return streaminfos_[0].encoder->GetScalingSettings(); | 531 return streaminfos_[0].encoder->GetScalingSettings(); |
| 471 } | 532 } |
| 472 | 533 |
| 473 const char* SimulcastEncoderAdapter::ImplementationName() const { | 534 const char* SimulcastEncoderAdapter::ImplementationName() const { |
| 535 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
| 474 return implementation_name_.c_str(); | 536 return implementation_name_.c_str(); |
| 475 } | 537 } |
| 476 | 538 |
| 477 } // namespace webrtc | 539 } // namespace webrtc |
| OLD | NEW |