OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
55 for (int i = 0; i < num_streams; ++i) { | 55 for (int i = 0; i < num_streams; ++i) { |
56 if (codec.width * codec.simulcastStream[i].height != | 56 if (codec.width * codec.simulcastStream[i].height != |
57 codec.height * codec.simulcastStream[i].width) { | 57 codec.height * codec.simulcastStream[i].width) { |
58 return false; | 58 return false; |
59 } | 59 } |
60 } | 60 } |
61 return true; | 61 return true; |
62 } | 62 } |
63 | 63 |
64 int VerifyCodec(const webrtc::VideoCodec* inst) { | 64 int VerifyCodec(const webrtc::VideoCodec* inst) { |
65 if (inst == NULL) { | 65 if (inst == nullptr) { |
66 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 66 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
67 } | 67 } |
68 if (inst->maxFramerate < 1) { | 68 if (inst->maxFramerate < 1) { |
69 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 69 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
70 } | 70 } |
71 // allow zero to represent an unspecified maxBitRate | 71 // allow zero to represent an unspecified maxBitRate |
72 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { | 72 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { |
73 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 73 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
74 } | 74 } |
75 if (inst->width <= 1 || inst->height <= 1) { | 75 if (inst->width <= 1 || inst->height <= 1) { |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
120 | 120 |
121 const int adapted_simulcast_id_; | 121 const int adapted_simulcast_id_; |
122 const TemporalLayersFactory& tl_factory_; | 122 const TemporalLayersFactory& tl_factory_; |
123 }; | 123 }; |
124 | 124 |
125 } // namespace | 125 } // namespace |
126 | 126 |
127 namespace webrtc { | 127 namespace webrtc { |
128 | 128 |
129 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) | 129 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) |
130 : factory_(factory), | 130 : inited_(false), |
131 factory_(factory), | |
131 encoded_complete_callback_(nullptr), | 132 encoded_complete_callback_(nullptr), |
132 implementation_name_("SimulcastEncoderAdapter") { | 133 implementation_name_("SimulcastEncoderAdapter") { |
134 // The adapter is typically created on the worker thread, but operated on | |
135 // the encoder task queue. | |
136 encoder_queue_.Detach(); | |
137 | |
133 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); | 138 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); |
134 } | 139 } |
135 | 140 |
136 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { | 141 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { |
137 Release(); | 142 RTC_DCHECK(!Initialized()); |
143 DestroyStoredEncoders(); | |
138 } | 144 } |
139 | 145 |
140 int SimulcastEncoderAdapter::Release() { | 146 int SimulcastEncoderAdapter::Release() { |
141 // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then | 147 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
142 // re-use this instance in ::InitEncode(). This means that changing | |
143 // resolutions doesn't require reallocation of the first encoder, but only | |
144 // reinitialization, which makes sense. Then Destroy this instance instead in | |
145 // ~SimulcastEncoderAdapter(). | |
146 while (!streaminfos_.empty()) { | 148 while (!streaminfos_.empty()) { |
147 VideoEncoder* encoder = streaminfos_.back().encoder; | 149 VideoEncoder* encoder = streaminfos_.back().encoder; |
148 EncodedImageCallback* callback = streaminfos_.back().callback; | |
149 encoder->Release(); | 150 encoder->Release(); |
150 factory_->Destroy(encoder); | 151 stored_encoders_.push_back(encoder); |
151 delete callback; | |
152 streaminfos_.pop_back(); | 152 streaminfos_.pop_back(); |
153 } | 153 } |
154 #if RTC_DCHECK_IS_ON | |
155 rtc::CritScope cs(&inited_crit_); | |
156 #endif | |
157 inited_ = false; | |
154 return WEBRTC_VIDEO_CODEC_OK; | 158 return WEBRTC_VIDEO_CODEC_OK; |
155 } | 159 } |
156 | 160 |
157 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst, | 161 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst, |
158 int number_of_cores, | 162 int number_of_cores, |
159 size_t max_payload_size) { | 163 size_t max_payload_size) { |
164 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
165 | |
160 if (number_of_cores < 1) { | 166 if (number_of_cores < 1) { |
161 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 167 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
162 } | 168 } |
163 | 169 |
164 int ret = VerifyCodec(inst); | 170 int ret = VerifyCodec(inst); |
165 if (ret < 0) { | 171 if (ret < 0) { |
166 return ret; | 172 return ret; |
167 } | 173 } |
168 | 174 |
169 ret = Release(); | 175 ret = Release(); |
170 if (ret < 0) { | 176 if (ret < 0) { |
171 return ret; | 177 return ret; |
172 } | 178 } |
173 | 179 |
174 int number_of_streams = NumberOfStreams(*inst); | 180 int number_of_streams = NumberOfStreams(*inst); |
181 RTC_DCHECK_LE(number_of_streams, kMaxSimulcastStreams); | |
175 const bool doing_simulcast = (number_of_streams > 1); | 182 const bool doing_simulcast = (number_of_streams > 1); |
176 | 183 |
177 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { | 184 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { |
178 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 185 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
179 } | 186 } |
180 | 187 |
181 codec_ = *inst; | 188 codec_ = *inst; |
182 SimulcastRateAllocator rate_allocator(codec_, nullptr); | 189 SimulcastRateAllocator rate_allocator(codec_, nullptr); |
183 BitrateAllocation allocation = rate_allocator.GetAllocation( | 190 BitrateAllocation allocation = rate_allocator.GetAllocation( |
184 codec_.startBitrate * 1000, codec_.maxFramerate); | 191 codec_.startBitrate * 1000, codec_.maxFramerate); |
(...skipping 10 matching lines...) Expand all Loading... | |
195 uint32_t start_bitrate_kbps = start_bitrates[i]; | 202 uint32_t start_bitrate_kbps = start_bitrates[i]; |
196 if (!doing_simulcast) { | 203 if (!doing_simulcast) { |
197 stream_codec = codec_; | 204 stream_codec = codec_; |
198 stream_codec.numberOfSimulcastStreams = 1; | 205 stream_codec.numberOfSimulcastStreams = 1; |
199 } else { | 206 } else { |
200 // Cap start bitrate to the min bitrate in order to avoid strange codec | 207 // Cap start bitrate to the min bitrate in order to avoid strange codec |
201 // behavior. Since sending sending will be false, this should not matter. | 208 // behavior. Since sending sending will be false, this should not matter. |
202 start_bitrate_kbps = | 209 start_bitrate_kbps = |
203 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); | 210 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); |
204 bool highest_resolution_stream = (i == (number_of_streams - 1)); | 211 bool highest_resolution_stream = (i == (number_of_streams - 1)); |
205 PopulateStreamCodec(&codec_, i, start_bitrate_kbps, | 212 PopulateStreamCodec(codec_, i, start_bitrate_kbps, |
206 highest_resolution_stream, &stream_codec); | 213 highest_resolution_stream, &stream_codec); |
207 } | 214 } |
208 TemporalLayersFactoryAdapter tl_factory_adapter(i, | 215 TemporalLayersFactoryAdapter tl_factory_adapter(i, |
209 *codec_.VP8()->tl_factory); | 216 *codec_.VP8()->tl_factory); |
210 stream_codec.VP8()->tl_factory = &tl_factory_adapter; | 217 stream_codec.VP8()->tl_factory = &tl_factory_adapter; |
211 | 218 |
212 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. | 219 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. |
213 if (stream_codec.qpMax < kDefaultMinQp) { | 220 if (stream_codec.qpMax < kDefaultMinQp) { |
214 stream_codec.qpMax = kDefaultMaxQp; | 221 stream_codec.qpMax = kDefaultMaxQp; |
215 } | 222 } |
216 | 223 |
217 VideoEncoder* encoder = factory_->Create(); | 224 // If an existing encoder instance exists, reuse it. |
225 // TODO(brandtr): Set initial RTP state (e.g., picture_id/tl0_pic_idx) here, | |
226 // when we start storing that state outside the encoder wrappers. | |
227 VideoEncoder* encoder; | |
228 if (!stored_encoders_.empty()) { | |
229 encoder = stored_encoders_.back(); | |
brandtr
2017/04/20 11:58:49
I may want to rethink the order here, to ensure th
noahric
2017/04/24 17:27:01
Two things:
1) You should try to use an equivalent
brandtr
2017/04/26 14:59:44
Yes, that would be a nice optimization. Not sure i
noahric
2017/04/26 21:38:51
Ah, ok. Then I guess the question is just do I exp
brandtr
2017/04/27 10:43:31
Thanks, that's good input to have!
| |
230 stored_encoders_.pop_back(); | |
231 } else { | |
232 encoder = factory_->Create(); | |
233 } | |
234 | |
218 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); | 235 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); |
219 if (ret < 0) { | 236 if (ret < 0) { |
220 // Explicitly destroy the current encoder; because we haven't registered a | 237 // Explicitly destroy the current encoder; because we haven't registered a |
221 // StreamInfo for it yet, Release won't do anything about it. | 238 // StreamInfo for it yet, Release won't do anything about it. |
222 factory_->Destroy(encoder); | 239 factory_->Destroy(encoder); |
223 Release(); | 240 Release(); |
224 return ret; | 241 return ret; |
225 } | 242 } |
226 EncodedImageCallback* callback = new AdapterEncodedImageCallback(this, i); | 243 std::unique_ptr<EncodedImageCallback> callback( |
227 encoder->RegisterEncodeCompleteCallback(callback); | 244 new AdapterEncodedImageCallback(this, i)); |
228 streaminfos_.push_back(StreamInfo(encoder, callback, stream_codec.width, | 245 encoder->RegisterEncodeCompleteCallback(callback.get()); |
229 stream_codec.height, | 246 streaminfos_.emplace_back(encoder, std::move(callback), stream_codec.width, |
230 start_bitrate_kbps > 0)); | 247 stream_codec.height, start_bitrate_kbps > 0); |
231 if (i != 0) | 248 |
249 if (i != 0) { | |
232 implementation_name += ", "; | 250 implementation_name += ", "; |
251 } | |
233 implementation_name += streaminfos_[i].encoder->ImplementationName(); | 252 implementation_name += streaminfos_[i].encoder->ImplementationName(); |
234 } | 253 } |
254 | |
235 if (doing_simulcast) { | 255 if (doing_simulcast) { |
236 implementation_name_ = | 256 implementation_name_ = |
237 "SimulcastEncoderAdapter (" + implementation_name + ")"; | 257 "SimulcastEncoderAdapter (" + implementation_name + ")"; |
238 } else { | 258 } else { |
239 implementation_name_ = implementation_name; | 259 implementation_name_ = implementation_name; |
240 } | 260 } |
261 | |
262 // To save memory, don't store encoders that we don't use. | |
263 DestroyStoredEncoders(); | |
264 | |
265 #if RTC_DCHECK_IS_ON | |
266 rtc::CritScope cs(&inited_crit_); | |
267 #endif | |
268 inited_ = true; | |
269 | |
241 return WEBRTC_VIDEO_CODEC_OK; | 270 return WEBRTC_VIDEO_CODEC_OK; |
242 } | 271 } |
243 | 272 |
244 int SimulcastEncoderAdapter::Encode( | 273 int SimulcastEncoderAdapter::Encode( |
245 const VideoFrame& input_image, | 274 const VideoFrame& input_image, |
246 const CodecSpecificInfo* codec_specific_info, | 275 const CodecSpecificInfo* codec_specific_info, |
247 const std::vector<FrameType>* frame_types) { | 276 const std::vector<FrameType>* frame_types) { |
277 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
278 | |
248 if (!Initialized()) { | 279 if (!Initialized()) { |
249 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 280 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
250 } | 281 } |
251 if (encoded_complete_callback_ == NULL) { | 282 if (encoded_complete_callback_ == nullptr) { |
252 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 283 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
253 } | 284 } |
254 | 285 |
255 // All active streams should generate a key frame if | 286 // All active streams should generate a key frame if |
256 // a key frame is requested by any stream. | 287 // a key frame is requested by any stream. |
257 bool send_key_frame = false; | 288 bool send_key_frame = false; |
258 if (frame_types) { | 289 if (frame_types) { |
259 for (size_t i = 0; i < frame_types->size(); ++i) { | 290 for (size_t i = 0; i < frame_types->size(); ++i) { |
260 if (frame_types->at(i) == kVideoFrameKey) { | 291 if (frame_types->at(i) == kVideoFrameKey) { |
261 send_key_frame = true; | 292 send_key_frame = true; |
262 break; | 293 break; |
263 } | 294 } |
264 } | 295 } |
265 } | 296 } |
266 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 297 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
267 if (streaminfos_[stream_idx].key_frame_request && | 298 if (streaminfos_[stream_idx].key_frame_request && |
268 streaminfos_[stream_idx].send_stream) { | 299 streaminfos_[stream_idx].send_stream) { |
269 send_key_frame = true; | 300 send_key_frame = true; |
270 break; | 301 break; |
271 } | 302 } |
272 } | 303 } |
273 | 304 |
274 int src_width = input_image.width(); | 305 int src_width = input_image.width(); |
275 int src_height = input_image.height(); | 306 int src_height = input_image.height(); |
276 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 307 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
277 // Don't encode frames in resolutions that we don't intend to send. | 308 // Don't encode frames in resolutions that we don't intend to send. |
278 if (!streaminfos_[stream_idx].send_stream) | 309 if (!streaminfos_[stream_idx].send_stream) { |
279 continue; | 310 continue; |
311 } | |
280 | 312 |
281 std::vector<FrameType> stream_frame_types; | 313 std::vector<FrameType> stream_frame_types; |
282 if (send_key_frame) { | 314 if (send_key_frame) { |
283 stream_frame_types.push_back(kVideoFrameKey); | 315 stream_frame_types.push_back(kVideoFrameKey); |
284 streaminfos_[stream_idx].key_frame_request = false; | 316 streaminfos_[stream_idx].key_frame_request = false; |
285 } else { | 317 } else { |
286 stream_frame_types.push_back(kVideoFrameDelta); | 318 stream_frame_types.push_back(kVideoFrameDelta); |
287 } | 319 } |
288 | 320 |
289 int dst_width = streaminfos_[stream_idx].width; | 321 int dst_width = streaminfos_[stream_idx].width; |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
331 return ret; | 363 return ret; |
332 } | 364 } |
333 } | 365 } |
334 } | 366 } |
335 | 367 |
336 return WEBRTC_VIDEO_CODEC_OK; | 368 return WEBRTC_VIDEO_CODEC_OK; |
337 } | 369 } |
338 | 370 |
339 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( | 371 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( |
340 EncodedImageCallback* callback) { | 372 EncodedImageCallback* callback) { |
373 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
341 encoded_complete_callback_ = callback; | 374 encoded_complete_callback_ = callback; |
342 return WEBRTC_VIDEO_CODEC_OK; | 375 return WEBRTC_VIDEO_CODEC_OK; |
343 } | 376 } |
344 | 377 |
345 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, | 378 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, |
346 int64_t rtt) { | 379 int64_t rtt) { |
380 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
347 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 381 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
348 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); | 382 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); |
349 } | 383 } |
350 return WEBRTC_VIDEO_CODEC_OK; | 384 return WEBRTC_VIDEO_CODEC_OK; |
351 } | 385 } |
352 | 386 |
353 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, | 387 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, |
354 uint32_t new_framerate) { | 388 uint32_t new_framerate) { |
355 if (!Initialized()) | 389 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); |
390 | |
391 if (!Initialized()) { | |
356 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 392 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
393 } | |
357 | 394 |
358 if (new_framerate < 1) | 395 if (new_framerate < 1) { |
359 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 396 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
397 } | |
360 | 398 |
361 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) | 399 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) { |
362 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 400 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
401 } | |
363 | 402 |
364 if (bitrate.get_sum_bps() > 0) { | 403 if (bitrate.get_sum_bps() > 0) { |
365 // Make sure the bitrate fits the configured min bitrates. 0 is a special | 404 // Make sure the bitrate fits the configured min bitrates. 0 is a special |
366 // value that means paused, though, so leave it alone. | 405 // value that means paused, though, so leave it alone. |
367 if (bitrate.get_sum_kbps() < codec_.minBitrate) | 406 if (bitrate.get_sum_kbps() < codec_.minBitrate) { |
368 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 407 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
408 } | |
369 | 409 |
370 if (codec_.numberOfSimulcastStreams > 0 && | 410 if (codec_.numberOfSimulcastStreams > 0 && |
371 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { | 411 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { |
372 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 412 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
373 } | 413 } |
374 } | 414 } |
375 | 415 |
376 codec_.maxFramerate = new_framerate; | 416 codec_.maxFramerate = new_framerate; |
377 | 417 |
378 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 418 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
379 uint32_t stream_bitrate_kbps = | 419 uint32_t stream_bitrate_kbps = |
380 bitrate.GetSpatialLayerSum(stream_idx) / 1000; | 420 bitrate.GetSpatialLayerSum(stream_idx) / 1000; |
381 | 421 |
382 // Need a key frame if we have not sent this stream before. | 422 // Need a key frame if we have not sent this stream before. |
383 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { | 423 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { |
384 streaminfos_[stream_idx].key_frame_request = true; | 424 streaminfos_[stream_idx].key_frame_request = true; |
385 } | 425 } |
386 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; | 426 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; |
387 | 427 |
388 // Slice the temporal layers out of the full allocation and pass it on to | 428 // Slice the temporal layers out of the full allocation and pass it on to |
389 // the encoder handling the current simulcast stream. | 429 // the encoder handling the current simulcast stream. |
390 BitrateAllocation stream_allocation; | 430 BitrateAllocation stream_allocation; |
391 for (int i = 0; i < kMaxTemporalStreams; ++i) | 431 for (int i = 0; i < kMaxTemporalStreams; ++i) { |
392 stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i)); | 432 stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i)); |
433 } | |
393 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, | 434 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, |
394 new_framerate); | 435 new_framerate); |
395 } | 436 } |
396 | 437 |
397 return WEBRTC_VIDEO_CODEC_OK; | 438 return WEBRTC_VIDEO_CODEC_OK; |
398 } | 439 } |
399 | 440 |
441 // TODO(brandtr): Add task checker to this member function, when all encoder | |
442 // callbacks are coming in on the encoder queue. | |
400 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( | 443 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( |
401 size_t stream_idx, | 444 size_t stream_idx, |
402 const EncodedImage& encodedImage, | 445 const EncodedImage& encodedImage, |
403 const CodecSpecificInfo* codecSpecificInfo, | 446 const CodecSpecificInfo* codecSpecificInfo, |
404 const RTPFragmentationHeader* fragmentation) { | 447 const RTPFragmentationHeader* fragmentation) { |
405 CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; | 448 CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; |
406 stream_codec_specific.codec_name = implementation_name_.c_str(); | 449 stream_codec_specific.codec_name = implementation_name_.c_str(); |
407 CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8); | 450 CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8); |
408 vp8Info->simulcastIdx = stream_idx; | 451 vp8Info->simulcastIdx = stream_idx; |
409 | 452 |
410 return encoded_complete_callback_->OnEncodedImage( | 453 return encoded_complete_callback_->OnEncodedImage( |
411 encodedImage, &stream_codec_specific, fragmentation); | 454 encodedImage, &stream_codec_specific, fragmentation); |
412 } | 455 } |
413 | 456 |
414 void SimulcastEncoderAdapter::PopulateStreamCodec( | 457 void SimulcastEncoderAdapter::PopulateStreamCodec( |
415 const webrtc::VideoCodec* inst, | 458 const webrtc::VideoCodec& inst, |
416 int stream_index, | 459 int stream_index, |
417 uint32_t start_bitrate_kbps, | 460 uint32_t start_bitrate_kbps, |
418 bool highest_resolution_stream, | 461 bool highest_resolution_stream, |
419 webrtc::VideoCodec* stream_codec) { | 462 webrtc::VideoCodec* stream_codec) { |
420 *stream_codec = *inst; | 463 *stream_codec = inst; |
421 | 464 |
422 // Stream specific settings. | 465 // Stream specific settings. |
423 stream_codec->VP8()->numberOfTemporalLayers = | 466 stream_codec->VP8()->numberOfTemporalLayers = |
424 inst->simulcastStream[stream_index].numberOfTemporalLayers; | 467 inst.simulcastStream[stream_index].numberOfTemporalLayers; |
425 stream_codec->numberOfSimulcastStreams = 0; | 468 stream_codec->numberOfSimulcastStreams = 0; |
426 stream_codec->width = inst->simulcastStream[stream_index].width; | 469 stream_codec->width = inst.simulcastStream[stream_index].width; |
427 stream_codec->height = inst->simulcastStream[stream_index].height; | 470 stream_codec->height = inst.simulcastStream[stream_index].height; |
428 stream_codec->maxBitrate = inst->simulcastStream[stream_index].maxBitrate; | 471 stream_codec->maxBitrate = inst.simulcastStream[stream_index].maxBitrate; |
429 stream_codec->minBitrate = inst->simulcastStream[stream_index].minBitrate; | 472 stream_codec->minBitrate = inst.simulcastStream[stream_index].minBitrate; |
430 stream_codec->qpMax = inst->simulcastStream[stream_index].qpMax; | 473 stream_codec->qpMax = inst.simulcastStream[stream_index].qpMax; |
431 // Settings that are based on stream/resolution. | 474 // Settings that are based on stream/resolution. |
432 if (stream_index == 0) { | 475 const bool lowest_resolution_stream = (stream_index == 0); |
476 if (lowest_resolution_stream) { | |
433 // Settings for lowest spatial resolutions. | 477 // Settings for lowest spatial resolutions. |
434 stream_codec->qpMax = kLowestResMaxQp; | 478 stream_codec->qpMax = kLowestResMaxQp; |
435 } | 479 } |
436 if (!highest_resolution_stream) { | 480 if (!highest_resolution_stream) { |
437 // For resolutions below CIF, set the codec |complexity| parameter to | 481 // For resolutions below CIF, set the codec |complexity| parameter to |
438 // kComplexityHigher, which maps to cpu_used = -4. | 482 // kComplexityHigher, which maps to cpu_used = -4. |
439 int pixels_per_frame = stream_codec->width * stream_codec->height; | 483 int pixels_per_frame = stream_codec->width * stream_codec->height; |
440 if (pixels_per_frame < 352 * 288) { | 484 if (pixels_per_frame < 352 * 288) { |
441 stream_codec->VP8()->complexity = webrtc::kComplexityHigher; | 485 stream_codec->VP8()->complexity = webrtc::kComplexityHigher; |
442 } | 486 } |
443 // Turn off denoising for all streams but the highest resolution. | 487 // Turn off denoising for all streams but the highest resolution. |
444 stream_codec->VP8()->denoisingOn = false; | 488 stream_codec->VP8()->denoisingOn = false; |
445 } | 489 } |
446 // TODO(ronghuawu): what to do with targetBitrate. | 490 // TODO(ronghuawu): what to do with targetBitrate. |
447 | 491 |
448 stream_codec->startBitrate = start_bitrate_kbps; | 492 stream_codec->startBitrate = start_bitrate_kbps; |
449 } | 493 } |
450 | 494 |
451 bool SimulcastEncoderAdapter::Initialized() const { | 495 bool SimulcastEncoderAdapter::Initialized() const { |
452 return !streaminfos_.empty(); | 496 #if RTC_DCHECK_IS_ON |
497 rtc::CritScope cs(&inited_crit_); | |
498 #endif | |
499 return inited_; | |
500 } | |
501 | |
502 void SimulcastEncoderAdapter::DestroyStoredEncoders() { | |
503 for (const auto& encoder : stored_encoders_) { | |
504 factory_->Destroy(encoder); | |
505 } | |
506 stored_encoders_.clear(); | |
453 } | 507 } |
454 | 508 |
455 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { | 509 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { |
510 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
456 // We should not be calling this method before streaminfos_ are configured. | 511 // We should not be calling this method before streaminfos_ are configured. |
457 RTC_DCHECK(!streaminfos_.empty()); | 512 RTC_DCHECK(!streaminfos_.empty()); |
458 for (const auto& streaminfo : streaminfos_) { | 513 for (const auto& streaminfo : streaminfos_) { |
459 if (!streaminfo.encoder->SupportsNativeHandle()) | 514 if (!streaminfo.encoder->SupportsNativeHandle()) { |
460 return false; | 515 return false; |
516 } | |
461 } | 517 } |
462 return true; | 518 return true; |
463 } | 519 } |
464 | 520 |
465 VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings() | 521 VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings() |
466 const { | 522 const { |
523 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
467 // Turn off quality scaling for simulcast. | 524 // Turn off quality scaling for simulcast. |
468 if (!Initialized() || NumberOfStreams(codec_) != 1) | 525 if (!Initialized() || NumberOfStreams(codec_) != 1) { |
469 return VideoEncoder::ScalingSettings(false); | 526 return VideoEncoder::ScalingSettings(false); |
527 } | |
470 return streaminfos_[0].encoder->GetScalingSettings(); | 528 return streaminfos_[0].encoder->GetScalingSettings(); |
471 } | 529 } |
472 | 530 |
473 const char* SimulcastEncoderAdapter::ImplementationName() const { | 531 const char* SimulcastEncoderAdapter::ImplementationName() const { |
532 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
474 return implementation_name_.c_str(); | 533 return implementation_name_.c_str(); |
475 } | 534 } |
476 | 535 |
477 } // namespace webrtc | 536 } // namespace webrtc |
OLD | NEW |