OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
55 for (int i = 0; i < num_streams; ++i) { | 55 for (int i = 0; i < num_streams; ++i) { |
56 if (codec.width * codec.simulcastStream[i].height != | 56 if (codec.width * codec.simulcastStream[i].height != |
57 codec.height * codec.simulcastStream[i].width) { | 57 codec.height * codec.simulcastStream[i].width) { |
58 return false; | 58 return false; |
59 } | 59 } |
60 } | 60 } |
61 return true; | 61 return true; |
62 } | 62 } |
63 | 63 |
64 int VerifyCodec(const webrtc::VideoCodec* inst) { | 64 int VerifyCodec(const webrtc::VideoCodec* inst) { |
65 if (inst == nullptr) { | 65 if (inst == NULL) { |
66 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 66 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
67 } | 67 } |
68 if (inst->maxFramerate < 1) { | 68 if (inst->maxFramerate < 1) { |
69 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 69 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
70 } | 70 } |
71 // allow zero to represent an unspecified maxBitRate | 71 // allow zero to represent an unspecified maxBitRate |
72 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { | 72 if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) { |
73 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 73 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
74 } | 74 } |
75 if (inst->width <= 1 || inst->height <= 1) { | 75 if (inst->width <= 1 || inst->height <= 1) { |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
120 | 120 |
121 const int adapted_simulcast_id_; | 121 const int adapted_simulcast_id_; |
122 const TemporalLayersFactory& tl_factory_; | 122 const TemporalLayersFactory& tl_factory_; |
123 }; | 123 }; |
124 | 124 |
125 } // namespace | 125 } // namespace |
126 | 126 |
127 namespace webrtc { | 127 namespace webrtc { |
128 | 128 |
129 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) | 129 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory) |
130 : inited_(0), | 130 : factory_(factory), |
131 factory_(factory), | |
132 encoded_complete_callback_(nullptr), | 131 encoded_complete_callback_(nullptr), |
133 implementation_name_("SimulcastEncoderAdapter") { | 132 implementation_name_("SimulcastEncoderAdapter") { |
134 // The adapter is typically created on the worker thread, but operated on | |
135 // the encoder task queue. | |
136 encoder_queue_.Detach(); | |
137 | |
138 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); | 133 memset(&codec_, 0, sizeof(webrtc::VideoCodec)); |
139 } | 134 } |
140 | 135 |
141 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { | 136 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { |
142 RTC_DCHECK(!Initialized()); | 137 Release(); |
143 DestroyStoredEncoders(); | |
144 } | 138 } |
145 | 139 |
146 int SimulcastEncoderAdapter::Release() { | 140 int SimulcastEncoderAdapter::Release() { |
147 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | 141 // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then |
148 | 142 // re-use this instance in ::InitEncode(). This means that changing |
| 143 // resolutions doesn't require reallocation of the first encoder, but only |
| 144 // reinitialization, which makes sense. Then Destroy this instance instead in |
| 145 // ~SimulcastEncoderAdapter(). |
149 while (!streaminfos_.empty()) { | 146 while (!streaminfos_.empty()) { |
150 VideoEncoder* encoder = streaminfos_.back().encoder; | 147 VideoEncoder* encoder = streaminfos_.back().encoder; |
| 148 EncodedImageCallback* callback = streaminfos_.back().callback; |
151 encoder->Release(); | 149 encoder->Release(); |
152 // Even though it seems very unlikely, there are no guarantees that the | 150 factory_->Destroy(encoder); |
153 // encoder will not call back after being Release()'d. Therefore, we disable | 151 delete callback; |
154 // the callbacks here. | 152 streaminfos_.pop_back(); |
155 encoder->RegisterEncodeCompleteCallback(nullptr); | |
156 streaminfos_.pop_back(); // Deletes callback adapter. | |
157 stored_encoders_.push(encoder); | |
158 } | 153 } |
159 | |
160 // It's legal to move the encoder to another queue now. | |
161 encoder_queue_.Detach(); | |
162 | |
163 rtc::AtomicOps::ReleaseStore(&inited_, 0); | |
164 | |
165 return WEBRTC_VIDEO_CODEC_OK; | 154 return WEBRTC_VIDEO_CODEC_OK; |
166 } | 155 } |
167 | 156 |
168 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst, | 157 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst, |
169 int number_of_cores, | 158 int number_of_cores, |
170 size_t max_payload_size) { | 159 size_t max_payload_size) { |
171 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
172 | |
173 if (number_of_cores < 1) { | 160 if (number_of_cores < 1) { |
174 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 161 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
175 } | 162 } |
176 | 163 |
177 int ret = VerifyCodec(inst); | 164 int ret = VerifyCodec(inst); |
178 if (ret < 0) { | 165 if (ret < 0) { |
179 return ret; | 166 return ret; |
180 } | 167 } |
181 | 168 |
182 ret = Release(); | 169 ret = Release(); |
183 if (ret < 0) { | 170 if (ret < 0) { |
184 return ret; | 171 return ret; |
185 } | 172 } |
186 | 173 |
187 int number_of_streams = NumberOfStreams(*inst); | 174 int number_of_streams = NumberOfStreams(*inst); |
188 RTC_DCHECK_LE(number_of_streams, kMaxSimulcastStreams); | |
189 const bool doing_simulcast = (number_of_streams > 1); | 175 const bool doing_simulcast = (number_of_streams > 1); |
190 | 176 |
191 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { | 177 if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) { |
192 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 178 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
193 } | 179 } |
194 | 180 |
195 codec_ = *inst; | 181 codec_ = *inst; |
196 SimulcastRateAllocator rate_allocator(codec_, nullptr); | 182 SimulcastRateAllocator rate_allocator(codec_, nullptr); |
197 BitrateAllocation allocation = rate_allocator.GetAllocation( | 183 BitrateAllocation allocation = rate_allocator.GetAllocation( |
198 codec_.startBitrate * 1000, codec_.maxFramerate); | 184 codec_.startBitrate * 1000, codec_.maxFramerate); |
(...skipping 10 matching lines...) Expand all Loading... |
209 uint32_t start_bitrate_kbps = start_bitrates[i]; | 195 uint32_t start_bitrate_kbps = start_bitrates[i]; |
210 if (!doing_simulcast) { | 196 if (!doing_simulcast) { |
211 stream_codec = codec_; | 197 stream_codec = codec_; |
212 stream_codec.numberOfSimulcastStreams = 1; | 198 stream_codec.numberOfSimulcastStreams = 1; |
213 } else { | 199 } else { |
214 // Cap start bitrate to the min bitrate in order to avoid strange codec | 200 // Cap start bitrate to the min bitrate in order to avoid strange codec |
215 // behavior. Since sending sending will be false, this should not matter. | 201 // behavior. Since sending sending will be false, this should not matter. |
216 start_bitrate_kbps = | 202 start_bitrate_kbps = |
217 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); | 203 std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); |
218 bool highest_resolution_stream = (i == (number_of_streams - 1)); | 204 bool highest_resolution_stream = (i == (number_of_streams - 1)); |
219 PopulateStreamCodec(codec_, i, start_bitrate_kbps, | 205 PopulateStreamCodec(&codec_, i, start_bitrate_kbps, |
220 highest_resolution_stream, &stream_codec); | 206 highest_resolution_stream, &stream_codec); |
221 } | 207 } |
222 TemporalLayersFactoryAdapter tl_factory_adapter(i, | 208 TemporalLayersFactoryAdapter tl_factory_adapter(i, |
223 *codec_.VP8()->tl_factory); | 209 *codec_.VP8()->tl_factory); |
224 stream_codec.VP8()->tl_factory = &tl_factory_adapter; | 210 stream_codec.VP8()->tl_factory = &tl_factory_adapter; |
225 | 211 |
226 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. | 212 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. |
227 if (stream_codec.qpMax < kDefaultMinQp) { | 213 if (stream_codec.qpMax < kDefaultMinQp) { |
228 stream_codec.qpMax = kDefaultMaxQp; | 214 stream_codec.qpMax = kDefaultMaxQp; |
229 } | 215 } |
230 | 216 |
231 // If an existing encoder instance exists, reuse it. | 217 VideoEncoder* encoder = factory_->Create(); |
232 // TODO(brandtr): Set initial RTP state (e.g., picture_id/tl0_pic_idx) here, | |
233 // when we start storing that state outside the encoder wrappers. | |
234 VideoEncoder* encoder; | |
235 if (!stored_encoders_.empty()) { | |
236 encoder = stored_encoders_.top(); | |
237 stored_encoders_.pop(); | |
238 } else { | |
239 encoder = factory_->Create(); | |
240 } | |
241 | |
242 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); | 218 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); |
243 if (ret < 0) { | 219 if (ret < 0) { |
244 // Explicitly destroy the current encoder; because we haven't registered a | 220 // Explicitly destroy the current encoder; because we haven't registered a |
245 // StreamInfo for it yet, Release won't do anything about it. | 221 // StreamInfo for it yet, Release won't do anything about it. |
246 factory_->Destroy(encoder); | 222 factory_->Destroy(encoder); |
247 Release(); | 223 Release(); |
248 return ret; | 224 return ret; |
249 } | 225 } |
250 std::unique_ptr<EncodedImageCallback> callback( | 226 EncodedImageCallback* callback = new AdapterEncodedImageCallback(this, i); |
251 new AdapterEncodedImageCallback(this, i)); | 227 encoder->RegisterEncodeCompleteCallback(callback); |
252 encoder->RegisterEncodeCompleteCallback(callback.get()); | 228 streaminfos_.push_back(StreamInfo(encoder, callback, stream_codec.width, |
253 streaminfos_.emplace_back(encoder, std::move(callback), stream_codec.width, | 229 stream_codec.height, |
254 stream_codec.height, start_bitrate_kbps > 0); | 230 start_bitrate_kbps > 0)); |
255 | 231 if (i != 0) |
256 if (i != 0) { | |
257 implementation_name += ", "; | 232 implementation_name += ", "; |
258 } | |
259 implementation_name += streaminfos_[i].encoder->ImplementationName(); | 233 implementation_name += streaminfos_[i].encoder->ImplementationName(); |
260 } | 234 } |
261 | |
262 if (doing_simulcast) { | 235 if (doing_simulcast) { |
263 implementation_name_ = | 236 implementation_name_ = |
264 "SimulcastEncoderAdapter (" + implementation_name + ")"; | 237 "SimulcastEncoderAdapter (" + implementation_name + ")"; |
265 } else { | 238 } else { |
266 implementation_name_ = implementation_name; | 239 implementation_name_ = implementation_name; |
267 } | 240 } |
268 | |
269 // To save memory, don't store encoders that we don't use. | |
270 DestroyStoredEncoders(); | |
271 | |
272 rtc::AtomicOps::ReleaseStore(&inited_, 1); | |
273 | |
274 return WEBRTC_VIDEO_CODEC_OK; | 241 return WEBRTC_VIDEO_CODEC_OK; |
275 } | 242 } |
276 | 243 |
277 int SimulcastEncoderAdapter::Encode( | 244 int SimulcastEncoderAdapter::Encode( |
278 const VideoFrame& input_image, | 245 const VideoFrame& input_image, |
279 const CodecSpecificInfo* codec_specific_info, | 246 const CodecSpecificInfo* codec_specific_info, |
280 const std::vector<FrameType>* frame_types) { | 247 const std::vector<FrameType>* frame_types) { |
281 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
282 | |
283 if (!Initialized()) { | 248 if (!Initialized()) { |
284 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 249 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
285 } | 250 } |
286 if (encoded_complete_callback_ == nullptr) { | 251 if (encoded_complete_callback_ == NULL) { |
287 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 252 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
288 } | 253 } |
289 | 254 |
290 // All active streams should generate a key frame if | 255 // All active streams should generate a key frame if |
291 // a key frame is requested by any stream. | 256 // a key frame is requested by any stream. |
292 bool send_key_frame = false; | 257 bool send_key_frame = false; |
293 if (frame_types) { | 258 if (frame_types) { |
294 for (size_t i = 0; i < frame_types->size(); ++i) { | 259 for (size_t i = 0; i < frame_types->size(); ++i) { |
295 if (frame_types->at(i) == kVideoFrameKey) { | 260 if (frame_types->at(i) == kVideoFrameKey) { |
296 send_key_frame = true; | 261 send_key_frame = true; |
297 break; | 262 break; |
298 } | 263 } |
299 } | 264 } |
300 } | 265 } |
301 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 266 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
302 if (streaminfos_[stream_idx].key_frame_request && | 267 if (streaminfos_[stream_idx].key_frame_request && |
303 streaminfos_[stream_idx].send_stream) { | 268 streaminfos_[stream_idx].send_stream) { |
304 send_key_frame = true; | 269 send_key_frame = true; |
305 break; | 270 break; |
306 } | 271 } |
307 } | 272 } |
308 | 273 |
309 int src_width = input_image.width(); | 274 int src_width = input_image.width(); |
310 int src_height = input_image.height(); | 275 int src_height = input_image.height(); |
311 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 276 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
312 // Don't encode frames in resolutions that we don't intend to send. | 277 // Don't encode frames in resolutions that we don't intend to send. |
313 if (!streaminfos_[stream_idx].send_stream) { | 278 if (!streaminfos_[stream_idx].send_stream) |
314 continue; | 279 continue; |
315 } | |
316 | 280 |
317 std::vector<FrameType> stream_frame_types; | 281 std::vector<FrameType> stream_frame_types; |
318 if (send_key_frame) { | 282 if (send_key_frame) { |
319 stream_frame_types.push_back(kVideoFrameKey); | 283 stream_frame_types.push_back(kVideoFrameKey); |
320 streaminfos_[stream_idx].key_frame_request = false; | 284 streaminfos_[stream_idx].key_frame_request = false; |
321 } else { | 285 } else { |
322 stream_frame_types.push_back(kVideoFrameDelta); | 286 stream_frame_types.push_back(kVideoFrameDelta); |
323 } | 287 } |
324 | 288 |
325 int dst_width = streaminfos_[stream_idx].width; | 289 int dst_width = streaminfos_[stream_idx].width; |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
367 return ret; | 331 return ret; |
368 } | 332 } |
369 } | 333 } |
370 } | 334 } |
371 | 335 |
372 return WEBRTC_VIDEO_CODEC_OK; | 336 return WEBRTC_VIDEO_CODEC_OK; |
373 } | 337 } |
374 | 338 |
375 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( | 339 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( |
376 EncodedImageCallback* callback) { | 340 EncodedImageCallback* callback) { |
377 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
378 encoded_complete_callback_ = callback; | 341 encoded_complete_callback_ = callback; |
379 return WEBRTC_VIDEO_CODEC_OK; | 342 return WEBRTC_VIDEO_CODEC_OK; |
380 } | 343 } |
381 | 344 |
382 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, | 345 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss, |
383 int64_t rtt) { | 346 int64_t rtt) { |
384 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
385 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 347 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
386 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); | 348 streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt); |
387 } | 349 } |
388 return WEBRTC_VIDEO_CODEC_OK; | 350 return WEBRTC_VIDEO_CODEC_OK; |
389 } | 351 } |
390 | 352 |
391 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, | 353 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate, |
392 uint32_t new_framerate) { | 354 uint32_t new_framerate) { |
393 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | 355 if (!Initialized()) |
| 356 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
394 | 357 |
395 if (!Initialized()) { | 358 if (new_framerate < 1) |
396 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 359 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
397 } | |
398 | 360 |
399 if (new_framerate < 1) { | 361 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) |
400 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 362 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
401 } | |
402 | |
403 if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) { | |
404 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
405 } | |
406 | 363 |
407 if (bitrate.get_sum_bps() > 0) { | 364 if (bitrate.get_sum_bps() > 0) { |
408 // Make sure the bitrate fits the configured min bitrates. 0 is a special | 365 // Make sure the bitrate fits the configured min bitrates. 0 is a special |
409 // value that means paused, though, so leave it alone. | 366 // value that means paused, though, so leave it alone. |
410 if (bitrate.get_sum_kbps() < codec_.minBitrate) { | 367 if (bitrate.get_sum_kbps() < codec_.minBitrate) |
411 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 368 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
412 } | |
413 | 369 |
414 if (codec_.numberOfSimulcastStreams > 0 && | 370 if (codec_.numberOfSimulcastStreams > 0 && |
415 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { | 371 bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) { |
416 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 372 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
417 } | 373 } |
418 } | 374 } |
419 | 375 |
420 codec_.maxFramerate = new_framerate; | 376 codec_.maxFramerate = new_framerate; |
421 | 377 |
422 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { | 378 for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { |
423 uint32_t stream_bitrate_kbps = | 379 uint32_t stream_bitrate_kbps = |
424 bitrate.GetSpatialLayerSum(stream_idx) / 1000; | 380 bitrate.GetSpatialLayerSum(stream_idx) / 1000; |
425 | 381 |
426 // Need a key frame if we have not sent this stream before. | 382 // Need a key frame if we have not sent this stream before. |
427 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { | 383 if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { |
428 streaminfos_[stream_idx].key_frame_request = true; | 384 streaminfos_[stream_idx].key_frame_request = true; |
429 } | 385 } |
430 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; | 386 streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; |
431 | 387 |
432 // Slice the temporal layers out of the full allocation and pass it on to | 388 // Slice the temporal layers out of the full allocation and pass it on to |
433 // the encoder handling the current simulcast stream. | 389 // the encoder handling the current simulcast stream. |
434 BitrateAllocation stream_allocation; | 390 BitrateAllocation stream_allocation; |
435 for (int i = 0; i < kMaxTemporalStreams; ++i) { | 391 for (int i = 0; i < kMaxTemporalStreams; ++i) |
436 stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i)); | 392 stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i)); |
437 } | |
438 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, | 393 streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation, |
439 new_framerate); | 394 new_framerate); |
440 } | 395 } |
441 | 396 |
442 return WEBRTC_VIDEO_CODEC_OK; | 397 return WEBRTC_VIDEO_CODEC_OK; |
443 } | 398 } |
444 | 399 |
445 // TODO(brandtr): Add task checker to this member function, when all encoder | |
446 // callbacks are coming in on the encoder queue. | |
447 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( | 400 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( |
448 size_t stream_idx, | 401 size_t stream_idx, |
449 const EncodedImage& encodedImage, | 402 const EncodedImage& encodedImage, |
450 const CodecSpecificInfo* codecSpecificInfo, | 403 const CodecSpecificInfo* codecSpecificInfo, |
451 const RTPFragmentationHeader* fragmentation) { | 404 const RTPFragmentationHeader* fragmentation) { |
452 CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; | 405 CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; |
453 stream_codec_specific.codec_name = implementation_name_.c_str(); | 406 stream_codec_specific.codec_name = implementation_name_.c_str(); |
454 CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8); | 407 CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8); |
455 vp8Info->simulcastIdx = stream_idx; | 408 vp8Info->simulcastIdx = stream_idx; |
456 | 409 |
457 return encoded_complete_callback_->OnEncodedImage( | 410 return encoded_complete_callback_->OnEncodedImage( |
458 encodedImage, &stream_codec_specific, fragmentation); | 411 encodedImage, &stream_codec_specific, fragmentation); |
459 } | 412 } |
460 | 413 |
461 void SimulcastEncoderAdapter::PopulateStreamCodec( | 414 void SimulcastEncoderAdapter::PopulateStreamCodec( |
462 const webrtc::VideoCodec& inst, | 415 const webrtc::VideoCodec* inst, |
463 int stream_index, | 416 int stream_index, |
464 uint32_t start_bitrate_kbps, | 417 uint32_t start_bitrate_kbps, |
465 bool highest_resolution_stream, | 418 bool highest_resolution_stream, |
466 webrtc::VideoCodec* stream_codec) { | 419 webrtc::VideoCodec* stream_codec) { |
467 *stream_codec = inst; | 420 *stream_codec = *inst; |
468 | 421 |
469 // Stream specific settings. | 422 // Stream specific settings. |
470 stream_codec->VP8()->numberOfTemporalLayers = | 423 stream_codec->VP8()->numberOfTemporalLayers = |
471 inst.simulcastStream[stream_index].numberOfTemporalLayers; | 424 inst->simulcastStream[stream_index].numberOfTemporalLayers; |
472 stream_codec->numberOfSimulcastStreams = 0; | 425 stream_codec->numberOfSimulcastStreams = 0; |
473 stream_codec->width = inst.simulcastStream[stream_index].width; | 426 stream_codec->width = inst->simulcastStream[stream_index].width; |
474 stream_codec->height = inst.simulcastStream[stream_index].height; | 427 stream_codec->height = inst->simulcastStream[stream_index].height; |
475 stream_codec->maxBitrate = inst.simulcastStream[stream_index].maxBitrate; | 428 stream_codec->maxBitrate = inst->simulcastStream[stream_index].maxBitrate; |
476 stream_codec->minBitrate = inst.simulcastStream[stream_index].minBitrate; | 429 stream_codec->minBitrate = inst->simulcastStream[stream_index].minBitrate; |
477 stream_codec->qpMax = inst.simulcastStream[stream_index].qpMax; | 430 stream_codec->qpMax = inst->simulcastStream[stream_index].qpMax; |
478 // Settings that are based on stream/resolution. | 431 // Settings that are based on stream/resolution. |
479 const bool lowest_resolution_stream = (stream_index == 0); | 432 if (stream_index == 0) { |
480 if (lowest_resolution_stream) { | |
481 // Settings for lowest spatial resolutions. | 433 // Settings for lowest spatial resolutions. |
482 stream_codec->qpMax = kLowestResMaxQp; | 434 stream_codec->qpMax = kLowestResMaxQp; |
483 } | 435 } |
484 if (!highest_resolution_stream) { | 436 if (!highest_resolution_stream) { |
485 // For resolutions below CIF, set the codec |complexity| parameter to | 437 // For resolutions below CIF, set the codec |complexity| parameter to |
486 // kComplexityHigher, which maps to cpu_used = -4. | 438 // kComplexityHigher, which maps to cpu_used = -4. |
487 int pixels_per_frame = stream_codec->width * stream_codec->height; | 439 int pixels_per_frame = stream_codec->width * stream_codec->height; |
488 if (pixels_per_frame < 352 * 288) { | 440 if (pixels_per_frame < 352 * 288) { |
489 stream_codec->VP8()->complexity = webrtc::kComplexityHigher; | 441 stream_codec->VP8()->complexity = webrtc::kComplexityHigher; |
490 } | 442 } |
491 // Turn off denoising for all streams but the highest resolution. | 443 // Turn off denoising for all streams but the highest resolution. |
492 stream_codec->VP8()->denoisingOn = false; | 444 stream_codec->VP8()->denoisingOn = false; |
493 } | 445 } |
494 // TODO(ronghuawu): what to do with targetBitrate. | 446 // TODO(ronghuawu): what to do with targetBitrate. |
495 | 447 |
496 stream_codec->startBitrate = start_bitrate_kbps; | 448 stream_codec->startBitrate = start_bitrate_kbps; |
497 } | 449 } |
498 | 450 |
499 bool SimulcastEncoderAdapter::Initialized() const { | 451 bool SimulcastEncoderAdapter::Initialized() const { |
500 return rtc::AtomicOps::AcquireLoad(&inited_) == 1; | 452 return !streaminfos_.empty(); |
501 } | |
502 | |
503 void SimulcastEncoderAdapter::DestroyStoredEncoders() { | |
504 while (!stored_encoders_.empty()) { | |
505 VideoEncoder* encoder = stored_encoders_.top(); | |
506 factory_->Destroy(encoder); | |
507 stored_encoders_.pop(); | |
508 } | |
509 } | 453 } |
510 | 454 |
511 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { | 455 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { |
512 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
513 // We should not be calling this method before streaminfos_ are configured. | 456 // We should not be calling this method before streaminfos_ are configured. |
514 RTC_DCHECK(!streaminfos_.empty()); | 457 RTC_DCHECK(!streaminfos_.empty()); |
515 for (const auto& streaminfo : streaminfos_) { | 458 for (const auto& streaminfo : streaminfos_) { |
516 if (!streaminfo.encoder->SupportsNativeHandle()) { | 459 if (!streaminfo.encoder->SupportsNativeHandle()) |
517 return false; | 460 return false; |
518 } | |
519 } | 461 } |
520 return true; | 462 return true; |
521 } | 463 } |
522 | 464 |
523 VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings() | 465 VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings() |
524 const { | 466 const { |
525 // TODO(brandtr): Investigate why the sequence checker below fails on mac. | |
526 // RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
527 // Turn off quality scaling for simulcast. | 467 // Turn off quality scaling for simulcast. |
528 if (!Initialized() || NumberOfStreams(codec_) != 1) { | 468 if (!Initialized() || NumberOfStreams(codec_) != 1) |
529 return VideoEncoder::ScalingSettings(false); | 469 return VideoEncoder::ScalingSettings(false); |
530 } | |
531 return streaminfos_[0].encoder->GetScalingSettings(); | 470 return streaminfos_[0].encoder->GetScalingSettings(); |
532 } | 471 } |
533 | 472 |
534 const char* SimulcastEncoderAdapter::ImplementationName() const { | 473 const char* SimulcastEncoderAdapter::ImplementationName() const { |
535 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_); | |
536 return implementation_name_.c_str(); | 474 return implementation_name_.c_str(); |
537 } | 475 } |
538 | 476 |
539 } // namespace webrtc | 477 } // namespace webrtc |
OLD | NEW |