OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
208 highest_resolution_stream, &stream_codec, | 208 highest_resolution_stream, &stream_codec, |
209 &send_stream); | 209 &send_stream); |
210 } | 210 } |
211 | 211 |
212 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. | 212 // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl. |
213 if (stream_codec.qpMax < kDefaultMinQp) { | 213 if (stream_codec.qpMax < kDefaultMinQp) { |
214 stream_codec.qpMax = kDefaultMaxQp; | 214 stream_codec.qpMax = kDefaultMaxQp; |
215 } | 215 } |
216 | 216 |
217 VideoEncoder* encoder = factory_->Create(); | 217 VideoEncoder* encoder = factory_->Create(); |
218 ret = encoder->InitEncode(&stream_codec, | 218 ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); |
219 number_of_cores, | |
220 max_payload_size); | |
221 if (ret < 0) { | 219 if (ret < 0) { |
222 Release(); | 220 Release(); |
223 return ret; | 221 return ret; |
224 } | 222 } |
225 EncodedImageCallback* callback = new AdapterEncodedImageCallback(this, i); | 223 EncodedImageCallback* callback = new AdapterEncodedImageCallback(this, i); |
226 encoder->RegisterEncodeCompleteCallback(callback); | 224 encoder->RegisterEncodeCompleteCallback(callback); |
227 streaminfos_.push_back(StreamInfo(encoder, callback, stream_codec.width, | 225 streaminfos_.push_back(StreamInfo(encoder, callback, stream_codec.width, |
228 stream_codec.height, send_stream)); | 226 stream_codec.height, send_stream)); |
229 } | 227 } |
230 return WEBRTC_VIDEO_CODEC_OK; | 228 return WEBRTC_VIDEO_CODEC_OK; |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
277 | 275 |
278 int dst_width = streaminfos_[stream_idx].width; | 276 int dst_width = streaminfos_[stream_idx].width; |
279 int dst_height = streaminfos_[stream_idx].height; | 277 int dst_height = streaminfos_[stream_idx].height; |
280 // If scaling isn't required, because the input resolution | 278 // If scaling isn't required, because the input resolution |
281 // matches the destination or the input image is empty (e.g. | 279 // matches the destination or the input image is empty (e.g. |
282 // a keyframe request for encoders with internal camera | 280 // a keyframe request for encoders with internal camera |
283 // sources), pass the image on directly. Otherwise, we'll | 281 // sources), pass the image on directly. Otherwise, we'll |
284 // scale it to match what the encoder expects (below). | 282 // scale it to match what the encoder expects (below). |
285 if ((dst_width == src_width && dst_height == src_height) || | 283 if ((dst_width == src_width && dst_height == src_height) || |
286 input_image.IsZeroSize()) { | 284 input_image.IsZeroSize()) { |
287 streaminfos_[stream_idx].encoder->Encode(input_image, | 285 streaminfos_[stream_idx].encoder->Encode(input_image, codec_specific_info, |
288 codec_specific_info, | |
289 &stream_frame_types); | 286 &stream_frame_types); |
290 } else { | 287 } else { |
291 VideoFrame dst_frame; | 288 VideoFrame dst_frame; |
292 // Making sure that destination frame is of sufficient size. | 289 // Making sure that destination frame is of sufficient size. |
293 // Aligning stride values based on width. | 290 // Aligning stride values based on width. |
294 dst_frame.CreateEmptyFrame(dst_width, dst_height, | 291 dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width, |
295 dst_width, (dst_width + 1) / 2, | 292 (dst_width + 1) / 2, (dst_width + 1) / 2); |
296 (dst_width + 1) / 2); | 293 libyuv::I420Scale( |
297 libyuv::I420Scale(input_image.buffer(kYPlane), | 294 input_image.buffer(kYPlane), input_image.stride(kYPlane), |
298 input_image.stride(kYPlane), | 295 input_image.buffer(kUPlane), input_image.stride(kUPlane), |
299 input_image.buffer(kUPlane), | 296 input_image.buffer(kVPlane), input_image.stride(kVPlane), src_width, |
300 input_image.stride(kUPlane), | 297 src_height, dst_frame.buffer(kYPlane), dst_frame.stride(kYPlane), |
301 input_image.buffer(kVPlane), | 298 dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane), |
302 input_image.stride(kVPlane), | 299 dst_frame.buffer(kVPlane), dst_frame.stride(kVPlane), dst_width, |
303 src_width, src_height, | 300 dst_height, libyuv::kFilterBilinear); |
304 dst_frame.buffer(kYPlane), | |
305 dst_frame.stride(kYPlane), | |
306 dst_frame.buffer(kUPlane), | |
307 dst_frame.stride(kUPlane), | |
308 dst_frame.buffer(kVPlane), | |
309 dst_frame.stride(kVPlane), | |
310 dst_width, dst_height, | |
311 libyuv::kFilterBilinear); | |
312 dst_frame.set_timestamp(input_image.timestamp()); | 301 dst_frame.set_timestamp(input_image.timestamp()); |
313 dst_frame.set_render_time_ms(input_image.render_time_ms()); | 302 dst_frame.set_render_time_ms(input_image.render_time_ms()); |
314 streaminfos_[stream_idx].encoder->Encode(dst_frame, | 303 streaminfos_[stream_idx].encoder->Encode(dst_frame, codec_specific_info, |
315 codec_specific_info, | |
316 &stream_frame_types); | 304 &stream_frame_types); |
317 } | 305 } |
318 } | 306 } |
319 | 307 |
320 return WEBRTC_VIDEO_CODEC_OK; | 308 return WEBRTC_VIDEO_CODEC_OK; |
321 } | 309 } |
322 | 310 |
323 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( | 311 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( |
324 EncodedImageCallback* callback) { | 312 EncodedImageCallback* callback) { |
325 encoded_complete_callback_ = callback; | 313 encoded_complete_callback_ = callback; |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
419 if (new_bitrate_kbit >= bitrate_to_send_this_layer) { | 407 if (new_bitrate_kbit >= bitrate_to_send_this_layer) { |
420 // We have enough bandwidth to send this stream. | 408 // We have enough bandwidth to send this stream. |
421 *send_stream = true; | 409 *send_stream = true; |
422 // Bitrate for this stream is the new bitrate (|new_bitrate_kbit|) minus the | 410 // Bitrate for this stream is the new bitrate (|new_bitrate_kbit|) minus the |
423 // sum target rates of the lower streams, and capped to a maximum bitrate. | 411 // sum target rates of the lower streams, and capped to a maximum bitrate. |
424 // The maximum cap depends on whether we send the next higher stream. | 412 // The maximum cap depends on whether we send the next higher stream. |
425 // If we will be sending the next higher stream, |max_rate| is given by | 413 // If we will be sending the next higher stream, |max_rate| is given by |
426 // current stream's |targetBitrate|, otherwise it's capped by |maxBitrate|. | 414 // current stream's |targetBitrate|, otherwise it's capped by |maxBitrate|. |
427 if (stream_idx < codec_.numberOfSimulcastStreams - 1) { | 415 if (stream_idx < codec_.numberOfSimulcastStreams - 1) { |
428 unsigned int max_rate = codec_.simulcastStream[stream_idx].maxBitrate; | 416 unsigned int max_rate = codec_.simulcastStream[stream_idx].maxBitrate; |
429 if (new_bitrate_kbit >= SumStreamTargetBitrate(stream_idx + 1, codec_) + | 417 if (new_bitrate_kbit >= |
430 codec_.simulcastStream[stream_idx + 1].minBitrate) { | 418 SumStreamTargetBitrate(stream_idx + 1, codec_) + |
| 419 codec_.simulcastStream[stream_idx + 1].minBitrate) { |
431 max_rate = codec_.simulcastStream[stream_idx].targetBitrate; | 420 max_rate = codec_.simulcastStream[stream_idx].targetBitrate; |
432 } | 421 } |
433 return std::min(new_bitrate_kbit - sum_target_lower_streams, max_rate); | 422 return std::min(new_bitrate_kbit - sum_target_lower_streams, max_rate); |
434 } else { | 423 } else { |
435 // For the highest stream (highest resolution), the |targetBitRate| and | 424 // For the highest stream (highest resolution), the |targetBitRate| and |
436 // |maxBitrate| are not used. Any excess bitrate (above the targets of | 425 // |maxBitrate| are not used. Any excess bitrate (above the targets of |
437 // all lower streams) is given to this (highest resolution) stream. | 426 // all lower streams) is given to this (highest resolution) stream. |
438 return new_bitrate_kbit - sum_target_lower_streams; | 427 return new_bitrate_kbit - sum_target_lower_streams; |
439 } | 428 } |
440 } else { | 429 } else { |
441 // Not enough bitrate for this stream. | 430 // Not enough bitrate for this stream. |
442 // Return our max bitrate of |stream_idx| - 1, but we don't send it. We need | 431 // Return our max bitrate of |stream_idx| - 1, but we don't send it. We need |
443 // to keep this resolution coding in order for the multi-encoder to work. | 432 // to keep this resolution coding in order for the multi-encoder to work. |
444 *send_stream = false; | 433 *send_stream = false; |
445 return codec_.simulcastStream[stream_idx - 1].maxBitrate; | 434 return codec_.simulcastStream[stream_idx - 1].maxBitrate; |
446 } | 435 } |
447 } | 436 } |
448 | 437 |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
501 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { | 490 bool SimulcastEncoderAdapter::SupportsNativeHandle() const { |
502 // We should not be calling this method before streaminfos_ are configured. | 491 // We should not be calling this method before streaminfos_ are configured. |
503 RTC_DCHECK(!streaminfos_.empty()); | 492 RTC_DCHECK(!streaminfos_.empty()); |
504 // TODO(pbos): Support textures when using more than one encoder. | 493 // TODO(pbos): Support textures when using more than one encoder. |
505 if (streaminfos_.size() != 1) | 494 if (streaminfos_.size() != 1) |
506 return false; | 495 return false; |
507 return streaminfos_[0].encoder->SupportsNativeHandle(); | 496 return streaminfos_[0].encoder->SupportsNativeHandle(); |
508 } | 497 } |
509 | 498 |
510 } // namespace webrtc | 499 } // namespace webrtc |
OLD | NEW |