| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 * | 9 * |
| 10 */ | 10 */ |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 43 // } else if (width * height > 1280 * 960 && number_of_cores >= 6) { | 43 // } else if (width * height > 1280 * 960 && number_of_cores >= 6) { |
| 44 // return 3; // 3 threads for 1080p. | 44 // return 3; // 3 threads for 1080p. |
| 45 // } else if (width * height > 640 * 480 && number_of_cores >= 3) { | 45 // } else if (width * height > 640 * 480 && number_of_cores >= 3) { |
| 46 // return 2; // 2 threads for qHD/HD. | 46 // return 2; // 2 threads for qHD/HD. |
| 47 // } else { | 47 // } else { |
| 48 // return 1; // 1 thread for VGA or less. | 48 // return 1; // 1 thread for VGA or less. |
| 49 // } | 49 // } |
| 50 return 1; | 50 return 1; |
| 51 } | 51 } |
| 52 | 52 |
| 53 } // namespace | 53 FrameType ConvertToVideoFrameType(EVideoFrameType type) { |
| 54 | |
| 55 static FrameType EVideoFrameType_to_FrameType(EVideoFrameType type) { | |
| 56 switch (type) { | 54 switch (type) { |
| 57 case videoFrameTypeInvalid: | |
| 58 return kEmptyFrame; | |
| 59 case videoFrameTypeIDR: | 55 case videoFrameTypeIDR: |
| 60 return kVideoFrameKey; | 56 return kVideoFrameKey; |
| 61 case videoFrameTypeSkip: | 57 case videoFrameTypeSkip: |
| 62 case videoFrameTypeI: | 58 case videoFrameTypeI: |
| 63 case videoFrameTypeP: | 59 case videoFrameTypeP: |
| 64 case videoFrameTypeIPMixed: | 60 case videoFrameTypeIPMixed: |
| 65 return kVideoFrameDelta; | 61 return kVideoFrameDelta; |
| 66 default: | 62 case videoFrameTypeInvalid: |
| 67 LOG(LS_WARNING) << "Unknown EVideoFrameType: " << type; | 63 break; |
| 68 return kVideoFrameDelta; | |
| 69 } | 64 } |
| 65 RTC_NOTREACHED() << "Unexpected/invalid frame type: " << type; |
| 66 return kEmptyFrame; |
| 70 } | 67 } |
| 71 | 68 |
| 69 } // namespace |
| 70 |
| 72 // Helper method used by H264EncoderImpl::Encode. | 71 // Helper method used by H264EncoderImpl::Encode. |
| 73 // Copies the encoded bytes from |info| to |encoded_image| and updates the | 72 // Copies the encoded bytes from |info| to |encoded_image| and updates the |
| 74 // fragmentation information of |frag_header|. The |encoded_image->_buffer| may | 73 // fragmentation information of |frag_header|. The |encoded_image->_buffer| may |
| 75 // be deleted and reallocated if a bigger buffer is required. | 74 // be deleted and reallocated if a bigger buffer is required. |
| 76 // | 75 // |
| 77 // After OpenH264 encoding, the encoded bytes are stored in |info| spread out | 76 // After OpenH264 encoding, the encoded bytes are stored in |info| spread out |
| 78 // over a number of layers and "NAL units". Each NAL unit is a fragment starting | 77 // over a number of layers and "NAL units". Each NAL unit is a fragment starting |
| 79 // with the four-byte start code {0,0,0,1}. All of this data (including the | 78 // with the four-byte start code {0,0,0,1}. All of this data (including the |
| 80 // start codes) is copied to the |encoded_image->_buffer| and the |frag_header| | 79 // start codes) is copied to the |encoded_image->_buffer| and the |frag_header| |
| 81 // is updated to point to each fragment, with offsets and lengths set as to | 80 // is updated to point to each fragment, with offsets and lengths set as to |
| (...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 379 ReportError(); | 378 ReportError(); |
| 380 return WEBRTC_VIDEO_CODEC_ERROR; | 379 return WEBRTC_VIDEO_CODEC_ERROR; |
| 381 } | 380 } |
| 382 | 381 |
| 383 encoded_image_._encodedWidth = frame.width(); | 382 encoded_image_._encodedWidth = frame.width(); |
| 384 encoded_image_._encodedHeight = frame.height(); | 383 encoded_image_._encodedHeight = frame.height(); |
| 385 encoded_image_._timeStamp = frame.timestamp(); | 384 encoded_image_._timeStamp = frame.timestamp(); |
| 386 encoded_image_.ntp_time_ms_ = frame.ntp_time_ms(); | 385 encoded_image_.ntp_time_ms_ = frame.ntp_time_ms(); |
| 387 encoded_image_.capture_time_ms_ = frame.render_time_ms(); | 386 encoded_image_.capture_time_ms_ = frame.render_time_ms(); |
| 388 encoded_image_.rotation_ = frame.rotation(); | 387 encoded_image_.rotation_ = frame.rotation(); |
| 389 encoded_image_._frameType = EVideoFrameType_to_FrameType(info.eFrameType); | 388 encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType); |
| 390 | 389 |
| 391 // Split encoded image up into fragments. This also updates |encoded_image_|. | 390 // Split encoded image up into fragments. This also updates |encoded_image_|. |
| 392 RTPFragmentationHeader frag_header; | 391 RTPFragmentationHeader frag_header; |
| 393 RtpFragmentize(&encoded_image_, &encoded_image_buffer_, frame, &info, | 392 RtpFragmentize(&encoded_image_, &encoded_image_buffer_, frame, &info, |
| 394 &frag_header); | 393 &frag_header); |
| 395 | 394 |
| 396 // Encoder can skip frames to save bandwidth in which case | 395 // Encoder can skip frames to save bandwidth in which case |
| 397 // |encoded_image_._length| == 0. | 396 // |encoded_image_._length| == 0. |
| 398 if (encoded_image_._length > 0) { | 397 if (encoded_image_._length > 0) { |
| 399 // Deliver encoded image. | 398 // Deliver encoded image. |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 434 } | 433 } |
| 435 | 434 |
| 436 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { | 435 int32_t H264EncoderImpl::SetPeriodicKeyFrames(bool enable) { |
| 437 return WEBRTC_VIDEO_CODEC_OK; | 436 return WEBRTC_VIDEO_CODEC_OK; |
| 438 } | 437 } |
| 439 | 438 |
| 440 void H264EncoderImpl::OnDroppedFrame() { | 439 void H264EncoderImpl::OnDroppedFrame() { |
| 441 } | 440 } |
| 442 | 441 |
| 443 } // namespace webrtc | 442 } // namespace webrtc |
| OLD | NEW |