| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 * | 9 * |
| 10 */ | 10 */ |
| (...skipping 685 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 696 RTC_DCHECK_LE(encoded_image_._length, encoded_image_._size); | 696 RTC_DCHECK_LE(encoded_image_._length, encoded_image_._size); |
| 697 | 697 |
| 698 CodecSpecificInfo codec_specific; | 698 CodecSpecificInfo codec_specific; |
| 699 PopulateCodecSpecific(&codec_specific, *pkt, input_image_->timestamp()); | 699 PopulateCodecSpecific(&codec_specific, *pkt, input_image_->timestamp()); |
| 700 | 700 |
| 701 if (encoded_image_._length > 0) { | 701 if (encoded_image_._length > 0) { |
| 702 TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length); | 702 TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length); |
| 703 encoded_image_._timeStamp = input_image_->timestamp(); | 703 encoded_image_._timeStamp = input_image_->timestamp(); |
| 704 encoded_image_.capture_time_ms_ = input_image_->render_time_ms(); | 704 encoded_image_.capture_time_ms_ = input_image_->render_time_ms(); |
| 705 encoded_image_.rotation_ = input_image_->rotation(); | 705 encoded_image_.rotation_ = input_image_->rotation(); |
| 706 encoded_image_.content_type_ = input_image_->content_type(); |
| 706 encoded_image_._encodedHeight = raw_->d_h; | 707 encoded_image_._encodedHeight = raw_->d_h; |
| 707 encoded_image_._encodedWidth = raw_->d_w; | 708 encoded_image_._encodedWidth = raw_->d_w; |
| 708 int qp = -1; | 709 int qp = -1; |
| 709 vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp); | 710 vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp); |
| 710 encoded_image_.qp_ = qp; | 711 encoded_image_.qp_ = qp; |
| 711 encoded_complete_callback_->OnEncodedImage(encoded_image_, &codec_specific, | 712 encoded_complete_callback_->OnEncodedImage(encoded_image_, &codec_specific, |
| 712 &frag_info); | 713 &frag_info); |
| 713 } | 714 } |
| 714 return WEBRTC_VIDEO_CODEC_OK; | 715 return WEBRTC_VIDEO_CODEC_OK; |
| 715 } | 716 } |
| (...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 965 img->d_w, img->d_h, img->planes[VPX_PLANE_Y], | 966 img->d_w, img->d_h, img->planes[VPX_PLANE_Y], |
| 966 img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], | 967 img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], |
| 967 img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], | 968 img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], |
| 968 img->stride[VPX_PLANE_V], | 969 img->stride[VPX_PLANE_V], |
| 969 // WrappedI420Buffer's mechanism for allowing the release of its frame | 970 // WrappedI420Buffer's mechanism for allowing the release of its frame |
| 970 // buffer is through a callback function. This is where we should | 971 // buffer is through a callback function. This is where we should |
| 971 // release |img_buffer|. | 972 // release |img_buffer|. |
| 972 rtc::KeepRefUntilDone(img_buffer))); | 973 rtc::KeepRefUntilDone(img_buffer))); |
| 973 | 974 |
| 974 VideoFrame decoded_image(img_wrapped_buffer, timestamp, | 975 VideoFrame decoded_image(img_wrapped_buffer, timestamp, |
| 975 0 /* render_time_ms */, webrtc::kVideoRotation_0); | 976 0 /* render_time_ms */, webrtc::kVideoRotation_0, |
| 977 webrtc::kVideoContent_Default); |
| 976 decoded_image.set_ntp_time_ms(ntp_time_ms); | 978 decoded_image.set_ntp_time_ms(ntp_time_ms); |
| 977 | 979 |
| 978 decode_complete_callback_->Decoded(decoded_image, rtc::Optional<int32_t>(), | 980 decode_complete_callback_->Decoded(decoded_image, rtc::Optional<int32_t>(), |
| 979 rtc::Optional<uint8_t>(qp)); | 981 rtc::Optional<uint8_t>(qp)); |
| 980 return WEBRTC_VIDEO_CODEC_OK; | 982 return WEBRTC_VIDEO_CODEC_OK; |
| 981 } | 983 } |
| 982 | 984 |
| 983 int VP9DecoderImpl::RegisterDecodeCompleteCallback( | 985 int VP9DecoderImpl::RegisterDecodeCompleteCallback( |
| 984 DecodedImageCallback* callback) { | 986 DecodedImageCallback* callback) { |
| 985 decode_complete_callback_ = callback; | 987 decode_complete_callback_ = callback; |
| (...skipping 16 matching lines...) Expand all Loading... |
| 1002 frame_buffer_pool_.ClearPool(); | 1004 frame_buffer_pool_.ClearPool(); |
| 1003 inited_ = false; | 1005 inited_ = false; |
| 1004 return WEBRTC_VIDEO_CODEC_OK; | 1006 return WEBRTC_VIDEO_CODEC_OK; |
| 1005 } | 1007 } |
| 1006 | 1008 |
| 1007 const char* VP9DecoderImpl::ImplementationName() const { | 1009 const char* VP9DecoderImpl::ImplementationName() const { |
| 1008 return "libvpx"; | 1010 return "libvpx"; |
| 1009 } | 1011 } |
| 1010 | 1012 |
| 1011 } // namespace webrtc | 1013 } // namespace webrtc |
| OLD | NEW |