| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 711 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 722 } | 722 } |
| 723 | 723 |
| 724 // Get decoded video frame properties. | 724 // Get decoded video frame properties. |
| 725 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 725 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, |
| 726 j_color_format_field_); | 726 j_color_format_field_); |
| 727 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 727 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); |
| 728 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 728 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); |
| 729 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 729 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); |
| 730 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | 730 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, |
| 731 j_slice_height_field_); | 731 j_slice_height_field_); |
| 732 RTC_CHECK_GE(slice_height, height); |
| 732 | 733 |
| 733 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | 734 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; |
| 734 int64_t presentation_timestamps_ms = 0; | 735 int64_t presentation_timestamps_ms = 0; |
| 735 int64_t output_timestamps_ms = 0; | 736 int64_t output_timestamps_ms = 0; |
| 736 int64_t output_ntp_timestamps_ms = 0; | 737 int64_t output_ntp_timestamps_ms = 0; |
| 737 int decode_time_ms = 0; | 738 int decode_time_ms = 0; |
| 738 int64_t frame_delayed_ms = 0; | 739 int64_t frame_delayed_ms = 0; |
| 739 if (use_surface_) { | 740 if (use_surface_) { |
| 740 // Extract data from Java DecodedTextureBuffer. | 741 // Extract data from Java DecodedTextureBuffer. |
| 741 presentation_timestamps_ms = GetLongField( | 742 presentation_timestamps_ms = GetLongField( |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 797 jobject output_buffer = | 798 jobject output_buffer = |
| 798 jni->GetObjectArrayElement(output_buffers, output_buffer_index); | 799 jni->GetObjectArrayElement(output_buffers, output_buffer_index); |
| 799 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | 800 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( |
| 800 output_buffer)); | 801 output_buffer)); |
| 801 if (CheckException(jni)) { | 802 if (CheckException(jni)) { |
| 802 return false; | 803 return false; |
| 803 } | 804 } |
| 804 payload += output_buffer_offset; | 805 payload += output_buffer_offset; |
| 805 | 806 |
| 806 // Create yuv420 frame. | 807 // Create yuv420 frame. |
| 807 rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer; | 808 rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer = |
| 808 | 809 decoded_frame_pool_.CreateBuffer(width, height); |
| 809 i420_buffer = decoded_frame_pool_.CreateBuffer(width, height); | |
| 810 if (color_format == COLOR_FormatYUV420Planar) { | 810 if (color_format == COLOR_FormatYUV420Planar) { |
| 811 RTC_CHECK_EQ(0, stride % 2); | 811 RTC_CHECK_EQ(0, stride % 2); |
| 812 RTC_CHECK_EQ(0, slice_height % 2); | |
| 813 const int uv_stride = stride / 2; | 812 const int uv_stride = stride / 2; |
| 814 const int u_slice_height = slice_height / 2; | |
| 815 const uint8_t* y_ptr = payload; | 813 const uint8_t* y_ptr = payload; |
| 816 const uint8_t* u_ptr = y_ptr + stride * slice_height; | 814 const uint8_t* u_ptr = y_ptr + stride * slice_height; |
| 817 const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height; | 815 |
| 818 libyuv::I420Copy(y_ptr, stride, u_ptr, uv_stride, v_ptr, uv_stride, | 816 // Note that the case with odd |slice_height| is handled in a special way. |
| 819 i420_buffer->MutableDataY(), i420_buffer->StrideY(), | 817 // The chroma height contained in the payload is rounded down instead of |
| 820 i420_buffer->MutableDataU(), i420_buffer->StrideU(), | 818 // up, making it one row less than what we expect in WebRTC. Therefore, we |
| 821 i420_buffer->MutableDataV(), i420_buffer->StrideV(), | 819 // have to duplicate the last chroma rows for this case. Also, the offset |
| 822 width, height); | 820 // between the Y plane and the U plane is unintuitive for this case. See |
| 821 // http://bugs.webrtc.org/6651 for more info. |
| 822 const int chroma_width = (width + 1) / 2; |
| 823 const int chroma_height = |
| 824 (slice_height % 2 == 0) ? (height + 1) / 2 : height / 2; |
| 825 const int u_offset = uv_stride * slice_height / 2; |
| 826 const uint8_t* v_ptr = u_ptr + u_offset; |
| 827 libyuv::CopyPlane(y_ptr, stride, |
| 828 i420_buffer->MutableDataY(), i420_buffer->StrideY(), |
| 829 width, height); |
| 830 libyuv::CopyPlane(u_ptr, uv_stride, |
| 831 i420_buffer->MutableDataU(), i420_buffer->StrideU(), |
| 832 chroma_width, chroma_height); |
| 833 libyuv::CopyPlane(v_ptr, uv_stride, |
| 834 i420_buffer->MutableDataV(), i420_buffer->StrideV(), |
| 835 chroma_width, chroma_height); |
| 836 if (slice_height % 2 == 1) { |
| 837 RTC_CHECK_EQ(height, slice_height); |
| 838 // Duplicate the last chroma rows. |
| 839 uint8_t* u_last_row_ptr = i420_buffer->MutableDataU() + |
| 840 chroma_height * i420_buffer->StrideU(); |
| 841 memcpy(u_last_row_ptr, u_last_row_ptr - i420_buffer->StrideU(), |
| 842 i420_buffer->StrideU()); |
| 843 uint8_t* v_last_row_ptr = i420_buffer->MutableDataV() + |
| 844 chroma_height * i420_buffer->StrideV(); |
| 845 memcpy(v_last_row_ptr, v_last_row_ptr - i420_buffer->StrideV(), |
| 846 i420_buffer->StrideV()); |
| 847 } |
| 823 } else { | 848 } else { |
| 824 // All other supported formats are nv12. | 849 // All other supported formats are nv12. |
| 825 const uint8_t* y_ptr = payload; | 850 const uint8_t* y_ptr = payload; |
| 826 const uint8_t* uv_ptr = y_ptr + stride * slice_height; | 851 const uint8_t* uv_ptr = y_ptr + stride * slice_height; |
| 827 libyuv::NV12ToI420(y_ptr, stride, uv_ptr, stride, | 852 libyuv::NV12ToI420(y_ptr, stride, uv_ptr, stride, |
| 828 i420_buffer->MutableDataY(), i420_buffer->StrideY(), | 853 i420_buffer->MutableDataY(), i420_buffer->StrideY(), |
| 829 i420_buffer->MutableDataU(), i420_buffer->StrideU(), | 854 i420_buffer->MutableDataU(), i420_buffer->StrideU(), |
| 830 i420_buffer->MutableDataV(), i420_buffer->StrideV(), | 855 i420_buffer->MutableDataV(), i420_buffer->StrideV(), |
| 831 width, height); | 856 width, height); |
| 832 } | 857 } |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1001 webrtc::VideoDecoder* decoder) { | 1026 webrtc::VideoDecoder* decoder) { |
| 1002 ALOGD << "Destroy video decoder."; | 1027 ALOGD << "Destroy video decoder."; |
| 1003 delete decoder; | 1028 delete decoder; |
| 1004 } | 1029 } |
| 1005 | 1030 |
| 1006 const char* MediaCodecVideoDecoder::ImplementationName() const { | 1031 const char* MediaCodecVideoDecoder::ImplementationName() const { |
| 1007 return "MediaCodec"; | 1032 return "MediaCodec"; |
| 1008 } | 1033 } |
| 1009 | 1034 |
| 1010 } // namespace webrtc_jni | 1035 } // namespace webrtc_jni |
| OLD | NEW |