| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 32 #include "webrtc/rtc_base/timeutils.h" | 32 #include "webrtc/rtc_base/timeutils.h" |
| 33 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" | 33 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" |
| 34 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | 34 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
| 35 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | 35 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
| 36 #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" | 36 #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" |
| 37 | 37 |
| 38 using rtc::Bind; | 38 using rtc::Bind; |
| 39 using rtc::Thread; | 39 using rtc::Thread; |
| 40 using rtc::ThreadManager; | 40 using rtc::ThreadManager; |
| 41 | 41 |
| 42 using webrtc::CodecSpecificInfo; | 42 namespace webrtc { |
| 43 using webrtc::DecodedImageCallback; | 43 namespace jni { |
| 44 using webrtc::EncodedImage; | |
| 45 using webrtc::VideoFrame; | |
| 46 using webrtc::RTPFragmentationHeader; | |
| 47 using webrtc::VideoCodec; | |
| 48 using webrtc::VideoCodecType; | |
| 49 using webrtc::kVideoCodecH264; | |
| 50 using webrtc::kVideoCodecVP8; | |
| 51 using webrtc::kVideoCodecVP9; | |
| 52 | |
| 53 namespace webrtc_jni { | |
| 54 | 44 |
| 55 // Logging macros. | 45 // Logging macros. |
| 56 #define TAG_DECODER "MediaCodecVideoDecoder" | 46 #define TAG_DECODER "MediaCodecVideoDecoder" |
| 57 #ifdef TRACK_BUFFER_TIMING | 47 #ifdef TRACK_BUFFER_TIMING |
| 58 #define ALOGV(...) \ | 48 #define ALOGV(...) \ |
| 59 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) | 49 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) |
| 60 #else | 50 #else |
| 61 #define ALOGV(...) | 51 #define ALOGV(...) |
| 62 #endif | 52 #endif |
| 63 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) | 53 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) |
| 64 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) | 54 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) |
| 65 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) | 55 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) |
| 66 | 56 |
| 67 enum { kMaxWarningLogFrames = 2 }; | 57 enum { kMaxWarningLogFrames = 2 }; |
| 68 | 58 |
| 69 class MediaCodecVideoDecoder : public webrtc::VideoDecoder, | 59 class MediaCodecVideoDecoder : public VideoDecoder, public rtc::MessageHandler { |
| 70 public rtc::MessageHandler { | |
| 71 public: | 60 public: |
| 72 explicit MediaCodecVideoDecoder( | 61 explicit MediaCodecVideoDecoder( |
| 73 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); | 62 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); |
| 74 virtual ~MediaCodecVideoDecoder(); | 63 virtual ~MediaCodecVideoDecoder(); |
| 75 | 64 |
| 76 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) | 65 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) |
| 77 override; | 66 override; |
| 78 | 67 |
| 79 int32_t Decode( | 68 int32_t Decode( |
| 80 const EncodedImage& inputImage, bool missingFrames, | 69 const EncodedImage& inputImage, bool missingFrames, |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 114 | 103 |
| 115 // Render EGL context - owned by factory, should not be allocated/destroyed | 104 // Render EGL context - owned by factory, should not be allocated/destroyed |
| 116 // by VideoDecoder. | 105 // by VideoDecoder. |
| 117 jobject render_egl_context_; | 106 jobject render_egl_context_; |
| 118 | 107 |
| 119 bool key_frame_required_; | 108 bool key_frame_required_; |
| 120 bool inited_; | 109 bool inited_; |
| 121 bool sw_fallback_required_; | 110 bool sw_fallback_required_; |
| 122 bool use_surface_; | 111 bool use_surface_; |
| 123 VideoCodec codec_; | 112 VideoCodec codec_; |
| 124 webrtc::I420BufferPool decoded_frame_pool_; | 113 I420BufferPool decoded_frame_pool_; |
| 125 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; | 114 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; |
| 126 DecodedImageCallback* callback_; | 115 DecodedImageCallback* callback_; |
| 127 int frames_received_; // Number of frames received by decoder. | 116 int frames_received_; // Number of frames received by decoder. |
| 128 int frames_decoded_; // Number of frames decoded by decoder. | 117 int frames_decoded_; // Number of frames decoded by decoder. |
| 129 // Number of decoded frames for which log information is displayed. | 118 // Number of decoded frames for which log information is displayed. |
| 130 int frames_decoded_logged_; | 119 int frames_decoded_logged_; |
| 131 int64_t start_time_ms_; // Start time for statistics. | 120 int64_t start_time_ms_; // Start time for statistics. |
| 132 int current_frames_; // Number of frames in the current statistics interval. | 121 int current_frames_; // Number of frames in the current statistics interval. |
| 133 int current_bytes_; // Encoded bytes in the current statistics interval. | 122 int current_bytes_; // Encoded bytes in the current statistics interval. |
| 134 int current_decoding_time_ms_; // Overall decoding time in the current second | 123 int current_decoding_time_ms_; // Overall decoding time in the current second |
| 135 int current_delay_time_ms_; // Overall delay time in the current second. | 124 int current_delay_time_ms_; // Overall delay time in the current second. |
| 136 uint32_t max_pending_frames_; // Maximum number of pending input frames. | 125 uint32_t max_pending_frames_; // Maximum number of pending input frames. |
| 137 webrtc::H264BitstreamParser h264_bitstream_parser_; | 126 H264BitstreamParser h264_bitstream_parser_; |
| 138 std::deque<rtc::Optional<uint8_t>> pending_frame_qps_; | 127 std::deque<rtc::Optional<uint8_t>> pending_frame_qps_; |
| 139 | 128 |
| 140 // State that is constant for the lifetime of this object once the ctor | 129 // State that is constant for the lifetime of this object once the ctor |
| 141 // returns. | 130 // returns. |
| 142 std::unique_ptr<Thread> | 131 std::unique_ptr<Thread> |
| 143 codec_thread_; // Thread on which to operate MediaCodec. | 132 codec_thread_; // Thread on which to operate MediaCodec. |
| 144 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 133 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; |
| 145 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 134 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; |
| 146 jmethodID j_init_decode_method_; | 135 jmethodID j_init_decode_method_; |
| 147 jmethodID j_reset_method_; | 136 jmethodID j_reset_method_; |
| (...skipping 406 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 554 } | 543 } |
| 555 if (ret < 0) { | 544 if (ret < 0) { |
| 556 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; | 545 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; |
| 557 sw_fallback_required_ = true; | 546 sw_fallback_required_ = true; |
| 558 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 547 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 559 } | 548 } |
| 560 } | 549 } |
| 561 | 550 |
| 562 // Always start with a complete key frame. | 551 // Always start with a complete key frame. |
| 563 if (key_frame_required_) { | 552 if (key_frame_required_) { |
| 564 if (inputImage._frameType != webrtc::kVideoFrameKey) { | 553 if (inputImage._frameType != kVideoFrameKey) { |
| 565 ALOGE << "Decode() - key frame is required"; | 554 ALOGE << "Decode() - key frame is required"; |
| 566 return WEBRTC_VIDEO_CODEC_ERROR; | 555 return WEBRTC_VIDEO_CODEC_ERROR; |
| 567 } | 556 } |
| 568 if (!inputImage._completeFrame) { | 557 if (!inputImage._completeFrame) { |
| 569 ALOGE << "Decode() - complete frame is required"; | 558 ALOGE << "Decode() - complete frame is required"; |
| 570 return WEBRTC_VIDEO_CODEC_ERROR; | 559 return WEBRTC_VIDEO_CODEC_ERROR; |
| 571 } | 560 } |
| 572 key_frame_required_ = false; | 561 key_frame_required_ = false; |
| 573 } | 562 } |
| 574 if (inputImage._length == 0) { | 563 if (inputImage._length == 0) { |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 654 ". TS: " << presentation_timestamp_us / 1000 << | 643 ". TS: " << presentation_timestamp_us / 1000 << |
| 655 ". Size: " << inputImage._length; | 644 ". Size: " << inputImage._length; |
| 656 } | 645 } |
| 657 | 646 |
| 658 // Save input image timestamps for later output. | 647 // Save input image timestamps for later output. |
| 659 frames_received_++; | 648 frames_received_++; |
| 660 current_bytes_ += inputImage._length; | 649 current_bytes_ += inputImage._length; |
| 661 rtc::Optional<uint8_t> qp; | 650 rtc::Optional<uint8_t> qp; |
| 662 if (codecType_ == kVideoCodecVP8) { | 651 if (codecType_ == kVideoCodecVP8) { |
| 663 int qp_int; | 652 int qp_int; |
| 664 if (webrtc::vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) { | 653 if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) { |
| 665 qp = rtc::Optional<uint8_t>(qp_int); | 654 qp = rtc::Optional<uint8_t>(qp_int); |
| 666 } | 655 } |
| 667 } else if (codecType_ == kVideoCodecH264) { | 656 } else if (codecType_ == kVideoCodecH264) { |
| 668 h264_bitstream_parser_.ParseBitstream(inputImage._buffer, | 657 h264_bitstream_parser_.ParseBitstream(inputImage._buffer, |
| 669 inputImage._length); | 658 inputImage._length); |
| 670 int qp_int; | 659 int qp_int; |
| 671 if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) { | 660 if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) { |
| 672 qp = rtc::Optional<uint8_t>(qp_int); | 661 qp = rtc::Optional<uint8_t>(qp_int); |
| 673 } | 662 } |
| 674 } | 663 } |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 719 // No decoded frame ready. | 708 // No decoded frame ready. |
| 720 return true; | 709 return true; |
| 721 } | 710 } |
| 722 | 711 |
| 723 // Get decoded video frame properties. | 712 // Get decoded video frame properties. |
| 724 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 713 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, |
| 725 j_color_format_field_); | 714 j_color_format_field_); |
| 726 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 715 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); |
| 727 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 716 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); |
| 728 | 717 |
| 729 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | 718 rtc::scoped_refptr<VideoFrameBuffer> frame_buffer; |
| 730 int64_t presentation_timestamps_ms = 0; | 719 int64_t presentation_timestamps_ms = 0; |
| 731 int64_t output_timestamps_ms = 0; | 720 int64_t output_timestamps_ms = 0; |
| 732 int64_t output_ntp_timestamps_ms = 0; | 721 int64_t output_ntp_timestamps_ms = 0; |
| 733 int decode_time_ms = 0; | 722 int decode_time_ms = 0; |
| 734 int64_t frame_delayed_ms = 0; | 723 int64_t frame_delayed_ms = 0; |
| 735 if (use_surface_) { | 724 if (use_surface_) { |
| 736 // Extract data from Java DecodedTextureBuffer. | 725 // Extract data from Java DecodedTextureBuffer. |
| 737 presentation_timestamps_ms = GetLongField( | 726 presentation_timestamps_ms = GetLongField( |
| 738 jni, j_decoder_output_buffer, | 727 jni, j_decoder_output_buffer, |
| 739 j_texture_presentation_timestamp_ms_field_); | 728 j_texture_presentation_timestamp_ms_field_); |
| 740 output_timestamps_ms = GetLongField( | 729 output_timestamps_ms = GetLongField( |
| 741 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_); | 730 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_); |
| 742 output_ntp_timestamps_ms = GetLongField( | 731 output_ntp_timestamps_ms = GetLongField( |
| 743 jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_); | 732 jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_); |
| 744 decode_time_ms = GetLongField( | 733 decode_time_ms = GetLongField( |
| 745 jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_); | 734 jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_); |
| 746 | 735 |
| 747 const int texture_id = | 736 const int texture_id = |
| 748 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); | 737 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); |
| 749 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. | 738 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. |
| 750 const jfloatArray j_transform_matrix = | 739 const jfloatArray j_transform_matrix = |
| 751 reinterpret_cast<jfloatArray>(GetObjectField( | 740 reinterpret_cast<jfloatArray>(GetObjectField( |
| 752 jni, j_decoder_output_buffer, j_transform_matrix_field_)); | 741 jni, j_decoder_output_buffer, j_transform_matrix_field_)); |
| 753 frame_delayed_ms = GetLongField( | 742 frame_delayed_ms = GetLongField( |
| 754 jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_); | 743 jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_); |
| 755 | 744 |
| 756 // Create webrtc::VideoFrameBuffer with native texture handle. | 745 // Create VideoFrameBuffer with native texture handle. |
| 757 frame_buffer = surface_texture_helper_->CreateTextureFrame( | 746 frame_buffer = surface_texture_helper_->CreateTextureFrame( |
| 758 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); | 747 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); |
| 759 } else { | 748 } else { |
| 760 EnableFrameLogOnWarning(); | 749 EnableFrameLogOnWarning(); |
| 761 } | 750 } |
| 762 } else { | 751 } else { |
| 763 // Extract data from Java ByteBuffer and create output yuv420 frame - | 752 // Extract data from Java ByteBuffer and create output yuv420 frame - |
| 764 // for non surface decoding only. | 753 // for non surface decoding only. |
| 765 int stride = | 754 int stride = |
| 766 GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 755 GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 798 jobject output_buffer = | 787 jobject output_buffer = |
| 799 jni->GetObjectArrayElement(output_buffers, output_buffer_index); | 788 jni->GetObjectArrayElement(output_buffers, output_buffer_index); |
| 800 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | 789 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( |
| 801 output_buffer)); | 790 output_buffer)); |
| 802 if (CheckException(jni)) { | 791 if (CheckException(jni)) { |
| 803 return false; | 792 return false; |
| 804 } | 793 } |
| 805 payload += output_buffer_offset; | 794 payload += output_buffer_offset; |
| 806 | 795 |
| 807 // Create yuv420 frame. | 796 // Create yuv420 frame. |
| 808 rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer = | 797 rtc::scoped_refptr<I420Buffer> i420_buffer = |
| 809 decoded_frame_pool_.CreateBuffer(width, height); | 798 decoded_frame_pool_.CreateBuffer(width, height); |
| 810 if (color_format == COLOR_FormatYUV420Planar) { | 799 if (color_format == COLOR_FormatYUV420Planar) { |
| 811 RTC_CHECK_EQ(0, stride % 2); | 800 RTC_CHECK_EQ(0, stride % 2); |
| 812 const int uv_stride = stride / 2; | 801 const int uv_stride = stride / 2; |
| 813 const uint8_t* y_ptr = payload; | 802 const uint8_t* y_ptr = payload; |
| 814 const uint8_t* u_ptr = y_ptr + stride * slice_height; | 803 const uint8_t* u_ptr = y_ptr + stride * slice_height; |
| 815 | 804 |
| 816 // Note that the case with odd |slice_height| is handled in a special way. | 805 // Note that the case with odd |slice_height| is handled in a special way. |
| 817 // The chroma height contained in the payload is rounded down instead of | 806 // The chroma height contained in the payload is rounded down instead of |
| 818 // up, making it one row less than what we expect in WebRTC. Therefore, we | 807 // up, making it one row less than what we expect in WebRTC. Therefore, we |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 896 " for last " << statistic_time_ms << " ms."; | 885 " for last " << statistic_time_ms << " ms."; |
| 897 start_time_ms_ = rtc::TimeMillis(); | 886 start_time_ms_ = rtc::TimeMillis(); |
| 898 current_frames_ = 0; | 887 current_frames_ = 0; |
| 899 current_bytes_ = 0; | 888 current_bytes_ = 0; |
| 900 current_decoding_time_ms_ = 0; | 889 current_decoding_time_ms_ = 0; |
| 901 current_delay_time_ms_ = 0; | 890 current_delay_time_ms_ = 0; |
| 902 } | 891 } |
| 903 | 892 |
| 904 // If the frame was dropped, frame_buffer is left as nullptr. | 893 // If the frame was dropped, frame_buffer is left as nullptr. |
| 905 if (frame_buffer) { | 894 if (frame_buffer) { |
| 906 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | 895 VideoFrame decoded_frame(frame_buffer, 0, 0, kVideoRotation_0); |
| 907 decoded_frame.set_timestamp(output_timestamps_ms); | 896 decoded_frame.set_timestamp(output_timestamps_ms); |
| 908 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); | 897 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); |
| 909 | 898 |
| 910 rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); | 899 rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); |
| 911 pending_frame_qps_.pop_front(); | 900 pending_frame_qps_.pop_front(); |
| 912 callback_->Decoded(decoded_frame, rtc::Optional<int32_t>(decode_time_ms), | 901 callback_->Decoded(decoded_frame, rtc::Optional<int32_t>(decode_time_ms), |
| 913 qp); | 902 qp); |
| 914 } | 903 } |
| 915 return true; | 904 return true; |
| 916 } | 905 } |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 997 if (egl_context_) { | 986 if (egl_context_) { |
| 998 jni->DeleteGlobalRef(egl_context_); | 987 jni->DeleteGlobalRef(egl_context_); |
| 999 egl_context_ = nullptr; | 988 egl_context_ = nullptr; |
| 1000 } | 989 } |
| 1001 egl_context_ = jni->NewGlobalRef(egl_context); | 990 egl_context_ = jni->NewGlobalRef(egl_context); |
| 1002 if (CheckException(jni)) { | 991 if (CheckException(jni)) { |
| 1003 ALOGE << "error calling NewGlobalRef for EGL Context."; | 992 ALOGE << "error calling NewGlobalRef for EGL Context."; |
| 1004 } | 993 } |
| 1005 } | 994 } |
| 1006 | 995 |
| 1007 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder( | 996 VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder( |
| 1008 VideoCodecType type) { | 997 VideoCodecType type) { |
| 1009 if (supported_codec_types_.empty()) { | 998 if (supported_codec_types_.empty()) { |
| 1010 ALOGW << "No HW video decoder for type " << (int)type; | 999 ALOGW << "No HW video decoder for type " << (int)type; |
| 1011 return nullptr; | 1000 return nullptr; |
| 1012 } | 1001 } |
| 1013 for (VideoCodecType codec_type : supported_codec_types_) { | 1002 for (VideoCodecType codec_type : supported_codec_types_) { |
| 1014 if (codec_type == type) { | 1003 if (codec_type == type) { |
| 1015 ALOGD << "Create HW video decoder for type " << (int)type; | 1004 ALOGD << "Create HW video decoder for type " << (int)type; |
| 1016 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 1005 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 1017 ScopedLocalRefFrame local_ref_frame(jni); | 1006 ScopedLocalRefFrame local_ref_frame(jni); |
| 1018 return new MediaCodecVideoDecoder(jni, type, egl_context_); | 1007 return new MediaCodecVideoDecoder(jni, type, egl_context_); |
| 1019 } | 1008 } |
| 1020 } | 1009 } |
| 1021 ALOGW << "Can not find HW video decoder for type " << (int)type; | 1010 ALOGW << "Can not find HW video decoder for type " << (int)type; |
| 1022 return nullptr; | 1011 return nullptr; |
| 1023 } | 1012 } |
| 1024 | 1013 |
| 1025 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 1014 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(VideoDecoder* decoder) { |
| 1026 webrtc::VideoDecoder* decoder) { | |
| 1027 ALOGD << "Destroy video decoder."; | 1015 ALOGD << "Destroy video decoder."; |
| 1028 delete decoder; | 1016 delete decoder; |
| 1029 } | 1017 } |
| 1030 | 1018 |
| 1031 const char* MediaCodecVideoDecoder::ImplementationName() const { | 1019 const char* MediaCodecVideoDecoder::ImplementationName() const { |
| 1032 return "MediaCodec"; | 1020 return "MediaCodec"; |
| 1033 } | 1021 } |
| 1034 | 1022 |
| 1035 } // namespace webrtc_jni | 1023 } // namespace jni |
| 1024 } // namespace webrtc |
| OLD | NEW |