| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 #include "third_party/libyuv/include/libyuv/video_common.h" | 22 #include "third_party/libyuv/include/libyuv/video_common.h" |
| 23 #include "webrtc/common_video/h264/h264_bitstream_parser.h" | 23 #include "webrtc/common_video/h264/h264_bitstream_parser.h" |
| 24 #include "webrtc/common_video/include/i420_buffer_pool.h" | 24 #include "webrtc/common_video/include/i420_buffer_pool.h" |
| 25 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 25 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
| 26 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 26 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
| 27 #include "webrtc/rtc_base/bind.h" | 27 #include "webrtc/rtc_base/bind.h" |
| 28 #include "webrtc/rtc_base/checks.h" | 28 #include "webrtc/rtc_base/checks.h" |
| 29 #include "webrtc/rtc_base/logging.h" | 29 #include "webrtc/rtc_base/logging.h" |
| 30 #include "webrtc/rtc_base/scoped_ref_ptr.h" | 30 #include "webrtc/rtc_base/scoped_ref_ptr.h" |
| 31 #include "webrtc/rtc_base/thread.h" | 31 #include "webrtc/rtc_base/thread.h" |
| 32 #include "webrtc/rtc_base/thread_checker.h" |
| 32 #include "webrtc/rtc_base/timeutils.h" | 33 #include "webrtc/rtc_base/timeutils.h" |
| 33 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" | 34 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" |
| 34 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | 35 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
| 35 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | 36 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
| 36 #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" | 37 #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" |
| 37 | 38 |
| 39 // Logging macros. |
| 40 #define TAG_DECODER "MediaCodecVideoDecoder" |
| 41 #ifdef TRACK_BUFFER_TIMING |
| 42 #define ALOGV(...) \ |
| 43 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) |
| 44 #else |
| 45 #define ALOGV(...) |
| 46 #endif |
| 47 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) |
| 48 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) |
| 49 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) |
| 50 |
| 38 using rtc::Bind; | 51 using rtc::Bind; |
| 39 using rtc::Thread; | 52 using rtc::Thread; |
| 40 using rtc::ThreadManager; | 53 using rtc::ThreadManager; |
| 41 | 54 |
| 42 using webrtc::CodecSpecificInfo; | 55 using webrtc::CodecSpecificInfo; |
| 43 using webrtc::DecodedImageCallback; | 56 using webrtc::DecodedImageCallback; |
| 44 using webrtc::EncodedImage; | 57 using webrtc::EncodedImage; |
| 45 using webrtc::VideoFrame; | 58 using webrtc::VideoFrame; |
| 46 using webrtc::RTPFragmentationHeader; | 59 using webrtc::RTPFragmentationHeader; |
| 47 using webrtc::VideoCodec; | 60 using webrtc::VideoCodec; |
| 48 using webrtc::VideoCodecType; | 61 using webrtc::VideoCodecType; |
| 49 using webrtc::kVideoCodecH264; | 62 using webrtc::kVideoCodecH264; |
| 50 using webrtc::kVideoCodecVP8; | 63 using webrtc::kVideoCodecVP8; |
| 51 using webrtc::kVideoCodecVP9; | 64 using webrtc::kVideoCodecVP9; |
| 52 | 65 |
| 53 namespace webrtc_jni { | 66 namespace webrtc_jni { |
| 54 | 67 |
| 55 // Logging macros. | |
| 56 #define TAG_DECODER "MediaCodecVideoDecoder" | |
| 57 #ifdef TRACK_BUFFER_TIMING | |
| 58 #define ALOGV(...) \ | |
| 59 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) | |
| 60 #else | |
| 61 #define ALOGV(...) | |
| 62 #endif | |
| 63 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) | |
| 64 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) | |
| 65 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) | |
| 66 | |
| 67 enum { kMaxWarningLogFrames = 2 }; | 68 enum { kMaxWarningLogFrames = 2 }; |
| 68 | 69 |
| 69 class MediaCodecVideoDecoder : public webrtc::VideoDecoder, | 70 class MediaCodecVideoDecoder : public webrtc::VideoDecoder { |
| 70 public rtc::MessageHandler { | |
| 71 public: | 71 public: |
| 72 explicit MediaCodecVideoDecoder( | 72 explicit MediaCodecVideoDecoder( |
| 73 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); | 73 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); |
| 74 virtual ~MediaCodecVideoDecoder(); | 74 virtual ~MediaCodecVideoDecoder(); |
| 75 | 75 |
| 76 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) | 76 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) |
| 77 override; | 77 override; |
| 78 | 78 |
| 79 int32_t Decode( | 79 int32_t Decode( |
| 80 const EncodedImage& inputImage, bool missingFrames, | 80 const EncodedImage& inputImage, bool missingFrames, |
| 81 const RTPFragmentationHeader* fragmentation, | 81 const RTPFragmentationHeader* fragmentation, |
| 82 const CodecSpecificInfo* codecSpecificInfo = NULL, | 82 const CodecSpecificInfo* codecSpecificInfo = NULL, |
| 83 int64_t renderTimeMs = -1) override; | 83 int64_t renderTimeMs = -1) override; |
| 84 | 84 |
| 85 void PollDecodedFrames() override; |
| 86 |
| 85 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) | 87 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) |
| 86 override; | 88 override; |
| 87 | 89 |
| 88 int32_t Release() override; | 90 int32_t Release() override; |
| 89 | 91 |
| 90 bool PrefersLateDecoding() const override { return true; } | 92 bool PrefersLateDecoding() const override { return true; } |
| 91 | 93 |
| 92 // rtc::MessageHandler implementation. | |
| 93 void OnMessage(rtc::Message* msg) override; | |
| 94 | |
| 95 const char* ImplementationName() const override; | 94 const char* ImplementationName() const override; |
| 96 | 95 |
| 97 private: | 96 private: |
| 98 // CHECK-fail if not running on |codec_thread_|. | 97 int32_t InitDecodeInternal(); |
| 99 void CheckOnCodecThread(); | 98 int32_t ResetDecode(); |
| 100 | |
| 101 int32_t InitDecodeOnCodecThread(); | |
| 102 int32_t ResetDecodeOnCodecThread(); | |
| 103 int32_t ReleaseOnCodecThread(); | |
| 104 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); | |
| 105 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 99 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
| 106 // true on success. | 100 // true on success. |
| 107 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); | 101 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); |
| 108 int32_t ProcessHWErrorOnCodecThread(); | 102 int32_t ProcessHWError(); |
| 109 void EnableFrameLogOnWarning(); | 103 void EnableFrameLogOnWarning(); |
| 110 void ResetVariables(); | 104 void ResetVariables(); |
| 111 | 105 |
| 112 // Type of video codec. | 106 // Type of video codec. |
| 113 VideoCodecType codecType_; | 107 VideoCodecType codecType_; |
| 114 | 108 |
| 115 // Render EGL context - owned by factory, should not be allocated/destroyed | 109 // Render EGL context - owned by factory, should not be allocated/destroyed |
| 116 // by VideoDecoder. | 110 // by VideoDecoder. |
| 117 jobject render_egl_context_; | 111 jobject render_egl_context_; |
| 118 | 112 |
| (...skipping 13 matching lines...) Expand all Loading... |
| 132 int current_frames_; // Number of frames in the current statistics interval. | 126 int current_frames_; // Number of frames in the current statistics interval. |
| 133 int current_bytes_; // Encoded bytes in the current statistics interval. | 127 int current_bytes_; // Encoded bytes in the current statistics interval. |
| 134 int current_decoding_time_ms_; // Overall decoding time in the current second | 128 int current_decoding_time_ms_; // Overall decoding time in the current second |
| 135 int current_delay_time_ms_; // Overall delay time in the current second. | 129 int current_delay_time_ms_; // Overall delay time in the current second. |
| 136 uint32_t max_pending_frames_; // Maximum number of pending input frames. | 130 uint32_t max_pending_frames_; // Maximum number of pending input frames. |
| 137 webrtc::H264BitstreamParser h264_bitstream_parser_; | 131 webrtc::H264BitstreamParser h264_bitstream_parser_; |
| 138 std::deque<rtc::Optional<uint8_t>> pending_frame_qps_; | 132 std::deque<rtc::Optional<uint8_t>> pending_frame_qps_; |
| 139 | 133 |
| 140 // State that is constant for the lifetime of this object once the ctor | 134 // State that is constant for the lifetime of this object once the ctor |
| 141 // returns. | 135 // returns. |
| 142 std::unique_ptr<Thread> | 136 rtc::ThreadChecker decode_thread_checker_; |
| 143 codec_thread_; // Thread on which to operate MediaCodec. | |
| 144 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 137 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; |
| 145 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 138 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; |
| 146 jmethodID j_init_decode_method_; | 139 jmethodID j_init_decode_method_; |
| 147 jmethodID j_reset_method_; | 140 jmethodID j_reset_method_; |
| 148 jmethodID j_release_method_; | 141 jmethodID j_release_method_; |
| 149 jmethodID j_dequeue_input_buffer_method_; | 142 jmethodID j_dequeue_input_buffer_method_; |
| 150 jmethodID j_queue_input_buffer_method_; | 143 jmethodID j_queue_input_buffer_method_; |
| 151 jmethodID j_dequeue_byte_buffer_method_; | 144 jmethodID j_dequeue_byte_buffer_method_; |
| 152 jmethodID j_dequeue_texture_buffer_method_; | 145 jmethodID j_dequeue_texture_buffer_method_; |
| 153 jmethodID j_return_decoded_byte_buffer_method_; | 146 jmethodID j_return_decoded_byte_buffer_method_; |
| (...skipping 27 matching lines...) Expand all Loading... |
| 181 }; | 174 }; |
| 182 | 175 |
| 183 MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni, | 176 MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni, |
| 184 VideoCodecType codecType, | 177 VideoCodecType codecType, |
| 185 jobject render_egl_context) | 178 jobject render_egl_context) |
| 186 : codecType_(codecType), | 179 : codecType_(codecType), |
| 187 render_egl_context_(render_egl_context), | 180 render_egl_context_(render_egl_context), |
| 188 key_frame_required_(true), | 181 key_frame_required_(true), |
| 189 inited_(false), | 182 inited_(false), |
| 190 sw_fallback_required_(false), | 183 sw_fallback_required_(false), |
| 191 codec_thread_(Thread::Create()), | |
| 192 j_media_codec_video_decoder_class_( | 184 j_media_codec_video_decoder_class_( |
| 193 jni, | 185 jni, |
| 194 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 186 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
| 195 j_media_codec_video_decoder_( | 187 j_media_codec_video_decoder_( |
| 196 jni, | 188 jni, |
| 197 jni->NewObject(*j_media_codec_video_decoder_class_, | 189 jni->NewObject(*j_media_codec_video_decoder_class_, |
| 198 GetMethodID(jni, | 190 GetMethodID(jni, |
| 199 *j_media_codec_video_decoder_class_, | 191 *j_media_codec_video_decoder_class_, |
| 200 "<init>", | 192 "<init>", |
| 201 "()V"))) { | 193 "()V"))) { |
| 202 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 194 decode_thread_checker_.DetachFromThread(); |
| 203 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | |
| 204 | 195 |
| 205 j_init_decode_method_ = GetMethodID( | 196 j_init_decode_method_ = GetMethodID( |
| 206 jni, *j_media_codec_video_decoder_class_, "initDecode", | 197 jni, *j_media_codec_video_decoder_class_, "initDecode", |
| 207 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 198 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
| 208 "IILorg/webrtc/SurfaceTextureHelper;)Z"); | 199 "IILorg/webrtc/SurfaceTextureHelper;)Z"); |
| 209 j_reset_method_ = | 200 j_reset_method_ = |
| 210 GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V"); | 201 GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V"); |
| 211 j_release_method_ = | 202 j_release_method_ = |
| 212 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 203 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
| 213 j_dequeue_input_buffer_method_ = GetMethodID( | 204 j_dequeue_input_buffer_method_ = GetMethodID( |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 282 AllowBlockingCalls(); | 273 AllowBlockingCalls(); |
| 283 } | 274 } |
| 284 | 275 |
| 285 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 276 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
| 286 // Call Release() to ensure no more callbacks to us after we are deleted. | 277 // Call Release() to ensure no more callbacks to us after we are deleted. |
| 287 Release(); | 278 Release(); |
| 288 } | 279 } |
| 289 | 280 |
| 290 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 281 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
| 291 int32_t numberOfCores) { | 282 int32_t numberOfCores) { |
| 283 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 292 ALOGD << "InitDecode."; | 284 ALOGD << "InitDecode."; |
| 293 if (inst == NULL) { | 285 if (inst == NULL) { |
| 294 ALOGE << "NULL VideoCodec instance"; | 286 ALOGE << "NULL VideoCodec instance"; |
| 295 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 287 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 296 } | 288 } |
| 297 // Factory should guard against other codecs being used with us. | 289 // Factory should guard against other codecs being used with us. |
| 298 RTC_CHECK(inst->codecType == codecType_) | 290 RTC_DCHECK(inst->codecType == codecType_) |
| 299 << "Unsupported codec " << inst->codecType << " for " << codecType_; | 291 << "Unsupported codec " << inst->codecType << " for " << codecType_; |
| 300 | 292 |
| 301 if (sw_fallback_required_) { | 293 if (sw_fallback_required_) { |
| 302 ALOGE << "InitDecode() - fallback to SW decoder"; | 294 ALOGE << "InitDecode() - fallback to SW decoder"; |
| 303 return WEBRTC_VIDEO_CODEC_OK; | 295 return WEBRTC_VIDEO_CODEC_OK; |
| 304 } | 296 } |
| 305 // Save VideoCodec instance for later. | 297 // Save VideoCodec instance for later. |
| 306 if (&codec_ != inst) { | 298 if (&codec_ != inst) { |
| 307 codec_ = *inst; | 299 codec_ = *inst; |
| 308 } | 300 } |
| 309 // If maxFramerate is not set then assume 30 fps. | 301 // If maxFramerate is not set then assume 30 fps. |
| 310 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; | 302 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; |
| 311 | 303 |
| 312 // Call Java init. | 304 // Call Java init. |
| 313 return codec_thread_->Invoke<int32_t>( | 305 return InitDecodeInternal(); |
| 314 RTC_FROM_HERE, | |
| 315 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); | |
| 316 } | 306 } |
| 317 | 307 |
| 318 void MediaCodecVideoDecoder::ResetVariables() { | 308 void MediaCodecVideoDecoder::ResetVariables() { |
| 319 CheckOnCodecThread(); | 309 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 320 | 310 |
| 321 key_frame_required_ = true; | 311 key_frame_required_ = true; |
| 322 frames_received_ = 0; | 312 frames_received_ = 0; |
| 323 frames_decoded_ = 0; | 313 frames_decoded_ = 0; |
| 324 frames_decoded_logged_ = kMaxDecodedLogFrames; | 314 frames_decoded_logged_ = kMaxDecodedLogFrames; |
| 325 start_time_ms_ = rtc::TimeMillis(); | 315 start_time_ms_ = rtc::TimeMillis(); |
| 326 current_frames_ = 0; | 316 current_frames_ = 0; |
| 327 current_bytes_ = 0; | 317 current_bytes_ = 0; |
| 328 current_decoding_time_ms_ = 0; | 318 current_decoding_time_ms_ = 0; |
| 329 current_delay_time_ms_ = 0; | 319 current_delay_time_ms_ = 0; |
| 330 pending_frame_qps_.clear(); | 320 pending_frame_qps_.clear(); |
| 331 } | 321 } |
| 332 | 322 |
| 333 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { | 323 int32_t MediaCodecVideoDecoder::InitDecodeInternal() { |
| 334 CheckOnCodecThread(); | 324 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 335 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 325 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 336 ScopedLocalRefFrame local_ref_frame(jni); | 326 ScopedLocalRefFrame local_ref_frame(jni); |
| 337 ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". " | 327 ALOGD << "InitDecodeInternal Type: " << (int)codecType_ << ". " |
| 338 << codec_.width << " x " << codec_.height << ". Fps: " << | 328 << codec_.width << " x " << codec_.height |
| 339 (int)codec_.maxFramerate; | 329 << ". Fps: " << (int)codec_.maxFramerate; |
| 340 | 330 |
| 341 // Release previous codec first if it was allocated before. | 331 // Release previous codec first if it was allocated before. |
| 342 int ret_val = ReleaseOnCodecThread(); | 332 int ret_val = Release(); |
| 343 if (ret_val < 0) { | 333 if (ret_val < 0) { |
| 344 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; | 334 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; |
| 345 sw_fallback_required_ = true; | 335 sw_fallback_required_ = true; |
| 346 return WEBRTC_VIDEO_CODEC_ERROR; | 336 return WEBRTC_VIDEO_CODEC_ERROR; |
| 347 } | 337 } |
| 348 | 338 |
| 349 ResetVariables(); | 339 ResetVariables(); |
| 350 | 340 |
| 351 if (use_surface_) { | 341 if (use_surface_) { |
| 352 surface_texture_helper_ = SurfaceTextureHelper::create( | 342 surface_texture_helper_ = SurfaceTextureHelper::create( |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 398 for (size_t i = 0; i < num_input_buffers; ++i) { | 388 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 399 input_buffers_[i] = | 389 input_buffers_[i] = |
| 400 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 390 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 401 if (CheckException(jni)) { | 391 if (CheckException(jni)) { |
| 402 ALOGE << "NewGlobalRef error - fallback to SW codec."; | 392 ALOGE << "NewGlobalRef error - fallback to SW codec."; |
| 403 sw_fallback_required_ = true; | 393 sw_fallback_required_ = true; |
| 404 return WEBRTC_VIDEO_CODEC_ERROR; | 394 return WEBRTC_VIDEO_CODEC_ERROR; |
| 405 } | 395 } |
| 406 } | 396 } |
| 407 | 397 |
| 408 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
| 409 | |
| 410 return WEBRTC_VIDEO_CODEC_OK; | 398 return WEBRTC_VIDEO_CODEC_OK; |
| 411 } | 399 } |
| 412 | 400 |
| 413 int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() { | 401 int32_t MediaCodecVideoDecoder::ResetDecode() { |
| 414 CheckOnCodecThread(); | 402 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 415 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 403 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 416 ScopedLocalRefFrame local_ref_frame(jni); | 404 ScopedLocalRefFrame local_ref_frame(jni); |
| 417 ALOGD << "ResetDecodeOnCodecThread Type: " << (int)codecType_ << ". " | 405 ALOGD << "ResetDecode Type: " << (int)codecType_ << ". " << codec_.width |
| 418 << codec_.width << " x " << codec_.height; | 406 << " x " << codec_.height; |
| 419 ALOGD << " Frames received: " << frames_received_ << | 407 ALOGD << " Frames received: " << frames_received_ << |
| 420 ". Frames decoded: " << frames_decoded_; | 408 ". Frames decoded: " << frames_decoded_; |
| 421 | 409 |
| 422 inited_ = false; | 410 inited_ = false; |
| 423 rtc::MessageQueueManager::Clear(this); | |
| 424 ResetVariables(); | 411 ResetVariables(); |
| 425 | 412 |
| 426 jni->CallVoidMethod( | 413 jni->CallVoidMethod( |
| 427 *j_media_codec_video_decoder_, | 414 *j_media_codec_video_decoder_, |
| 428 j_reset_method_, | 415 j_reset_method_, |
| 429 codec_.width, | 416 codec_.width, |
| 430 codec_.height); | 417 codec_.height); |
| 431 | 418 |
| 432 if (CheckException(jni)) { | 419 if (CheckException(jni)) { |
| 433 ALOGE << "Soft reset error - fallback to SW codec."; | 420 ALOGE << "Soft reset error - fallback to SW codec."; |
| 434 sw_fallback_required_ = true; | 421 sw_fallback_required_ = true; |
| 435 return WEBRTC_VIDEO_CODEC_ERROR; | 422 return WEBRTC_VIDEO_CODEC_ERROR; |
| 436 } | 423 } |
| 437 inited_ = true; | 424 inited_ = true; |
| 438 | 425 |
| 439 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
| 440 | |
| 441 return WEBRTC_VIDEO_CODEC_OK; | 426 return WEBRTC_VIDEO_CODEC_OK; |
| 442 } | 427 } |
| 443 | 428 |
| 444 int32_t MediaCodecVideoDecoder::Release() { | 429 int32_t MediaCodecVideoDecoder::Release() { |
| 445 ALOGD << "DecoderRelease request"; | 430 ALOGD << "DecoderRelease request"; |
| 446 return codec_thread_->Invoke<int32_t>( | |
| 447 RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | |
| 448 } | |
| 449 | |
| 450 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | |
| 451 if (!inited_) { | 431 if (!inited_) { |
| 452 return WEBRTC_VIDEO_CODEC_OK; | 432 return WEBRTC_VIDEO_CODEC_OK; |
| 453 } | 433 } |
| 454 CheckOnCodecThread(); | |
| 455 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 434 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 456 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << | 435 ALOGD << "DecoderRelease: Frames received: " << frames_received_ |
| 457 frames_received_ << ". Frames decoded: " << frames_decoded_; | 436 << ". Frames decoded: " << frames_decoded_; |
| 458 ScopedLocalRefFrame local_ref_frame(jni); | 437 ScopedLocalRefFrame local_ref_frame(jni); |
| 459 for (size_t i = 0; i < input_buffers_.size(); i++) { | 438 for (size_t i = 0; i < input_buffers_.size(); i++) { |
| 460 jni->DeleteGlobalRef(input_buffers_[i]); | 439 jni->DeleteGlobalRef(input_buffers_[i]); |
| 461 } | 440 } |
| 462 input_buffers_.clear(); | 441 input_buffers_.clear(); |
| 463 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | 442 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
| 464 surface_texture_helper_ = nullptr; | 443 surface_texture_helper_ = nullptr; |
| 465 inited_ = false; | 444 inited_ = false; |
| 466 rtc::MessageQueueManager::Clear(this); | |
| 467 if (CheckException(jni)) { | 445 if (CheckException(jni)) { |
| 468 ALOGE << "Decoder release exception"; | 446 ALOGE << "Decoder release exception"; |
| 469 return WEBRTC_VIDEO_CODEC_ERROR; | 447 return WEBRTC_VIDEO_CODEC_ERROR; |
| 470 } | 448 } |
| 471 ALOGD << "DecoderReleaseOnCodecThread done"; | 449 ALOGD << "DecoderRelease done"; |
| 472 return WEBRTC_VIDEO_CODEC_OK; | 450 return WEBRTC_VIDEO_CODEC_OK; |
| 473 } | 451 } |
| 474 | 452 |
| 475 void MediaCodecVideoDecoder::CheckOnCodecThread() { | |
| 476 RTC_CHECK(codec_thread_.get() == ThreadManager::Instance()->CurrentThread()) | |
| 477 << "Running on wrong thread!"; | |
| 478 } | |
| 479 | |
| 480 void MediaCodecVideoDecoder::EnableFrameLogOnWarning() { | 453 void MediaCodecVideoDecoder::EnableFrameLogOnWarning() { |
| 481 // Log next 2 output frames. | 454 // Log next 2 output frames. |
| 455 static const int kMaxWarningLogFrames = 2; |
| 482 frames_decoded_logged_ = std::max( | 456 frames_decoded_logged_ = std::max( |
| 483 frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames); | 457 frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames); |
| 484 } | 458 } |
| 485 | 459 |
| 486 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { | 460 int32_t MediaCodecVideoDecoder::ProcessHWError() { |
| 487 CheckOnCodecThread(); | 461 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 488 int ret_val = ReleaseOnCodecThread(); | 462 int ret_val = Release(); |
| 489 if (ret_val < 0) { | 463 if (ret_val < 0) { |
| 490 ALOGE << "ProcessHWError: Release failure"; | 464 ALOGE << "ProcessHWError: Release failure"; |
| 491 } | 465 } |
| 492 if (codecType_ == kVideoCodecH264) { | 466 if (codecType_ == kVideoCodecH264) { |
| 493 // For now there is no SW H.264 which can be used as fallback codec. | 467 // For now there is no SW H.264 which can be used as fallback codec. |
| 494 // So try to restart hw codec for now. | 468 // So try to restart hw codec for now. |
| 495 ret_val = InitDecodeOnCodecThread(); | 469 ret_val = InitDecodeInternal(); |
| 496 ALOGE << "Reset H.264 codec done. Status: " << ret_val; | 470 ALOGE << "Reset H.264 codec done. Status: " << ret_val; |
| 497 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { | 471 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { |
| 498 // H.264 codec was succesfully reset - return regular error code. | 472 // H.264 codec was succesfully reset - return regular error code. |
| 499 return WEBRTC_VIDEO_CODEC_ERROR; | 473 return WEBRTC_VIDEO_CODEC_ERROR; |
| 500 } else { | 474 } else { |
| 501 // Fail to restart H.264 codec - return error code which should stop the | 475 // Fail to restart H.264 codec - return error code which should stop the |
| 502 // call. | 476 // call. |
| 503 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 477 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 504 } | 478 } |
| 505 } else { | 479 } else { |
| 506 sw_fallback_required_ = true; | 480 sw_fallback_required_ = true; |
| 507 ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"; | 481 ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"; |
| 508 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 482 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 509 } | 483 } |
| 510 } | 484 } |
| 511 | 485 |
| 512 int32_t MediaCodecVideoDecoder::Decode( | 486 int32_t MediaCodecVideoDecoder::Decode( |
| 513 const EncodedImage& inputImage, | 487 const EncodedImage& inputImage, |
| 514 bool missingFrames, | 488 bool missingFrames, |
| 515 const RTPFragmentationHeader* fragmentation, | 489 const RTPFragmentationHeader* fragmentation, |
| 516 const CodecSpecificInfo* codecSpecificInfo, | 490 const CodecSpecificInfo* codecSpecificInfo, |
| 517 int64_t renderTimeMs) { | 491 int64_t renderTimeMs) { |
| 492 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 493 RTC_DCHECK(callback_); |
| 494 RTC_DCHECK(inited_); |
| 495 |
| 518 if (sw_fallback_required_) { | 496 if (sw_fallback_required_) { |
| 519 ALOGE << "Decode() - fallback to SW codec"; | 497 ALOGE << "Decode() - fallback to SW codec"; |
| 520 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 498 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 521 } | 499 } |
| 522 if (callback_ == NULL) { | |
| 523 ALOGE << "Decode() - callback_ is NULL"; | |
| 524 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 525 } | |
| 526 if (inputImage._buffer == NULL && inputImage._length > 0) { | 500 if (inputImage._buffer == NULL && inputImage._length > 0) { |
| 527 ALOGE << "Decode() - inputImage is incorrect"; | 501 ALOGE << "Decode() - inputImage is incorrect"; |
| 528 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 502 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 529 } | 503 } |
| 530 if (!inited_) { | |
| 531 ALOGE << "Decode() - decoder is not initialized"; | |
| 532 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 533 } | |
| 534 | 504 |
| 535 // Check if encoded frame dimension has changed. | 505 // Check if encoded frame dimension has changed. |
| 536 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && | 506 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && |
| 537 (inputImage._encodedWidth != codec_.width || | 507 (inputImage._encodedWidth != codec_.width || |
| 538 inputImage._encodedHeight != codec_.height)) { | 508 inputImage._encodedHeight != codec_.height)) { |
| 539 ALOGW << "Input resolution changed from " << | 509 ALOGW << "Input resolution changed from " << |
| 540 codec_.width << " x " << codec_.height << " to " << | 510 codec_.width << " x " << codec_.height << " to " << |
| 541 inputImage._encodedWidth << " x " << inputImage._encodedHeight; | 511 inputImage._encodedWidth << " x " << inputImage._encodedHeight; |
| 542 codec_.width = inputImage._encodedWidth; | 512 codec_.width = inputImage._encodedWidth; |
| 543 codec_.height = inputImage._encodedHeight; | 513 codec_.height = inputImage._encodedHeight; |
| 544 int32_t ret; | 514 int32_t ret; |
| 545 if (use_surface_ && | 515 if (use_surface_ && |
| 546 (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264)) { | 516 (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264)) { |
| 547 // Soft codec reset - only for surface decoding. | 517 // Soft codec reset - only for surface decoding. |
| 548 ret = codec_thread_->Invoke<int32_t>( | 518 ret = ResetDecode(); |
| 549 RTC_FROM_HERE, | |
| 550 Bind(&MediaCodecVideoDecoder::ResetDecodeOnCodecThread, this)); | |
| 551 } else { | 519 } else { |
| 552 // Hard codec reset. | 520 // Hard codec reset. |
| 553 ret = InitDecode(&codec_, 1); | 521 ret = InitDecode(&codec_, 1); |
| 554 } | 522 } |
| 555 if (ret < 0) { | 523 if (ret < 0) { |
| 556 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; | 524 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; |
| 557 sw_fallback_required_ = true; | 525 sw_fallback_required_ = true; |
| 558 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 526 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 559 } | 527 } |
| 560 } | 528 } |
| 561 | 529 |
| 562 // Always start with a complete key frame. | 530 // Always start with a complete key frame. |
| 563 if (key_frame_required_) { | 531 if (key_frame_required_) { |
| 564 if (inputImage._frameType != webrtc::kVideoFrameKey) { | 532 if (inputImage._frameType != webrtc::kVideoFrameKey) { |
| 565 ALOGE << "Decode() - key frame is required"; | 533 ALOGE << "Decode() - key frame is required"; |
| 566 return WEBRTC_VIDEO_CODEC_ERROR; | 534 return WEBRTC_VIDEO_CODEC_ERROR; |
| 567 } | 535 } |
| 568 if (!inputImage._completeFrame) { | 536 if (!inputImage._completeFrame) { |
| 569 ALOGE << "Decode() - complete frame is required"; | 537 ALOGE << "Decode() - complete frame is required"; |
| 570 return WEBRTC_VIDEO_CODEC_ERROR; | 538 return WEBRTC_VIDEO_CODEC_ERROR; |
| 571 } | 539 } |
| 572 key_frame_required_ = false; | 540 key_frame_required_ = false; |
| 573 } | 541 } |
| 574 if (inputImage._length == 0) { | 542 if (inputImage._length == 0) { |
| 575 return WEBRTC_VIDEO_CODEC_ERROR; | 543 return WEBRTC_VIDEO_CODEC_ERROR; |
| 576 } | 544 } |
| 577 | 545 |
| 578 return codec_thread_->Invoke<int32_t>( | 546 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 579 RTC_FROM_HERE, | |
| 580 Bind(&MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage)); | |
| 581 } | |
| 582 | |
| 583 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( | |
| 584 const EncodedImage& inputImage) { | |
| 585 CheckOnCodecThread(); | |
| 586 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 547 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 587 ScopedLocalRefFrame local_ref_frame(jni); | 548 ScopedLocalRefFrame local_ref_frame(jni); |
| 588 | 549 |
| 589 // Try to drain the decoder and wait until output is not too | 550 // Try to drain the decoder and wait until output is not too |
| 590 // much behind the input. | 551 // much behind the input. |
| 591 if (codecType_ == kVideoCodecH264 && | 552 if (codecType_ == kVideoCodecH264 && |
| 592 frames_received_ > frames_decoded_ + max_pending_frames_) { | 553 frames_received_ > frames_decoded_ + max_pending_frames_) { |
| 593 // Print warning for H.264 only - for VP8/VP9 one frame delay is ok. | 554 // Print warning for H.264 only - for VP8/VP9 one frame delay is ok. |
| 594 ALOGW << "Decoder is too far behind. Try to drain. Received: " << | 555 ALOGW << "Decoder is too far behind. Try to drain. Received: " << |
| 595 frames_received_ << ". Decoded: " << frames_decoded_; | 556 frames_received_ << ". Decoded: " << frames_decoded_; |
| 596 EnableFrameLogOnWarning(); | 557 EnableFrameLogOnWarning(); |
| 597 } | 558 } |
| 598 const int64 drain_start = rtc::TimeMillis(); | 559 const int64 drain_start = rtc::TimeMillis(); |
| 599 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && | 560 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && |
| 600 (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) { | 561 (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) { |
| 601 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { | 562 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { |
| 602 ALOGE << "DeliverPendingOutputs error. Frames received: " << | 563 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
| 603 frames_received_ << ". Frames decoded: " << frames_decoded_; | 564 frames_received_ << ". Frames decoded: " << frames_decoded_; |
| 604 return ProcessHWErrorOnCodecThread(); | 565 return ProcessHWError(); |
| 605 } | 566 } |
| 606 } | 567 } |
| 607 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 568 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
| 608 ALOGE << "Output buffer dequeue timeout. Frames received: " << | 569 ALOGE << "Output buffer dequeue timeout. Frames received: " << |
| 609 frames_received_ << ". Frames decoded: " << frames_decoded_; | 570 frames_received_ << ". Frames decoded: " << frames_decoded_; |
| 610 return ProcessHWErrorOnCodecThread(); | 571 return ProcessHWError(); |
| 611 } | 572 } |
| 612 | 573 |
| 613 // Get input buffer. | 574 // Get input buffer. |
| 614 int j_input_buffer_index = jni->CallIntMethod( | 575 int j_input_buffer_index = jni->CallIntMethod( |
| 615 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); | 576 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); |
| 616 if (CheckException(jni) || j_input_buffer_index < 0) { | 577 if (CheckException(jni) || j_input_buffer_index < 0) { |
| 617 ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index << | 578 ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index << |
| 618 ". Retry DeliverPendingOutputs."; | 579 ". Retry DeliverPendingOutputs."; |
| 619 EnableFrameLogOnWarning(); | 580 EnableFrameLogOnWarning(); |
| 620 // Try to drain the decoder. | 581 // Try to drain the decoder. |
| 621 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { | 582 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { |
| 622 ALOGE << "DeliverPendingOutputs error. Frames received: " << | 583 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
| 623 frames_received_ << ". Frames decoded: " << frames_decoded_; | 584 frames_received_ << ". Frames decoded: " << frames_decoded_; |
| 624 return ProcessHWErrorOnCodecThread(); | 585 return ProcessHWError(); |
| 625 } | 586 } |
| 626 // Try dequeue input buffer one last time. | 587 // Try dequeue input buffer one last time. |
| 627 j_input_buffer_index = jni->CallIntMethod( | 588 j_input_buffer_index = jni->CallIntMethod( |
| 628 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); | 589 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); |
| 629 if (CheckException(jni) || j_input_buffer_index < 0) { | 590 if (CheckException(jni) || j_input_buffer_index < 0) { |
| 630 ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; | 591 ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; |
| 631 return ProcessHWErrorOnCodecThread(); | 592 return ProcessHWError(); |
| 632 } | 593 } |
| 633 } | 594 } |
| 634 | 595 |
| 635 // Copy encoded data to Java ByteBuffer. | 596 // Copy encoded data to Java ByteBuffer. |
| 636 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | 597 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; |
| 637 uint8_t* buffer = | 598 uint8_t* buffer = |
| 638 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 599 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
| 639 RTC_CHECK(buffer) << "Indirect buffer??"; | 600 RTC_DCHECK(buffer) << "Indirect buffer??"; |
| 640 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); | 601 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); |
| 641 if (CheckException(jni) || buffer_capacity < inputImage._length) { | 602 if (CheckException(jni) || buffer_capacity < inputImage._length) { |
| 642 ALOGE << "Input frame size "<< inputImage._length << | 603 ALOGE << "Input frame size "<< inputImage._length << |
| 643 " is bigger than buffer size " << buffer_capacity; | 604 " is bigger than buffer size " << buffer_capacity; |
| 644 return ProcessHWErrorOnCodecThread(); | 605 return ProcessHWError(); |
| 645 } | 606 } |
| 646 jlong presentation_timestamp_us = static_cast<jlong>( | 607 jlong presentation_timestamp_us = static_cast<jlong>( |
| 647 static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate); | 608 static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate); |
| 648 memcpy(buffer, inputImage._buffer, inputImage._length); | 609 memcpy(buffer, inputImage._buffer, inputImage._length); |
| 649 | 610 |
| 650 if (frames_decoded_ < frames_decoded_logged_) { | 611 if (frames_decoded_ < frames_decoded_logged_) { |
| 651 ALOGD << "Decoder frame in # " << frames_received_ << | 612 ALOGD << "Decoder frame in # " << frames_received_ << |
| 652 ". Type: " << inputImage._frameType << | 613 ". Type: " << inputImage._frameType << |
| 653 ". Buffer # " << j_input_buffer_index << | 614 ". Buffer # " << j_input_buffer_index << |
| 654 ". TS: " << presentation_timestamp_us / 1000 << | 615 ". TS: " << presentation_timestamp_us / 1000 << |
| (...skipping 23 matching lines...) Expand all Loading... |
| 678 bool success = jni->CallBooleanMethod( | 639 bool success = jni->CallBooleanMethod( |
| 679 *j_media_codec_video_decoder_, | 640 *j_media_codec_video_decoder_, |
| 680 j_queue_input_buffer_method_, | 641 j_queue_input_buffer_method_, |
| 681 j_input_buffer_index, | 642 j_input_buffer_index, |
| 682 inputImage._length, | 643 inputImage._length, |
| 683 presentation_timestamp_us, | 644 presentation_timestamp_us, |
| 684 static_cast<int64_t> (inputImage._timeStamp), | 645 static_cast<int64_t> (inputImage._timeStamp), |
| 685 inputImage.ntp_time_ms_); | 646 inputImage.ntp_time_ms_); |
| 686 if (CheckException(jni) || !success) { | 647 if (CheckException(jni) || !success) { |
| 687 ALOGE << "queueInputBuffer error"; | 648 ALOGE << "queueInputBuffer error"; |
| 688 return ProcessHWErrorOnCodecThread(); | 649 return ProcessHWError(); |
| 689 } | 650 } |
| 690 | 651 |
| 691 // Try to drain the decoder | 652 // Try to drain the decoder |
| 692 if (!DeliverPendingOutputs(jni, 0)) { | 653 if (!DeliverPendingOutputs(jni, 0)) { |
| 693 ALOGE << "DeliverPendingOutputs error"; | 654 ALOGE << "DeliverPendingOutputs error"; |
| 694 return ProcessHWErrorOnCodecThread(); | 655 return ProcessHWError(); |
| 695 } | 656 } |
| 696 | 657 |
| 697 return WEBRTC_VIDEO_CODEC_OK; | 658 return WEBRTC_VIDEO_CODEC_OK; |
| 698 } | 659 } |
| 699 | 660 |
| 700 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | 661 void MediaCodecVideoDecoder::PollDecodedFrames() { |
| 701 JNIEnv* jni, int dequeue_timeout_ms) { | 662 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 702 CheckOnCodecThread(); | 663 |
| 664 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 665 ScopedLocalRefFrame local_ref_frame(jni); |
| 666 |
| 667 if (!DeliverPendingOutputs(jni, 0)) { |
| 668 ALOGE << "PollDecodedFrames: DeliverPendingOutputs error"; |
| 669 ProcessHWError(); |
| 670 } |
| 671 } |
| 672 |
| 673 bool MediaCodecVideoDecoder::DeliverPendingOutputs(JNIEnv* jni, |
| 674 int dequeue_timeout_ms) { |
| 675 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
| 676 |
| 703 if (frames_received_ <= frames_decoded_) { | 677 if (frames_received_ <= frames_decoded_) { |
| 704 // No need to query for output buffers - decoder is drained. | 678 // No need to query for output buffers - decoder is drained. |
| 705 return true; | 679 return true; |
| 706 } | 680 } |
| 707 // Get decoder output. | 681 // Get decoder output. |
| 708 jobject j_decoder_output_buffer = | 682 jobject j_decoder_output_buffer = |
| 709 jni->CallObjectMethod(*j_media_codec_video_decoder_, | 683 jni->CallObjectMethod(*j_media_codec_video_decoder_, |
| 710 use_surface_ ? j_dequeue_texture_buffer_method_ | 684 use_surface_ ? j_dequeue_texture_buffer_method_ |
| 711 : j_dequeue_byte_buffer_method_, | 685 : j_dequeue_byte_buffer_method_, |
| 712 dequeue_timeout_ms); | 686 dequeue_timeout_ms); |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 801 output_buffer)); | 775 output_buffer)); |
| 802 if (CheckException(jni)) { | 776 if (CheckException(jni)) { |
| 803 return false; | 777 return false; |
| 804 } | 778 } |
| 805 payload += output_buffer_offset; | 779 payload += output_buffer_offset; |
| 806 | 780 |
| 807 // Create yuv420 frame. | 781 // Create yuv420 frame. |
| 808 rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer = | 782 rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer = |
| 809 decoded_frame_pool_.CreateBuffer(width, height); | 783 decoded_frame_pool_.CreateBuffer(width, height); |
| 810 if (color_format == COLOR_FormatYUV420Planar) { | 784 if (color_format == COLOR_FormatYUV420Planar) { |
| 811 RTC_CHECK_EQ(0, stride % 2); | 785 RTC_DCHECK_EQ(0, stride % 2); |
| 812 const int uv_stride = stride / 2; | 786 const int uv_stride = stride / 2; |
| 813 const uint8_t* y_ptr = payload; | 787 const uint8_t* y_ptr = payload; |
| 814 const uint8_t* u_ptr = y_ptr + stride * slice_height; | 788 const uint8_t* u_ptr = y_ptr + stride * slice_height; |
| 815 | 789 |
| 816 // Note that the case with odd |slice_height| is handled in a special way. | 790 // Note that the case with odd |slice_height| is handled in a special way. |
| 817 // The chroma height contained in the payload is rounded down instead of | 791 // The chroma height contained in the payload is rounded down instead of |
| 818 // up, making it one row less than what we expect in WebRTC. Therefore, we | 792 // up, making it one row less than what we expect in WebRTC. Therefore, we |
| 819 // have to duplicate the last chroma rows for this case. Also, the offset | 793 // have to duplicate the last chroma rows for this case. Also, the offset |
| 820 // between the Y plane and the U plane is unintuitive for this case. See | 794 // between the Y plane and the U plane is unintuitive for this case. See |
| 821 // http://bugs.webrtc.org/6651 for more info. | 795 // http://bugs.webrtc.org/6651 for more info. |
| 822 const int chroma_width = (width + 1) / 2; | 796 const int chroma_width = (width + 1) / 2; |
| 823 const int chroma_height = | 797 const int chroma_height = |
| 824 (slice_height % 2 == 0) ? (height + 1) / 2 : height / 2; | 798 (slice_height % 2 == 0) ? (height + 1) / 2 : height / 2; |
| 825 const int u_offset = uv_stride * slice_height / 2; | 799 const int u_offset = uv_stride * slice_height / 2; |
| 826 const uint8_t* v_ptr = u_ptr + u_offset; | 800 const uint8_t* v_ptr = u_ptr + u_offset; |
| 827 libyuv::CopyPlane(y_ptr, stride, | 801 libyuv::CopyPlane(y_ptr, stride, |
| 828 i420_buffer->MutableDataY(), i420_buffer->StrideY(), | 802 i420_buffer->MutableDataY(), i420_buffer->StrideY(), |
| 829 width, height); | 803 width, height); |
| 830 libyuv::CopyPlane(u_ptr, uv_stride, | 804 libyuv::CopyPlane(u_ptr, uv_stride, |
| 831 i420_buffer->MutableDataU(), i420_buffer->StrideU(), | 805 i420_buffer->MutableDataU(), i420_buffer->StrideU(), |
| 832 chroma_width, chroma_height); | 806 chroma_width, chroma_height); |
| 833 libyuv::CopyPlane(v_ptr, uv_stride, | 807 libyuv::CopyPlane(v_ptr, uv_stride, |
| 834 i420_buffer->MutableDataV(), i420_buffer->StrideV(), | 808 i420_buffer->MutableDataV(), i420_buffer->StrideV(), |
| 835 chroma_width, chroma_height); | 809 chroma_width, chroma_height); |
| 836 if (slice_height % 2 == 1) { | 810 if (slice_height % 2 == 1) { |
| 837 RTC_CHECK_EQ(height, slice_height); | 811 RTC_DCHECK_EQ(height, slice_height); |
| 838 // Duplicate the last chroma rows. | 812 // Duplicate the last chroma rows. |
| 839 uint8_t* u_last_row_ptr = i420_buffer->MutableDataU() + | 813 uint8_t* u_last_row_ptr = i420_buffer->MutableDataU() + |
| 840 chroma_height * i420_buffer->StrideU(); | 814 chroma_height * i420_buffer->StrideU(); |
| 841 memcpy(u_last_row_ptr, u_last_row_ptr - i420_buffer->StrideU(), | 815 memcpy(u_last_row_ptr, u_last_row_ptr - i420_buffer->StrideU(), |
| 842 i420_buffer->StrideU()); | 816 i420_buffer->StrideU()); |
| 843 uint8_t* v_last_row_ptr = i420_buffer->MutableDataV() + | 817 uint8_t* v_last_row_ptr = i420_buffer->MutableDataV() + |
| 844 chroma_height * i420_buffer->StrideV(); | 818 chroma_height * i420_buffer->StrideV(); |
| 845 memcpy(v_last_row_ptr, v_last_row_ptr - i420_buffer->StrideV(), | 819 memcpy(v_last_row_ptr, v_last_row_ptr - i420_buffer->StrideV(), |
| 846 i420_buffer->StrideV()); | 820 i420_buffer->StrideV()); |
| 847 } | 821 } |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 905 if (frame_buffer) { | 879 if (frame_buffer) { |
| 906 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | 880 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); |
| 907 decoded_frame.set_timestamp(output_timestamps_ms); | 881 decoded_frame.set_timestamp(output_timestamps_ms); |
| 908 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); | 882 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); |
| 909 | 883 |
| 910 rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); | 884 rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); |
| 911 pending_frame_qps_.pop_front(); | 885 pending_frame_qps_.pop_front(); |
| 912 callback_->Decoded(decoded_frame, rtc::Optional<int32_t>(decode_time_ms), | 886 callback_->Decoded(decoded_frame, rtc::Optional<int32_t>(decode_time_ms), |
| 913 qp); | 887 qp); |
| 914 } | 888 } |
| 889 |
| 915 return true; | 890 return true; |
| 916 } | 891 } |
| 917 | 892 |
| 918 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 893 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
| 919 DecodedImageCallback* callback) { | 894 DecodedImageCallback* callback) { |
| 920 callback_ = callback; | 895 callback_ = callback; |
| 921 return WEBRTC_VIDEO_CODEC_OK; | 896 return WEBRTC_VIDEO_CODEC_OK; |
| 922 } | 897 } |
| 923 | 898 |
| 924 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) { | |
| 925 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 926 ScopedLocalRefFrame local_ref_frame(jni); | |
| 927 if (!inited_) { | |
| 928 return; | |
| 929 } | |
| 930 // We only ever send one message to |this| directly (not through a Bind()'d | |
| 931 // functor), so expect no ID/data. | |
| 932 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | |
| 933 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | |
| 934 CheckOnCodecThread(); | |
| 935 | |
| 936 if (!DeliverPendingOutputs(jni, 0)) { | |
| 937 ALOGE << "OnMessage: DeliverPendingOutputs error"; | |
| 938 ProcessHWErrorOnCodecThread(); | |
| 939 return; | |
| 940 } | |
| 941 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
| 942 } | |
| 943 | |
| 944 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() | 899 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() |
| 945 : egl_context_(nullptr) { | 900 : egl_context_(nullptr) { |
| 946 ALOGD << "MediaCodecVideoDecoderFactory ctor"; | 901 ALOGD << "MediaCodecVideoDecoderFactory ctor"; |
| 947 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 902 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 948 ScopedLocalRefFrame local_ref_frame(jni); | 903 ScopedLocalRefFrame local_ref_frame(jni); |
| 949 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); | 904 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); |
| 950 supported_codec_types_.clear(); | 905 supported_codec_types_.clear(); |
| 951 | 906 |
| 952 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( | 907 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( |
| 953 j_decoder_class, | 908 j_decoder_class, |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1026 webrtc::VideoDecoder* decoder) { | 981 webrtc::VideoDecoder* decoder) { |
| 1027 ALOGD << "Destroy video decoder."; | 982 ALOGD << "Destroy video decoder."; |
| 1028 delete decoder; | 983 delete decoder; |
| 1029 } | 984 } |
| 1030 | 985 |
| 1031 const char* MediaCodecVideoDecoder::ImplementationName() const { | 986 const char* MediaCodecVideoDecoder::ImplementationName() const { |
| 1032 return "MediaCodec"; | 987 return "MediaCodec"; |
| 1033 } | 988 } |
| 1034 | 989 |
| 1035 } // namespace webrtc_jni | 990 } // namespace webrtc_jni |
| OLD | NEW |