Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 29 #include "webrtc/common_video/h264/h264_bitstream_parser.h" | 29 #include "webrtc/common_video/h264/h264_bitstream_parser.h" |
| 30 #include "webrtc/common_video/include/i420_buffer_pool.h" | 30 #include "webrtc/common_video/include/i420_buffer_pool.h" |
| 31 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 31 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
| 32 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 32 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
| 33 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" | 33 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" |
| 34 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | 34 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
| 35 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | 35 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
| 36 #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" | 36 #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" |
| 37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
| 38 | 38 |
| 39 using rtc::Bind; | 39 // Logging macros. |
| 40 #define TAG_DECODER "MediaCodecVideoDecoder" | |
| 41 #ifdef TRACK_BUFFER_TIMING | |
| 42 #define ALOGV(...) | |
| 43 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) | |
| 44 #else | |
| 45 #define ALOGV(...) | |
| 46 #endif | |
| 47 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) | |
| 48 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) | |
| 49 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) | |
| 50 | |
| 51 using rtc::Bind; | |
|
sakal
2017/03/21 10:05:58
nit: indentation
tommi
2017/03/21 16:21:43
Done.
| |
| 40 using rtc::Thread; | 52 using rtc::Thread; |
| 41 using rtc::ThreadManager; | 53 using rtc::ThreadManager; |
| 42 | 54 |
| 43 using webrtc::CodecSpecificInfo; | 55 using webrtc::CodecSpecificInfo; |
| 44 using webrtc::DecodedImageCallback; | 56 using webrtc::DecodedImageCallback; |
| 45 using webrtc::EncodedImage; | 57 using webrtc::EncodedImage; |
| 46 using webrtc::VideoFrame; | 58 using webrtc::VideoFrame; |
| 47 using webrtc::RTPFragmentationHeader; | 59 using webrtc::RTPFragmentationHeader; |
| 48 using webrtc::VideoCodec; | 60 using webrtc::VideoCodec; |
| 49 using webrtc::VideoCodecType; | 61 using webrtc::VideoCodecType; |
| 50 using webrtc::kVideoCodecH264; | 62 using webrtc::kVideoCodecH264; |
| 51 using webrtc::kVideoCodecVP8; | 63 using webrtc::kVideoCodecVP8; |
| 52 using webrtc::kVideoCodecVP9; | 64 using webrtc::kVideoCodecVP9; |
| 53 | 65 |
| 54 namespace webrtc_jni { | 66 namespace webrtc_jni { |
| 55 | 67 |
| 56 // Logging macros. | 68 class MediaCodecVideoDecoder : public webrtc::VideoDecoder { |
| 57 #define TAG_DECODER "MediaCodecVideoDecoder" | |
| 58 #ifdef TRACK_BUFFER_TIMING | |
| 59 #define ALOGV(...) | |
| 60 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) | |
| 61 #else | |
| 62 #define ALOGV(...) | |
| 63 #endif | |
| 64 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) | |
| 65 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) | |
| 66 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) | |
| 67 | |
| 68 enum { kMaxWarningLogFrames = 2 }; | |
| 69 | |
| 70 class MediaCodecVideoDecoder : public webrtc::VideoDecoder, | |
| 71 public rtc::MessageHandler { | |
| 72 public: | 69 public: |
| 73 explicit MediaCodecVideoDecoder( | 70 explicit MediaCodecVideoDecoder( |
| 74 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); | 71 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); |
| 75 virtual ~MediaCodecVideoDecoder(); | 72 virtual ~MediaCodecVideoDecoder(); |
| 76 | 73 |
| 77 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) | 74 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) |
| 78 override; | 75 override; |
| 79 | 76 |
| 80 int32_t Decode( | 77 int32_t Decode( |
| 81 const EncodedImage& inputImage, bool missingFrames, | 78 const EncodedImage& inputImage, bool missingFrames, |
| 82 const RTPFragmentationHeader* fragmentation, | 79 const RTPFragmentationHeader* fragmentation, |
| 83 const CodecSpecificInfo* codecSpecificInfo = NULL, | 80 const CodecSpecificInfo* codecSpecificInfo = NULL, |
| 84 int64_t renderTimeMs = -1) override; | 81 int64_t renderTimeMs = -1) override; |
| 85 | 82 |
| 83 void PollDecodedFrames() override; | |
| 84 | |
| 86 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) | 85 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) |
| 87 override; | 86 override; |
| 88 | 87 |
| 89 int32_t Release() override; | 88 int32_t Release() override; |
| 90 | 89 |
| 91 bool PrefersLateDecoding() const override { return true; } | 90 bool PrefersLateDecoding() const override { return true; } |
| 92 | 91 |
| 93 // rtc::MessageHandler implementation. | |
| 94 void OnMessage(rtc::Message* msg) override; | |
| 95 | |
| 96 const char* ImplementationName() const override; | 92 const char* ImplementationName() const override; |
| 97 | 93 |
| 98 private: | 94 private: |
| 99 // CHECK-fail if not running on |codec_thread_|. | 95 struct DecodedFrame { |
| 100 void CheckOnCodecThread(); | 96 DecodedFrame(VideoFrame frame, |
| 97 int decode_time_ms, | |
| 98 int64_t timestamp, | |
| 99 int64_t ntp_timestamp, | |
| 100 rtc::Optional<uint8_t> qp) | |
| 101 : frame(std::move(frame)), | |
| 102 decode_time_ms(decode_time_ms), | |
| 103 qp(std::move(qp)) { | |
| 104 frame.set_timestamp(timestamp); | |
| 105 frame.set_ntp_time_ms(ntp_timestamp); | |
| 106 } | |
| 107 | |
| 108 VideoFrame frame; | |
| 109 int decode_time_ms; | |
| 110 rtc::Optional<uint8_t> qp; | |
| 111 }; | |
| 112 | |
| 113 // Returns true if running on |codec_thread_|. Used for DCHECKing. | |
| 114 bool IsOnCodecThread(); | |
| 101 | 115 |
| 102 int32_t InitDecodeOnCodecThread(); | 116 int32_t InitDecodeOnCodecThread(); |
| 103 int32_t ResetDecodeOnCodecThread(); | 117 int32_t ResetDecodeOnCodecThread(); |
| 104 int32_t ReleaseOnCodecThread(); | 118 int32_t ReleaseOnCodecThread(); |
| 105 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); | 119 int32_t DecodeOnCodecThread(const EncodedImage& inputImage, |
| 120 std::vector<DecodedFrame>* frames); | |
| 121 void PollDecodedFramesOnCodecThread(std::vector<DecodedFrame>* frames); | |
| 106 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 122 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
| 107 // true on success. | 123 // true on success. |
| 108 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); | 124 bool DeliverPendingOutputs(JNIEnv* jni, |
| 125 int dequeue_timeout_us, | |
| 126 std::vector<DecodedFrame>* frames); | |
| 109 int32_t ProcessHWErrorOnCodecThread(); | 127 int32_t ProcessHWErrorOnCodecThread(); |
| 110 void EnableFrameLogOnWarning(); | 128 void EnableFrameLogOnWarning(); |
| 111 void ResetVariables(); | 129 void ResetVariables(); |
| 112 | 130 |
| 113 // Type of video codec. | 131 // Type of video codec. |
| 114 VideoCodecType codecType_; | 132 VideoCodecType codecType_; |
| 115 | 133 |
| 116 // Render EGL context - owned by factory, should not be allocated/destroyed | 134 // Render EGL context - owned by factory, should not be allocated/destroyed |
| 117 // by VideoDecoder. | 135 // by VideoDecoder. |
| 118 jobject render_egl_context_; | 136 jobject render_egl_context_; |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 172 jfieldID j_info_index_field_; | 190 jfieldID j_info_index_field_; |
| 173 jfieldID j_info_offset_field_; | 191 jfieldID j_info_offset_field_; |
| 174 jfieldID j_info_size_field_; | 192 jfieldID j_info_size_field_; |
| 175 jfieldID j_presentation_timestamp_ms_field_; | 193 jfieldID j_presentation_timestamp_ms_field_; |
| 176 jfieldID j_timestamp_ms_field_; | 194 jfieldID j_timestamp_ms_field_; |
| 177 jfieldID j_ntp_timestamp_ms_field_; | 195 jfieldID j_ntp_timestamp_ms_field_; |
| 178 jfieldID j_byte_buffer_decode_time_ms_field_; | 196 jfieldID j_byte_buffer_decode_time_ms_field_; |
| 179 | 197 |
| 180 // Global references; must be deleted in Release(). | 198 // Global references; must be deleted in Release(). |
| 181 std::vector<jobject> input_buffers_; | 199 std::vector<jobject> input_buffers_; |
| 200 | |
| 201 // Added to on the codec thread, frames are delivered on the decoder thread. | |
| 202 std::vector<DecodedFrame> decoded_frames_; | |
| 182 }; | 203 }; |
| 183 | 204 |
| 184 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | 205 MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| 185 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | 206 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
| 186 codecType_(codecType), | 207 codecType_(codecType), |
| 187 render_egl_context_(render_egl_context), | 208 render_egl_context_(render_egl_context), |
| 188 key_frame_required_(true), | 209 key_frame_required_(true), |
| 189 inited_(false), | 210 inited_(false), |
| 190 sw_fallback_required_(false), | 211 sw_fallback_required_(false), |
| 191 codec_thread_(new Thread()), | 212 codec_thread_(new Thread()), |
| 192 j_media_codec_video_decoder_class_( | 213 j_media_codec_video_decoder_class_( |
| 193 jni, | 214 jni, |
| 194 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 215 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
| 195 j_media_codec_video_decoder_( | 216 j_media_codec_video_decoder_( |
| 196 jni, | 217 jni, |
| 197 jni->NewObject(*j_media_codec_video_decoder_class_, | 218 jni->NewObject(*j_media_codec_video_decoder_class_, |
| 198 GetMethodID(jni, | 219 GetMethodID(jni, |
| 199 *j_media_codec_video_decoder_class_, | 220 *j_media_codec_video_decoder_class_, |
| 200 "<init>", | 221 "<init>", |
| 201 "()V"))) { | 222 "()V"))) { |
| 202 ScopedLocalRefFrame local_ref_frame(jni); | 223 ScopedLocalRefFrame local_ref_frame(jni); |
| 203 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 224 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); |
| 204 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | 225 RTC_CHECK(codec_thread_->Start()); |
| 205 | 226 |
| 206 j_init_decode_method_ = GetMethodID( | 227 j_init_decode_method_ = GetMethodID( |
| 207 jni, *j_media_codec_video_decoder_class_, "initDecode", | 228 jni, *j_media_codec_video_decoder_class_, "initDecode", |
| 208 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 229 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
| 209 "IILorg/webrtc/SurfaceTextureHelper;)Z"); | 230 "IILorg/webrtc/SurfaceTextureHelper;)Z"); |
| 210 j_reset_method_ = | 231 j_reset_method_ = |
| 211 GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V"); | 232 GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V"); |
| 212 j_release_method_ = | 233 j_release_method_ = |
| 213 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 234 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
| 214 j_dequeue_input_buffer_method_ = GetMethodID( | 235 j_dequeue_input_buffer_method_ = GetMethodID( |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 289 } | 310 } |
| 290 | 311 |
| 291 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 312 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
| 292 int32_t numberOfCores) { | 313 int32_t numberOfCores) { |
| 293 ALOGD << "InitDecode."; | 314 ALOGD << "InitDecode."; |
| 294 if (inst == NULL) { | 315 if (inst == NULL) { |
| 295 ALOGE << "NULL VideoCodec instance"; | 316 ALOGE << "NULL VideoCodec instance"; |
| 296 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 317 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 297 } | 318 } |
| 298 // Factory should guard against other codecs being used with us. | 319 // Factory should guard against other codecs being used with us. |
| 299 RTC_CHECK(inst->codecType == codecType_) | 320 RTC_DCHECK(inst->codecType == codecType_) |
| 300 << "Unsupported codec " << inst->codecType << " for " << codecType_; | 321 << "Unsupported codec " << inst->codecType << " for " << codecType_; |
| 301 | 322 |
| 302 if (sw_fallback_required_) { | 323 if (sw_fallback_required_) { |
| 303 ALOGE << "InitDecode() - fallback to SW decoder"; | 324 ALOGE << "InitDecode() - fallback to SW decoder"; |
| 304 return WEBRTC_VIDEO_CODEC_OK; | 325 return WEBRTC_VIDEO_CODEC_OK; |
| 305 } | 326 } |
| 306 // Save VideoCodec instance for later. | 327 // Save VideoCodec instance for later. |
| 307 if (&codec_ != inst) { | 328 if (&codec_ != inst) { |
| 308 codec_ = *inst; | 329 codec_ = *inst; |
| 309 } | 330 } |
| 310 // If maxFramerate is not set then assume 30 fps. | 331 // If maxFramerate is not set then assume 30 fps. |
| 311 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; | 332 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; |
| 312 | 333 |
| 313 // Call Java init. | 334 // Call Java init. |
| 314 return codec_thread_->Invoke<int32_t>( | 335 return codec_thread_->Invoke<int32_t>( |
| 315 RTC_FROM_HERE, | 336 RTC_FROM_HERE, |
| 316 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); | 337 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); |
| 317 } | 338 } |
| 318 | 339 |
| 319 void MediaCodecVideoDecoder::ResetVariables() { | 340 void MediaCodecVideoDecoder::ResetVariables() { |
| 320 CheckOnCodecThread(); | 341 RTC_DCHECK(IsOnCodecThread()); |
| 321 | 342 |
| 322 key_frame_required_ = true; | 343 key_frame_required_ = true; |
| 323 frames_received_ = 0; | 344 frames_received_ = 0; |
| 324 frames_decoded_ = 0; | 345 frames_decoded_ = 0; |
| 325 frames_decoded_logged_ = kMaxDecodedLogFrames; | 346 frames_decoded_logged_ = kMaxDecodedLogFrames; |
| 326 start_time_ms_ = rtc::TimeMillis(); | 347 start_time_ms_ = rtc::TimeMillis(); |
| 327 current_frames_ = 0; | 348 current_frames_ = 0; |
| 328 current_bytes_ = 0; | 349 current_bytes_ = 0; |
| 329 current_decoding_time_ms_ = 0; | 350 current_decoding_time_ms_ = 0; |
| 330 current_delay_time_ms_ = 0; | 351 current_delay_time_ms_ = 0; |
| 331 pending_frame_qps_.clear(); | 352 pending_frame_qps_.clear(); |
| 332 } | 353 } |
| 333 | 354 |
| 334 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { | 355 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
| 335 CheckOnCodecThread(); | 356 RTC_DCHECK(IsOnCodecThread()); |
| 336 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 357 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 337 ScopedLocalRefFrame local_ref_frame(jni); | 358 ScopedLocalRefFrame local_ref_frame(jni); |
| 338 ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". " | 359 ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". " |
| 339 << codec_.width << " x " << codec_.height << ". Fps: " << | 360 << codec_.width << " x " << codec_.height << ". Fps: " << |
| 340 (int)codec_.maxFramerate; | 361 (int)codec_.maxFramerate; |
| 341 | 362 |
| 342 // Release previous codec first if it was allocated before. | 363 // Release previous codec first if it was allocated before. |
| 343 int ret_val = ReleaseOnCodecThread(); | 364 int ret_val = ReleaseOnCodecThread(); |
| 344 if (ret_val < 0) { | 365 if (ret_val < 0) { |
| 345 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; | 366 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 399 for (size_t i = 0; i < num_input_buffers; ++i) { | 420 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 400 input_buffers_[i] = | 421 input_buffers_[i] = |
| 401 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 422 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 402 if (CheckException(jni)) { | 423 if (CheckException(jni)) { |
| 403 ALOGE << "NewGlobalRef error - fallback to SW codec."; | 424 ALOGE << "NewGlobalRef error - fallback to SW codec."; |
| 404 sw_fallback_required_ = true; | 425 sw_fallback_required_ = true; |
| 405 return WEBRTC_VIDEO_CODEC_ERROR; | 426 return WEBRTC_VIDEO_CODEC_ERROR; |
| 406 } | 427 } |
| 407 } | 428 } |
| 408 | 429 |
| 409 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
| 410 | |
| 411 return WEBRTC_VIDEO_CODEC_OK; | 430 return WEBRTC_VIDEO_CODEC_OK; |
| 412 } | 431 } |
| 413 | 432 |
| 414 int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() { | 433 int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() { |
| 415 CheckOnCodecThread(); | 434 RTC_DCHECK(IsOnCodecThread()); |
| 416 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 435 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 417 ScopedLocalRefFrame local_ref_frame(jni); | 436 ScopedLocalRefFrame local_ref_frame(jni); |
| 418 ALOGD << "ResetDecodeOnCodecThread Type: " << (int)codecType_ << ". " | 437 ALOGD << "ResetDecodeOnCodecThread Type: " << (int)codecType_ << ". " |
| 419 << codec_.width << " x " << codec_.height; | 438 << codec_.width << " x " << codec_.height; |
| 420 ALOGD << " Frames received: " << frames_received_ << | 439 ALOGD << " Frames received: " << frames_received_ << |
| 421 ". Frames decoded: " << frames_decoded_; | 440 ". Frames decoded: " << frames_decoded_; |
| 422 | 441 |
| 423 inited_ = false; | 442 inited_ = false; |
| 424 rtc::MessageQueueManager::Clear(this); | |
| 425 ResetVariables(); | 443 ResetVariables(); |
| 426 | 444 |
| 427 jni->CallVoidMethod( | 445 jni->CallVoidMethod( |
| 428 *j_media_codec_video_decoder_, | 446 *j_media_codec_video_decoder_, |
| 429 j_reset_method_, | 447 j_reset_method_, |
| 430 codec_.width, | 448 codec_.width, |
| 431 codec_.height); | 449 codec_.height); |
| 432 | 450 |
| 433 if (CheckException(jni)) { | 451 if (CheckException(jni)) { |
| 434 ALOGE << "Soft reset error - fallback to SW codec."; | 452 ALOGE << "Soft reset error - fallback to SW codec."; |
| 435 sw_fallback_required_ = true; | 453 sw_fallback_required_ = true; |
| 436 return WEBRTC_VIDEO_CODEC_ERROR; | 454 return WEBRTC_VIDEO_CODEC_ERROR; |
| 437 } | 455 } |
| 438 inited_ = true; | 456 inited_ = true; |
| 439 | 457 |
| 440 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
| 441 | |
| 442 return WEBRTC_VIDEO_CODEC_OK; | 458 return WEBRTC_VIDEO_CODEC_OK; |
| 443 } | 459 } |
| 444 | 460 |
| 445 int32_t MediaCodecVideoDecoder::Release() { | 461 int32_t MediaCodecVideoDecoder::Release() { |
| 446 ALOGD << "DecoderRelease request"; | 462 ALOGD << "DecoderRelease request"; |
| 447 return codec_thread_->Invoke<int32_t>( | 463 return codec_thread_->Invoke<int32_t>( |
| 448 RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | 464 RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); |
| 449 } | 465 } |
| 450 | 466 |
| 451 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | 467 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { |
| 468 RTC_DCHECK(IsOnCodecThread()); | |
| 452 if (!inited_) { | 469 if (!inited_) { |
| 453 return WEBRTC_VIDEO_CODEC_OK; | 470 return WEBRTC_VIDEO_CODEC_OK; |
| 454 } | 471 } |
| 455 CheckOnCodecThread(); | |
| 456 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 472 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 457 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << | 473 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << |
| 458 frames_received_ << ". Frames decoded: " << frames_decoded_; | 474 frames_received_ << ". Frames decoded: " << frames_decoded_; |
| 459 ScopedLocalRefFrame local_ref_frame(jni); | 475 ScopedLocalRefFrame local_ref_frame(jni); |
| 460 for (size_t i = 0; i < input_buffers_.size(); i++) { | 476 for (size_t i = 0; i < input_buffers_.size(); i++) { |
| 461 jni->DeleteGlobalRef(input_buffers_[i]); | 477 jni->DeleteGlobalRef(input_buffers_[i]); |
| 462 } | 478 } |
| 463 input_buffers_.clear(); | 479 input_buffers_.clear(); |
| 464 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | 480 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
| 465 surface_texture_helper_ = nullptr; | 481 surface_texture_helper_ = nullptr; |
| 466 inited_ = false; | 482 inited_ = false; |
| 467 rtc::MessageQueueManager::Clear(this); | |
| 468 if (CheckException(jni)) { | 483 if (CheckException(jni)) { |
| 469 ALOGE << "Decoder release exception"; | 484 ALOGE << "Decoder release exception"; |
| 470 return WEBRTC_VIDEO_CODEC_ERROR; | 485 return WEBRTC_VIDEO_CODEC_ERROR; |
| 471 } | 486 } |
| 472 ALOGD << "DecoderReleaseOnCodecThread done"; | 487 ALOGD << "DecoderReleaseOnCodecThread done"; |
| 473 return WEBRTC_VIDEO_CODEC_OK; | 488 return WEBRTC_VIDEO_CODEC_OK; |
| 474 } | 489 } |
| 475 | 490 |
| 476 void MediaCodecVideoDecoder::CheckOnCodecThread() { | 491 bool MediaCodecVideoDecoder::IsOnCodecThread() { |
| 477 RTC_CHECK(codec_thread_.get() == ThreadManager::Instance()->CurrentThread()) | 492 return codec_thread_.get() == ThreadManager::Instance()->CurrentThread(); |
| 478 << "Running on wrong thread!"; | |
| 479 } | 493 } |
| 480 | 494 |
| 481 void MediaCodecVideoDecoder::EnableFrameLogOnWarning() { | 495 void MediaCodecVideoDecoder::EnableFrameLogOnWarning() { |
| 482 // Log next 2 output frames. | 496 // Log next 2 output frames. |
| 497 static const int kMaxWarningLogFrames = 2; | |
| 483 frames_decoded_logged_ = std::max( | 498 frames_decoded_logged_ = std::max( |
| 484 frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames); | 499 frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames); |
| 485 } | 500 } |
| 486 | 501 |
| 487 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { | 502 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { |
| 488 CheckOnCodecThread(); | 503 RTC_DCHECK(IsOnCodecThread()); |
| 489 int ret_val = ReleaseOnCodecThread(); | 504 int ret_val = ReleaseOnCodecThread(); |
| 490 if (ret_val < 0) { | 505 if (ret_val < 0) { |
| 491 ALOGE << "ProcessHWError: Release failure"; | 506 ALOGE << "ProcessHWError: Release failure"; |
| 492 } | 507 } |
| 493 if (codecType_ == kVideoCodecH264) { | 508 if (codecType_ == kVideoCodecH264) { |
| 494 // For now there is no SW H.264 which can be used as fallback codec. | 509 // For now there is no SW H.264 which can be used as fallback codec. |
| 495 // So try to restart hw codec for now. | 510 // So try to restart hw codec for now. |
| 496 ret_val = InitDecodeOnCodecThread(); | 511 ret_val = InitDecodeOnCodecThread(); |
| 497 ALOGE << "Reset H.264 codec done. Status: " << ret_val; | 512 ALOGE << "Reset H.264 codec done. Status: " << ret_val; |
| 498 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { | 513 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 509 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 524 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 510 } | 525 } |
| 511 } | 526 } |
| 512 | 527 |
| 513 int32_t MediaCodecVideoDecoder::Decode( | 528 int32_t MediaCodecVideoDecoder::Decode( |
| 514 const EncodedImage& inputImage, | 529 const EncodedImage& inputImage, |
| 515 bool missingFrames, | 530 bool missingFrames, |
| 516 const RTPFragmentationHeader* fragmentation, | 531 const RTPFragmentationHeader* fragmentation, |
| 517 const CodecSpecificInfo* codecSpecificInfo, | 532 const CodecSpecificInfo* codecSpecificInfo, |
| 518 int64_t renderTimeMs) { | 533 int64_t renderTimeMs) { |
| 534 RTC_DCHECK(callback_); | |
| 535 RTC_DCHECK(inited_); | |
| 536 | |
| 519 if (sw_fallback_required_) { | 537 if (sw_fallback_required_) { |
| 520 ALOGE << "Decode() - fallback to SW codec"; | 538 ALOGE << "Decode() - fallback to SW codec"; |
| 521 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 539 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 522 } | 540 } |
| 523 if (callback_ == NULL) { | |
|
sakal
2017/03/21 10:05:57
Is this behavior change intended / relevant for th
tommi
2017/03/21 16:21:43
It's intended. The callback should never be null.
| |
| 524 ALOGE << "Decode() - callback_ is NULL"; | |
| 525 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 526 } | |
| 527 if (inputImage._buffer == NULL && inputImage._length > 0) { | 541 if (inputImage._buffer == NULL && inputImage._length > 0) { |
| 528 ALOGE << "Decode() - inputImage is incorrect"; | 542 ALOGE << "Decode() - inputImage is incorrect"; |
| 529 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 543 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 530 } | 544 } |
| 531 if (!inited_) { | |
| 532 ALOGE << "Decode() - decoder is not initialized"; | |
| 533 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 534 } | |
| 535 | 545 |
| 536 // Check if encoded frame dimension has changed. | 546 // Check if encoded frame dimension has changed. |
| 537 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && | 547 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && |
| 538 (inputImage._encodedWidth != codec_.width || | 548 (inputImage._encodedWidth != codec_.width || |
| 539 inputImage._encodedHeight != codec_.height)) { | 549 inputImage._encodedHeight != codec_.height)) { |
| 540 ALOGW << "Input resolution changed from " << | 550 ALOGW << "Input resolution changed from " << |
| 541 codec_.width << " x " << codec_.height << " to " << | 551 codec_.width << " x " << codec_.height << " to " << |
| 542 inputImage._encodedWidth << " x " << inputImage._encodedHeight; | 552 inputImage._encodedWidth << " x " << inputImage._encodedHeight; |
| 543 codec_.width = inputImage._encodedWidth; | 553 codec_.width = inputImage._encodedWidth; |
| 544 codec_.height = inputImage._encodedHeight; | 554 codec_.height = inputImage._encodedHeight; |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 569 if (!inputImage._completeFrame) { | 579 if (!inputImage._completeFrame) { |
| 570 ALOGE << "Decode() - complete frame is required"; | 580 ALOGE << "Decode() - complete frame is required"; |
| 571 return WEBRTC_VIDEO_CODEC_ERROR; | 581 return WEBRTC_VIDEO_CODEC_ERROR; |
| 572 } | 582 } |
| 573 key_frame_required_ = false; | 583 key_frame_required_ = false; |
| 574 } | 584 } |
| 575 if (inputImage._length == 0) { | 585 if (inputImage._length == 0) { |
| 576 return WEBRTC_VIDEO_CODEC_ERROR; | 586 return WEBRTC_VIDEO_CODEC_ERROR; |
| 577 } | 587 } |
| 578 | 588 |
| 579 return codec_thread_->Invoke<int32_t>( | 589 ALOGD << "Decode() - about to call DecodeOnCodecThread"; |
|
sakal
2017/03/21 10:05:57
nit: Please remove excessive logging.
tommi
2017/03/21 16:21:43
Done.
| |
| 590 std::vector<DecodedFrame> frames; | |
| 591 int32_t ret = codec_thread_->Invoke<int32_t>( | |
| 592 RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::DecodeOnCodecThread, this, | |
| 593 inputImage, &frames)); | |
| 594 ALOGD << "Decode() - after call to DecodeOnCodecThread"; | |
|
sakal
2017/03/21 10:05:58
nit: Please remove excessive logging.
tommi
2017/03/21 16:21:42
Done.
| |
| 595 for (auto& f : frames) | |
| 596 callback_->Decoded(f.frame, rtc::Optional<int32_t>(f.decode_time_ms), f.qp); | |
| 597 return ret; | |
| 598 } | |
| 599 | |
| 600 void MediaCodecVideoDecoder::PollDecodedFrames() { | |
| 601 RTC_DCHECK(callback_); | |
| 602 | |
| 603 std::vector<DecodedFrame> frames; | |
| 604 codec_thread_->Invoke<void>( | |
| 580 RTC_FROM_HERE, | 605 RTC_FROM_HERE, |
| 581 Bind(&MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage)); | 606 Bind(&MediaCodecVideoDecoder::PollDecodedFramesOnCodecThread, this, |
| 607 &frames)); | |
| 608 | |
| 609 for (auto& f : frames) | |
| 610 callback_->Decoded(f.frame, rtc::Optional<int32_t>(f.decode_time_ms), f.qp); | |
| 582 } | 611 } |
| 583 | 612 |
| 584 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( | 613 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
| 585 const EncodedImage& inputImage) { | 614 const EncodedImage& inputImage, |
| 586 CheckOnCodecThread(); | 615 std::vector<DecodedFrame>* frames) { |
| 616 RTC_DCHECK(IsOnCodecThread()); | |
| 587 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 617 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 588 ScopedLocalRefFrame local_ref_frame(jni); | 618 ScopedLocalRefFrame local_ref_frame(jni); |
| 589 | 619 |
| 590 // Try to drain the decoder and wait until output is not too | 620 // Try to drain the decoder and wait until output is not too |
| 591 // much behind the input. | 621 // much behind the input. |
| 592 if (codecType_ == kVideoCodecH264 && | 622 if (codecType_ == kVideoCodecH264 && |
| 593 frames_received_ > frames_decoded_ + max_pending_frames_) { | 623 frames_received_ > frames_decoded_ + max_pending_frames_) { |
| 594 // Print warning for H.264 only - for VP8/VP9 one frame delay is ok. | 624 // Print warning for H.264 only - for VP8/VP9 one frame delay is ok. |
| 595 ALOGW << "Decoder is too far behind. Try to drain. Received: " << | 625 ALOGW << "Decoder is too far behind. Try to drain. Received: " << |
| 596 frames_received_ << ". Decoded: " << frames_decoded_; | 626 frames_received_ << ". Decoded: " << frames_decoded_; |
| 597 EnableFrameLogOnWarning(); | 627 EnableFrameLogOnWarning(); |
| 598 } | 628 } |
| 599 const int64 drain_start = rtc::TimeMillis(); | 629 const int64 drain_start = rtc::TimeMillis(); |
| 600 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && | 630 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && |
| 601 (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) { | 631 (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) { |
| 602 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { | 632 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs, frames)) { |
| 603 ALOGE << "DeliverPendingOutputs error. Frames received: " << | 633 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
| 604 frames_received_ << ". Frames decoded: " << frames_decoded_; | 634 frames_received_ << ". Frames decoded: " << frames_decoded_; |
| 605 return ProcessHWErrorOnCodecThread(); | 635 return ProcessHWErrorOnCodecThread(); |
| 606 } | 636 } |
| 607 } | 637 } |
| 608 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 638 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
| 609 ALOGE << "Output buffer dequeue timeout. Frames received: " << | 639 ALOGE << "Output buffer dequeue timeout. Frames received: " << |
| 610 frames_received_ << ". Frames decoded: " << frames_decoded_; | 640 frames_received_ << ". Frames decoded: " << frames_decoded_; |
| 611 return ProcessHWErrorOnCodecThread(); | 641 return ProcessHWErrorOnCodecThread(); |
| 612 } | 642 } |
| 613 | 643 |
| 614 // Get input buffer. | 644 // Get input buffer. |
| 615 int j_input_buffer_index = jni->CallIntMethod( | 645 int j_input_buffer_index = jni->CallIntMethod( |
| 616 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); | 646 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); |
| 617 if (CheckException(jni) || j_input_buffer_index < 0) { | 647 if (CheckException(jni) || j_input_buffer_index < 0) { |
| 618 ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index << | 648 ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index << |
| 619 ". Retry DeliverPendingOutputs."; | 649 ". Retry DeliverPendingOutputs."; |
| 620 EnableFrameLogOnWarning(); | 650 EnableFrameLogOnWarning(); |
| 621 // Try to drain the decoder. | 651 // Try to drain the decoder. |
| 622 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { | 652 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs, frames)) { |
| 623 ALOGE << "DeliverPendingOutputs error. Frames received: " << | 653 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
| 624 frames_received_ << ". Frames decoded: " << frames_decoded_; | 654 frames_received_ << ". Frames decoded: " << frames_decoded_; |
| 625 return ProcessHWErrorOnCodecThread(); | 655 return ProcessHWErrorOnCodecThread(); |
| 626 } | 656 } |
| 627 // Try dequeue input buffer one last time. | 657 // Try dequeue input buffer one last time. |
| 628 j_input_buffer_index = jni->CallIntMethod( | 658 j_input_buffer_index = jni->CallIntMethod( |
| 629 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); | 659 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); |
| 630 if (CheckException(jni) || j_input_buffer_index < 0) { | 660 if (CheckException(jni) || j_input_buffer_index < 0) { |
| 631 ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; | 661 ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; |
| 632 return ProcessHWErrorOnCodecThread(); | 662 return ProcessHWErrorOnCodecThread(); |
| 633 } | 663 } |
| 634 } | 664 } |
| 635 | 665 |
| 636 // Copy encoded data to Java ByteBuffer. | 666 // Copy encoded data to Java ByteBuffer. |
| 637 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | 667 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; |
| 638 uint8_t* buffer = | 668 uint8_t* buffer = |
| 639 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 669 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
| 640 RTC_CHECK(buffer) << "Indirect buffer??"; | 670 RTC_DCHECK(buffer) << "Indirect buffer??"; |
| 641 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); | 671 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); |
| 642 if (CheckException(jni) || buffer_capacity < inputImage._length) { | 672 if (CheckException(jni) || buffer_capacity < inputImage._length) { |
| 643 ALOGE << "Input frame size "<< inputImage._length << | 673 ALOGE << "Input frame size "<< inputImage._length << |
| 644 " is bigger than buffer size " << buffer_capacity; | 674 " is bigger than buffer size " << buffer_capacity; |
| 645 return ProcessHWErrorOnCodecThread(); | 675 return ProcessHWErrorOnCodecThread(); |
| 646 } | 676 } |
| 647 jlong presentation_timestamp_us = static_cast<jlong>( | 677 jlong presentation_timestamp_us = static_cast<jlong>( |
| 648 static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate); | 678 static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate); |
| 649 memcpy(buffer, inputImage._buffer, inputImage._length); | 679 memcpy(buffer, inputImage._buffer, inputImage._length); |
| 650 | 680 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 683 inputImage._length, | 713 inputImage._length, |
| 684 presentation_timestamp_us, | 714 presentation_timestamp_us, |
| 685 static_cast<int64_t> (inputImage._timeStamp), | 715 static_cast<int64_t> (inputImage._timeStamp), |
| 686 inputImage.ntp_time_ms_); | 716 inputImage.ntp_time_ms_); |
| 687 if (CheckException(jni) || !success) { | 717 if (CheckException(jni) || !success) { |
| 688 ALOGE << "queueInputBuffer error"; | 718 ALOGE << "queueInputBuffer error"; |
| 689 return ProcessHWErrorOnCodecThread(); | 719 return ProcessHWErrorOnCodecThread(); |
| 690 } | 720 } |
| 691 | 721 |
| 692 // Try to drain the decoder | 722 // Try to drain the decoder |
| 693 if (!DeliverPendingOutputs(jni, 0)) { | 723 if (!DeliverPendingOutputs(jni, 0, frames)) { |
| 694 ALOGE << "DeliverPendingOutputs error"; | 724 ALOGE << "DeliverPendingOutputs error"; |
| 695 return ProcessHWErrorOnCodecThread(); | 725 return ProcessHWErrorOnCodecThread(); |
| 696 } | 726 } |
| 697 | 727 |
| 698 return WEBRTC_VIDEO_CODEC_OK; | 728 return WEBRTC_VIDEO_CODEC_OK; |
| 699 } | 729 } |
| 700 | 730 |
| 731 void MediaCodecVideoDecoder::PollDecodedFramesOnCodecThread( | |
| 732 std::vector<DecodedFrame>* frames) { | |
| 733 RTC_DCHECK(IsOnCodecThread()); | |
| 734 | |
| 735 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 736 ScopedLocalRefFrame local_ref_frame(jni); | |
| 737 | |
| 738 if (!DeliverPendingOutputs(jni, 0, frames)) { | |
| 739 ALOGE << "PollDecodedFramesOnCodecThread: DeliverPendingOutputs error"; | |
| 740 ProcessHWErrorOnCodecThread(); | |
| 741 } | |
| 742 } | |
| 743 | |
| 701 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | 744 bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| 702 JNIEnv* jni, int dequeue_timeout_ms) { | 745 JNIEnv* jni, |
| 703 CheckOnCodecThread(); | 746 int dequeue_timeout_ms, |
| 747 std::vector<DecodedFrame>* frames) { | |
| 748 RTC_DCHECK(IsOnCodecThread()); | |
| 704 if (frames_received_ <= frames_decoded_) { | 749 if (frames_received_ <= frames_decoded_) { |
| 705 // No need to query for output buffers - decoder is drained. | 750 // No need to query for output buffers - decoder is drained. |
| 706 return true; | 751 return true; |
| 707 } | 752 } |
| 708 // Get decoder output. | 753 // Get decoder output. |
| 709 jobject j_decoder_output_buffer = | 754 jobject j_decoder_output_buffer = |
| 710 jni->CallObjectMethod(*j_media_codec_video_decoder_, | 755 jni->CallObjectMethod(*j_media_codec_video_decoder_, |
| 711 use_surface_ ? j_dequeue_texture_buffer_method_ | 756 use_surface_ ? j_dequeue_texture_buffer_method_ |
| 712 : j_dequeue_byte_buffer_method_, | 757 : j_dequeue_byte_buffer_method_, |
| 713 dequeue_timeout_ms); | 758 dequeue_timeout_ms); |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 775 jni, j_decoder_output_buffer, j_info_size_field_); | 820 jni, j_decoder_output_buffer, j_info_size_field_); |
| 776 presentation_timestamps_ms = GetLongField( | 821 presentation_timestamps_ms = GetLongField( |
| 777 jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_); | 822 jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_); |
| 778 output_timestamps_ms = GetLongField( | 823 output_timestamps_ms = GetLongField( |
| 779 jni, j_decoder_output_buffer, j_timestamp_ms_field_); | 824 jni, j_decoder_output_buffer, j_timestamp_ms_field_); |
| 780 output_ntp_timestamps_ms = GetLongField( | 825 output_ntp_timestamps_ms = GetLongField( |
| 781 jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_); | 826 jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_); |
| 782 | 827 |
| 783 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, | 828 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, |
| 784 j_byte_buffer_decode_time_ms_field_); | 829 j_byte_buffer_decode_time_ms_field_); |
| 785 RTC_CHECK_GE(slice_height, height); | 830 RTC_DCHECK_GE(slice_height, height); |
|
sakal
2017/03/21 10:05:57
If this check fails it result in reading outside o
tommi
2017/03/21 16:21:42
good point, changed back to a CHECK.
| |
| 786 | 831 |
| 787 if (output_buffer_size < width * height * 3 / 2) { | 832 if (output_buffer_size < width * height * 3 / 2) { |
| 788 ALOGE << "Insufficient output buffer size: " << output_buffer_size; | 833 ALOGE << "Insufficient output buffer size: " << output_buffer_size; |
| 789 return false; | 834 return false; |
| 790 } | 835 } |
| 791 if (output_buffer_size < stride * height * 3 / 2 && | 836 if (output_buffer_size < stride * height * 3 / 2 && |
| 792 slice_height == height && stride > width) { | 837 slice_height == height && stride > width) { |
| 793 // Some codecs (Exynos) incorrectly report stride information for | 838 // Some codecs (Exynos) incorrectly report stride information for |
| 794 // output byte buffer, so actual stride value need to be corrected. | 839 // output byte buffer, so actual stride value need to be corrected. |
| 795 stride = output_buffer_size * 2 / (height * 3); | 840 stride = output_buffer_size * 2 / (height * 3); |
| 796 } | 841 } |
| 797 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | 842 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( |
| 798 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | 843 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); |
| 799 jobject output_buffer = | 844 jobject output_buffer = |
| 800 jni->GetObjectArrayElement(output_buffers, output_buffer_index); | 845 jni->GetObjectArrayElement(output_buffers, output_buffer_index); |
| 801 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | 846 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( |
| 802 output_buffer)); | 847 output_buffer)); |
| 803 if (CheckException(jni)) { | 848 if (CheckException(jni)) { |
| 804 return false; | 849 return false; |
| 805 } | 850 } |
| 806 payload += output_buffer_offset; | 851 payload += output_buffer_offset; |
| 807 | 852 |
| 808 // Create yuv420 frame. | 853 // Create yuv420 frame. |
| 809 rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer = | 854 rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer = |
| 810 decoded_frame_pool_.CreateBuffer(width, height); | 855 decoded_frame_pool_.CreateBuffer(width, height); |
| 811 if (color_format == COLOR_FormatYUV420Planar) { | 856 if (color_format == COLOR_FormatYUV420Planar) { |
| 812 RTC_CHECK_EQ(0, stride % 2); | 857 RTC_DCHECK_EQ(0, stride % 2); |
| 813 const int uv_stride = stride / 2; | 858 const int uv_stride = stride / 2; |
| 814 const uint8_t* y_ptr = payload; | 859 const uint8_t* y_ptr = payload; |
| 815 const uint8_t* u_ptr = y_ptr + stride * slice_height; | 860 const uint8_t* u_ptr = y_ptr + stride * slice_height; |
| 816 | 861 |
| 817 // Note that the case with odd |slice_height| is handled in a special way. | 862 // Note that the case with odd |slice_height| is handled in a special way. |
| 818 // The chroma height contained in the payload is rounded down instead of | 863 // The chroma height contained in the payload is rounded down instead of |
| 819 // up, making it one row less than what we expect in WebRTC. Therefore, we | 864 // up, making it one row less than what we expect in WebRTC. Therefore, we |
| 820 // have to duplicate the last chroma rows for this case. Also, the offset | 865 // have to duplicate the last chroma rows for this case. Also, the offset |
| 821 // between the Y plane and the U plane is unintuitive for this case. See | 866 // between the Y plane and the U plane is unintuitive for this case. See |
| 822 // http://bugs.webrtc.org/6651 for more info. | 867 // http://bugs.webrtc.org/6651 for more info. |
| 823 const int chroma_width = (width + 1) / 2; | 868 const int chroma_width = (width + 1) / 2; |
| 824 const int chroma_height = | 869 const int chroma_height = |
| 825 (slice_height % 2 == 0) ? (height + 1) / 2 : height / 2; | 870 (slice_height % 2 == 0) ? (height + 1) / 2 : height / 2; |
| 826 const int u_offset = uv_stride * slice_height / 2; | 871 const int u_offset = uv_stride * slice_height / 2; |
| 827 const uint8_t* v_ptr = u_ptr + u_offset; | 872 const uint8_t* v_ptr = u_ptr + u_offset; |
| 828 libyuv::CopyPlane(y_ptr, stride, | 873 libyuv::CopyPlane(y_ptr, stride, |
| 829 i420_buffer->MutableDataY(), i420_buffer->StrideY(), | 874 i420_buffer->MutableDataY(), i420_buffer->StrideY(), |
| 830 width, height); | 875 width, height); |
| 831 libyuv::CopyPlane(u_ptr, uv_stride, | 876 libyuv::CopyPlane(u_ptr, uv_stride, |
| 832 i420_buffer->MutableDataU(), i420_buffer->StrideU(), | 877 i420_buffer->MutableDataU(), i420_buffer->StrideU(), |
| 833 chroma_width, chroma_height); | 878 chroma_width, chroma_height); |
| 834 libyuv::CopyPlane(v_ptr, uv_stride, | 879 libyuv::CopyPlane(v_ptr, uv_stride, |
| 835 i420_buffer->MutableDataV(), i420_buffer->StrideV(), | 880 i420_buffer->MutableDataV(), i420_buffer->StrideV(), |
| 836 chroma_width, chroma_height); | 881 chroma_width, chroma_height); |
| 837 if (slice_height % 2 == 1) { | 882 if (slice_height % 2 == 1) { |
| 838 RTC_CHECK_EQ(height, slice_height); | 883 RTC_DCHECK_EQ(height, slice_height); |
| 839 // Duplicate the last chroma rows. | 884 // Duplicate the last chroma rows. |
| 840 uint8_t* u_last_row_ptr = i420_buffer->MutableDataU() + | 885 uint8_t* u_last_row_ptr = i420_buffer->MutableDataU() + |
| 841 chroma_height * i420_buffer->StrideU(); | 886 chroma_height * i420_buffer->StrideU(); |
| 842 memcpy(u_last_row_ptr, u_last_row_ptr - i420_buffer->StrideU(), | 887 memcpy(u_last_row_ptr, u_last_row_ptr - i420_buffer->StrideU(), |
| 843 i420_buffer->StrideU()); | 888 i420_buffer->StrideU()); |
| 844 uint8_t* v_last_row_ptr = i420_buffer->MutableDataV() + | 889 uint8_t* v_last_row_ptr = i420_buffer->MutableDataV() + |
| 845 chroma_height * i420_buffer->StrideV(); | 890 chroma_height * i420_buffer->StrideV(); |
| 846 memcpy(v_last_row_ptr, v_last_row_ptr - i420_buffer->StrideV(), | 891 memcpy(v_last_row_ptr, v_last_row_ptr - i420_buffer->StrideV(), |
| 847 i420_buffer->StrideV()); | 892 i420_buffer->StrideV()); |
| 848 } | 893 } |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 897 " for last " << statistic_time_ms << " ms."; | 942 " for last " << statistic_time_ms << " ms."; |
| 898 start_time_ms_ = rtc::TimeMillis(); | 943 start_time_ms_ = rtc::TimeMillis(); |
| 899 current_frames_ = 0; | 944 current_frames_ = 0; |
| 900 current_bytes_ = 0; | 945 current_bytes_ = 0; |
| 901 current_decoding_time_ms_ = 0; | 946 current_decoding_time_ms_ = 0; |
| 902 current_delay_time_ms_ = 0; | 947 current_delay_time_ms_ = 0; |
| 903 } | 948 } |
| 904 | 949 |
| 905 // If the frame was dropped, frame_buffer is left as nullptr. | 950 // If the frame was dropped, frame_buffer is left as nullptr. |
| 906 if (frame_buffer) { | 951 if (frame_buffer) { |
| 952 ALOGD << "DeliverPendingOutputs: Have frame on the codec thread."; | |
|
sakal
2017/03/21 10:05:57
This log seems a little excessive.
tommi
2017/03/21 16:21:42
Done.
| |
| 907 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | 953 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); |
| 908 decoded_frame.set_timestamp(output_timestamps_ms); | 954 decoded_frame.set_timestamp(output_timestamps_ms); |
| 909 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); | 955 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); |
| 910 | 956 |
| 911 rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); | 957 rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); |
| 912 pending_frame_qps_.pop_front(); | 958 pending_frame_qps_.pop_front(); |
| 913 callback_->Decoded(decoded_frame, rtc::Optional<int32_t>(decode_time_ms), | 959 decoded_frames_.push_back(DecodedFrame(std::move(decoded_frame), |
| 914 qp); | 960 decode_time_ms, output_timestamps_ms, |
| 961 output_ntp_timestamps_ms, qp)); | |
| 962 ALOGD << "DeliverPendingOutputs: Decoded frame delivered."; | |
|
sakal
2017/03/21 10:05:58
Same here.
tommi
2017/03/21 16:21:43
Done.
| |
| 963 } | |
| 964 | |
| 965 if (frames) { | |
|
sakal
2017/03/21 10:05:57
I don't see frames ever actually being null. DCHEC
tommi
2017/03/21 16:21:43
Done.
| |
| 966 frames->reserve(frames->size() + decoded_frames_.size()); | |
| 967 std::move(decoded_frames_.begin(), decoded_frames_.end(), | |
| 968 std::back_inserter(*frames)); | |
| 969 decoded_frames_.clear(); | |
| 915 } | 970 } |
| 916 return true; | 971 return true; |
| 917 } | 972 } |
| 918 | 973 |
| 919 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 974 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
| 920 DecodedImageCallback* callback) { | 975 DecodedImageCallback* callback) { |
| 921 callback_ = callback; | 976 callback_ = callback; |
| 922 return WEBRTC_VIDEO_CODEC_OK; | 977 return WEBRTC_VIDEO_CODEC_OK; |
| 923 } | 978 } |
| 924 | 979 |
| 925 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) { | |
| 926 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 927 ScopedLocalRefFrame local_ref_frame(jni); | |
| 928 if (!inited_) { | |
| 929 return; | |
| 930 } | |
| 931 // We only ever send one message to |this| directly (not through a Bind()'d | |
| 932 // functor), so expect no ID/data. | |
| 933 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | |
| 934 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | |
| 935 CheckOnCodecThread(); | |
| 936 | |
| 937 if (!DeliverPendingOutputs(jni, 0)) { | |
| 938 ALOGE << "OnMessage: DeliverPendingOutputs error"; | |
| 939 ProcessHWErrorOnCodecThread(); | |
| 940 return; | |
| 941 } | |
| 942 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
| 943 } | |
| 944 | |
| 945 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() | 980 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() |
| 946 : egl_context_(nullptr) { | 981 : egl_context_(nullptr) { |
| 947 ALOGD << "MediaCodecVideoDecoderFactory ctor"; | 982 ALOGD << "MediaCodecVideoDecoderFactory ctor"; |
| 948 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 983 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 949 ScopedLocalRefFrame local_ref_frame(jni); | 984 ScopedLocalRefFrame local_ref_frame(jni); |
| 950 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); | 985 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); |
| 951 supported_codec_types_.clear(); | 986 supported_codec_types_.clear(); |
| 952 | 987 |
| 953 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( | 988 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( |
| 954 j_decoder_class, | 989 j_decoder_class, |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1026 webrtc::VideoDecoder* decoder) { | 1061 webrtc::VideoDecoder* decoder) { |
| 1027 ALOGD << "Destroy video decoder."; | 1062 ALOGD << "Destroy video decoder."; |
| 1028 delete decoder; | 1063 delete decoder; |
| 1029 } | 1064 } |
| 1030 | 1065 |
| 1031 const char* MediaCodecVideoDecoder::ImplementationName() const { | 1066 const char* MediaCodecVideoDecoder::ImplementationName() const { |
| 1032 return "MediaCodec"; | 1067 return "MediaCodec"; |
| 1033 } | 1068 } |
| 1034 | 1069 |
| 1035 } // namespace webrtc_jni | 1070 } // namespace webrtc_jni |
| OLD | NEW |