OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include <algorithm> | 11 #include <algorithm> |
12 #include <deque> | 12 #include <deque> |
13 #include <memory> | 13 #include <memory> |
14 #include <vector> | 14 #include <vector> |
15 | 15 |
16 // NOTICE: androidmediadecoder_jni.h must be included before | 16 // NOTICE: androidmediadecoder_jni.h must be included before |
17 // androidmediacodeccommon.h to avoid build errors. | 17 // androidmediacodeccommon.h to avoid build errors. |
18 #include "webrtc/sdk/android/src/jni/androidmediadecoder_jni.h" | 18 #include "webrtc/sdk/android/src/jni/androidmediadecoder_jni.h" |
19 | 19 |
20 #include "third_party/libyuv/include/libyuv/convert.h" | 20 #include "third_party/libyuv/include/libyuv/convert.h" |
21 #include "third_party/libyuv/include/libyuv/convert_from.h" | 21 #include "third_party/libyuv/include/libyuv/convert_from.h" |
22 #include "third_party/libyuv/include/libyuv/video_common.h" | 22 #include "third_party/libyuv/include/libyuv/video_common.h" |
23 #include "webrtc/base/bind.h" | 23 #include "webrtc/base/bind.h" |
24 #include "webrtc/base/checks.h" | 24 #include "webrtc/base/checks.h" |
25 #include "webrtc/base/logging.h" | 25 #include "webrtc/base/logging.h" |
26 #include "webrtc/base/scoped_ref_ptr.h" | 26 #include "webrtc/base/scoped_ref_ptr.h" |
27 #include "webrtc/base/thread.h" | 27 #include "webrtc/base/thread_checker.h" |
28 #include "webrtc/base/timeutils.h" | 28 #include "webrtc/base/timeutils.h" |
29 #include "webrtc/common_video/h264/h264_bitstream_parser.h" | 29 #include "webrtc/common_video/h264/h264_bitstream_parser.h" |
30 #include "webrtc/common_video/include/i420_buffer_pool.h" | 30 #include "webrtc/common_video/include/i420_buffer_pool.h" |
31 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 31 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
32 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 32 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
33 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" | 33 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" |
34 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | 34 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
35 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | 35 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
36 #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" | 36 #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" |
37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
(...skipping 20 matching lines...) Expand all Loading... |
58 using webrtc::VideoFrame; | 58 using webrtc::VideoFrame; |
59 using webrtc::RTPFragmentationHeader; | 59 using webrtc::RTPFragmentationHeader; |
60 using webrtc::VideoCodec; | 60 using webrtc::VideoCodec; |
61 using webrtc::VideoCodecType; | 61 using webrtc::VideoCodecType; |
62 using webrtc::kVideoCodecH264; | 62 using webrtc::kVideoCodecH264; |
63 using webrtc::kVideoCodecVP8; | 63 using webrtc::kVideoCodecVP8; |
64 using webrtc::kVideoCodecVP9; | 64 using webrtc::kVideoCodecVP9; |
65 | 65 |
66 namespace webrtc_jni { | 66 namespace webrtc_jni { |
67 | 67 |
| 68 // All calls should be made on the decoding thread expect for release which can |
| 69 // be called after the decoder thread has been stopped. |
68 class MediaCodecVideoDecoder : public webrtc::VideoDecoder { | 70 class MediaCodecVideoDecoder : public webrtc::VideoDecoder { |
69 public: | 71 public: |
70 explicit MediaCodecVideoDecoder( | 72 explicit MediaCodecVideoDecoder( |
71 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); | 73 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); |
72 virtual ~MediaCodecVideoDecoder(); | 74 virtual ~MediaCodecVideoDecoder(); |
73 | 75 |
74 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) | 76 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) |
75 override; | 77 override; |
76 | 78 |
77 int32_t Decode( | 79 int32_t Decode( |
78 const EncodedImage& inputImage, bool missingFrames, | 80 const EncodedImage& inputImage, bool missingFrames, |
79 const RTPFragmentationHeader* fragmentation, | 81 const RTPFragmentationHeader* fragmentation, |
80 const CodecSpecificInfo* codecSpecificInfo = NULL, | 82 const CodecSpecificInfo* codecSpecificInfo = NULL, |
81 int64_t renderTimeMs = -1) override; | 83 int64_t renderTimeMs = -1) override; |
82 | 84 |
83 void PollDecodedFrames() override; | 85 void PollDecodedFrames() override; |
84 | 86 |
85 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) | 87 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) |
86 override; | 88 override; |
87 | 89 |
88 int32_t Release() override; | 90 int32_t Release() override; |
89 | 91 |
90 bool PrefersLateDecoding() const override { return true; } | 92 bool PrefersLateDecoding() const override { return true; } |
91 | 93 |
92 const char* ImplementationName() const override; | 94 const char* ImplementationName() const override; |
93 | 95 |
94 private: | 96 private: |
95 struct DecodedFrame { | 97 int32_t InitDecodeInternal(); |
96 DecodedFrame(VideoFrame frame, | 98 int32_t ResetDecode(); |
97 int decode_time_ms, | |
98 int64_t timestamp, | |
99 int64_t ntp_timestamp, | |
100 rtc::Optional<uint8_t> qp) | |
101 : frame(std::move(frame)), | |
102 decode_time_ms(decode_time_ms), | |
103 qp(std::move(qp)) { | |
104 frame.set_timestamp(timestamp); | |
105 frame.set_ntp_time_ms(ntp_timestamp); | |
106 } | |
107 | |
108 VideoFrame frame; | |
109 int decode_time_ms; | |
110 rtc::Optional<uint8_t> qp; | |
111 }; | |
112 | |
113 // Returns true if running on |codec_thread_|. Used for DCHECKing. | |
114 bool IsOnCodecThread(); | |
115 | |
116 int32_t InitDecodeOnCodecThread(); | |
117 int32_t ResetDecodeOnCodecThread(); | |
118 int32_t ReleaseOnCodecThread(); | |
119 int32_t DecodeOnCodecThread(const EncodedImage& inputImage, | |
120 std::vector<DecodedFrame>* frames); | |
121 void PollDecodedFramesOnCodecThread(std::vector<DecodedFrame>* frames); | |
122 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 99 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
123 // true on success. | 100 // true on success. |
124 bool DeliverPendingOutputs(JNIEnv* jni, | 101 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); |
125 int dequeue_timeout_us, | 102 int32_t ProcessHWError(); |
126 std::vector<DecodedFrame>* frames); | |
127 int32_t ProcessHWErrorOnCodecThread(); | |
128 void EnableFrameLogOnWarning(); | 103 void EnableFrameLogOnWarning(); |
129 void ResetVariables(); | 104 void ResetVariables(); |
130 | 105 |
131 // Type of video codec. | 106 // Type of video codec. |
132 VideoCodecType codecType_; | 107 VideoCodecType codecType_; |
133 | 108 |
134 // Render EGL context - owned by factory, should not be allocated/destroyed | 109 // Render EGL context - owned by factory, should not be allocated/destroyed |
135 // by VideoDecoder. | 110 // by VideoDecoder. |
136 jobject render_egl_context_; | 111 jobject render_egl_context_; |
137 | 112 |
(...skipping 13 matching lines...) Expand all Loading... |
151 int current_frames_; // Number of frames in the current statistics interval. | 126 int current_frames_; // Number of frames in the current statistics interval. |
152 int current_bytes_; // Encoded bytes in the current statistics interval. | 127 int current_bytes_; // Encoded bytes in the current statistics interval. |
153 int current_decoding_time_ms_; // Overall decoding time in the current second | 128 int current_decoding_time_ms_; // Overall decoding time in the current second |
154 int current_delay_time_ms_; // Overall delay time in the current second. | 129 int current_delay_time_ms_; // Overall delay time in the current second. |
155 uint32_t max_pending_frames_; // Maximum number of pending input frames. | 130 uint32_t max_pending_frames_; // Maximum number of pending input frames. |
156 webrtc::H264BitstreamParser h264_bitstream_parser_; | 131 webrtc::H264BitstreamParser h264_bitstream_parser_; |
157 std::deque<rtc::Optional<uint8_t>> pending_frame_qps_; | 132 std::deque<rtc::Optional<uint8_t>> pending_frame_qps_; |
158 | 133 |
159 // State that is constant for the lifetime of this object once the ctor | 134 // State that is constant for the lifetime of this object once the ctor |
160 // returns. | 135 // returns. |
161 std::unique_ptr<Thread> | 136 rtc::ThreadChecker decode_thread_checker_; |
162 codec_thread_; // Thread on which to operate MediaCodec. | |
163 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 137 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; |
164 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 138 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; |
165 jmethodID j_init_decode_method_; | 139 jmethodID j_init_decode_method_; |
166 jmethodID j_reset_method_; | 140 jmethodID j_reset_method_; |
167 jmethodID j_release_method_; | 141 jmethodID j_release_method_; |
168 jmethodID j_dequeue_input_buffer_method_; | 142 jmethodID j_dequeue_input_buffer_method_; |
169 jmethodID j_queue_input_buffer_method_; | 143 jmethodID j_queue_input_buffer_method_; |
170 jmethodID j_dequeue_byte_buffer_method_; | 144 jmethodID j_dequeue_byte_buffer_method_; |
171 jmethodID j_dequeue_texture_buffer_method_; | 145 jmethodID j_dequeue_texture_buffer_method_; |
172 jmethodID j_return_decoded_byte_buffer_method_; | 146 jmethodID j_return_decoded_byte_buffer_method_; |
(...skipping 17 matching lines...) Expand all Loading... |
190 jfieldID j_info_index_field_; | 164 jfieldID j_info_index_field_; |
191 jfieldID j_info_offset_field_; | 165 jfieldID j_info_offset_field_; |
192 jfieldID j_info_size_field_; | 166 jfieldID j_info_size_field_; |
193 jfieldID j_presentation_timestamp_ms_field_; | 167 jfieldID j_presentation_timestamp_ms_field_; |
194 jfieldID j_timestamp_ms_field_; | 168 jfieldID j_timestamp_ms_field_; |
195 jfieldID j_ntp_timestamp_ms_field_; | 169 jfieldID j_ntp_timestamp_ms_field_; |
196 jfieldID j_byte_buffer_decode_time_ms_field_; | 170 jfieldID j_byte_buffer_decode_time_ms_field_; |
197 | 171 |
198 // Global references; must be deleted in Release(). | 172 // Global references; must be deleted in Release(). |
199 std::vector<jobject> input_buffers_; | 173 std::vector<jobject> input_buffers_; |
200 | |
201 // Added to on the codec thread, frames are delivered on the decoder thread. | |
202 std::vector<DecodedFrame> decoded_frames_; | |
203 }; | 174 }; |
204 | 175 |
205 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | 176 MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
206 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | 177 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
207 codecType_(codecType), | 178 codecType_(codecType), |
208 render_egl_context_(render_egl_context), | 179 render_egl_context_(render_egl_context), |
209 key_frame_required_(true), | 180 key_frame_required_(true), |
210 inited_(false), | 181 inited_(false), |
211 sw_fallback_required_(false), | 182 sw_fallback_required_(false), |
212 codec_thread_(new Thread()), | |
213 j_media_codec_video_decoder_class_( | 183 j_media_codec_video_decoder_class_( |
214 jni, | 184 jni, |
215 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 185 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
216 j_media_codec_video_decoder_( | 186 j_media_codec_video_decoder_( |
217 jni, | 187 jni, |
218 jni->NewObject(*j_media_codec_video_decoder_class_, | 188 jni->NewObject(*j_media_codec_video_decoder_class_, |
219 GetMethodID(jni, | 189 GetMethodID(jni, |
220 *j_media_codec_video_decoder_class_, | 190 *j_media_codec_video_decoder_class_, |
221 "<init>", | 191 "<init>", |
222 "()V"))) { | 192 "()V"))) { |
223 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 193 decode_thread_checker_.DetachFromThread(); |
224 RTC_CHECK(codec_thread_->Start()); | |
225 | 194 |
226 j_init_decode_method_ = GetMethodID( | 195 j_init_decode_method_ = GetMethodID( |
227 jni, *j_media_codec_video_decoder_class_, "initDecode", | 196 jni, *j_media_codec_video_decoder_class_, "initDecode", |
228 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 197 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
229 "IILorg/webrtc/SurfaceTextureHelper;)Z"); | 198 "IILorg/webrtc/SurfaceTextureHelper;)Z"); |
230 j_reset_method_ = | 199 j_reset_method_ = |
231 GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V"); | 200 GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V"); |
232 j_release_method_ = | 201 j_release_method_ = |
233 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 202 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
234 j_dequeue_input_buffer_method_ = GetMethodID( | 203 j_dequeue_input_buffer_method_ = GetMethodID( |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
303 AllowBlockingCalls(); | 272 AllowBlockingCalls(); |
304 } | 273 } |
305 | 274 |
306 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 275 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
307 // Call Release() to ensure no more callbacks to us after we are deleted. | 276 // Call Release() to ensure no more callbacks to us after we are deleted. |
308 Release(); | 277 Release(); |
309 } | 278 } |
310 | 279 |
311 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 280 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
312 int32_t numberOfCores) { | 281 int32_t numberOfCores) { |
| 282 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
313 ALOGD << "InitDecode."; | 283 ALOGD << "InitDecode."; |
314 if (inst == NULL) { | 284 if (inst == NULL) { |
315 ALOGE << "NULL VideoCodec instance"; | 285 ALOGE << "NULL VideoCodec instance"; |
316 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 286 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
317 } | 287 } |
318 // Factory should guard against other codecs being used with us. | 288 // Factory should guard against other codecs being used with us. |
319 RTC_DCHECK(inst->codecType == codecType_) | 289 RTC_DCHECK(inst->codecType == codecType_) |
320 << "Unsupported codec " << inst->codecType << " for " << codecType_; | 290 << "Unsupported codec " << inst->codecType << " for " << codecType_; |
321 | 291 |
322 if (sw_fallback_required_) { | 292 if (sw_fallback_required_) { |
323 ALOGE << "InitDecode() - fallback to SW decoder"; | 293 ALOGE << "InitDecode() - fallback to SW decoder"; |
324 return WEBRTC_VIDEO_CODEC_OK; | 294 return WEBRTC_VIDEO_CODEC_OK; |
325 } | 295 } |
326 // Save VideoCodec instance for later. | 296 // Save VideoCodec instance for later. |
327 if (&codec_ != inst) { | 297 if (&codec_ != inst) { |
328 codec_ = *inst; | 298 codec_ = *inst; |
329 } | 299 } |
330 // If maxFramerate is not set then assume 30 fps. | 300 // If maxFramerate is not set then assume 30 fps. |
331 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; | 301 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; |
332 | 302 |
333 // Call Java init. | 303 // Call Java init. |
334 return codec_thread_->Invoke<int32_t>( | 304 return InitDecodeInternal(); |
335 RTC_FROM_HERE, | |
336 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); | |
337 } | 305 } |
338 | 306 |
339 void MediaCodecVideoDecoder::ResetVariables() { | 307 void MediaCodecVideoDecoder::ResetVariables() { |
340 RTC_DCHECK(IsOnCodecThread()); | 308 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
341 | 309 |
342 key_frame_required_ = true; | 310 key_frame_required_ = true; |
343 frames_received_ = 0; | 311 frames_received_ = 0; |
344 frames_decoded_ = 0; | 312 frames_decoded_ = 0; |
345 frames_decoded_logged_ = kMaxDecodedLogFrames; | 313 frames_decoded_logged_ = kMaxDecodedLogFrames; |
346 start_time_ms_ = rtc::TimeMillis(); | 314 start_time_ms_ = rtc::TimeMillis(); |
347 current_frames_ = 0; | 315 current_frames_ = 0; |
348 current_bytes_ = 0; | 316 current_bytes_ = 0; |
349 current_decoding_time_ms_ = 0; | 317 current_decoding_time_ms_ = 0; |
350 current_delay_time_ms_ = 0; | 318 current_delay_time_ms_ = 0; |
351 pending_frame_qps_.clear(); | 319 pending_frame_qps_.clear(); |
352 } | 320 } |
353 | 321 |
354 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { | 322 int32_t MediaCodecVideoDecoder::InitDecodeInternal() { |
355 RTC_DCHECK(IsOnCodecThread()); | 323 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
356 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 324 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
357 ScopedLocalRefFrame local_ref_frame(jni); | 325 ScopedLocalRefFrame local_ref_frame(jni); |
358 ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". " | 326 ALOGD << "InitDecodeInternal Type: " << (int)codecType_ << ". " |
359 << codec_.width << " x " << codec_.height << ". Fps: " << | 327 << codec_.width << " x " << codec_.height |
360 (int)codec_.maxFramerate; | 328 << ". Fps: " << (int)codec_.maxFramerate; |
361 | 329 |
362 // Release previous codec first if it was allocated before. | 330 // Release previous codec first if it was allocated before. |
363 int ret_val = ReleaseOnCodecThread(); | 331 int ret_val = Release(); |
364 if (ret_val < 0) { | 332 if (ret_val < 0) { |
365 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; | 333 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; |
366 sw_fallback_required_ = true; | 334 sw_fallback_required_ = true; |
367 return WEBRTC_VIDEO_CODEC_ERROR; | 335 return WEBRTC_VIDEO_CODEC_ERROR; |
368 } | 336 } |
369 | 337 |
370 ResetVariables(); | 338 ResetVariables(); |
371 | 339 |
372 if (use_surface_) { | 340 if (use_surface_) { |
373 surface_texture_helper_ = SurfaceTextureHelper::create( | 341 surface_texture_helper_ = SurfaceTextureHelper::create( |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
422 if (CheckException(jni)) { | 390 if (CheckException(jni)) { |
423 ALOGE << "NewGlobalRef error - fallback to SW codec."; | 391 ALOGE << "NewGlobalRef error - fallback to SW codec."; |
424 sw_fallback_required_ = true; | 392 sw_fallback_required_ = true; |
425 return WEBRTC_VIDEO_CODEC_ERROR; | 393 return WEBRTC_VIDEO_CODEC_ERROR; |
426 } | 394 } |
427 } | 395 } |
428 | 396 |
429 return WEBRTC_VIDEO_CODEC_OK; | 397 return WEBRTC_VIDEO_CODEC_OK; |
430 } | 398 } |
431 | 399 |
432 int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() { | 400 int32_t MediaCodecVideoDecoder::ResetDecode() { |
433 RTC_DCHECK(IsOnCodecThread()); | 401 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
434 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 402 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
435 ScopedLocalRefFrame local_ref_frame(jni); | 403 ScopedLocalRefFrame local_ref_frame(jni); |
436 ALOGD << "ResetDecodeOnCodecThread Type: " << (int)codecType_ << ". " | 404 ALOGD << "ResetDecode Type: " << (int)codecType_ << ". " << codec_.width |
437 << codec_.width << " x " << codec_.height; | 405 << " x " << codec_.height; |
438 ALOGD << " Frames received: " << frames_received_ << | 406 ALOGD << " Frames received: " << frames_received_ << |
439 ". Frames decoded: " << frames_decoded_; | 407 ". Frames decoded: " << frames_decoded_; |
440 | 408 |
441 inited_ = false; | 409 inited_ = false; |
442 ResetVariables(); | 410 ResetVariables(); |
443 | 411 |
444 jni->CallVoidMethod( | 412 jni->CallVoidMethod( |
445 *j_media_codec_video_decoder_, | 413 *j_media_codec_video_decoder_, |
446 j_reset_method_, | 414 j_reset_method_, |
447 codec_.width, | 415 codec_.width, |
448 codec_.height); | 416 codec_.height); |
449 | 417 |
450 if (CheckException(jni)) { | 418 if (CheckException(jni)) { |
451 ALOGE << "Soft reset error - fallback to SW codec."; | 419 ALOGE << "Soft reset error - fallback to SW codec."; |
452 sw_fallback_required_ = true; | 420 sw_fallback_required_ = true; |
453 return WEBRTC_VIDEO_CODEC_ERROR; | 421 return WEBRTC_VIDEO_CODEC_ERROR; |
454 } | 422 } |
455 inited_ = true; | 423 inited_ = true; |
456 | 424 |
457 return WEBRTC_VIDEO_CODEC_OK; | 425 return WEBRTC_VIDEO_CODEC_OK; |
458 } | 426 } |
459 | 427 |
460 int32_t MediaCodecVideoDecoder::Release() { | 428 int32_t MediaCodecVideoDecoder::Release() { |
461 ALOGD << "DecoderRelease request"; | 429 ALOGD << "DecoderRelease request"; |
462 return codec_thread_->Invoke<int32_t>( | |
463 RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | |
464 } | |
465 | |
466 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | |
467 RTC_DCHECK(IsOnCodecThread()); | |
468 if (!inited_) { | 430 if (!inited_) { |
469 return WEBRTC_VIDEO_CODEC_OK; | 431 return WEBRTC_VIDEO_CODEC_OK; |
470 } | 432 } |
471 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 433 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
472 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << | 434 ALOGD << "DecoderRelease: Frames received: " << frames_received_ |
473 frames_received_ << ". Frames decoded: " << frames_decoded_; | 435 << ". Frames decoded: " << frames_decoded_; |
474 ScopedLocalRefFrame local_ref_frame(jni); | 436 ScopedLocalRefFrame local_ref_frame(jni); |
475 for (size_t i = 0; i < input_buffers_.size(); i++) { | 437 for (size_t i = 0; i < input_buffers_.size(); i++) { |
476 jni->DeleteGlobalRef(input_buffers_[i]); | 438 jni->DeleteGlobalRef(input_buffers_[i]); |
477 } | 439 } |
478 input_buffers_.clear(); | 440 input_buffers_.clear(); |
479 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | 441 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
480 surface_texture_helper_ = nullptr; | 442 surface_texture_helper_ = nullptr; |
481 inited_ = false; | 443 inited_ = false; |
482 if (CheckException(jni)) { | 444 if (CheckException(jni)) { |
483 ALOGE << "Decoder release exception"; | 445 ALOGE << "Decoder release exception"; |
484 return WEBRTC_VIDEO_CODEC_ERROR; | 446 return WEBRTC_VIDEO_CODEC_ERROR; |
485 } | 447 } |
486 ALOGD << "DecoderReleaseOnCodecThread done"; | 448 ALOGD << "DecoderRelease done"; |
487 return WEBRTC_VIDEO_CODEC_OK; | 449 return WEBRTC_VIDEO_CODEC_OK; |
488 } | 450 } |
489 | 451 |
490 bool MediaCodecVideoDecoder::IsOnCodecThread() { | |
491 return codec_thread_.get() == ThreadManager::Instance()->CurrentThread(); | |
492 } | |
493 | |
494 void MediaCodecVideoDecoder::EnableFrameLogOnWarning() { | 452 void MediaCodecVideoDecoder::EnableFrameLogOnWarning() { |
495 // Log next 2 output frames. | 453 // Log next 2 output frames. |
496 static const int kMaxWarningLogFrames = 2; | 454 static const int kMaxWarningLogFrames = 2; |
497 frames_decoded_logged_ = std::max( | 455 frames_decoded_logged_ = std::max( |
498 frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames); | 456 frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames); |
499 } | 457 } |
500 | 458 |
501 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { | 459 int32_t MediaCodecVideoDecoder::ProcessHWError() { |
502 RTC_DCHECK(IsOnCodecThread()); | 460 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
503 int ret_val = ReleaseOnCodecThread(); | 461 int ret_val = Release(); |
504 if (ret_val < 0) { | 462 if (ret_val < 0) { |
505 ALOGE << "ProcessHWError: Release failure"; | 463 ALOGE << "ProcessHWError: Release failure"; |
506 } | 464 } |
507 if (codecType_ == kVideoCodecH264) { | 465 if (codecType_ == kVideoCodecH264) { |
508 // For now there is no SW H.264 which can be used as fallback codec. | 466 // For now there is no SW H.264 which can be used as fallback codec. |
509 // So try to restart hw codec for now. | 467 // So try to restart hw codec for now. |
510 ret_val = InitDecodeOnCodecThread(); | 468 ret_val = InitDecodeInternal(); |
511 ALOGE << "Reset H.264 codec done. Status: " << ret_val; | 469 ALOGE << "Reset H.264 codec done. Status: " << ret_val; |
512 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { | 470 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { |
513 // H.264 codec was succesfully reset - return regular error code. | 471 // H.264 codec was succesfully reset - return regular error code. |
514 return WEBRTC_VIDEO_CODEC_ERROR; | 472 return WEBRTC_VIDEO_CODEC_ERROR; |
515 } else { | 473 } else { |
516 // Fail to restart H.264 codec - return error code which should stop the | 474 // Fail to restart H.264 codec - return error code which should stop the |
517 // call. | 475 // call. |
518 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 476 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
519 } | 477 } |
520 } else { | 478 } else { |
521 sw_fallback_required_ = true; | 479 sw_fallback_required_ = true; |
522 ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"; | 480 ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"; |
523 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 481 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
524 } | 482 } |
525 } | 483 } |
526 | 484 |
527 int32_t MediaCodecVideoDecoder::Decode( | 485 int32_t MediaCodecVideoDecoder::Decode( |
528 const EncodedImage& inputImage, | 486 const EncodedImage& inputImage, |
529 bool missingFrames, | 487 bool missingFrames, |
530 const RTPFragmentationHeader* fragmentation, | 488 const RTPFragmentationHeader* fragmentation, |
531 const CodecSpecificInfo* codecSpecificInfo, | 489 const CodecSpecificInfo* codecSpecificInfo, |
532 int64_t renderTimeMs) { | 490 int64_t renderTimeMs) { |
| 491 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
533 RTC_DCHECK(callback_); | 492 RTC_DCHECK(callback_); |
534 RTC_DCHECK(inited_); | 493 RTC_DCHECK(inited_); |
535 | 494 |
536 if (sw_fallback_required_) { | 495 if (sw_fallback_required_) { |
537 ALOGE << "Decode() - fallback to SW codec"; | 496 ALOGE << "Decode() - fallback to SW codec"; |
538 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 497 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
539 } | 498 } |
540 if (inputImage._buffer == NULL && inputImage._length > 0) { | 499 if (inputImage._buffer == NULL && inputImage._length > 0) { |
541 ALOGE << "Decode() - inputImage is incorrect"; | 500 ALOGE << "Decode() - inputImage is incorrect"; |
542 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 501 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
543 } | 502 } |
544 | 503 |
545 // Check if encoded frame dimension has changed. | 504 // Check if encoded frame dimension has changed. |
546 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && | 505 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && |
547 (inputImage._encodedWidth != codec_.width || | 506 (inputImage._encodedWidth != codec_.width || |
548 inputImage._encodedHeight != codec_.height)) { | 507 inputImage._encodedHeight != codec_.height)) { |
549 ALOGW << "Input resolution changed from " << | 508 ALOGW << "Input resolution changed from " << |
550 codec_.width << " x " << codec_.height << " to " << | 509 codec_.width << " x " << codec_.height << " to " << |
551 inputImage._encodedWidth << " x " << inputImage._encodedHeight; | 510 inputImage._encodedWidth << " x " << inputImage._encodedHeight; |
552 codec_.width = inputImage._encodedWidth; | 511 codec_.width = inputImage._encodedWidth; |
553 codec_.height = inputImage._encodedHeight; | 512 codec_.height = inputImage._encodedHeight; |
554 int32_t ret; | 513 int32_t ret; |
555 if (use_surface_ && | 514 if (use_surface_ && |
556 (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264)) { | 515 (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264)) { |
557 // Soft codec reset - only for surface decoding. | 516 // Soft codec reset - only for surface decoding. |
558 ret = codec_thread_->Invoke<int32_t>( | 517 ret = ResetDecode(); |
559 RTC_FROM_HERE, | |
560 Bind(&MediaCodecVideoDecoder::ResetDecodeOnCodecThread, this)); | |
561 } else { | 518 } else { |
562 // Hard codec reset. | 519 // Hard codec reset. |
563 ret = InitDecode(&codec_, 1); | 520 ret = InitDecode(&codec_, 1); |
564 } | 521 } |
565 if (ret < 0) { | 522 if (ret < 0) { |
566 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; | 523 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; |
567 sw_fallback_required_ = true; | 524 sw_fallback_required_ = true; |
568 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 525 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
569 } | 526 } |
570 } | 527 } |
571 | 528 |
572 // Always start with a complete key frame. | 529 // Always start with a complete key frame. |
573 if (key_frame_required_) { | 530 if (key_frame_required_) { |
574 if (inputImage._frameType != webrtc::kVideoFrameKey) { | 531 if (inputImage._frameType != webrtc::kVideoFrameKey) { |
575 ALOGE << "Decode() - key frame is required"; | 532 ALOGE << "Decode() - key frame is required"; |
576 return WEBRTC_VIDEO_CODEC_ERROR; | 533 return WEBRTC_VIDEO_CODEC_ERROR; |
577 } | 534 } |
578 if (!inputImage._completeFrame) { | 535 if (!inputImage._completeFrame) { |
579 ALOGE << "Decode() - complete frame is required"; | 536 ALOGE << "Decode() - complete frame is required"; |
580 return WEBRTC_VIDEO_CODEC_ERROR; | 537 return WEBRTC_VIDEO_CODEC_ERROR; |
581 } | 538 } |
582 key_frame_required_ = false; | 539 key_frame_required_ = false; |
583 } | 540 } |
584 if (inputImage._length == 0) { | 541 if (inputImage._length == 0) { |
585 return WEBRTC_VIDEO_CODEC_ERROR; | 542 return WEBRTC_VIDEO_CODEC_ERROR; |
586 } | 543 } |
587 | 544 |
588 std::vector<DecodedFrame> frames; | 545 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
589 int32_t ret = codec_thread_->Invoke<int32_t>( | |
590 RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::DecodeOnCodecThread, this, | |
591 inputImage, &frames)); | |
592 for (auto& f : frames) | |
593 callback_->Decoded(f.frame, rtc::Optional<int32_t>(f.decode_time_ms), f.qp); | |
594 return ret; | |
595 } | |
596 | |
597 void MediaCodecVideoDecoder::PollDecodedFrames() { | |
598 RTC_DCHECK(callback_); | |
599 | |
600 std::vector<DecodedFrame> frames; | |
601 codec_thread_->Invoke<void>( | |
602 RTC_FROM_HERE, | |
603 Bind(&MediaCodecVideoDecoder::PollDecodedFramesOnCodecThread, this, | |
604 &frames)); | |
605 | |
606 for (auto& f : frames) | |
607 callback_->Decoded(f.frame, rtc::Optional<int32_t>(f.decode_time_ms), f.qp); | |
608 } | |
609 | |
610 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( | |
611 const EncodedImage& inputImage, | |
612 std::vector<DecodedFrame>* frames) { | |
613 RTC_DCHECK(IsOnCodecThread()); | |
614 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 546 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
615 ScopedLocalRefFrame local_ref_frame(jni); | 547 ScopedLocalRefFrame local_ref_frame(jni); |
616 | 548 |
617 // Try to drain the decoder and wait until output is not too | 549 // Try to drain the decoder and wait until output is not too |
618 // much behind the input. | 550 // much behind the input. |
619 if (codecType_ == kVideoCodecH264 && | 551 if (codecType_ == kVideoCodecH264 && |
620 frames_received_ > frames_decoded_ + max_pending_frames_) { | 552 frames_received_ > frames_decoded_ + max_pending_frames_) { |
621 // Print warning for H.264 only - for VP8/VP9 one frame delay is ok. | 553 // Print warning for H.264 only - for VP8/VP9 one frame delay is ok. |
622 ALOGW << "Decoder is too far behind. Try to drain. Received: " << | 554 ALOGW << "Decoder is too far behind. Try to drain. Received: " << |
623 frames_received_ << ". Decoded: " << frames_decoded_; | 555 frames_received_ << ". Decoded: " << frames_decoded_; |
624 EnableFrameLogOnWarning(); | 556 EnableFrameLogOnWarning(); |
625 } | 557 } |
626 const int64 drain_start = rtc::TimeMillis(); | 558 const int64 drain_start = rtc::TimeMillis(); |
627 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && | 559 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && |
628 (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) { | 560 (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) { |
629 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs, frames)) { | 561 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { |
630 ALOGE << "DeliverPendingOutputs error. Frames received: " << | 562 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
631 frames_received_ << ". Frames decoded: " << frames_decoded_; | 563 frames_received_ << ". Frames decoded: " << frames_decoded_; |
632 return ProcessHWErrorOnCodecThread(); | 564 return ProcessHWError(); |
633 } | 565 } |
634 } | 566 } |
635 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 567 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
636 ALOGE << "Output buffer dequeue timeout. Frames received: " << | 568 ALOGE << "Output buffer dequeue timeout. Frames received: " << |
637 frames_received_ << ". Frames decoded: " << frames_decoded_; | 569 frames_received_ << ". Frames decoded: " << frames_decoded_; |
638 return ProcessHWErrorOnCodecThread(); | 570 return ProcessHWError(); |
639 } | 571 } |
640 | 572 |
641 // Get input buffer. | 573 // Get input buffer. |
642 int j_input_buffer_index = jni->CallIntMethod( | 574 int j_input_buffer_index = jni->CallIntMethod( |
643 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); | 575 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); |
644 if (CheckException(jni) || j_input_buffer_index < 0) { | 576 if (CheckException(jni) || j_input_buffer_index < 0) { |
645 ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index << | 577 ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index << |
646 ". Retry DeliverPendingOutputs."; | 578 ". Retry DeliverPendingOutputs."; |
647 EnableFrameLogOnWarning(); | 579 EnableFrameLogOnWarning(); |
648 // Try to drain the decoder. | 580 // Try to drain the decoder. |
649 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs, frames)) { | 581 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { |
650 ALOGE << "DeliverPendingOutputs error. Frames received: " << | 582 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
651 frames_received_ << ". Frames decoded: " << frames_decoded_; | 583 frames_received_ << ". Frames decoded: " << frames_decoded_; |
652 return ProcessHWErrorOnCodecThread(); | 584 return ProcessHWError(); |
653 } | 585 } |
654 // Try dequeue input buffer one last time. | 586 // Try dequeue input buffer one last time. |
655 j_input_buffer_index = jni->CallIntMethod( | 587 j_input_buffer_index = jni->CallIntMethod( |
656 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); | 588 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); |
657 if (CheckException(jni) || j_input_buffer_index < 0) { | 589 if (CheckException(jni) || j_input_buffer_index < 0) { |
658 ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; | 590 ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; |
659 return ProcessHWErrorOnCodecThread(); | 591 return ProcessHWError(); |
660 } | 592 } |
661 } | 593 } |
662 | 594 |
663 // Copy encoded data to Java ByteBuffer. | 595 // Copy encoded data to Java ByteBuffer. |
664 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | 596 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; |
665 uint8_t* buffer = | 597 uint8_t* buffer = |
666 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 598 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
667 RTC_DCHECK(buffer) << "Indirect buffer??"; | 599 RTC_DCHECK(buffer) << "Indirect buffer??"; |
668 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); | 600 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); |
669 if (CheckException(jni) || buffer_capacity < inputImage._length) { | 601 if (CheckException(jni) || buffer_capacity < inputImage._length) { |
670 ALOGE << "Input frame size "<< inputImage._length << | 602 ALOGE << "Input frame size "<< inputImage._length << |
671 " is bigger than buffer size " << buffer_capacity; | 603 " is bigger than buffer size " << buffer_capacity; |
672 return ProcessHWErrorOnCodecThread(); | 604 return ProcessHWError(); |
673 } | 605 } |
674 jlong presentation_timestamp_us = static_cast<jlong>( | 606 jlong presentation_timestamp_us = static_cast<jlong>( |
675 static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate); | 607 static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate); |
676 memcpy(buffer, inputImage._buffer, inputImage._length); | 608 memcpy(buffer, inputImage._buffer, inputImage._length); |
677 | 609 |
678 if (frames_decoded_ < frames_decoded_logged_) { | 610 if (frames_decoded_ < frames_decoded_logged_) { |
679 ALOGD << "Decoder frame in # " << frames_received_ << | 611 ALOGD << "Decoder frame in # " << frames_received_ << |
680 ". Type: " << inputImage._frameType << | 612 ". Type: " << inputImage._frameType << |
681 ". Buffer # " << j_input_buffer_index << | 613 ". Buffer # " << j_input_buffer_index << |
682 ". TS: " << presentation_timestamp_us / 1000 << | 614 ". TS: " << presentation_timestamp_us / 1000 << |
(...skipping 23 matching lines...) Expand all Loading... |
706 bool success = jni->CallBooleanMethod( | 638 bool success = jni->CallBooleanMethod( |
707 *j_media_codec_video_decoder_, | 639 *j_media_codec_video_decoder_, |
708 j_queue_input_buffer_method_, | 640 j_queue_input_buffer_method_, |
709 j_input_buffer_index, | 641 j_input_buffer_index, |
710 inputImage._length, | 642 inputImage._length, |
711 presentation_timestamp_us, | 643 presentation_timestamp_us, |
712 static_cast<int64_t> (inputImage._timeStamp), | 644 static_cast<int64_t> (inputImage._timeStamp), |
713 inputImage.ntp_time_ms_); | 645 inputImage.ntp_time_ms_); |
714 if (CheckException(jni) || !success) { | 646 if (CheckException(jni) || !success) { |
715 ALOGE << "queueInputBuffer error"; | 647 ALOGE << "queueInputBuffer error"; |
716 return ProcessHWErrorOnCodecThread(); | 648 return ProcessHWError(); |
717 } | 649 } |
718 | 650 |
719 // Try to drain the decoder | 651 // Try to drain the decoder |
720 if (!DeliverPendingOutputs(jni, 0, frames)) { | 652 if (!DeliverPendingOutputs(jni, 0)) { |
721 ALOGE << "DeliverPendingOutputs error"; | 653 ALOGE << "DeliverPendingOutputs error"; |
722 return ProcessHWErrorOnCodecThread(); | 654 return ProcessHWError(); |
723 } | 655 } |
724 | 656 |
725 return WEBRTC_VIDEO_CODEC_OK; | 657 return WEBRTC_VIDEO_CODEC_OK; |
726 } | 658 } |
727 | 659 |
728 void MediaCodecVideoDecoder::PollDecodedFramesOnCodecThread( | 660 void MediaCodecVideoDecoder::PollDecodedFrames() { |
729 std::vector<DecodedFrame>* frames) { | 661 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
730 RTC_DCHECK(IsOnCodecThread()); | |
731 | 662 |
732 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 663 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
733 ScopedLocalRefFrame local_ref_frame(jni); | 664 ScopedLocalRefFrame local_ref_frame(jni); |
734 | 665 |
735 if (!DeliverPendingOutputs(jni, 0, frames)) { | 666 if (!DeliverPendingOutputs(jni, 0)) { |
736 ALOGE << "PollDecodedFramesOnCodecThread: DeliverPendingOutputs error"; | 667 ALOGE << "PollDecodedFrames: DeliverPendingOutputs error"; |
737 ProcessHWErrorOnCodecThread(); | 668 ProcessHWError(); |
738 } | 669 } |
739 } | 670 } |
740 | 671 |
741 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | 672 bool MediaCodecVideoDecoder::DeliverPendingOutputs(JNIEnv* jni, |
742 JNIEnv* jni, | 673 int dequeue_timeout_ms) { |
743 int dequeue_timeout_ms, | 674 RTC_DCHECK(decode_thread_checker_.CalledOnValidThread()); |
744 std::vector<DecodedFrame>* frames) { | |
745 RTC_DCHECK(IsOnCodecThread()); | |
746 RTC_DCHECK(frames); | |
747 | 675 |
748 if (frames_received_ <= frames_decoded_) { | 676 if (frames_received_ <= frames_decoded_) { |
749 // No need to query for output buffers - decoder is drained. | 677 // No need to query for output buffers - decoder is drained. |
750 return true; | 678 return true; |
751 } | 679 } |
752 // Get decoder output. | 680 // Get decoder output. |
753 jobject j_decoder_output_buffer = | 681 jobject j_decoder_output_buffer = |
754 jni->CallObjectMethod(*j_media_codec_video_decoder_, | 682 jni->CallObjectMethod(*j_media_codec_video_decoder_, |
755 use_surface_ ? j_dequeue_texture_buffer_method_ | 683 use_surface_ ? j_dequeue_texture_buffer_method_ |
756 : j_dequeue_byte_buffer_method_, | 684 : j_dequeue_byte_buffer_method_, |
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
947 } | 875 } |
948 | 876 |
949 // If the frame was dropped, frame_buffer is left as nullptr. | 877 // If the frame was dropped, frame_buffer is left as nullptr. |
950 if (frame_buffer) { | 878 if (frame_buffer) { |
951 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | 879 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); |
952 decoded_frame.set_timestamp(output_timestamps_ms); | 880 decoded_frame.set_timestamp(output_timestamps_ms); |
953 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); | 881 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); |
954 | 882 |
955 rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); | 883 rtc::Optional<uint8_t> qp = pending_frame_qps_.front(); |
956 pending_frame_qps_.pop_front(); | 884 pending_frame_qps_.pop_front(); |
957 decoded_frames_.push_back(DecodedFrame(std::move(decoded_frame), | 885 callback_->Decoded(decoded_frame, rtc::Optional<int32_t>(decode_time_ms), |
958 decode_time_ms, output_timestamps_ms, | 886 qp); |
959 output_ntp_timestamps_ms, qp)); | |
960 } | 887 } |
961 | 888 |
962 frames->reserve(frames->size() + decoded_frames_.size()); | |
963 std::move(decoded_frames_.begin(), decoded_frames_.end(), | |
964 std::back_inserter(*frames)); | |
965 decoded_frames_.clear(); | |
966 | |
967 return true; | 889 return true; |
968 } | 890 } |
969 | 891 |
970 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 892 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
971 DecodedImageCallback* callback) { | 893 DecodedImageCallback* callback) { |
972 callback_ = callback; | 894 callback_ = callback; |
973 return WEBRTC_VIDEO_CODEC_OK; | 895 return WEBRTC_VIDEO_CODEC_OK; |
974 } | 896 } |
975 | 897 |
976 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() | 898 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1058 webrtc::VideoDecoder* decoder) { | 980 webrtc::VideoDecoder* decoder) { |
1059 ALOGD << "Destroy video decoder."; | 981 ALOGD << "Destroy video decoder."; |
1060 delete decoder; | 982 delete decoder; |
1061 } | 983 } |
1062 | 984 |
1063 const char* MediaCodecVideoDecoder::ImplementationName() const { | 985 const char* MediaCodecVideoDecoder::ImplementationName() const { |
1064 return "MediaCodec"; | 986 return "MediaCodec"; |
1065 } | 987 } |
1066 | 988 |
1067 } // namespace webrtc_jni | 989 } // namespace webrtc_jni |
OLD | NEW |