OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #include <algorithm> | |
12 #include <memory> | |
13 #include <vector> | |
14 | |
15 // NOTICE: androidmediadecoder_jni.h must be included before | |
16 // androidmediacodeccommon.h to avoid build errors. | |
17 #include "webrtc/api/android/jni/androidmediadecoder_jni.h" | |
18 | |
19 #include "third_party/libyuv/include/libyuv/convert.h" | |
20 #include "third_party/libyuv/include/libyuv/convert_from.h" | |
21 #include "third_party/libyuv/include/libyuv/video_common.h" | |
22 #include "webrtc/api/android/jni/androidmediacodeccommon.h" | |
23 #include "webrtc/api/android/jni/classreferenceholder.h" | |
24 #include "webrtc/api/android/jni/native_handle_impl.h" | |
25 #include "webrtc/api/android/jni/surfacetexturehelper_jni.h" | |
26 #include "webrtc/base/bind.h" | |
27 #include "webrtc/base/checks.h" | |
28 #include "webrtc/base/logging.h" | |
29 #include "webrtc/base/scoped_ref_ptr.h" | |
30 #include "webrtc/base/thread.h" | |
31 #include "webrtc/base/timeutils.h" | |
32 #include "webrtc/common_video/include/i420_buffer_pool.h" | |
33 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | |
34 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | |
35 | |
36 using rtc::Bind; | |
37 using rtc::Thread; | |
38 using rtc::ThreadManager; | |
39 | |
40 using webrtc::CodecSpecificInfo; | |
41 using webrtc::DecodedImageCallback; | |
42 using webrtc::EncodedImage; | |
43 using webrtc::VideoFrame; | |
44 using webrtc::RTPFragmentationHeader; | |
45 using webrtc::VideoCodec; | |
46 using webrtc::VideoCodecType; | |
47 using webrtc::kVideoCodecH264; | |
48 using webrtc::kVideoCodecVP8; | |
49 using webrtc::kVideoCodecVP9; | |
50 | |
51 namespace webrtc_jni { | |
52 | |
53 // Logging macros. | |
54 #define TAG_DECODER "MediaCodecVideoDecoder" | |
55 #ifdef TRACK_BUFFER_TIMING | |
56 #define ALOGV(...) | |
57 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) | |
58 #else | |
59 #define ALOGV(...) | |
60 #endif | |
61 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) | |
62 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) | |
63 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) | |
64 | |
65 enum { kMaxWarningLogFrames = 2 }; | |
66 | |
67 class MediaCodecVideoDecoder : public webrtc::VideoDecoder, | |
68 public rtc::MessageHandler { | |
69 public: | |
70 explicit MediaCodecVideoDecoder( | |
71 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); | |
72 virtual ~MediaCodecVideoDecoder(); | |
73 | |
74 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) | |
75 override; | |
76 | |
77 int32_t Decode( | |
78 const EncodedImage& inputImage, bool missingFrames, | |
79 const RTPFragmentationHeader* fragmentation, | |
80 const CodecSpecificInfo* codecSpecificInfo = NULL, | |
81 int64_t renderTimeMs = -1) override; | |
82 | |
83 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) | |
84 override; | |
85 | |
86 int32_t Release() override; | |
87 | |
88 bool PrefersLateDecoding() const override { return true; } | |
89 | |
90 // rtc::MessageHandler implementation. | |
91 void OnMessage(rtc::Message* msg) override; | |
92 | |
93 const char* ImplementationName() const override; | |
94 | |
95 private: | |
96 // CHECK-fail if not running on |codec_thread_|. | |
97 void CheckOnCodecThread(); | |
98 | |
99 int32_t InitDecodeOnCodecThread(); | |
100 int32_t ResetDecodeOnCodecThread(); | |
101 int32_t ReleaseOnCodecThread(); | |
102 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); | |
103 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | |
104 // true on success. | |
105 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); | |
106 int32_t ProcessHWErrorOnCodecThread(); | |
107 void EnableFrameLogOnWarning(); | |
108 void ResetVariables(); | |
109 | |
110 // Type of video codec. | |
111 VideoCodecType codecType_; | |
112 | |
113 // Render EGL context - owned by factory, should not be allocated/destroyed | |
114 // by VideoDecoder. | |
115 jobject render_egl_context_; | |
116 | |
117 bool key_frame_required_; | |
118 bool inited_; | |
119 bool sw_fallback_required_; | |
120 bool use_surface_; | |
121 VideoCodec codec_; | |
122 webrtc::I420BufferPool decoded_frame_pool_; | |
123 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; | |
124 DecodedImageCallback* callback_; | |
125 int frames_received_; // Number of frames received by decoder. | |
126 int frames_decoded_; // Number of frames decoded by decoder. | |
127 // Number of decoded frames for which log information is displayed. | |
128 int frames_decoded_logged_; | |
129 int64_t start_time_ms_; // Start time for statistics. | |
130 int current_frames_; // Number of frames in the current statistics interval. | |
131 int current_bytes_; // Encoded bytes in the current statistics interval. | |
132 int current_decoding_time_ms_; // Overall decoding time in the current second | |
133 int current_delay_time_ms_; // Overall delay time in the current second. | |
134 uint32_t max_pending_frames_; // Maximum number of pending input frames. | |
135 | |
136 // State that is constant for the lifetime of this object once the ctor | |
137 // returns. | |
138 std::unique_ptr<Thread> | |
139 codec_thread_; // Thread on which to operate MediaCodec. | |
140 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | |
141 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | |
142 jmethodID j_init_decode_method_; | |
143 jmethodID j_reset_method_; | |
144 jmethodID j_release_method_; | |
145 jmethodID j_dequeue_input_buffer_method_; | |
146 jmethodID j_queue_input_buffer_method_; | |
147 jmethodID j_dequeue_byte_buffer_method_; | |
148 jmethodID j_dequeue_texture_buffer_method_; | |
149 jmethodID j_return_decoded_byte_buffer_method_; | |
150 // MediaCodecVideoDecoder fields. | |
151 jfieldID j_input_buffers_field_; | |
152 jfieldID j_output_buffers_field_; | |
153 jfieldID j_color_format_field_; | |
154 jfieldID j_width_field_; | |
155 jfieldID j_height_field_; | |
156 jfieldID j_stride_field_; | |
157 jfieldID j_slice_height_field_; | |
158 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. | |
159 jfieldID j_texture_id_field_; | |
160 jfieldID j_transform_matrix_field_; | |
161 jfieldID j_texture_presentation_timestamp_ms_field_; | |
162 jfieldID j_texture_timestamp_ms_field_; | |
163 jfieldID j_texture_ntp_timestamp_ms_field_; | |
164 jfieldID j_texture_decode_time_ms_field_; | |
165 jfieldID j_texture_frame_delay_ms_field_; | |
166 // MediaCodecVideoDecoder.DecodedOutputBuffer fields. | |
167 jfieldID j_info_index_field_; | |
168 jfieldID j_info_offset_field_; | |
169 jfieldID j_info_size_field_; | |
170 jfieldID j_presentation_timestamp_ms_field_; | |
171 jfieldID j_timestamp_ms_field_; | |
172 jfieldID j_ntp_timestamp_ms_field_; | |
173 jfieldID j_byte_buffer_decode_time_ms_field_; | |
174 | |
175 // Global references; must be deleted in Release(). | |
176 std::vector<jobject> input_buffers_; | |
177 }; | |
178 | |
179 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | |
180 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | |
181 codecType_(codecType), | |
182 render_egl_context_(render_egl_context), | |
183 key_frame_required_(true), | |
184 inited_(false), | |
185 sw_fallback_required_(false), | |
186 codec_thread_(new Thread()), | |
187 j_media_codec_video_decoder_class_( | |
188 jni, | |
189 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | |
190 j_media_codec_video_decoder_( | |
191 jni, | |
192 jni->NewObject(*j_media_codec_video_decoder_class_, | |
193 GetMethodID(jni, | |
194 *j_media_codec_video_decoder_class_, | |
195 "<init>", | |
196 "()V"))) { | |
197 ScopedLocalRefFrame local_ref_frame(jni); | |
198 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | |
199 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | |
200 | |
201 j_init_decode_method_ = GetMethodID( | |
202 jni, *j_media_codec_video_decoder_class_, "initDecode", | |
203 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | |
204 "IILorg/webrtc/SurfaceTextureHelper;)Z"); | |
205 j_reset_method_ = | |
206 GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V"); | |
207 j_release_method_ = | |
208 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | |
209 j_dequeue_input_buffer_method_ = GetMethodID( | |
210 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); | |
211 j_queue_input_buffer_method_ = GetMethodID( | |
212 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z"); | |
213 j_dequeue_byte_buffer_method_ = GetMethodID( | |
214 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", | |
215 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;"); | |
216 j_dequeue_texture_buffer_method_ = GetMethodID( | |
217 jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer", | |
218 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;"); | |
219 j_return_decoded_byte_buffer_method_ = | |
220 GetMethodID(jni, *j_media_codec_video_decoder_class_, | |
221 "returnDecodedOutputBuffer", "(I)V"); | |
222 | |
223 j_input_buffers_field_ = GetFieldID( | |
224 jni, *j_media_codec_video_decoder_class_, | |
225 "inputBuffers", "[Ljava/nio/ByteBuffer;"); | |
226 j_output_buffers_field_ = GetFieldID( | |
227 jni, *j_media_codec_video_decoder_class_, | |
228 "outputBuffers", "[Ljava/nio/ByteBuffer;"); | |
229 j_color_format_field_ = GetFieldID( | |
230 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); | |
231 j_width_field_ = GetFieldID( | |
232 jni, *j_media_codec_video_decoder_class_, "width", "I"); | |
233 j_height_field_ = GetFieldID( | |
234 jni, *j_media_codec_video_decoder_class_, "height", "I"); | |
235 j_stride_field_ = GetFieldID( | |
236 jni, *j_media_codec_video_decoder_class_, "stride", "I"); | |
237 j_slice_height_field_ = GetFieldID( | |
238 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); | |
239 | |
240 jclass j_decoded_texture_buffer_class = FindClass(jni, | |
241 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); | |
242 j_texture_id_field_ = GetFieldID( | |
243 jni, j_decoded_texture_buffer_class, "textureID", "I"); | |
244 j_transform_matrix_field_ = GetFieldID( | |
245 jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); | |
246 j_texture_presentation_timestamp_ms_field_ = GetFieldID( | |
247 jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J"); | |
248 j_texture_timestamp_ms_field_ = GetFieldID( | |
249 jni, j_decoded_texture_buffer_class, "timeStampMs", "J"); | |
250 j_texture_ntp_timestamp_ms_field_ = GetFieldID( | |
251 jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J"); | |
252 j_texture_decode_time_ms_field_ = GetFieldID( | |
253 jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J"); | |
254 j_texture_frame_delay_ms_field_ = GetFieldID( | |
255 jni, j_decoded_texture_buffer_class, "frameDelayMs", "J"); | |
256 | |
257 jclass j_decoded_output_buffer_class = FindClass(jni, | |
258 "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); | |
259 j_info_index_field_ = GetFieldID( | |
260 jni, j_decoded_output_buffer_class, "index", "I"); | |
261 j_info_offset_field_ = GetFieldID( | |
262 jni, j_decoded_output_buffer_class, "offset", "I"); | |
263 j_info_size_field_ = GetFieldID( | |
264 jni, j_decoded_output_buffer_class, "size", "I"); | |
265 j_presentation_timestamp_ms_field_ = GetFieldID( | |
266 jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J"); | |
267 j_timestamp_ms_field_ = GetFieldID( | |
268 jni, j_decoded_output_buffer_class, "timeStampMs", "J"); | |
269 j_ntp_timestamp_ms_field_ = GetFieldID( | |
270 jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J"); | |
271 j_byte_buffer_decode_time_ms_field_ = GetFieldID( | |
272 jni, j_decoded_output_buffer_class, "decodeTimeMs", "J"); | |
273 | |
274 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; | |
275 use_surface_ = (render_egl_context_ != NULL); | |
276 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; | |
277 memset(&codec_, 0, sizeof(codec_)); | |
278 AllowBlockingCalls(); | |
279 } | |
280 | |
281 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | |
282 // Call Release() to ensure no more callbacks to us after we are deleted. | |
283 Release(); | |
284 } | |
285 | |
286 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | |
287 int32_t numberOfCores) { | |
288 ALOGD << "InitDecode."; | |
289 if (inst == NULL) { | |
290 ALOGE << "NULL VideoCodec instance"; | |
291 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
292 } | |
293 // Factory should guard against other codecs being used with us. | |
294 RTC_CHECK(inst->codecType == codecType_) | |
295 << "Unsupported codec " << inst->codecType << " for " << codecType_; | |
296 | |
297 if (sw_fallback_required_) { | |
298 ALOGE << "InitDecode() - fallback to SW decoder"; | |
299 return WEBRTC_VIDEO_CODEC_OK; | |
300 } | |
301 // Save VideoCodec instance for later. | |
302 if (&codec_ != inst) { | |
303 codec_ = *inst; | |
304 } | |
305 // If maxFramerate is not set then assume 30 fps. | |
306 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; | |
307 | |
308 // Call Java init. | |
309 return codec_thread_->Invoke<int32_t>( | |
310 RTC_FROM_HERE, | |
311 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); | |
312 } | |
313 | |
314 void MediaCodecVideoDecoder::ResetVariables() { | |
315 CheckOnCodecThread(); | |
316 | |
317 key_frame_required_ = true; | |
318 frames_received_ = 0; | |
319 frames_decoded_ = 0; | |
320 frames_decoded_logged_ = kMaxDecodedLogFrames; | |
321 start_time_ms_ = rtc::TimeMillis(); | |
322 current_frames_ = 0; | |
323 current_bytes_ = 0; | |
324 current_decoding_time_ms_ = 0; | |
325 current_delay_time_ms_ = 0; | |
326 } | |
327 | |
328 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { | |
329 CheckOnCodecThread(); | |
330 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
331 ScopedLocalRefFrame local_ref_frame(jni); | |
332 ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". " | |
333 << codec_.width << " x " << codec_.height << ". Fps: " << | |
334 (int)codec_.maxFramerate; | |
335 | |
336 // Release previous codec first if it was allocated before. | |
337 int ret_val = ReleaseOnCodecThread(); | |
338 if (ret_val < 0) { | |
339 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; | |
340 sw_fallback_required_ = true; | |
341 return WEBRTC_VIDEO_CODEC_ERROR; | |
342 } | |
343 | |
344 ResetVariables(); | |
345 | |
346 if (use_surface_) { | |
347 surface_texture_helper_ = SurfaceTextureHelper::create( | |
348 jni, "Decoder SurfaceTextureHelper", render_egl_context_); | |
349 if (!surface_texture_helper_) { | |
350 ALOGE << "Couldn't create SurfaceTextureHelper - fallback to SW codec"; | |
351 sw_fallback_required_ = true; | |
352 return WEBRTC_VIDEO_CODEC_ERROR; | |
353 } | |
354 } | |
355 | |
356 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | |
357 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); | |
358 bool success = jni->CallBooleanMethod( | |
359 *j_media_codec_video_decoder_, | |
360 j_init_decode_method_, | |
361 j_video_codec_enum, | |
362 codec_.width, | |
363 codec_.height, | |
364 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() | |
365 : nullptr); | |
366 | |
367 if (CheckException(jni) || !success) { | |
368 ALOGE << "Codec initialization error - fallback to SW codec."; | |
369 sw_fallback_required_ = true; | |
370 return WEBRTC_VIDEO_CODEC_ERROR; | |
371 } | |
372 inited_ = true; | |
373 | |
374 switch (codecType_) { | |
375 case kVideoCodecVP8: | |
376 max_pending_frames_ = kMaxPendingFramesVp8; | |
377 break; | |
378 case kVideoCodecVP9: | |
379 max_pending_frames_ = kMaxPendingFramesVp9; | |
380 break; | |
381 case kVideoCodecH264: | |
382 max_pending_frames_ = kMaxPendingFramesH264; | |
383 break; | |
384 default: | |
385 max_pending_frames_ = 0; | |
386 } | |
387 ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; | |
388 | |
389 jobjectArray input_buffers = (jobjectArray)GetObjectField( | |
390 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); | |
391 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | |
392 input_buffers_.resize(num_input_buffers); | |
393 for (size_t i = 0; i < num_input_buffers; ++i) { | |
394 input_buffers_[i] = | |
395 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | |
396 if (CheckException(jni)) { | |
397 ALOGE << "NewGlobalRef error - fallback to SW codec."; | |
398 sw_fallback_required_ = true; | |
399 return WEBRTC_VIDEO_CODEC_ERROR; | |
400 } | |
401 } | |
402 | |
403 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
404 | |
405 return WEBRTC_VIDEO_CODEC_OK; | |
406 } | |
407 | |
408 int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() { | |
409 CheckOnCodecThread(); | |
410 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
411 ScopedLocalRefFrame local_ref_frame(jni); | |
412 ALOGD << "ResetDecodeOnCodecThread Type: " << (int)codecType_ << ". " | |
413 << codec_.width << " x " << codec_.height; | |
414 ALOGD << " Frames received: " << frames_received_ << | |
415 ". Frames decoded: " << frames_decoded_; | |
416 | |
417 inited_ = false; | |
418 rtc::MessageQueueManager::Clear(this); | |
419 ResetVariables(); | |
420 | |
421 jni->CallVoidMethod( | |
422 *j_media_codec_video_decoder_, | |
423 j_reset_method_, | |
424 codec_.width, | |
425 codec_.height); | |
426 | |
427 if (CheckException(jni)) { | |
428 ALOGE << "Soft reset error - fallback to SW codec."; | |
429 sw_fallback_required_ = true; | |
430 return WEBRTC_VIDEO_CODEC_ERROR; | |
431 } | |
432 inited_ = true; | |
433 | |
434 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
435 | |
436 return WEBRTC_VIDEO_CODEC_OK; | |
437 } | |
438 | |
439 int32_t MediaCodecVideoDecoder::Release() { | |
440 ALOGD << "DecoderRelease request"; | |
441 return codec_thread_->Invoke<int32_t>( | |
442 RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | |
443 } | |
444 | |
445 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | |
446 if (!inited_) { | |
447 return WEBRTC_VIDEO_CODEC_OK; | |
448 } | |
449 CheckOnCodecThread(); | |
450 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
451 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << | |
452 frames_received_ << ". Frames decoded: " << frames_decoded_; | |
453 ScopedLocalRefFrame local_ref_frame(jni); | |
454 for (size_t i = 0; i < input_buffers_.size(); i++) { | |
455 jni->DeleteGlobalRef(input_buffers_[i]); | |
456 } | |
457 input_buffers_.clear(); | |
458 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | |
459 surface_texture_helper_ = nullptr; | |
460 inited_ = false; | |
461 rtc::MessageQueueManager::Clear(this); | |
462 if (CheckException(jni)) { | |
463 ALOGE << "Decoder release exception"; | |
464 return WEBRTC_VIDEO_CODEC_ERROR; | |
465 } | |
466 ALOGD << "DecoderReleaseOnCodecThread done"; | |
467 return WEBRTC_VIDEO_CODEC_OK; | |
468 } | |
469 | |
470 void MediaCodecVideoDecoder::CheckOnCodecThread() { | |
471 RTC_CHECK(codec_thread_.get() == ThreadManager::Instance()->CurrentThread()) | |
472 << "Running on wrong thread!"; | |
473 } | |
474 | |
475 void MediaCodecVideoDecoder::EnableFrameLogOnWarning() { | |
476 // Log next 2 output frames. | |
477 frames_decoded_logged_ = std::max( | |
478 frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames); | |
479 } | |
480 | |
481 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { | |
482 CheckOnCodecThread(); | |
483 int ret_val = ReleaseOnCodecThread(); | |
484 if (ret_val < 0) { | |
485 ALOGE << "ProcessHWError: Release failure"; | |
486 } | |
487 if (codecType_ == kVideoCodecH264) { | |
488 // For now there is no SW H.264 which can be used as fallback codec. | |
489 // So try to restart hw codec for now. | |
490 ret_val = InitDecodeOnCodecThread(); | |
491 ALOGE << "Reset H.264 codec done. Status: " << ret_val; | |
492 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { | |
493 // H.264 codec was succesfully reset - return regular error code. | |
494 return WEBRTC_VIDEO_CODEC_ERROR; | |
495 } else { | |
496 // Fail to restart H.264 codec - return error code which should stop the | |
497 // call. | |
498 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
499 } | |
500 } else { | |
501 sw_fallback_required_ = true; | |
502 ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"; | |
503 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
504 } | |
505 } | |
506 | |
507 int32_t MediaCodecVideoDecoder::Decode( | |
508 const EncodedImage& inputImage, | |
509 bool missingFrames, | |
510 const RTPFragmentationHeader* fragmentation, | |
511 const CodecSpecificInfo* codecSpecificInfo, | |
512 int64_t renderTimeMs) { | |
513 if (sw_fallback_required_) { | |
514 ALOGE << "Decode() - fallback to SW codec"; | |
515 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
516 } | |
517 if (callback_ == NULL) { | |
518 ALOGE << "Decode() - callback_ is NULL"; | |
519 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
520 } | |
521 if (inputImage._buffer == NULL && inputImage._length > 0) { | |
522 ALOGE << "Decode() - inputImage is incorrect"; | |
523 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
524 } | |
525 if (!inited_) { | |
526 ALOGE << "Decode() - decoder is not initialized"; | |
527 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
528 } | |
529 | |
530 // Check if encoded frame dimension has changed. | |
531 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && | |
532 (inputImage._encodedWidth != codec_.width || | |
533 inputImage._encodedHeight != codec_.height)) { | |
534 ALOGW << "Input resolution changed from " << | |
535 codec_.width << " x " << codec_.height << " to " << | |
536 inputImage._encodedWidth << " x " << inputImage._encodedHeight; | |
537 codec_.width = inputImage._encodedWidth; | |
538 codec_.height = inputImage._encodedHeight; | |
539 int32_t ret; | |
540 if (use_surface_ && | |
541 (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264)) { | |
542 // Soft codec reset - only for surface decoding. | |
543 ret = codec_thread_->Invoke<int32_t>( | |
544 RTC_FROM_HERE, | |
545 Bind(&MediaCodecVideoDecoder::ResetDecodeOnCodecThread, this)); | |
546 } else { | |
547 // Hard codec reset. | |
548 ret = InitDecode(&codec_, 1); | |
549 } | |
550 if (ret < 0) { | |
551 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; | |
552 sw_fallback_required_ = true; | |
553 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
554 } | |
555 } | |
556 | |
557 // Always start with a complete key frame. | |
558 if (key_frame_required_) { | |
559 if (inputImage._frameType != webrtc::kVideoFrameKey) { | |
560 ALOGE << "Decode() - key frame is required"; | |
561 return WEBRTC_VIDEO_CODEC_ERROR; | |
562 } | |
563 if (!inputImage._completeFrame) { | |
564 ALOGE << "Decode() - complete frame is required"; | |
565 return WEBRTC_VIDEO_CODEC_ERROR; | |
566 } | |
567 key_frame_required_ = false; | |
568 } | |
569 if (inputImage._length == 0) { | |
570 return WEBRTC_VIDEO_CODEC_ERROR; | |
571 } | |
572 | |
573 return codec_thread_->Invoke<int32_t>( | |
574 RTC_FROM_HERE, | |
575 Bind(&MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage)); | |
576 } | |
577 | |
578 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( | |
579 const EncodedImage& inputImage) { | |
580 CheckOnCodecThread(); | |
581 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
582 ScopedLocalRefFrame local_ref_frame(jni); | |
583 | |
584 // Try to drain the decoder and wait until output is not too | |
585 // much behind the input. | |
586 if (codecType_ == kVideoCodecH264 && | |
587 frames_received_ > frames_decoded_ + max_pending_frames_) { | |
588 // Print warning for H.264 only - for VP8/VP9 one frame delay is ok. | |
589 ALOGW << "Decoder is too far behind. Try to drain. Received: " << | |
590 frames_received_ << ". Decoded: " << frames_decoded_; | |
591 EnableFrameLogOnWarning(); | |
592 } | |
593 const int64 drain_start = rtc::TimeMillis(); | |
594 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && | |
595 (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) { | |
596 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { | |
597 ALOGE << "DeliverPendingOutputs error. Frames received: " << | |
598 frames_received_ << ". Frames decoded: " << frames_decoded_; | |
599 return ProcessHWErrorOnCodecThread(); | |
600 } | |
601 } | |
602 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | |
603 ALOGE << "Output buffer dequeue timeout. Frames received: " << | |
604 frames_received_ << ". Frames decoded: " << frames_decoded_; | |
605 return ProcessHWErrorOnCodecThread(); | |
606 } | |
607 | |
608 // Get input buffer. | |
609 int j_input_buffer_index = jni->CallIntMethod( | |
610 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); | |
611 if (CheckException(jni) || j_input_buffer_index < 0) { | |
612 ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index << | |
613 ". Retry DeliverPendingOutputs."; | |
614 EnableFrameLogOnWarning(); | |
615 // Try to drain the decoder. | |
616 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { | |
617 ALOGE << "DeliverPendingOutputs error. Frames received: " << | |
618 frames_received_ << ". Frames decoded: " << frames_decoded_; | |
619 return ProcessHWErrorOnCodecThread(); | |
620 } | |
621 // Try dequeue input buffer one last time. | |
622 j_input_buffer_index = jni->CallIntMethod( | |
623 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); | |
624 if (CheckException(jni) || j_input_buffer_index < 0) { | |
625 ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; | |
626 return ProcessHWErrorOnCodecThread(); | |
627 } | |
628 } | |
629 | |
630 // Copy encoded data to Java ByteBuffer. | |
631 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | |
632 uint8_t* buffer = | |
633 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | |
634 RTC_CHECK(buffer) << "Indirect buffer??"; | |
635 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); | |
636 if (CheckException(jni) || buffer_capacity < inputImage._length) { | |
637 ALOGE << "Input frame size "<< inputImage._length << | |
638 " is bigger than buffer size " << buffer_capacity; | |
639 return ProcessHWErrorOnCodecThread(); | |
640 } | |
641 jlong presentation_timestamp_us = static_cast<jlong>( | |
642 static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate); | |
643 memcpy(buffer, inputImage._buffer, inputImage._length); | |
644 | |
645 if (frames_decoded_ < frames_decoded_logged_) { | |
646 ALOGD << "Decoder frame in # " << frames_received_ << | |
647 ". Type: " << inputImage._frameType << | |
648 ". Buffer # " << j_input_buffer_index << | |
649 ". TS: " << presentation_timestamp_us / 1000 << | |
650 ". Size: " << inputImage._length; | |
651 } | |
652 | |
653 // Save input image timestamps for later output. | |
654 frames_received_++; | |
655 current_bytes_ += inputImage._length; | |
656 | |
657 // Feed input to decoder. | |
658 bool success = jni->CallBooleanMethod( | |
659 *j_media_codec_video_decoder_, | |
660 j_queue_input_buffer_method_, | |
661 j_input_buffer_index, | |
662 inputImage._length, | |
663 presentation_timestamp_us, | |
664 static_cast<int64_t> (inputImage._timeStamp), | |
665 inputImage.ntp_time_ms_); | |
666 if (CheckException(jni) || !success) { | |
667 ALOGE << "queueInputBuffer error"; | |
668 return ProcessHWErrorOnCodecThread(); | |
669 } | |
670 | |
671 // Try to drain the decoder | |
672 if (!DeliverPendingOutputs(jni, 0)) { | |
673 ALOGE << "DeliverPendingOutputs error"; | |
674 return ProcessHWErrorOnCodecThread(); | |
675 } | |
676 | |
677 return WEBRTC_VIDEO_CODEC_OK; | |
678 } | |
679 | |
680 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | |
681 JNIEnv* jni, int dequeue_timeout_ms) { | |
682 if (frames_received_ <= frames_decoded_) { | |
683 // No need to query for output buffers - decoder is drained. | |
684 return true; | |
685 } | |
686 // Get decoder output. | |
687 jobject j_decoder_output_buffer = | |
688 jni->CallObjectMethod(*j_media_codec_video_decoder_, | |
689 use_surface_ ? j_dequeue_texture_buffer_method_ | |
690 : j_dequeue_byte_buffer_method_, | |
691 dequeue_timeout_ms); | |
692 | |
693 if (CheckException(jni)) { | |
694 ALOGE << "dequeueOutputBuffer() error"; | |
695 return false; | |
696 } | |
697 if (IsNull(jni, j_decoder_output_buffer)) { | |
698 // No decoded frame ready. | |
699 return true; | |
700 } | |
701 | |
702 // Get decoded video frame properties. | |
703 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | |
704 j_color_format_field_); | |
705 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | |
706 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | |
707 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | |
708 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | |
709 j_slice_height_field_); | |
710 | |
711 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | |
712 int64_t presentation_timestamps_ms = 0; | |
713 int64_t output_timestamps_ms = 0; | |
714 int64_t output_ntp_timestamps_ms = 0; | |
715 int decode_time_ms = 0; | |
716 int64_t frame_delayed_ms = 0; | |
717 if (use_surface_) { | |
718 // Extract data from Java DecodedTextureBuffer. | |
719 presentation_timestamps_ms = GetLongField( | |
720 jni, j_decoder_output_buffer, | |
721 j_texture_presentation_timestamp_ms_field_); | |
722 output_timestamps_ms = GetLongField( | |
723 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_); | |
724 output_ntp_timestamps_ms = GetLongField( | |
725 jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_); | |
726 decode_time_ms = GetLongField( | |
727 jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_); | |
728 | |
729 const int texture_id = | |
730 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); | |
731 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. | |
732 const jfloatArray j_transform_matrix = | |
733 reinterpret_cast<jfloatArray>(GetObjectField( | |
734 jni, j_decoder_output_buffer, j_transform_matrix_field_)); | |
735 frame_delayed_ms = GetLongField( | |
736 jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_); | |
737 | |
738 // Create webrtc::VideoFrameBuffer with native texture handle. | |
739 frame_buffer = surface_texture_helper_->CreateTextureFrame( | |
740 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); | |
741 } else { | |
742 EnableFrameLogOnWarning(); | |
743 } | |
744 } else { | |
745 // Extract data from Java ByteBuffer and create output yuv420 frame - | |
746 // for non surface decoding only. | |
747 const int output_buffer_index = GetIntField( | |
748 jni, j_decoder_output_buffer, j_info_index_field_); | |
749 const int output_buffer_offset = GetIntField( | |
750 jni, j_decoder_output_buffer, j_info_offset_field_); | |
751 const int output_buffer_size = GetIntField( | |
752 jni, j_decoder_output_buffer, j_info_size_field_); | |
753 presentation_timestamps_ms = GetLongField( | |
754 jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_); | |
755 output_timestamps_ms = GetLongField( | |
756 jni, j_decoder_output_buffer, j_timestamp_ms_field_); | |
757 output_ntp_timestamps_ms = GetLongField( | |
758 jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_); | |
759 | |
760 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, | |
761 j_byte_buffer_decode_time_ms_field_); | |
762 | |
763 if (output_buffer_size < width * height * 3 / 2) { | |
764 ALOGE << "Insufficient output buffer size: " << output_buffer_size; | |
765 return false; | |
766 } | |
767 if (output_buffer_size < stride * height * 3 / 2 && | |
768 slice_height == height && stride > width) { | |
769 // Some codecs (Exynos) incorrectly report stride information for | |
770 // output byte buffer, so actual stride value need to be corrected. | |
771 stride = output_buffer_size * 2 / (height * 3); | |
772 } | |
773 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | |
774 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | |
775 jobject output_buffer = | |
776 jni->GetObjectArrayElement(output_buffers, output_buffer_index); | |
777 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | |
778 output_buffer)); | |
779 if (CheckException(jni)) { | |
780 return false; | |
781 } | |
782 payload += output_buffer_offset; | |
783 | |
784 // Create yuv420 frame. | |
785 frame_buffer = decoded_frame_pool_.CreateBuffer(width, height); | |
786 if (color_format == COLOR_FormatYUV420Planar) { | |
787 RTC_CHECK_EQ(0, stride % 2); | |
788 RTC_CHECK_EQ(0, slice_height % 2); | |
789 const int uv_stride = stride / 2; | |
790 const int u_slice_height = slice_height / 2; | |
791 const uint8_t* y_ptr = payload; | |
792 const uint8_t* u_ptr = y_ptr + stride * slice_height; | |
793 const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height; | |
794 libyuv::I420Copy(y_ptr, stride, | |
795 u_ptr, uv_stride, | |
796 v_ptr, uv_stride, | |
797 frame_buffer->MutableDataY(), | |
798 frame_buffer->StrideY(), | |
799 frame_buffer->MutableDataU(), | |
800 frame_buffer->StrideU(), | |
801 frame_buffer->MutableDataV(), | |
802 frame_buffer->StrideV(), | |
803 width, height); | |
804 } else { | |
805 // All other supported formats are nv12. | |
806 const uint8_t* y_ptr = payload; | |
807 const uint8_t* uv_ptr = y_ptr + stride * slice_height; | |
808 libyuv::NV12ToI420( | |
809 y_ptr, stride, | |
810 uv_ptr, stride, | |
811 frame_buffer->MutableDataY(), | |
812 frame_buffer->StrideY(), | |
813 frame_buffer->MutableDataU(), | |
814 frame_buffer->StrideU(), | |
815 frame_buffer->MutableDataV(), | |
816 frame_buffer->StrideV(), | |
817 width, height); | |
818 } | |
819 // Return output byte buffer back to codec. | |
820 jni->CallVoidMethod( | |
821 *j_media_codec_video_decoder_, | |
822 j_return_decoded_byte_buffer_method_, | |
823 output_buffer_index); | |
824 if (CheckException(jni)) { | |
825 ALOGE << "returnDecodedOutputBuffer error"; | |
826 return false; | |
827 } | |
828 } | |
829 if (frames_decoded_ < frames_decoded_logged_) { | |
830 ALOGD << "Decoder frame out # " << frames_decoded_ << | |
831 ". " << width << " x " << height << | |
832 ". " << stride << " x " << slice_height << | |
833 ". Color: " << color_format << | |
834 ". TS: " << presentation_timestamps_ms << | |
835 ". DecTime: " << (int)decode_time_ms << | |
836 ". DelayTime: " << (int)frame_delayed_ms; | |
837 } | |
838 | |
839 // Calculate and print decoding statistics - every 3 seconds. | |
840 frames_decoded_++; | |
841 current_frames_++; | |
842 current_decoding_time_ms_ += decode_time_ms; | |
843 current_delay_time_ms_ += frame_delayed_ms; | |
844 int statistic_time_ms = rtc::TimeMillis() - start_time_ms_; | |
845 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && | |
846 current_frames_ > 0) { | |
847 int current_bitrate = current_bytes_ * 8 / statistic_time_ms; | |
848 int current_fps = | |
849 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms; | |
850 ALOGD << "Frames decoded: " << frames_decoded_ << | |
851 ". Received: " << frames_received_ << | |
852 ". Bitrate: " << current_bitrate << " kbps" << | |
853 ". Fps: " << current_fps << | |
854 ". DecTime: " << (current_decoding_time_ms_ / current_frames_) << | |
855 ". DelayTime: " << (current_delay_time_ms_ / current_frames_) << | |
856 " for last " << statistic_time_ms << " ms."; | |
857 start_time_ms_ = rtc::TimeMillis(); | |
858 current_frames_ = 0; | |
859 current_bytes_ = 0; | |
860 current_decoding_time_ms_ = 0; | |
861 current_delay_time_ms_ = 0; | |
862 } | |
863 | |
864 // If the frame was dropped, frame_buffer is left as nullptr. | |
865 if (frame_buffer) { | |
866 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | |
867 decoded_frame.set_timestamp(output_timestamps_ms); | |
868 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); | |
869 | |
870 const int32_t callback_status = | |
871 callback_->Decoded(decoded_frame, decode_time_ms); | |
872 if (callback_status > 0) { | |
873 ALOGE << "callback error"; | |
874 } | |
875 } | |
876 return true; | |
877 } | |
878 | |
879 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | |
880 DecodedImageCallback* callback) { | |
881 callback_ = callback; | |
882 return WEBRTC_VIDEO_CODEC_OK; | |
883 } | |
884 | |
885 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) { | |
886 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
887 ScopedLocalRefFrame local_ref_frame(jni); | |
888 if (!inited_) { | |
889 return; | |
890 } | |
891 // We only ever send one message to |this| directly (not through a Bind()'d | |
892 // functor), so expect no ID/data. | |
893 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | |
894 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | |
895 CheckOnCodecThread(); | |
896 | |
897 if (!DeliverPendingOutputs(jni, 0)) { | |
898 ALOGE << "OnMessage: DeliverPendingOutputs error"; | |
899 ProcessHWErrorOnCodecThread(); | |
900 return; | |
901 } | |
902 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
903 } | |
904 | |
905 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() | |
906 : egl_context_(nullptr) { | |
907 ALOGD << "MediaCodecVideoDecoderFactory ctor"; | |
908 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
909 ScopedLocalRefFrame local_ref_frame(jni); | |
910 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); | |
911 supported_codec_types_.clear(); | |
912 | |
913 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( | |
914 j_decoder_class, | |
915 GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z")); | |
916 if (CheckException(jni)) { | |
917 is_vp8_hw_supported = false; | |
918 } | |
919 if (is_vp8_hw_supported) { | |
920 ALOGD << "VP8 HW Decoder supported."; | |
921 supported_codec_types_.push_back(kVideoCodecVP8); | |
922 } | |
923 | |
924 bool is_vp9_hw_supported = jni->CallStaticBooleanMethod( | |
925 j_decoder_class, | |
926 GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z")); | |
927 if (CheckException(jni)) { | |
928 is_vp9_hw_supported = false; | |
929 } | |
930 if (is_vp9_hw_supported) { | |
931 ALOGD << "VP9 HW Decoder supported."; | |
932 supported_codec_types_.push_back(kVideoCodecVP9); | |
933 } | |
934 | |
935 bool is_h264_hw_supported = jni->CallStaticBooleanMethod( | |
936 j_decoder_class, | |
937 GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z")); | |
938 if (CheckException(jni)) { | |
939 is_h264_hw_supported = false; | |
940 } | |
941 if (is_h264_hw_supported) { | |
942 ALOGD << "H264 HW Decoder supported."; | |
943 supported_codec_types_.push_back(kVideoCodecH264); | |
944 } | |
945 } | |
946 | |
947 MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() { | |
948 ALOGD << "MediaCodecVideoDecoderFactory dtor"; | |
949 if (egl_context_) { | |
950 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
951 jni->DeleteGlobalRef(egl_context_); | |
952 } | |
953 } | |
954 | |
955 void MediaCodecVideoDecoderFactory::SetEGLContext( | |
956 JNIEnv* jni, jobject egl_context) { | |
957 ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext"; | |
958 if (egl_context_) { | |
959 jni->DeleteGlobalRef(egl_context_); | |
960 egl_context_ = nullptr; | |
961 } | |
962 egl_context_ = jni->NewGlobalRef(egl_context); | |
963 if (CheckException(jni)) { | |
964 ALOGE << "error calling NewGlobalRef for EGL Context."; | |
965 } | |
966 } | |
967 | |
968 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder( | |
969 VideoCodecType type) { | |
970 if (supported_codec_types_.empty()) { | |
971 ALOGW << "No HW video decoder for type " << (int)type; | |
972 return nullptr; | |
973 } | |
974 for (VideoCodecType codec_type : supported_codec_types_) { | |
975 if (codec_type == type) { | |
976 ALOGD << "Create HW video decoder for type " << (int)type; | |
977 return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type, | |
978 egl_context_); | |
979 } | |
980 } | |
981 ALOGW << "Can not find HW video decoder for type " << (int)type; | |
982 return nullptr; | |
983 } | |
984 | |
985 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | |
986 webrtc::VideoDecoder* decoder) { | |
987 ALOGD << "Destroy video decoder."; | |
988 delete decoder; | |
989 } | |
990 | |
991 const char* MediaCodecVideoDecoder::ImplementationName() const { | |
992 return "MediaCodec"; | |
993 } | |
994 | |
995 } // namespace webrtc_jni | |
OLD | NEW |