OLD | NEW |
| (Empty) |
1 /* | |
2 * libjingle | |
3 * Copyright 2015 Google Inc. | |
4 * | |
5 * Redistribution and use in source and binary forms, with or without | |
6 * modification, are permitted provided that the following conditions are met: | |
7 * | |
8 * 1. Redistributions of source code must retain the above copyright notice, | |
9 * this list of conditions and the following disclaimer. | |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | |
11 * this list of conditions and the following disclaimer in the documentation | |
12 * and/or other materials provided with the distribution. | |
13 * 3. The name of the author may not be used to endorse or promote products | |
14 * derived from this software without specific prior written permission. | |
15 * | |
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | |
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | |
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | |
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | |
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | |
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | |
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | |
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
26 * | |
27 */ | |
28 | |
29 #include <algorithm> | |
30 #include <vector> | |
31 | |
32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | |
33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | |
34 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | |
35 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | |
36 #include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" | |
37 #include "webrtc/base/bind.h" | |
38 #include "webrtc/base/checks.h" | |
39 #include "webrtc/base/logging.h" | |
40 #include "webrtc/base/scoped_ref_ptr.h" | |
41 #include "webrtc/base/thread.h" | |
42 #include "webrtc/base/timeutils.h" | |
43 #include "webrtc/common_video/include/i420_buffer_pool.h" | |
44 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | |
45 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | |
46 #include "webrtc/system_wrappers/include/tick_util.h" | |
47 #include "third_party/libyuv/include/libyuv/convert.h" | |
48 #include "third_party/libyuv/include/libyuv/convert_from.h" | |
49 #include "third_party/libyuv/include/libyuv/video_common.h" | |
50 | |
51 using rtc::Bind; | |
52 using rtc::Thread; | |
53 using rtc::ThreadManager; | |
54 using rtc::scoped_ptr; | |
55 | |
56 using webrtc::CodecSpecificInfo; | |
57 using webrtc::DecodedImageCallback; | |
58 using webrtc::EncodedImage; | |
59 using webrtc::VideoFrame; | |
60 using webrtc::RTPFragmentationHeader; | |
61 using webrtc::TickTime; | |
62 using webrtc::VideoCodec; | |
63 using webrtc::VideoCodecType; | |
64 using webrtc::kVideoCodecH264; | |
65 using webrtc::kVideoCodecVP8; | |
66 using webrtc::kVideoCodecVP9; | |
67 | |
68 namespace webrtc_jni { | |
69 | |
70 // Logging macros. | |
71 #define TAG_DECODER "MediaCodecVideoDecoder" | |
72 #ifdef TRACK_BUFFER_TIMING | |
73 #define ALOGV(...) | |
74 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) | |
75 #else | |
76 #define ALOGV(...) | |
77 #endif | |
78 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) | |
79 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) | |
80 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) | |
81 | |
82 class MediaCodecVideoDecoder : public webrtc::VideoDecoder, | |
83 public rtc::MessageHandler { | |
84 public: | |
85 explicit MediaCodecVideoDecoder( | |
86 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); | |
87 virtual ~MediaCodecVideoDecoder(); | |
88 | |
89 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) | |
90 override; | |
91 | |
92 int32_t Decode( | |
93 const EncodedImage& inputImage, bool missingFrames, | |
94 const RTPFragmentationHeader* fragmentation, | |
95 const CodecSpecificInfo* codecSpecificInfo = NULL, | |
96 int64_t renderTimeMs = -1) override; | |
97 | |
98 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) | |
99 override; | |
100 | |
101 int32_t Release() override; | |
102 | |
103 int32_t Reset() override; | |
104 | |
105 bool PrefersLateDecoding() const override { return true; } | |
106 | |
107 // rtc::MessageHandler implementation. | |
108 void OnMessage(rtc::Message* msg) override; | |
109 | |
110 const char* ImplementationName() const override; | |
111 | |
112 private: | |
113 // CHECK-fail if not running on |codec_thread_|. | |
114 void CheckOnCodecThread(); | |
115 | |
116 int32_t InitDecodeOnCodecThread(); | |
117 int32_t ReleaseOnCodecThread(); | |
118 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); | |
119 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | |
120 // true on success. | |
121 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); | |
122 int32_t ProcessHWErrorOnCodecThread(); | |
123 | |
124 // Type of video codec. | |
125 VideoCodecType codecType_; | |
126 | |
127 // Render EGL context - owned by factory, should not be allocated/destroyed | |
128 // by VideoDecoder. | |
129 jobject render_egl_context_; | |
130 | |
131 bool key_frame_required_; | |
132 bool inited_; | |
133 bool sw_fallback_required_; | |
134 bool use_surface_; | |
135 VideoCodec codec_; | |
136 webrtc::I420BufferPool decoded_frame_pool_; | |
137 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; | |
138 DecodedImageCallback* callback_; | |
139 int frames_received_; // Number of frames received by decoder. | |
140 int frames_decoded_; // Number of frames decoded by decoder. | |
141 int64_t start_time_ms_; // Start time for statistics. | |
142 int current_frames_; // Number of frames in the current statistics interval. | |
143 int current_bytes_; // Encoded bytes in the current statistics interval. | |
144 int current_decoding_time_ms_; // Overall decoding time in the current second | |
145 uint32_t max_pending_frames_; // Maximum number of pending input frames | |
146 | |
147 // State that is constant for the lifetime of this object once the ctor | |
148 // returns. | |
149 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | |
150 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | |
151 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | |
152 jmethodID j_init_decode_method_; | |
153 jmethodID j_release_method_; | |
154 jmethodID j_dequeue_input_buffer_method_; | |
155 jmethodID j_queue_input_buffer_method_; | |
156 jmethodID j_dequeue_byte_buffer_method_; | |
157 jmethodID j_dequeue_texture_buffer_method_; | |
158 jmethodID j_return_decoded_byte_buffer_method_; | |
159 // MediaCodecVideoDecoder fields. | |
160 jfieldID j_input_buffers_field_; | |
161 jfieldID j_output_buffers_field_; | |
162 jfieldID j_color_format_field_; | |
163 jfieldID j_width_field_; | |
164 jfieldID j_height_field_; | |
165 jfieldID j_stride_field_; | |
166 jfieldID j_slice_height_field_; | |
167 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. | |
168 jfieldID j_texture_id_field_; | |
169 jfieldID j_transform_matrix_field_; | |
170 jfieldID j_texture_timestamp_ms_field_; | |
171 jfieldID j_texture_ntp_timestamp_ms_field_; | |
172 jfieldID j_texture_decode_time_ms_field_; | |
173 jfieldID j_texture_frame_delay_ms_field_; | |
174 // MediaCodecVideoDecoder.DecodedOutputBuffer fields. | |
175 jfieldID j_info_index_field_; | |
176 jfieldID j_info_offset_field_; | |
177 jfieldID j_info_size_field_; | |
178 jfieldID j_info_timestamp_ms_field_; | |
179 jfieldID j_info_ntp_timestamp_ms_field_; | |
180 jfieldID j_byte_buffer_decode_time_ms_field_; | |
181 | |
182 // Global references; must be deleted in Release(). | |
183 std::vector<jobject> input_buffers_; | |
184 }; | |
185 | |
186 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | |
187 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | |
188 codecType_(codecType), | |
189 render_egl_context_(render_egl_context), | |
190 key_frame_required_(true), | |
191 inited_(false), | |
192 sw_fallback_required_(false), | |
193 codec_thread_(new Thread()), | |
194 j_media_codec_video_decoder_class_( | |
195 jni, | |
196 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | |
197 j_media_codec_video_decoder_( | |
198 jni, | |
199 jni->NewObject(*j_media_codec_video_decoder_class_, | |
200 GetMethodID(jni, | |
201 *j_media_codec_video_decoder_class_, | |
202 "<init>", | |
203 "()V"))) { | |
204 ScopedLocalRefFrame local_ref_frame(jni); | |
205 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | |
206 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | |
207 | |
208 j_init_decode_method_ = GetMethodID( | |
209 jni, *j_media_codec_video_decoder_class_, "initDecode", | |
210 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | |
211 "IILorg/webrtc/SurfaceTextureHelper;)Z"); | |
212 j_release_method_ = | |
213 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | |
214 j_dequeue_input_buffer_method_ = GetMethodID( | |
215 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); | |
216 j_queue_input_buffer_method_ = GetMethodID( | |
217 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z"); | |
218 j_dequeue_byte_buffer_method_ = GetMethodID( | |
219 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", | |
220 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;"); | |
221 j_dequeue_texture_buffer_method_ = GetMethodID( | |
222 jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer", | |
223 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;"); | |
224 j_return_decoded_byte_buffer_method_ = | |
225 GetMethodID(jni, *j_media_codec_video_decoder_class_, | |
226 "returnDecodedOutputBuffer", "(I)V"); | |
227 | |
228 j_input_buffers_field_ = GetFieldID( | |
229 jni, *j_media_codec_video_decoder_class_, | |
230 "inputBuffers", "[Ljava/nio/ByteBuffer;"); | |
231 j_output_buffers_field_ = GetFieldID( | |
232 jni, *j_media_codec_video_decoder_class_, | |
233 "outputBuffers", "[Ljava/nio/ByteBuffer;"); | |
234 j_color_format_field_ = GetFieldID( | |
235 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); | |
236 j_width_field_ = GetFieldID( | |
237 jni, *j_media_codec_video_decoder_class_, "width", "I"); | |
238 j_height_field_ = GetFieldID( | |
239 jni, *j_media_codec_video_decoder_class_, "height", "I"); | |
240 j_stride_field_ = GetFieldID( | |
241 jni, *j_media_codec_video_decoder_class_, "stride", "I"); | |
242 j_slice_height_field_ = GetFieldID( | |
243 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); | |
244 | |
245 jclass j_decoded_texture_buffer_class = FindClass(jni, | |
246 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); | |
247 j_texture_id_field_ = GetFieldID( | |
248 jni, j_decoded_texture_buffer_class, "textureID", "I"); | |
249 j_transform_matrix_field_ = GetFieldID( | |
250 jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); | |
251 j_texture_timestamp_ms_field_ = GetFieldID( | |
252 jni, j_decoded_texture_buffer_class, "timeStampMs", "J"); | |
253 j_texture_ntp_timestamp_ms_field_ = GetFieldID( | |
254 jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J"); | |
255 j_texture_decode_time_ms_field_ = GetFieldID( | |
256 jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J"); | |
257 j_texture_frame_delay_ms_field_ = GetFieldID( | |
258 jni, j_decoded_texture_buffer_class, "frameDelayMs", "J"); | |
259 | |
260 jclass j_decoded_output_buffer_class = FindClass(jni, | |
261 "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); | |
262 j_info_index_field_ = GetFieldID( | |
263 jni, j_decoded_output_buffer_class, "index", "I"); | |
264 j_info_offset_field_ = GetFieldID( | |
265 jni, j_decoded_output_buffer_class, "offset", "I"); | |
266 j_info_size_field_ = GetFieldID( | |
267 jni, j_decoded_output_buffer_class, "size", "I"); | |
268 j_info_timestamp_ms_field_ = GetFieldID( | |
269 jni, j_decoded_output_buffer_class, "timeStampMs", "J"); | |
270 j_info_ntp_timestamp_ms_field_ = GetFieldID( | |
271 jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J"); | |
272 j_byte_buffer_decode_time_ms_field_ = GetFieldID( | |
273 jni, j_decoded_output_buffer_class, "decodeTimeMs", "J"); | |
274 | |
275 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; | |
276 use_surface_ = (render_egl_context_ != NULL); | |
277 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; | |
278 memset(&codec_, 0, sizeof(codec_)); | |
279 AllowBlockingCalls(); | |
280 } | |
281 | |
282 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | |
283 // Call Release() to ensure no more callbacks to us after we are deleted. | |
284 Release(); | |
285 } | |
286 | |
287 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | |
288 int32_t numberOfCores) { | |
289 ALOGD << "InitDecode."; | |
290 if (inst == NULL) { | |
291 ALOGE << "NULL VideoCodec instance"; | |
292 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
293 } | |
294 // Factory should guard against other codecs being used with us. | |
295 RTC_CHECK(inst->codecType == codecType_) | |
296 << "Unsupported codec " << inst->codecType << " for " << codecType_; | |
297 | |
298 if (sw_fallback_required_) { | |
299 ALOGE << "InitDecode() - fallback to SW decoder"; | |
300 return WEBRTC_VIDEO_CODEC_OK; | |
301 } | |
302 // Save VideoCodec instance for later. | |
303 if (&codec_ != inst) { | |
304 codec_ = *inst; | |
305 } | |
306 // If maxFramerate is not set then assume 30 fps. | |
307 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; | |
308 | |
309 // Call Java init. | |
310 return codec_thread_->Invoke<int32_t>( | |
311 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); | |
312 } | |
313 | |
314 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { | |
315 CheckOnCodecThread(); | |
316 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
317 ScopedLocalRefFrame local_ref_frame(jni); | |
318 ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". " | |
319 << codec_.width << " x " << codec_.height << ". Fps: " << | |
320 (int)codec_.maxFramerate; | |
321 | |
322 // Release previous codec first if it was allocated before. | |
323 int ret_val = ReleaseOnCodecThread(); | |
324 if (ret_val < 0) { | |
325 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; | |
326 sw_fallback_required_ = true; | |
327 return WEBRTC_VIDEO_CODEC_ERROR; | |
328 } | |
329 | |
330 // Always start with a complete key frame. | |
331 key_frame_required_ = true; | |
332 frames_received_ = 0; | |
333 frames_decoded_ = 0; | |
334 | |
335 jobject java_surface_texture_helper_ = nullptr; | |
336 if (use_surface_) { | |
337 java_surface_texture_helper_ = jni->CallStaticObjectMethod( | |
338 FindClass(jni, "org/webrtc/SurfaceTextureHelper"), | |
339 GetStaticMethodID(jni, | |
340 FindClass(jni, "org/webrtc/SurfaceTextureHelper"), | |
341 "create", | |
342 "(Lorg/webrtc/EglBase$Context;)" | |
343 "Lorg/webrtc/SurfaceTextureHelper;"), | |
344 render_egl_context_); | |
345 RTC_CHECK(java_surface_texture_helper_ != nullptr); | |
346 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>( | |
347 jni, java_surface_texture_helper_); | |
348 } | |
349 | |
350 jobject j_video_codec_enum = JavaEnumFromIndex( | |
351 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); | |
352 bool success = jni->CallBooleanMethod( | |
353 *j_media_codec_video_decoder_, | |
354 j_init_decode_method_, | |
355 j_video_codec_enum, | |
356 codec_.width, | |
357 codec_.height, | |
358 java_surface_texture_helper_); | |
359 if (CheckException(jni) || !success) { | |
360 ALOGE << "Codec initialization error - fallback to SW codec."; | |
361 sw_fallback_required_ = true; | |
362 return WEBRTC_VIDEO_CODEC_ERROR; | |
363 } | |
364 inited_ = true; | |
365 | |
366 switch (codecType_) { | |
367 case kVideoCodecVP8: | |
368 max_pending_frames_ = kMaxPendingFramesVp8; | |
369 break; | |
370 case kVideoCodecVP9: | |
371 max_pending_frames_ = kMaxPendingFramesVp9; | |
372 break; | |
373 case kVideoCodecH264: | |
374 max_pending_frames_ = kMaxPendingFramesH264; | |
375 break; | |
376 default: | |
377 max_pending_frames_ = 0; | |
378 } | |
379 start_time_ms_ = GetCurrentTimeMs(); | |
380 current_frames_ = 0; | |
381 current_bytes_ = 0; | |
382 current_decoding_time_ms_ = 0; | |
383 | |
384 jobjectArray input_buffers = (jobjectArray)GetObjectField( | |
385 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); | |
386 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | |
387 ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; | |
388 input_buffers_.resize(num_input_buffers); | |
389 for (size_t i = 0; i < num_input_buffers; ++i) { | |
390 input_buffers_[i] = | |
391 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | |
392 if (CheckException(jni)) { | |
393 ALOGE << "NewGlobalRef error - fallback to SW codec."; | |
394 sw_fallback_required_ = true; | |
395 return WEBRTC_VIDEO_CODEC_ERROR; | |
396 } | |
397 } | |
398 | |
399 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | |
400 | |
401 return WEBRTC_VIDEO_CODEC_OK; | |
402 } | |
403 | |
404 int32_t MediaCodecVideoDecoder::Release() { | |
405 ALOGD << "DecoderRelease request"; | |
406 return codec_thread_->Invoke<int32_t>( | |
407 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | |
408 } | |
409 | |
410 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | |
411 if (!inited_) { | |
412 return WEBRTC_VIDEO_CODEC_OK; | |
413 } | |
414 CheckOnCodecThread(); | |
415 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
416 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << | |
417 frames_received_ << ". Frames decoded: " << frames_decoded_; | |
418 ScopedLocalRefFrame local_ref_frame(jni); | |
419 for (size_t i = 0; i < input_buffers_.size(); i++) { | |
420 jni->DeleteGlobalRef(input_buffers_[i]); | |
421 } | |
422 input_buffers_.clear(); | |
423 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | |
424 surface_texture_helper_ = nullptr; | |
425 inited_ = false; | |
426 rtc::MessageQueueManager::Clear(this); | |
427 if (CheckException(jni)) { | |
428 ALOGE << "Decoder release exception"; | |
429 return WEBRTC_VIDEO_CODEC_ERROR; | |
430 } | |
431 ALOGD << "DecoderReleaseOnCodecThread done"; | |
432 return WEBRTC_VIDEO_CODEC_OK; | |
433 } | |
434 | |
435 void MediaCodecVideoDecoder::CheckOnCodecThread() { | |
436 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread()) | |
437 << "Running on wrong thread!"; | |
438 } | |
439 | |
440 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { | |
441 CheckOnCodecThread(); | |
442 int ret_val = ReleaseOnCodecThread(); | |
443 if (ret_val < 0) { | |
444 ALOGE << "ProcessHWError: Release failure"; | |
445 } | |
446 if (codecType_ == kVideoCodecH264) { | |
447 // For now there is no SW H.264 which can be used as fallback codec. | |
448 // So try to restart hw codec for now. | |
449 ret_val = InitDecodeOnCodecThread(); | |
450 ALOGE << "Reset H.264 codec done. Status: " << ret_val; | |
451 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { | |
452 // H.264 codec was succesfully reset - return regular error code. | |
453 return WEBRTC_VIDEO_CODEC_ERROR; | |
454 } else { | |
455 // Fail to restart H.264 codec - return error code which should stop the | |
456 // call. | |
457 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
458 } | |
459 } else { | |
460 sw_fallback_required_ = true; | |
461 ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"; | |
462 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
463 } | |
464 } | |
465 | |
466 int32_t MediaCodecVideoDecoder::Decode( | |
467 const EncodedImage& inputImage, | |
468 bool missingFrames, | |
469 const RTPFragmentationHeader* fragmentation, | |
470 const CodecSpecificInfo* codecSpecificInfo, | |
471 int64_t renderTimeMs) { | |
472 if (sw_fallback_required_) { | |
473 ALOGE << "Decode() - fallback to SW codec"; | |
474 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
475 } | |
476 if (callback_ == NULL) { | |
477 ALOGE << "Decode() - callback_ is NULL"; | |
478 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
479 } | |
480 if (inputImage._buffer == NULL && inputImage._length > 0) { | |
481 ALOGE << "Decode() - inputImage is incorrect"; | |
482 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
483 } | |
484 if (!inited_) { | |
485 ALOGE << "Decode() - decoder is not initialized"; | |
486 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
487 } | |
488 | |
489 // Check if encoded frame dimension has changed. | |
490 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && | |
491 (inputImage._encodedWidth != codec_.width || | |
492 inputImage._encodedHeight != codec_.height)) { | |
493 codec_.width = inputImage._encodedWidth; | |
494 codec_.height = inputImage._encodedHeight; | |
495 int32_t ret = InitDecode(&codec_, 1); | |
496 if (ret < 0) { | |
497 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec"; | |
498 sw_fallback_required_ = true; | |
499 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
500 } | |
501 } | |
502 | |
503 // Always start with a complete key frame. | |
504 if (key_frame_required_) { | |
505 if (inputImage._frameType != webrtc::kVideoFrameKey) { | |
506 ALOGE << "Decode() - key frame is required"; | |
507 return WEBRTC_VIDEO_CODEC_ERROR; | |
508 } | |
509 if (!inputImage._completeFrame) { | |
510 ALOGE << "Decode() - complete frame is required"; | |
511 return WEBRTC_VIDEO_CODEC_ERROR; | |
512 } | |
513 key_frame_required_ = false; | |
514 } | |
515 if (inputImage._length == 0) { | |
516 return WEBRTC_VIDEO_CODEC_ERROR; | |
517 } | |
518 | |
519 return codec_thread_->Invoke<int32_t>(Bind( | |
520 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage)); | |
521 } | |
522 | |
523 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( | |
524 const EncodedImage& inputImage) { | |
525 CheckOnCodecThread(); | |
526 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
527 ScopedLocalRefFrame local_ref_frame(jni); | |
528 | |
529 // Try to drain the decoder and wait until output is not too | |
530 // much behind the input. | |
531 const int64 drain_start = GetCurrentTimeMs(); | |
532 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && | |
533 (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) { | |
534 ALOGV("Received: %d. Decoded: %d. Wait for output...", | |
535 frames_received_, frames_decoded_); | |
536 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { | |
537 ALOGE << "DeliverPendingOutputs error. Frames received: " << | |
538 frames_received_ << ". Frames decoded: " << frames_decoded_; | |
539 return ProcessHWErrorOnCodecThread(); | |
540 } | |
541 } | |
542 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | |
543 ALOGE << "Output buffer dequeue timeout. Frames received: " << | |
544 frames_received_ << ". Frames decoded: " << frames_decoded_; | |
545 return ProcessHWErrorOnCodecThread(); | |
546 } | |
547 | |
548 // Get input buffer. | |
549 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, | |
550 j_dequeue_input_buffer_method_); | |
551 if (CheckException(jni) || j_input_buffer_index < 0) { | |
552 ALOGE << "dequeueInputBuffer error"; | |
553 return ProcessHWErrorOnCodecThread(); | |
554 } | |
555 | |
556 // Copy encoded data to Java ByteBuffer. | |
557 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | |
558 uint8_t* buffer = | |
559 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | |
560 RTC_CHECK(buffer) << "Indirect buffer??"; | |
561 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); | |
562 if (CheckException(jni) || buffer_capacity < inputImage._length) { | |
563 ALOGE << "Input frame size "<< inputImage._length << | |
564 " is bigger than buffer size " << buffer_capacity; | |
565 return ProcessHWErrorOnCodecThread(); | |
566 } | |
567 jlong presentation_timestamp_us = | |
568 (frames_received_ * 1000000) / codec_.maxFramerate; | |
569 if (frames_decoded_ < kMaxDecodedLogFrames) { | |
570 ALOGD << "Decoder frame in # " << frames_received_ << ". Type: " | |
571 << inputImage._frameType << ". Buffer # " << | |
572 j_input_buffer_index << ". pTS: " | |
573 << (int)(presentation_timestamp_us / 1000) | |
574 << ". TS: " << inputImage._timeStamp | |
575 << ". Size: " << inputImage._length; | |
576 } | |
577 memcpy(buffer, inputImage._buffer, inputImage._length); | |
578 | |
579 // Save input image timestamps for later output. | |
580 frames_received_++; | |
581 current_bytes_ += inputImage._length; | |
582 | |
583 // Feed input to decoder. | |
584 bool success = jni->CallBooleanMethod( | |
585 *j_media_codec_video_decoder_, | |
586 j_queue_input_buffer_method_, | |
587 j_input_buffer_index, | |
588 inputImage._length, | |
589 presentation_timestamp_us, | |
590 static_cast<int64_t> (inputImage._timeStamp), | |
591 inputImage.ntp_time_ms_); | |
592 if (CheckException(jni) || !success) { | |
593 ALOGE << "queueInputBuffer error"; | |
594 return ProcessHWErrorOnCodecThread(); | |
595 } | |
596 | |
597 // Try to drain the decoder | |
598 if (!DeliverPendingOutputs(jni, 0)) { | |
599 ALOGE << "DeliverPendingOutputs error"; | |
600 return ProcessHWErrorOnCodecThread(); | |
601 } | |
602 | |
603 return WEBRTC_VIDEO_CODEC_OK; | |
604 } | |
605 | |
606 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | |
607 JNIEnv* jni, int dequeue_timeout_ms) { | |
608 if (frames_received_ <= frames_decoded_) { | |
609 // No need to query for output buffers - decoder is drained. | |
610 return true; | |
611 } | |
612 // Get decoder output. | |
613 jobject j_decoder_output_buffer = | |
614 jni->CallObjectMethod(*j_media_codec_video_decoder_, | |
615 use_surface_ ? j_dequeue_texture_buffer_method_ | |
616 : j_dequeue_byte_buffer_method_, | |
617 dequeue_timeout_ms); | |
618 | |
619 if (CheckException(jni)) { | |
620 ALOGE << "dequeueOutputBuffer() error"; | |
621 return false; | |
622 } | |
623 if (IsNull(jni, j_decoder_output_buffer)) { | |
624 // No decoded frame ready. | |
625 return true; | |
626 } | |
627 | |
628 // Get decoded video frame properties. | |
629 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | |
630 j_color_format_field_); | |
631 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | |
632 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | |
633 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | |
634 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | |
635 j_slice_height_field_); | |
636 | |
637 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | |
638 int64_t output_timestamps_ms = 0; | |
639 int64_t output_ntp_timestamps_ms = 0; | |
640 int decode_time_ms = 0; | |
641 int64_t frame_delayed_ms = 0; | |
642 if (use_surface_) { | |
643 // Extract data from Java DecodedTextureBuffer. | |
644 const int texture_id = | |
645 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); | |
646 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. | |
647 const jfloatArray j_transform_matrix = | |
648 reinterpret_cast<jfloatArray>(GetObjectField( | |
649 jni, j_decoder_output_buffer, j_transform_matrix_field_)); | |
650 const int64_t timestamp_us = | |
651 GetLongField(jni, j_decoder_output_buffer, | |
652 j_texture_timestamp_ms_field_); | |
653 output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer, | |
654 j_texture_timestamp_ms_field_); | |
655 output_ntp_timestamps_ms = | |
656 GetLongField(jni, j_decoder_output_buffer, | |
657 j_texture_ntp_timestamp_ms_field_); | |
658 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, | |
659 j_texture_decode_time_ms_field_); | |
660 frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer, | |
661 j_texture_frame_delay_ms_field_); | |
662 | |
663 // Create webrtc::VideoFrameBuffer with native texture handle. | |
664 frame_buffer = surface_texture_helper_->CreateTextureFrame( | |
665 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); | |
666 } | |
667 } else { | |
668 // Extract data from Java ByteBuffer and create output yuv420 frame - | |
669 // for non surface decoding only. | |
670 const int output_buffer_index = | |
671 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); | |
672 const int output_buffer_offset = | |
673 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); | |
674 const int output_buffer_size = | |
675 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); | |
676 output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer, | |
677 j_info_timestamp_ms_field_); | |
678 output_ntp_timestamps_ms = | |
679 GetLongField(jni, j_decoder_output_buffer, | |
680 j_info_ntp_timestamp_ms_field_); | |
681 | |
682 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, | |
683 j_byte_buffer_decode_time_ms_field_); | |
684 | |
685 if (output_buffer_size < width * height * 3 / 2) { | |
686 ALOGE << "Insufficient output buffer size: " << output_buffer_size; | |
687 return false; | |
688 } | |
689 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | |
690 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | |
691 jobject output_buffer = | |
692 jni->GetObjectArrayElement(output_buffers, output_buffer_index); | |
693 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | |
694 output_buffer)); | |
695 if (CheckException(jni)) { | |
696 return false; | |
697 } | |
698 payload += output_buffer_offset; | |
699 | |
700 // Create yuv420 frame. | |
701 frame_buffer = decoded_frame_pool_.CreateBuffer(width, height); | |
702 if (color_format == COLOR_FormatYUV420Planar) { | |
703 RTC_CHECK_EQ(0, stride % 2); | |
704 RTC_CHECK_EQ(0, slice_height % 2); | |
705 const int uv_stride = stride / 2; | |
706 const int u_slice_height = slice_height / 2; | |
707 const uint8_t* y_ptr = payload; | |
708 const uint8_t* u_ptr = y_ptr + stride * slice_height; | |
709 const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height; | |
710 libyuv::I420Copy(y_ptr, stride, | |
711 u_ptr, uv_stride, | |
712 v_ptr, uv_stride, | |
713 frame_buffer->MutableData(webrtc::kYPlane), | |
714 frame_buffer->stride(webrtc::kYPlane), | |
715 frame_buffer->MutableData(webrtc::kUPlane), | |
716 frame_buffer->stride(webrtc::kUPlane), | |
717 frame_buffer->MutableData(webrtc::kVPlane), | |
718 frame_buffer->stride(webrtc::kVPlane), | |
719 width, height); | |
720 } else { | |
721 // All other supported formats are nv12. | |
722 const uint8_t* y_ptr = payload; | |
723 const uint8_t* uv_ptr = y_ptr + stride * slice_height; | |
724 libyuv::NV12ToI420( | |
725 y_ptr, stride, | |
726 uv_ptr, stride, | |
727 frame_buffer->MutableData(webrtc::kYPlane), | |
728 frame_buffer->stride(webrtc::kYPlane), | |
729 frame_buffer->MutableData(webrtc::kUPlane), | |
730 frame_buffer->stride(webrtc::kUPlane), | |
731 frame_buffer->MutableData(webrtc::kVPlane), | |
732 frame_buffer->stride(webrtc::kVPlane), | |
733 width, height); | |
734 } | |
735 // Return output byte buffer back to codec. | |
736 jni->CallVoidMethod( | |
737 *j_media_codec_video_decoder_, | |
738 j_return_decoded_byte_buffer_method_, | |
739 output_buffer_index); | |
740 if (CheckException(jni)) { | |
741 ALOGE << "returnDecodedOutputBuffer error"; | |
742 return false; | |
743 } | |
744 } | |
745 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | |
746 decoded_frame.set_timestamp(output_timestamps_ms); | |
747 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); | |
748 | |
749 if (frames_decoded_ < kMaxDecodedLogFrames) { | |
750 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << | |
751 " x " << height << ". " << stride << " x " << slice_height << | |
752 ". Color: " << color_format << ". TS:" << decoded_frame.timestamp() << | |
753 ". DecTime: " << (int)decode_time_ms << | |
754 ". DelayTime: " << (int)frame_delayed_ms; | |
755 } | |
756 | |
757 // Calculate and print decoding statistics - every 3 seconds. | |
758 frames_decoded_++; | |
759 current_frames_++; | |
760 current_decoding_time_ms_ += decode_time_ms; | |
761 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; | |
762 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && | |
763 current_frames_ > 0) { | |
764 ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: " | |
765 << frames_received_ << ". Bitrate: " << | |
766 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << | |
767 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) | |
768 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << | |
769 " for last " << statistic_time_ms << " ms."; | |
770 start_time_ms_ = GetCurrentTimeMs(); | |
771 current_frames_ = 0; | |
772 current_bytes_ = 0; | |
773 current_decoding_time_ms_ = 0; | |
774 } | |
775 | |
776 // |.IsZeroSize())| returns true when a frame has been dropped. | |
777 if (!decoded_frame.IsZeroSize()) { | |
778 // Callback - output decoded frame. | |
779 const int32_t callback_status = | |
780 callback_->Decoded(decoded_frame, decode_time_ms); | |
781 if (callback_status > 0) { | |
782 ALOGE << "callback error"; | |
783 } | |
784 } | |
785 return true; | |
786 } | |
787 | |
788 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | |
789 DecodedImageCallback* callback) { | |
790 callback_ = callback; | |
791 return WEBRTC_VIDEO_CODEC_OK; | |
792 } | |
793 | |
794 int32_t MediaCodecVideoDecoder::Reset() { | |
795 ALOGD << "DecoderReset"; | |
796 if (!inited_) { | |
797 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
798 } | |
799 return InitDecode(&codec_, 1); | |
800 } | |
801 | |
802 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) { | |
803 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
804 ScopedLocalRefFrame local_ref_frame(jni); | |
805 if (!inited_) { | |
806 return; | |
807 } | |
808 // We only ever send one message to |this| directly (not through a Bind()'d | |
809 // functor), so expect no ID/data. | |
810 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | |
811 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | |
812 CheckOnCodecThread(); | |
813 | |
814 if (!DeliverPendingOutputs(jni, 0)) { | |
815 ALOGE << "OnMessage: DeliverPendingOutputs error"; | |
816 ProcessHWErrorOnCodecThread(); | |
817 return; | |
818 } | |
819 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | |
820 } | |
821 | |
822 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() : | |
823 render_egl_context_(NULL) { | |
824 ALOGD << "MediaCodecVideoDecoderFactory ctor"; | |
825 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
826 ScopedLocalRefFrame local_ref_frame(jni); | |
827 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); | |
828 supported_codec_types_.clear(); | |
829 | |
830 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( | |
831 j_decoder_class, | |
832 GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z")); | |
833 if (CheckException(jni)) { | |
834 is_vp8_hw_supported = false; | |
835 } | |
836 if (is_vp8_hw_supported) { | |
837 ALOGD << "VP8 HW Decoder supported."; | |
838 supported_codec_types_.push_back(kVideoCodecVP8); | |
839 } | |
840 | |
841 bool is_vp9_hw_supported = jni->CallStaticBooleanMethod( | |
842 j_decoder_class, | |
843 GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z")); | |
844 if (CheckException(jni)) { | |
845 is_vp9_hw_supported = false; | |
846 } | |
847 if (is_vp9_hw_supported) { | |
848 ALOGD << "VP9 HW Decoder supported."; | |
849 supported_codec_types_.push_back(kVideoCodecVP9); | |
850 } | |
851 | |
852 bool is_h264_hw_supported = jni->CallStaticBooleanMethod( | |
853 j_decoder_class, | |
854 GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z")); | |
855 if (CheckException(jni)) { | |
856 is_h264_hw_supported = false; | |
857 } | |
858 if (is_h264_hw_supported) { | |
859 ALOGD << "H264 HW Decoder supported."; | |
860 supported_codec_types_.push_back(kVideoCodecH264); | |
861 } | |
862 } | |
863 | |
864 MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() { | |
865 ALOGD << "MediaCodecVideoDecoderFactory dtor"; | |
866 if (render_egl_context_) { | |
867 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
868 jni->DeleteGlobalRef(render_egl_context_); | |
869 render_egl_context_ = NULL; | |
870 } | |
871 } | |
872 | |
873 void MediaCodecVideoDecoderFactory::SetEGLContext( | |
874 JNIEnv* jni, jobject render_egl_context) { | |
875 ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext"; | |
876 if (render_egl_context_) { | |
877 jni->DeleteGlobalRef(render_egl_context_); | |
878 render_egl_context_ = NULL; | |
879 } | |
880 if (!IsNull(jni, render_egl_context)) { | |
881 render_egl_context_ = jni->NewGlobalRef(render_egl_context); | |
882 if (CheckException(jni)) { | |
883 ALOGE << "error calling NewGlobalRef for EGL Context."; | |
884 render_egl_context_ = NULL; | |
885 } else { | |
886 jclass j_egl_context_class = | |
887 FindClass(jni, "org/webrtc/EglBase$Context"); | |
888 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) { | |
889 ALOGE << "Wrong EGL Context."; | |
890 jni->DeleteGlobalRef(render_egl_context_); | |
891 render_egl_context_ = NULL; | |
892 } | |
893 } | |
894 } | |
895 if (render_egl_context_ == NULL) { | |
896 ALOGW << "NULL VideoDecoder EGL context - HW surface decoding is disabled."; | |
897 } | |
898 } | |
899 | |
900 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder( | |
901 VideoCodecType type) { | |
902 if (supported_codec_types_.empty()) { | |
903 ALOGW << "No HW video decoder for type " << (int)type; | |
904 return NULL; | |
905 } | |
906 for (VideoCodecType codec_type : supported_codec_types_) { | |
907 if (codec_type == type) { | |
908 ALOGD << "Create HW video decoder for type " << (int)type; | |
909 return new MediaCodecVideoDecoder( | |
910 AttachCurrentThreadIfNeeded(), type, render_egl_context_); | |
911 } | |
912 } | |
913 ALOGW << "Can not find HW video decoder for type " << (int)type; | |
914 return NULL; | |
915 } | |
916 | |
917 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | |
918 webrtc::VideoDecoder* decoder) { | |
919 ALOGD << "Destroy video decoder."; | |
920 delete decoder; | |
921 } | |
922 | |
923 const char* MediaCodecVideoDecoder::ImplementationName() const { | |
924 return "MediaCodec"; | |
925 } | |
926 | |
927 } // namespace webrtc_jni | |
928 | |
OLD | NEW |