Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(28)

Side by Side Diff: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc

Issue 1422963003: Android MediaCodecVideoDecoder: Manage lifetime of texture frames (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 15 matching lines...) Expand all
26 * 26 *
27 */ 27 */
28 28
29 #include <algorithm> 29 #include <algorithm>
30 #include <vector> 30 #include <vector>
31 31
32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" 32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" 33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
34 #include "talk/app/webrtc/java/jni/classreferenceholder.h" 34 #include "talk/app/webrtc/java/jni/classreferenceholder.h"
35 #include "talk/app/webrtc/java/jni/native_handle_impl.h" 35 #include "talk/app/webrtc/java/jni/native_handle_impl.h"
36 #include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
36 #include "webrtc/base/bind.h" 37 #include "webrtc/base/bind.h"
37 #include "webrtc/base/checks.h" 38 #include "webrtc/base/checks.h"
38 #include "webrtc/base/logging.h" 39 #include "webrtc/base/logging.h"
39 #include "webrtc/base/scoped_ref_ptr.h" 40 #include "webrtc/base/scoped_ref_ptr.h"
40 #include "webrtc/base/thread.h" 41 #include "webrtc/base/thread.h"
41 #include "webrtc/base/timeutils.h" 42 #include "webrtc/base/timeutils.h"
42 #include "webrtc/common_video/interface/i420_buffer_pool.h" 43 #include "webrtc/common_video/interface/i420_buffer_pool.h"
43 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" 44 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
44 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" 45 #include "webrtc/system_wrappers/interface/logcat_trace_context.h"
45 #include "webrtc/system_wrappers/interface/tick_util.h" 46 #include "webrtc/system_wrappers/interface/tick_util.h"
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
104 105
105 // Type of video codec. 106 // Type of video codec.
106 VideoCodecType codecType_; 107 VideoCodecType codecType_;
107 108
108 bool key_frame_required_; 109 bool key_frame_required_;
109 bool inited_; 110 bool inited_;
110 bool sw_fallback_required_; 111 bool sw_fallback_required_;
111 bool use_surface_; 112 bool use_surface_;
112 VideoCodec codec_; 113 VideoCodec codec_;
113 webrtc::I420BufferPool decoded_frame_pool_; 114 webrtc::I420BufferPool decoded_frame_pool_;
114 NativeHandleImpl native_handle_; 115 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
115 DecodedImageCallback* callback_; 116 DecodedImageCallback* callback_;
116 int frames_received_; // Number of frames received by decoder. 117 int frames_received_; // Number of frames received by decoder.
117 int frames_decoded_; // Number of frames decoded by decoder. 118 int frames_decoded_; // Number of frames decoded by decoder.
118 int64_t start_time_ms_; // Start time for statistics. 119 int64_t start_time_ms_; // Start time for statistics.
119 int current_frames_; // Number of frames in the current statistics interval. 120 int current_frames_; // Number of frames in the current statistics interval.
120 int current_bytes_; // Encoded bytes in the current statistics interval. 121 int current_bytes_; // Encoded bytes in the current statistics interval.
121 int current_decoding_time_ms_; // Overall decoding time in the current second 122 int current_decoding_time_ms_; // Overall decoding time in the current second
122 uint32_t max_pending_frames_; // Maximum number of pending input frames 123 uint32_t max_pending_frames_; // Maximum number of pending input frames
123 std::vector<int32_t> timestamps_; 124 std::vector<int32_t> timestamps_;
124 std::vector<int64_t> ntp_times_ms_; 125 std::vector<int64_t> ntp_times_ms_;
(...skipping 12 matching lines...) Expand all
137 jmethodID j_dequeue_output_buffer_method_; 138 jmethodID j_dequeue_output_buffer_method_;
138 jmethodID j_return_decoded_byte_buffer_method_; 139 jmethodID j_return_decoded_byte_buffer_method_;
139 // MediaCodecVideoDecoder fields. 140 // MediaCodecVideoDecoder fields.
140 jfieldID j_input_buffers_field_; 141 jfieldID j_input_buffers_field_;
141 jfieldID j_output_buffers_field_; 142 jfieldID j_output_buffers_field_;
142 jfieldID j_color_format_field_; 143 jfieldID j_color_format_field_;
143 jfieldID j_width_field_; 144 jfieldID j_width_field_;
144 jfieldID j_height_field_; 145 jfieldID j_height_field_;
145 jfieldID j_stride_field_; 146 jfieldID j_stride_field_;
146 jfieldID j_slice_height_field_; 147 jfieldID j_slice_height_field_;
147 jfieldID j_surface_texture_field_;
148 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. 148 // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
149 jfieldID j_textureID_field_; 149 jfieldID j_textureID_field_;
150 jfieldID j_texture_presentation_timestamp_us_field_; 150 jfieldID j_transform_matrix_field_;
151 jfieldID j_texture_timestamp_ns_field_;
152 jfieldID j_texture_decode_time_ms_field_;
151 // MediaCodecVideoDecoder.DecodedByteBuffer fields. 153 // MediaCodecVideoDecoder.DecodedByteBuffer fields.
152 jfieldID j_info_index_field_; 154 jfieldID j_info_index_field_;
153 jfieldID j_info_offset_field_; 155 jfieldID j_info_offset_field_;
154 jfieldID j_info_size_field_; 156 jfieldID j_info_size_field_;
155 jfieldID j_info_presentation_timestamp_us_field_; 157 jfieldID j_info_presentation_timestamp_us_field_;
156 158
157 // Global references; must be deleted in Release(). 159 // Global references; must be deleted in Release().
158 std::vector<jobject> input_buffers_; 160 std::vector<jobject> input_buffers_;
159 jobject surface_texture_;
160 jobject previous_surface_texture_;
161 161
162 // Render EGL context - owned by factory, should not be allocated/destroyed 162 // Render EGL context - owned by factory, should not be allocated/destroyed
163 // by VideoDecoder. 163 // by VideoDecoder.
164 jobject render_egl_context_; 164 jobject render_egl_context_;
165 }; 165 };
166 166
167 MediaCodecVideoDecoder::MediaCodecVideoDecoder( 167 MediaCodecVideoDecoder::MediaCodecVideoDecoder(
168 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : 168 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
169 codecType_(codecType), 169 codecType_(codecType),
170 render_egl_context_(render_egl_context), 170 render_egl_context_(render_egl_context),
171 key_frame_required_(true), 171 key_frame_required_(true),
172 inited_(false), 172 inited_(false),
173 sw_fallback_required_(false), 173 sw_fallback_required_(false),
174 surface_texture_(NULL),
175 previous_surface_texture_(NULL),
176 codec_thread_(new Thread()), 174 codec_thread_(new Thread()),
177 j_media_codec_video_decoder_class_( 175 j_media_codec_video_decoder_class_(
178 jni, 176 jni,
179 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), 177 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
180 j_media_codec_video_decoder_( 178 j_media_codec_video_decoder_(
181 jni, 179 jni,
182 jni->NewObject(*j_media_codec_video_decoder_class_, 180 jni->NewObject(*j_media_codec_video_decoder_class_,
183 GetMethodID(jni, 181 GetMethodID(jni,
184 *j_media_codec_video_decoder_class_, 182 *j_media_codec_video_decoder_class_,
185 "<init>", 183 "<init>",
186 "()V"))) { 184 "()V"))) {
187 ScopedLocalRefFrame local_ref_frame(jni); 185 ScopedLocalRefFrame local_ref_frame(jni);
188 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); 186 codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
189 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; 187 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
190 188
191 j_init_decode_method_ = GetMethodID( 189 j_init_decode_method_ = GetMethodID(
192 jni, *j_media_codec_video_decoder_class_, "initDecode", 190 jni, *j_media_codec_video_decoder_class_, "initDecode",
193 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" 191 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
194 "IILandroid/opengl/EGLContext;)Z"); 192 "IILorg/webrtc/SurfaceTextureHelper;)Z");
195 j_release_method_ = 193 j_release_method_ =
196 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); 194 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
197 j_dequeue_input_buffer_method_ = GetMethodID( 195 j_dequeue_input_buffer_method_ = GetMethodID(
198 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); 196 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
199 j_queue_input_buffer_method_ = GetMethodID( 197 j_queue_input_buffer_method_ = GetMethodID(
200 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); 198 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
201 j_dequeue_output_buffer_method_ = GetMethodID( 199 j_dequeue_output_buffer_method_ = GetMethodID(
202 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", 200 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
203 "(I)Ljava/lang/Object;"); 201 "(I)Ljava/lang/Object;");
204 j_return_decoded_byte_buffer_method_ = 202 j_return_decoded_byte_buffer_method_ =
205 GetMethodID(jni, *j_media_codec_video_decoder_class_, 203 GetMethodID(jni, *j_media_codec_video_decoder_class_,
206 "returnDecodedByteBuffer", "(I)V"); 204 "returnDecodedByteBuffer", "(I)V");
207 205
208 j_input_buffers_field_ = GetFieldID( 206 j_input_buffers_field_ = GetFieldID(
209 jni, *j_media_codec_video_decoder_class_, 207 jni, *j_media_codec_video_decoder_class_,
210 "inputBuffers", "[Ljava/nio/ByteBuffer;"); 208 "inputBuffers", "[Ljava/nio/ByteBuffer;");
211 j_output_buffers_field_ = GetFieldID( 209 j_output_buffers_field_ = GetFieldID(
212 jni, *j_media_codec_video_decoder_class_, 210 jni, *j_media_codec_video_decoder_class_,
213 "outputBuffers", "[Ljava/nio/ByteBuffer;"); 211 "outputBuffers", "[Ljava/nio/ByteBuffer;");
214 j_color_format_field_ = GetFieldID( 212 j_color_format_field_ = GetFieldID(
215 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); 213 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
216 j_width_field_ = GetFieldID( 214 j_width_field_ = GetFieldID(
217 jni, *j_media_codec_video_decoder_class_, "width", "I"); 215 jni, *j_media_codec_video_decoder_class_, "width", "I");
218 j_height_field_ = GetFieldID( 216 j_height_field_ = GetFieldID(
219 jni, *j_media_codec_video_decoder_class_, "height", "I"); 217 jni, *j_media_codec_video_decoder_class_, "height", "I");
220 j_stride_field_ = GetFieldID( 218 j_stride_field_ = GetFieldID(
221 jni, *j_media_codec_video_decoder_class_, "stride", "I"); 219 jni, *j_media_codec_video_decoder_class_, "stride", "I");
222 j_slice_height_field_ = GetFieldID( 220 j_slice_height_field_ = GetFieldID(
223 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); 221 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
224 j_surface_texture_field_ = GetFieldID(
225 jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
226 "Landroid/graphics/SurfaceTexture;");
227 222
228 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, 223 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
229 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); 224 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
230 j_textureID_field_ = GetFieldID( 225 j_textureID_field_ = GetFieldID(
231 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); 226 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
232 j_texture_presentation_timestamp_us_field_ = 227 j_transform_matrix_field_ = GetFieldID(
233 GetFieldID(jni, j_decoder_decoded_texture_buffer_class, 228 jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F");
234 "presentationTimestampUs", "J"); 229 j_texture_timestamp_ns_field_ = GetFieldID(
230 jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J");
231 j_texture_decode_time_ms_field_ = GetFieldID(
232 jni, j_decoder_decoded_texture_buffer_class, "decodeTimeMs", "J");
235 233
236 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, 234 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
237 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); 235 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
238 j_info_index_field_ = GetFieldID( 236 j_info_index_field_ = GetFieldID(
239 jni, j_decoder_decoded_byte_buffer_class, "index", "I"); 237 jni, j_decoder_decoded_byte_buffer_class, "index", "I");
240 j_info_offset_field_ = GetFieldID( 238 j_info_offset_field_ = GetFieldID(
241 jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); 239 jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
242 j_info_size_field_ = GetFieldID( 240 j_info_size_field_ = GetFieldID(
243 jni, j_decoder_decoded_byte_buffer_class, "size", "I"); 241 jni, j_decoder_decoded_byte_buffer_class, "size", "I");
244 j_info_presentation_timestamp_us_field_ = GetFieldID( 242 j_info_presentation_timestamp_us_field_ = GetFieldID(
245 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); 243 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
246 244
247 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; 245 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
248 use_surface_ = (render_egl_context_ != NULL); 246 use_surface_ = (render_egl_context_ != NULL);
249 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; 247 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
250 memset(&codec_, 0, sizeof(codec_)); 248 memset(&codec_, 0, sizeof(codec_));
251 AllowBlockingCalls(); 249 AllowBlockingCalls();
252 } 250 }
253 251
254 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { 252 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
255 // Call Release() to ensure no more callbacks to us after we are deleted. 253 // Call Release() to ensure no more callbacks to us after we are deleted.
256 Release(); 254 Release();
257 // Delete global references.
258 JNIEnv* jni = AttachCurrentThreadIfNeeded();
259 if (previous_surface_texture_ != NULL) {
260 jni->DeleteGlobalRef(previous_surface_texture_);
261 }
262 if (surface_texture_ != NULL) {
263 jni->DeleteGlobalRef(surface_texture_);
264 }
265 } 255 }
266 256
267 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, 257 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
268 int32_t numberOfCores) { 258 int32_t numberOfCores) {
269 ALOGD << "InitDecode."; 259 ALOGD << "InitDecode.";
270 if (inst == NULL) { 260 if (inst == NULL) {
271 ALOGE << "NULL VideoCodec instance"; 261 ALOGE << "NULL VideoCodec instance";
272 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 262 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
273 } 263 }
274 // Factory should guard against other codecs being used with us. 264 // Factory should guard against other codecs being used with us.
(...skipping 29 matching lines...) Expand all
304 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; 294 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
305 sw_fallback_required_ = true; 295 sw_fallback_required_ = true;
306 return WEBRTC_VIDEO_CODEC_ERROR; 296 return WEBRTC_VIDEO_CODEC_ERROR;
307 } 297 }
308 298
309 // Always start with a complete key frame. 299 // Always start with a complete key frame.
310 key_frame_required_ = true; 300 key_frame_required_ = true;
311 frames_received_ = 0; 301 frames_received_ = 0;
312 frames_decoded_ = 0; 302 frames_decoded_ = 0;
313 303
304 if (use_surface_) {
305 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
306 jni, render_egl_context_);
307 }
308
314 jobject j_video_codec_enum = JavaEnumFromIndex( 309 jobject j_video_codec_enum = JavaEnumFromIndex(
315 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); 310 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
316 bool success = jni->CallBooleanMethod( 311 bool success = jni->CallBooleanMethod(
317 *j_media_codec_video_decoder_, 312 *j_media_codec_video_decoder_,
318 j_init_decode_method_, 313 j_init_decode_method_,
319 j_video_codec_enum, 314 j_video_codec_enum,
320 codec_.width, 315 codec_.width,
321 codec_.height, 316 codec_.height,
322 use_surface_ ? render_egl_context_ : nullptr); 317 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
318 : nullptr);
323 if (CheckException(jni) || !success) { 319 if (CheckException(jni) || !success) {
324 ALOGE << "Codec initialization error - fallback to SW codec."; 320 ALOGE << "Codec initialization error - fallback to SW codec.";
325 sw_fallback_required_ = true; 321 sw_fallback_required_ = true;
326 return WEBRTC_VIDEO_CODEC_ERROR; 322 return WEBRTC_VIDEO_CODEC_ERROR;
327 } 323 }
328 inited_ = true; 324 inited_ = true;
329 325
330 switch (codecType_) { 326 switch (codecType_) {
331 case kVideoCodecVP8: 327 case kVideoCodecVP8:
332 max_pending_frames_ = kMaxPendingFramesVp8; 328 max_pending_frames_ = kMaxPendingFramesVp8;
(...skipping 21 matching lines...) Expand all
354 for (size_t i = 0; i < num_input_buffers; ++i) { 350 for (size_t i = 0; i < num_input_buffers; ++i) {
355 input_buffers_[i] = 351 input_buffers_[i] =
356 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); 352 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
357 if (CheckException(jni)) { 353 if (CheckException(jni)) {
358 ALOGE << "NewGlobalRef error - fallback to SW codec."; 354 ALOGE << "NewGlobalRef error - fallback to SW codec.";
359 sw_fallback_required_ = true; 355 sw_fallback_required_ = true;
360 return WEBRTC_VIDEO_CODEC_ERROR; 356 return WEBRTC_VIDEO_CODEC_ERROR;
361 } 357 }
362 } 358 }
363 359
364 if (use_surface_) {
365 jobject surface_texture = GetObjectField(
366 jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
367 if (previous_surface_texture_ != NULL) {
368 jni->DeleteGlobalRef(previous_surface_texture_);
369 }
370 previous_surface_texture_ = surface_texture_;
371 surface_texture_ = jni->NewGlobalRef(surface_texture);
372 }
373 codec_thread_->PostDelayed(kMediaCodecPollMs, this); 360 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
374 361
375 return WEBRTC_VIDEO_CODEC_OK; 362 return WEBRTC_VIDEO_CODEC_OK;
376 } 363 }
377 364
378 int32_t MediaCodecVideoDecoder::Release() { 365 int32_t MediaCodecVideoDecoder::Release() {
379 ALOGD << "DecoderRelease request"; 366 ALOGD << "DecoderRelease request";
380 return codec_thread_->Invoke<int32_t>( 367 return codec_thread_->Invoke<int32_t>(
381 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); 368 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
382 } 369 }
383 370
384 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { 371 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
385 if (!inited_) { 372 if (!inited_) {
386 return WEBRTC_VIDEO_CODEC_OK; 373 return WEBRTC_VIDEO_CODEC_OK;
387 } 374 }
388 CheckOnCodecThread(); 375 CheckOnCodecThread();
389 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 376 JNIEnv* jni = AttachCurrentThreadIfNeeded();
390 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_; 377 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_;
391 ScopedLocalRefFrame local_ref_frame(jni); 378 ScopedLocalRefFrame local_ref_frame(jni);
392 for (size_t i = 0; i < input_buffers_.size(); i++) { 379 for (size_t i = 0; i < input_buffers_.size(); i++) {
393 jni->DeleteGlobalRef(input_buffers_[i]); 380 jni->DeleteGlobalRef(input_buffers_[i]);
394 } 381 }
395 input_buffers_.clear(); 382 input_buffers_.clear();
396 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); 383 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
384 surface_texture_helper_ = nullptr;
397 inited_ = false; 385 inited_ = false;
398 rtc::MessageQueueManager::Clear(this); 386 rtc::MessageQueueManager::Clear(this);
399 if (CheckException(jni)) { 387 if (CheckException(jni)) {
400 ALOGE << "Decoder release exception"; 388 ALOGE << "Decoder release exception";
401 return WEBRTC_VIDEO_CODEC_ERROR; 389 return WEBRTC_VIDEO_CODEC_ERROR;
402 } 390 }
403 ALOGD << "DecoderReleaseOnCodecThread done"; 391 ALOGD << "DecoderReleaseOnCodecThread done";
404 return WEBRTC_VIDEO_CODEC_OK; 392 return WEBRTC_VIDEO_CODEC_OK;
405 } 393 }
406 394
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
494 482
495 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( 483 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
496 const EncodedImage& inputImage) { 484 const EncodedImage& inputImage) {
497 CheckOnCodecThread(); 485 CheckOnCodecThread();
498 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 486 JNIEnv* jni = AttachCurrentThreadIfNeeded();
499 ScopedLocalRefFrame local_ref_frame(jni); 487 ScopedLocalRefFrame local_ref_frame(jni);
500 488
501 // Try to drain the decoder and wait until output is not too 489 // Try to drain the decoder and wait until output is not too
502 // much behind the input. 490 // much behind the input.
503 if (frames_received_ > frames_decoded_ + max_pending_frames_) { 491 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
504 ALOGV("Received: %d. Decoded: %d. Wait for output...", 492 ALOGD << "Received: " << frames_received_ << ". Decoded: "
505 frames_received_, frames_decoded_); 493 << frames_decoded_ << ". Wait for output...";
506 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { 494 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
507 ALOGE << "DeliverPendingOutputs error"; 495 ALOGE << "DeliverPendingOutputs error";
508 return ProcessHWErrorOnCodecThread(); 496 return ProcessHWErrorOnCodecThread();
509 } 497 }
510 if (frames_received_ > frames_decoded_ + max_pending_frames_) { 498 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
511 ALOGE << "Output buffer dequeue timeout"; 499 ALOGE << "Output buffer dequeue timeout";
512 return ProcessHWErrorOnCodecThread(); 500 return ProcessHWErrorOnCodecThread();
513 } 501 }
514 } 502 }
515 503
516 // Get input buffer. 504 // Get input buffer.
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
559 // Try to drain the decoder 547 // Try to drain the decoder
560 if (!DeliverPendingOutputs(jni, 0)) { 548 if (!DeliverPendingOutputs(jni, 0)) {
561 ALOGE << "DeliverPendingOutputs error"; 549 ALOGE << "DeliverPendingOutputs error";
562 return ProcessHWErrorOnCodecThread(); 550 return ProcessHWErrorOnCodecThread();
563 } 551 }
564 552
565 return WEBRTC_VIDEO_CODEC_OK; 553 return WEBRTC_VIDEO_CODEC_OK;
566 } 554 }
567 555
568 bool MediaCodecVideoDecoder::DeliverPendingOutputs( 556 bool MediaCodecVideoDecoder::DeliverPendingOutputs(
569 JNIEnv* jni, int dequeue_timeout_us) { 557 JNIEnv* jni, int dequeue_timeout_ms) {
570 if (frames_received_ <= frames_decoded_) { 558 if (frames_received_ <= frames_decoded_) {
571 // No need to query for output buffers - decoder is drained. 559 // No need to query for output buffers - decoder is drained.
572 return true; 560 return true;
573 } 561 }
574 // Get decoder output. 562 // Get decoder output.
575 jobject j_decoder_output_buffer = jni->CallObjectMethod( 563 jobject j_decoder_output_buffer = jni->CallObjectMethod(
576 *j_media_codec_video_decoder_, 564 *j_media_codec_video_decoder_,
577 j_dequeue_output_buffer_method_, 565 j_dequeue_output_buffer_method_,
578 dequeue_timeout_us); 566 dequeue_timeout_ms);
579 if (CheckException(jni)) { 567 if (CheckException(jni)) {
580 ALOGE << "dequeueOutputBuffer() error"; 568 ALOGE << "dequeueOutputBuffer() error";
581 return false; 569 return false;
582 } 570 }
583 if (IsNull(jni, j_decoder_output_buffer)) { 571 if (IsNull(jni, j_decoder_output_buffer)) {
584 // No decoded frame ready. 572 // No decoded frame ready.
585 return true; 573 return true;
586 } 574 }
587 575
588 // Get decoded video frame properties. 576 // Get decoded video frame properties.
589 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, 577 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
590 j_color_format_field_); 578 j_color_format_field_);
591 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); 579 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
592 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); 580 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
593 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); 581 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
594 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, 582 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
595 j_slice_height_field_); 583 j_slice_height_field_);
596 584
597 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; 585 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
598 long output_timestamps_ms = 0; 586 long output_timestamps_ms = 0;
587 int decode_time_ms = 0;
599 if (use_surface_) { 588 if (use_surface_) {
600 // Extract data from Java DecodedTextureBuffer. 589 // Extract data from Java DecodedTextureBuffer.
601 const int texture_id = 590 const int texture_id =
602 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); 591 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
603 const int64_t timestamp_us = 592 const jfloatArray j_transform_matrix =
604 GetLongField(jni, j_decoder_output_buffer, 593 reinterpret_cast<jfloatArray>(GetObjectField(
605 j_texture_presentation_timestamp_us_field_); 594 jni, j_decoder_output_buffer, j_transform_matrix_field_));
606 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; 595 const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer,
596 j_texture_timestamp_ns_field_);
597 output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec;
598
599 decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
600 j_texture_decode_time_ms_field_);
601 output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec;
magjed_webrtc 2015/10/28 11:57:16 This is a duplicated line.
perkj_webrtc 2015/10/28 21:12:39 Done.
602
607 // Create webrtc::VideoFrameBuffer with native texture handle. 603 // Create webrtc::VideoFrameBuffer with native texture handle.
608 native_handle_.SetTextureObject(surface_texture_, texture_id); 604 frame_buffer = surface_texture_helper_->CreateTextureFrame(
609 frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>( 605 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
610 &native_handle_, width, height);
611 } else { 606 } else {
612 // Extract data from Java ByteBuffer and create output yuv420 frame - 607 // Extract data from Java ByteBuffer and create output yuv420 frame -
613 // for non surface decoding only. 608 // for non surface decoding only.
614 const int output_buffer_index = 609 const int output_buffer_index =
615 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); 610 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_);
616 const int output_buffer_offset = 611 const int output_buffer_offset =
617 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); 612 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_);
618 const int output_buffer_size = 613 const int output_buffer_size =
619 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); 614 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_);
620 const int64_t timestamp_us = GetLongField( 615 const int64_t timestamp_us = GetLongField(
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
685 680
686 // Get frame timestamps from a queue. 681 // Get frame timestamps from a queue.
687 if (timestamps_.size() > 0) { 682 if (timestamps_.size() > 0) {
688 decoded_frame.set_timestamp(timestamps_.front()); 683 decoded_frame.set_timestamp(timestamps_.front());
689 timestamps_.erase(timestamps_.begin()); 684 timestamps_.erase(timestamps_.begin());
690 } 685 }
691 if (ntp_times_ms_.size() > 0) { 686 if (ntp_times_ms_.size() > 0) {
692 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); 687 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
693 ntp_times_ms_.erase(ntp_times_ms_.begin()); 688 ntp_times_ms_.erase(ntp_times_ms_.begin());
694 } 689 }
695 int64_t frame_decoding_time_ms = 0; 690 int64_t frame_delayed_ms = 0;
696 if (frame_rtc_times_ms_.size() > 0) { 691 if (frame_rtc_times_ms_.size() > 0) {
697 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); 692 frame_delayed_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
698 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); 693 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
699 } 694 }
700 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." 695 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld."
701 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, 696 " DelayTime: %lld", frames_decoded_, width, height, stride, slice_height,
magjed_webrtc 2015/10/28 11:57:16 I think you should print both latency and decode t
perkj_webrtc 2015/10/28 21:12:39 Done.
702 color_format, output_timestamps_ms, frame_decoding_time_ms); 697 color_format, output_timestamps_ms, frame_delayed_ms);
703 698
704 // Calculate and print decoding statistics - every 3 seconds. 699 // Calculate and print decoding statistics - every 3 seconds.
705 frames_decoded_++; 700 frames_decoded_++;
706 current_frames_++; 701 current_frames_++;
707 current_decoding_time_ms_ += frame_decoding_time_ms; 702 current_decoding_time_ms_ += decode_time_ms;
magjed_webrtc 2015/10/28 11:57:16 You need to set |decode_time_ms| for ByteBuffer ou
perkj_webrtc 2015/10/28 21:12:39 Done.
708 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; 703 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
709 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && 704 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
710 current_frames_ > 0) { 705 current_frames_ > 0) {
711 ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " << 706 ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: "
707 << frames_received_ << ". Bitrate: " <<
712 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << 708 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
713 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) 709 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
714 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << 710 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
715 " for last " << statistic_time_ms << " ms."; 711 " for last " << statistic_time_ms << " ms.";
716 start_time_ms_ = GetCurrentTimeMs(); 712 start_time_ms_ = GetCurrentTimeMs();
717 current_frames_ = 0; 713 current_frames_ = 0;
718 current_bytes_ = 0; 714 current_bytes_ = 0;
719 current_decoding_time_ms_ = 0; 715 current_decoding_time_ms_ = 0;
720 } 716 }
721 717
722 // Callback - output decoded frame. 718 // Callback - output decoded frame.
723 const int32_t callback_status = callback_->Decoded(decoded_frame); 719 const int32_t callback_status =
720 callback_->Decoded(decoded_frame, decode_time_ms);
724 if (callback_status > 0) { 721 if (callback_status > 0) {
725 ALOGE << "callback error"; 722 ALOGE << "callback error";
726 } 723 }
727 724
728 return true; 725 return true;
729 } 726 }
730 727
731 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( 728 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
732 DecodedImageCallback* callback) { 729 DecodedImageCallback* callback) {
733 callback_ = callback; 730 callback_ = callback;
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
846 } 843 }
847 844
848 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( 845 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
849 webrtc::VideoDecoder* decoder) { 846 webrtc::VideoDecoder* decoder) {
850 ALOGD << "Destroy video decoder."; 847 ALOGD << "Destroy video decoder.";
851 delete decoder; 848 delete decoder;
852 } 849 }
853 850
854 } // namespace webrtc_jni 851 } // namespace webrtc_jni
855 852
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698