OLD | NEW |
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 15 matching lines...) Expand all Loading... |
26 * | 26 * |
27 */ | 27 */ |
28 | 28 |
29 #include <algorithm> | 29 #include <algorithm> |
30 #include <vector> | 30 #include <vector> |
31 | 31 |
32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | 32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" |
33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
34 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 34 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
35 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | 35 #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
| 36 #include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" |
36 #include "webrtc/base/bind.h" | 37 #include "webrtc/base/bind.h" |
37 #include "webrtc/base/checks.h" | 38 #include "webrtc/base/checks.h" |
38 #include "webrtc/base/logging.h" | 39 #include "webrtc/base/logging.h" |
39 #include "webrtc/base/scoped_ref_ptr.h" | 40 #include "webrtc/base/scoped_ref_ptr.h" |
40 #include "webrtc/base/thread.h" | 41 #include "webrtc/base/thread.h" |
41 #include "webrtc/base/timeutils.h" | 42 #include "webrtc/base/timeutils.h" |
42 #include "webrtc/common_video/include/i420_buffer_pool.h" | 43 #include "webrtc/common_video/include/i420_buffer_pool.h" |
43 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 44 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
44 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 45 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
45 #include "webrtc/system_wrappers/include/tick_util.h" | 46 #include "webrtc/system_wrappers/include/tick_util.h" |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
105 | 106 |
106 // Type of video codec. | 107 // Type of video codec. |
107 VideoCodecType codecType_; | 108 VideoCodecType codecType_; |
108 | 109 |
109 bool key_frame_required_; | 110 bool key_frame_required_; |
110 bool inited_; | 111 bool inited_; |
111 bool sw_fallback_required_; | 112 bool sw_fallback_required_; |
112 bool use_surface_; | 113 bool use_surface_; |
113 VideoCodec codec_; | 114 VideoCodec codec_; |
114 webrtc::I420BufferPool decoded_frame_pool_; | 115 webrtc::I420BufferPool decoded_frame_pool_; |
115 NativeHandleImpl native_handle_; | 116 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; |
116 DecodedImageCallback* callback_; | 117 DecodedImageCallback* callback_; |
117 int frames_received_; // Number of frames received by decoder. | 118 int frames_received_; // Number of frames received by decoder. |
118 int frames_decoded_; // Number of frames decoded by decoder. | 119 int frames_decoded_; // Number of frames decoded by decoder. |
119 int64_t start_time_ms_; // Start time for statistics. | 120 int64_t start_time_ms_; // Start time for statistics. |
120 int current_frames_; // Number of frames in the current statistics interval. | 121 int current_frames_; // Number of frames in the current statistics interval. |
121 int current_bytes_; // Encoded bytes in the current statistics interval. | 122 int current_bytes_; // Encoded bytes in the current statistics interval. |
122 int current_decoding_time_ms_; // Overall decoding time in the current second | 123 int current_decoding_time_ms_; // Overall decoding time in the current second |
123 uint32_t max_pending_frames_; // Maximum number of pending input frames | 124 uint32_t max_pending_frames_; // Maximum number of pending input frames |
124 std::vector<int32_t> timestamps_; | |
125 std::vector<int64_t> ntp_times_ms_; | |
126 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to | |
127 // decoder input. | |
128 | 125 |
129 // State that is constant for the lifetime of this object once the ctor | 126 // State that is constant for the lifetime of this object once the ctor |
130 // returns. | 127 // returns. |
131 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | 128 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. |
132 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 129 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; |
133 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 130 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; |
134 jmethodID j_init_decode_method_; | 131 jmethodID j_init_decode_method_; |
135 jmethodID j_release_method_; | 132 jmethodID j_release_method_; |
136 jmethodID j_dequeue_input_buffer_method_; | 133 jmethodID j_dequeue_input_buffer_method_; |
137 jmethodID j_queue_input_buffer_method_; | 134 jmethodID j_queue_input_buffer_method_; |
138 jmethodID j_dequeue_output_buffer_method_; | 135 jmethodID j_dequeue_byte_buffer_method_; |
| 136 jmethodID j_dequeue_texture_buffer_method_; |
139 jmethodID j_return_decoded_byte_buffer_method_; | 137 jmethodID j_return_decoded_byte_buffer_method_; |
140 // MediaCodecVideoDecoder fields. | 138 // MediaCodecVideoDecoder fields. |
141 jfieldID j_input_buffers_field_; | 139 jfieldID j_input_buffers_field_; |
142 jfieldID j_output_buffers_field_; | 140 jfieldID j_output_buffers_field_; |
143 jfieldID j_color_format_field_; | 141 jfieldID j_color_format_field_; |
144 jfieldID j_width_field_; | 142 jfieldID j_width_field_; |
145 jfieldID j_height_field_; | 143 jfieldID j_height_field_; |
146 jfieldID j_stride_field_; | 144 jfieldID j_stride_field_; |
147 jfieldID j_slice_height_field_; | 145 jfieldID j_slice_height_field_; |
148 jfieldID j_surface_texture_field_; | |
149 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. | 146 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. |
150 jfieldID j_textureID_field_; | 147 jfieldID j_texture_id_field_; |
151 jfieldID j_texture_presentation_timestamp_us_field_; | 148 jfieldID j_transform_matrix_field_; |
152 // MediaCodecVideoDecoder.DecodedByteBuffer fields. | 149 jfieldID j_texture_timestamp_ms_field_; |
| 150 jfieldID j_texture_ntp_timestamp_ms_field_; |
| 151 jfieldID j_texture_decode_time_ms_field_; |
| 152 jfieldID j_texture_frame_delay_ms_field_; |
| 153 // MediaCodecVideoDecoder.DecodedOutputBuffer fields. |
153 jfieldID j_info_index_field_; | 154 jfieldID j_info_index_field_; |
154 jfieldID j_info_offset_field_; | 155 jfieldID j_info_offset_field_; |
155 jfieldID j_info_size_field_; | 156 jfieldID j_info_size_field_; |
156 jfieldID j_info_presentation_timestamp_us_field_; | 157 jfieldID j_info_timestamp_ms_field_; |
| 158 jfieldID j_info_ntp_timestamp_ms_field_; |
| 159 jfieldID j_byte_buffer_decode_time_ms_field_; |
157 | 160 |
158 // Global references; must be deleted in Release(). | 161 // Global references; must be deleted in Release(). |
159 std::vector<jobject> input_buffers_; | 162 std::vector<jobject> input_buffers_; |
160 jobject surface_texture_; | |
161 jobject previous_surface_texture_; | |
162 | 163 |
163 // Render EGL context - owned by factory, should not be allocated/destroyed | 164 // Render EGL context - owned by factory, should not be allocated/destroyed |
164 // by VideoDecoder. | 165 // by VideoDecoder. |
165 jobject render_egl_context_; | 166 jobject render_egl_context_; |
166 }; | 167 }; |
167 | 168 |
168 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | 169 MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
169 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | 170 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
170 codecType_(codecType), | 171 codecType_(codecType), |
171 render_egl_context_(render_egl_context), | 172 render_egl_context_(render_egl_context), |
172 key_frame_required_(true), | 173 key_frame_required_(true), |
173 inited_(false), | 174 inited_(false), |
174 sw_fallback_required_(false), | 175 sw_fallback_required_(false), |
175 surface_texture_(NULL), | |
176 previous_surface_texture_(NULL), | |
177 codec_thread_(new Thread()), | 176 codec_thread_(new Thread()), |
178 j_media_codec_video_decoder_class_( | 177 j_media_codec_video_decoder_class_( |
179 jni, | 178 jni, |
180 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 179 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
181 j_media_codec_video_decoder_( | 180 j_media_codec_video_decoder_( |
182 jni, | 181 jni, |
183 jni->NewObject(*j_media_codec_video_decoder_class_, | 182 jni->NewObject(*j_media_codec_video_decoder_class_, |
184 GetMethodID(jni, | 183 GetMethodID(jni, |
185 *j_media_codec_video_decoder_class_, | 184 *j_media_codec_video_decoder_class_, |
186 "<init>", | 185 "<init>", |
187 "()V"))) { | 186 "()V"))) { |
188 ScopedLocalRefFrame local_ref_frame(jni); | 187 ScopedLocalRefFrame local_ref_frame(jni); |
189 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 188 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); |
190 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | 189 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; |
191 | 190 |
192 j_init_decode_method_ = GetMethodID( | 191 j_init_decode_method_ = GetMethodID( |
193 jni, *j_media_codec_video_decoder_class_, "initDecode", | 192 jni, *j_media_codec_video_decoder_class_, "initDecode", |
194 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 193 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
195 "IILjavax/microedition/khronos/egl/EGLContext;)Z"); | 194 "IILorg/webrtc/SurfaceTextureHelper;)Z"); |
196 j_release_method_ = | 195 j_release_method_ = |
197 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 196 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
198 j_dequeue_input_buffer_method_ = GetMethodID( | 197 j_dequeue_input_buffer_method_ = GetMethodID( |
199 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); | 198 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); |
200 j_queue_input_buffer_method_ = GetMethodID( | 199 j_queue_input_buffer_method_ = GetMethodID( |
201 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); | 200 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z"); |
202 j_dequeue_output_buffer_method_ = GetMethodID( | 201 j_dequeue_byte_buffer_method_ = GetMethodID( |
203 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", | 202 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", |
204 "(I)Ljava/lang/Object;"); | 203 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;"); |
| 204 j_dequeue_texture_buffer_method_ = GetMethodID( |
| 205 jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer", |
| 206 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;"); |
205 j_return_decoded_byte_buffer_method_ = | 207 j_return_decoded_byte_buffer_method_ = |
206 GetMethodID(jni, *j_media_codec_video_decoder_class_, | 208 GetMethodID(jni, *j_media_codec_video_decoder_class_, |
207 "returnDecodedByteBuffer", "(I)V"); | 209 "returnDecodedOutputBuffer", "(I)V"); |
208 | 210 |
209 j_input_buffers_field_ = GetFieldID( | 211 j_input_buffers_field_ = GetFieldID( |
210 jni, *j_media_codec_video_decoder_class_, | 212 jni, *j_media_codec_video_decoder_class_, |
211 "inputBuffers", "[Ljava/nio/ByteBuffer;"); | 213 "inputBuffers", "[Ljava/nio/ByteBuffer;"); |
212 j_output_buffers_field_ = GetFieldID( | 214 j_output_buffers_field_ = GetFieldID( |
213 jni, *j_media_codec_video_decoder_class_, | 215 jni, *j_media_codec_video_decoder_class_, |
214 "outputBuffers", "[Ljava/nio/ByteBuffer;"); | 216 "outputBuffers", "[Ljava/nio/ByteBuffer;"); |
215 j_color_format_field_ = GetFieldID( | 217 j_color_format_field_ = GetFieldID( |
216 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); | 218 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); |
217 j_width_field_ = GetFieldID( | 219 j_width_field_ = GetFieldID( |
218 jni, *j_media_codec_video_decoder_class_, "width", "I"); | 220 jni, *j_media_codec_video_decoder_class_, "width", "I"); |
219 j_height_field_ = GetFieldID( | 221 j_height_field_ = GetFieldID( |
220 jni, *j_media_codec_video_decoder_class_, "height", "I"); | 222 jni, *j_media_codec_video_decoder_class_, "height", "I"); |
221 j_stride_field_ = GetFieldID( | 223 j_stride_field_ = GetFieldID( |
222 jni, *j_media_codec_video_decoder_class_, "stride", "I"); | 224 jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
223 j_slice_height_field_ = GetFieldID( | 225 j_slice_height_field_ = GetFieldID( |
224 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); | 226 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
225 j_surface_texture_field_ = GetFieldID( | |
226 jni, *j_media_codec_video_decoder_class_, "surfaceTexture", | |
227 "Landroid/graphics/SurfaceTexture;"); | |
228 | 227 |
229 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, | 228 jclass j_decoded_texture_buffer_class = FindClass(jni, |
230 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); | 229 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); |
231 j_textureID_field_ = GetFieldID( | 230 j_texture_id_field_ = GetFieldID( |
232 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); | 231 jni, j_decoded_texture_buffer_class, "textureID", "I"); |
233 j_texture_presentation_timestamp_us_field_ = | 232 j_transform_matrix_field_ = GetFieldID( |
234 GetFieldID(jni, j_decoder_decoded_texture_buffer_class, | 233 jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); |
235 "presentationTimestampUs", "J"); | 234 j_texture_timestamp_ms_field_ = GetFieldID( |
| 235 jni, j_decoded_texture_buffer_class, "timeStampMs", "J"); |
| 236 j_texture_ntp_timestamp_ms_field_ = GetFieldID( |
| 237 jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J"); |
| 238 j_texture_decode_time_ms_field_ = GetFieldID( |
| 239 jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J"); |
| 240 j_texture_frame_delay_ms_field_ = GetFieldID( |
| 241 jni, j_decoded_texture_buffer_class, "frameDelayMs", "J"); |
236 | 242 |
237 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, | 243 jclass j_decoded_output_buffer_class = FindClass(jni, |
238 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); | 244 "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); |
239 j_info_index_field_ = GetFieldID( | 245 j_info_index_field_ = GetFieldID( |
240 jni, j_decoder_decoded_byte_buffer_class, "index", "I"); | 246 jni, j_decoded_output_buffer_class, "index", "I"); |
241 j_info_offset_field_ = GetFieldID( | 247 j_info_offset_field_ = GetFieldID( |
242 jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); | 248 jni, j_decoded_output_buffer_class, "offset", "I"); |
243 j_info_size_field_ = GetFieldID( | 249 j_info_size_field_ = GetFieldID( |
244 jni, j_decoder_decoded_byte_buffer_class, "size", "I"); | 250 jni, j_decoded_output_buffer_class, "size", "I"); |
245 j_info_presentation_timestamp_us_field_ = GetFieldID( | 251 j_info_timestamp_ms_field_ = GetFieldID( |
246 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); | 252 jni, j_decoded_output_buffer_class, "timeStampMs", "J"); |
| 253 j_info_ntp_timestamp_ms_field_ = GetFieldID( |
| 254 jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J"); |
| 255 j_byte_buffer_decode_time_ms_field_ = GetFieldID( |
| 256 jni, j_decoded_output_buffer_class, "decodeTimeMs", "J"); |
247 | 257 |
248 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; | 258 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; |
249 use_surface_ = (render_egl_context_ != NULL); | 259 use_surface_ = (render_egl_context_ != NULL); |
250 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; | 260 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; |
251 memset(&codec_, 0, sizeof(codec_)); | 261 memset(&codec_, 0, sizeof(codec_)); |
252 AllowBlockingCalls(); | 262 AllowBlockingCalls(); |
253 } | 263 } |
254 | 264 |
255 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 265 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
256 // Call Release() to ensure no more callbacks to us after we are deleted. | 266 // Call Release() to ensure no more callbacks to us after we are deleted. |
257 Release(); | 267 Release(); |
258 // Delete global references. | |
259 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
260 if (previous_surface_texture_ != NULL) { | |
261 jni->DeleteGlobalRef(previous_surface_texture_); | |
262 } | |
263 if (surface_texture_ != NULL) { | |
264 jni->DeleteGlobalRef(surface_texture_); | |
265 } | |
266 } | 268 } |
267 | 269 |
268 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 270 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
269 int32_t numberOfCores) { | 271 int32_t numberOfCores) { |
270 ALOGD << "InitDecode."; | 272 ALOGD << "InitDecode."; |
271 if (inst == NULL) { | 273 if (inst == NULL) { |
272 ALOGE << "NULL VideoCodec instance"; | 274 ALOGE << "NULL VideoCodec instance"; |
273 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 275 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
274 } | 276 } |
275 // Factory should guard against other codecs being used with us. | 277 // Factory should guard against other codecs being used with us. |
(...skipping 30 matching lines...) Expand all Loading... |
306 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; | 308 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; |
307 sw_fallback_required_ = true; | 309 sw_fallback_required_ = true; |
308 return WEBRTC_VIDEO_CODEC_ERROR; | 310 return WEBRTC_VIDEO_CODEC_ERROR; |
309 } | 311 } |
310 | 312 |
311 // Always start with a complete key frame. | 313 // Always start with a complete key frame. |
312 key_frame_required_ = true; | 314 key_frame_required_ = true; |
313 frames_received_ = 0; | 315 frames_received_ = 0; |
314 frames_decoded_ = 0; | 316 frames_decoded_ = 0; |
315 | 317 |
| 318 if (use_surface_) { |
| 319 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>( |
| 320 jni, render_egl_context_); |
| 321 } |
| 322 |
316 jobject j_video_codec_enum = JavaEnumFromIndex( | 323 jobject j_video_codec_enum = JavaEnumFromIndex( |
317 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); | 324 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); |
318 bool success = jni->CallBooleanMethod( | 325 bool success = jni->CallBooleanMethod( |
319 *j_media_codec_video_decoder_, | 326 *j_media_codec_video_decoder_, |
320 j_init_decode_method_, | 327 j_init_decode_method_, |
321 j_video_codec_enum, | 328 j_video_codec_enum, |
322 codec_.width, | 329 codec_.width, |
323 codec_.height, | 330 codec_.height, |
324 use_surface_ ? render_egl_context_ : nullptr); | 331 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() |
| 332 : nullptr); |
325 if (CheckException(jni) || !success) { | 333 if (CheckException(jni) || !success) { |
326 ALOGE << "Codec initialization error - fallback to SW codec."; | 334 ALOGE << "Codec initialization error - fallback to SW codec."; |
327 sw_fallback_required_ = true; | 335 sw_fallback_required_ = true; |
328 return WEBRTC_VIDEO_CODEC_ERROR; | 336 return WEBRTC_VIDEO_CODEC_ERROR; |
329 } | 337 } |
330 inited_ = true; | 338 inited_ = true; |
331 | 339 |
332 switch (codecType_) { | 340 switch (codecType_) { |
333 case kVideoCodecVP8: | 341 case kVideoCodecVP8: |
334 max_pending_frames_ = kMaxPendingFramesVp8; | 342 max_pending_frames_ = kMaxPendingFramesVp8; |
335 break; | 343 break; |
336 case kVideoCodecVP9: | 344 case kVideoCodecVP9: |
337 max_pending_frames_ = kMaxPendingFramesVp9; | 345 max_pending_frames_ = kMaxPendingFramesVp9; |
338 break; | 346 break; |
339 case kVideoCodecH264: | 347 case kVideoCodecH264: |
340 max_pending_frames_ = kMaxPendingFramesH264; | 348 max_pending_frames_ = kMaxPendingFramesH264; |
341 break; | 349 break; |
342 default: | 350 default: |
343 max_pending_frames_ = 0; | 351 max_pending_frames_ = 0; |
344 } | 352 } |
345 start_time_ms_ = GetCurrentTimeMs(); | 353 start_time_ms_ = GetCurrentTimeMs(); |
346 current_frames_ = 0; | 354 current_frames_ = 0; |
347 current_bytes_ = 0; | 355 current_bytes_ = 0; |
348 current_decoding_time_ms_ = 0; | 356 current_decoding_time_ms_ = 0; |
349 timestamps_.clear(); | |
350 ntp_times_ms_.clear(); | |
351 frame_rtc_times_ms_.clear(); | |
352 | 357 |
353 jobjectArray input_buffers = (jobjectArray)GetObjectField( | 358 jobjectArray input_buffers = (jobjectArray)GetObjectField( |
354 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); | 359 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); |
355 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 360 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
356 ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; | 361 ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; |
357 input_buffers_.resize(num_input_buffers); | 362 input_buffers_.resize(num_input_buffers); |
358 for (size_t i = 0; i < num_input_buffers; ++i) { | 363 for (size_t i = 0; i < num_input_buffers; ++i) { |
359 input_buffers_[i] = | 364 input_buffers_[i] = |
360 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 365 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
361 if (CheckException(jni)) { | 366 if (CheckException(jni)) { |
362 ALOGE << "NewGlobalRef error - fallback to SW codec."; | 367 ALOGE << "NewGlobalRef error - fallback to SW codec."; |
363 sw_fallback_required_ = true; | 368 sw_fallback_required_ = true; |
364 return WEBRTC_VIDEO_CODEC_ERROR; | 369 return WEBRTC_VIDEO_CODEC_ERROR; |
365 } | 370 } |
366 } | 371 } |
367 | 372 |
368 if (use_surface_) { | |
369 jobject surface_texture = GetObjectField( | |
370 jni, *j_media_codec_video_decoder_, j_surface_texture_field_); | |
371 if (previous_surface_texture_ != NULL) { | |
372 jni->DeleteGlobalRef(previous_surface_texture_); | |
373 } | |
374 previous_surface_texture_ = surface_texture_; | |
375 surface_texture_ = jni->NewGlobalRef(surface_texture); | |
376 } | |
377 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 373 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
378 | 374 |
379 return WEBRTC_VIDEO_CODEC_OK; | 375 return WEBRTC_VIDEO_CODEC_OK; |
380 } | 376 } |
381 | 377 |
382 int32_t MediaCodecVideoDecoder::Release() { | 378 int32_t MediaCodecVideoDecoder::Release() { |
383 ALOGD << "DecoderRelease request"; | 379 ALOGD << "DecoderRelease request"; |
384 return codec_thread_->Invoke<int32_t>( | 380 return codec_thread_->Invoke<int32_t>( |
385 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | 381 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); |
386 } | 382 } |
387 | 383 |
388 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | 384 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { |
389 if (!inited_) { | 385 if (!inited_) { |
390 return WEBRTC_VIDEO_CODEC_OK; | 386 return WEBRTC_VIDEO_CODEC_OK; |
391 } | 387 } |
392 CheckOnCodecThread(); | 388 CheckOnCodecThread(); |
393 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 389 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
394 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << | 390 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << |
395 frames_received_ << ". Frames decoded: " << frames_decoded_; | 391 frames_received_ << ". Frames decoded: " << frames_decoded_; |
396 ScopedLocalRefFrame local_ref_frame(jni); | 392 ScopedLocalRefFrame local_ref_frame(jni); |
397 for (size_t i = 0; i < input_buffers_.size(); i++) { | 393 for (size_t i = 0; i < input_buffers_.size(); i++) { |
398 jni->DeleteGlobalRef(input_buffers_[i]); | 394 jni->DeleteGlobalRef(input_buffers_[i]); |
399 } | 395 } |
400 input_buffers_.clear(); | 396 input_buffers_.clear(); |
401 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | 397 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
| 398 surface_texture_helper_ = nullptr; |
402 inited_ = false; | 399 inited_ = false; |
403 rtc::MessageQueueManager::Clear(this); | 400 rtc::MessageQueueManager::Clear(this); |
404 if (CheckException(jni)) { | 401 if (CheckException(jni)) { |
405 ALOGE << "Decoder release exception"; | 402 ALOGE << "Decoder release exception"; |
406 return WEBRTC_VIDEO_CODEC_ERROR; | 403 return WEBRTC_VIDEO_CODEC_ERROR; |
407 } | 404 } |
408 ALOGD << "DecoderReleaseOnCodecThread done"; | 405 ALOGD << "DecoderReleaseOnCodecThread done"; |
409 return WEBRTC_VIDEO_CODEC_OK; | 406 return WEBRTC_VIDEO_CODEC_OK; |
410 } | 407 } |
411 | 408 |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
498 } | 495 } |
499 | 496 |
500 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( | 497 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
501 const EncodedImage& inputImage) { | 498 const EncodedImage& inputImage) { |
502 CheckOnCodecThread(); | 499 CheckOnCodecThread(); |
503 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 500 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
504 ScopedLocalRefFrame local_ref_frame(jni); | 501 ScopedLocalRefFrame local_ref_frame(jni); |
505 | 502 |
506 // Try to drain the decoder and wait until output is not too | 503 // Try to drain the decoder and wait until output is not too |
507 // much behind the input. | 504 // much behind the input. |
508 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 505 const int64 drain_start = GetCurrentTimeMs(); |
| 506 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && |
| 507 (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) { |
509 ALOGV("Received: %d. Decoded: %d. Wait for output...", | 508 ALOGV("Received: %d. Decoded: %d. Wait for output...", |
510 frames_received_, frames_decoded_); | 509 frames_received_, frames_decoded_); |
511 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { | 510 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { |
512 ALOGE << "DeliverPendingOutputs error. Frames received: " << | 511 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
513 frames_received_ << ". Frames decoded: " << frames_decoded_; | 512 frames_received_ << ". Frames decoded: " << frames_decoded_; |
514 return ProcessHWErrorOnCodecThread(); | 513 return ProcessHWErrorOnCodecThread(); |
515 } | 514 } |
516 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 515 } |
517 ALOGE << "Output buffer dequeue timeout. Frames received: " << | 516 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
518 frames_received_ << ". Frames decoded: " << frames_decoded_; | 517 ALOGE << "Output buffer dequeue timeout. Frames received: " << |
519 return ProcessHWErrorOnCodecThread(); | 518 frames_received_ << ". Frames decoded: " << frames_decoded_; |
520 } | 519 return ProcessHWErrorOnCodecThread(); |
521 } | 520 } |
522 | 521 |
523 // Get input buffer. | 522 // Get input buffer. |
524 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, | 523 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, |
525 j_dequeue_input_buffer_method_); | 524 j_dequeue_input_buffer_method_); |
526 if (CheckException(jni) || j_input_buffer_index < 0) { | 525 if (CheckException(jni) || j_input_buffer_index < 0) { |
527 ALOGE << "dequeueInputBuffer error"; | 526 ALOGE << "dequeueInputBuffer error"; |
528 return ProcessHWErrorOnCodecThread(); | 527 return ProcessHWErrorOnCodecThread(); |
529 } | 528 } |
530 | 529 |
531 // Copy encoded data to Java ByteBuffer. | 530 // Copy encoded data to Java ByteBuffer. |
532 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | 531 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; |
533 uint8_t* buffer = | 532 uint8_t* buffer = |
534 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 533 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
535 RTC_CHECK(buffer) << "Indirect buffer??"; | 534 RTC_CHECK(buffer) << "Indirect buffer??"; |
536 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); | 535 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); |
537 if (CheckException(jni) || buffer_capacity < inputImage._length) { | 536 if (CheckException(jni) || buffer_capacity < inputImage._length) { |
538 ALOGE << "Input frame size "<< inputImage._length << | 537 ALOGE << "Input frame size "<< inputImage._length << |
539 " is bigger than buffer size " << buffer_capacity; | 538 " is bigger than buffer size " << buffer_capacity; |
540 return ProcessHWErrorOnCodecThread(); | 539 return ProcessHWErrorOnCodecThread(); |
541 } | 540 } |
542 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate; | 541 jlong presentation_timestamp_us = |
| 542 (frames_received_ * 1000000) / codec_.maxFramerate; |
543 if (frames_decoded_ < kMaxDecodedLogFrames) { | 543 if (frames_decoded_ < kMaxDecodedLogFrames) { |
544 ALOGD << "Decoder frame in # " << frames_received_ << ". Type: " | 544 ALOGD << "Decoder frame in # " << frames_received_ << ". Type: " |
545 << inputImage._frameType << ". Buffer # " << | 545 << inputImage._frameType << ". Buffer # " << |
546 j_input_buffer_index << ". TS: " << (int)(timestamp_us / 1000) | 546 j_input_buffer_index << ". TS: " |
| 547 << (int)(presentation_timestamp_us / 1000) |
547 << ". Size: " << inputImage._length; | 548 << ". Size: " << inputImage._length; |
548 } | 549 } |
549 memcpy(buffer, inputImage._buffer, inputImage._length); | 550 memcpy(buffer, inputImage._buffer, inputImage._length); |
550 | 551 |
551 // Save input image timestamps for later output. | 552 // Save input image timestamps for later output. |
552 frames_received_++; | 553 frames_received_++; |
553 current_bytes_ += inputImage._length; | 554 current_bytes_ += inputImage._length; |
554 timestamps_.push_back(inputImage._timeStamp); | |
555 ntp_times_ms_.push_back(inputImage.ntp_time_ms_); | |
556 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); | |
557 | 555 |
558 // Feed input to decoder. | 556 // Feed input to decoder. |
559 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, | 557 bool success = jni->CallBooleanMethod( |
560 j_queue_input_buffer_method_, | 558 *j_media_codec_video_decoder_, |
561 j_input_buffer_index, | 559 j_queue_input_buffer_method_, |
562 inputImage._length, | 560 j_input_buffer_index, |
563 timestamp_us); | 561 inputImage._length, |
| 562 presentation_timestamp_us, |
| 563 static_cast<int64_t> (inputImage._timeStamp), |
| 564 inputImage.ntp_time_ms_); |
564 if (CheckException(jni) || !success) { | 565 if (CheckException(jni) || !success) { |
565 ALOGE << "queueInputBuffer error"; | 566 ALOGE << "queueInputBuffer error"; |
566 return ProcessHWErrorOnCodecThread(); | 567 return ProcessHWErrorOnCodecThread(); |
567 } | 568 } |
568 | 569 |
569 // Try to drain the decoder | 570 // Try to drain the decoder |
570 if (!DeliverPendingOutputs(jni, 0)) { | 571 if (!DeliverPendingOutputs(jni, 0)) { |
571 ALOGE << "DeliverPendingOutputs error"; | 572 ALOGE << "DeliverPendingOutputs error"; |
572 return ProcessHWErrorOnCodecThread(); | 573 return ProcessHWErrorOnCodecThread(); |
573 } | 574 } |
574 | 575 |
575 return WEBRTC_VIDEO_CODEC_OK; | 576 return WEBRTC_VIDEO_CODEC_OK; |
576 } | 577 } |
577 | 578 |
578 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | 579 bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
579 JNIEnv* jni, int dequeue_timeout_us) { | 580 JNIEnv* jni, int dequeue_timeout_ms) { |
580 if (frames_received_ <= frames_decoded_) { | 581 if (frames_received_ <= frames_decoded_) { |
581 // No need to query for output buffers - decoder is drained. | 582 // No need to query for output buffers - decoder is drained. |
582 return true; | 583 return true; |
583 } | 584 } |
584 // Get decoder output. | 585 // Get decoder output. |
585 jobject j_decoder_output_buffer = jni->CallObjectMethod( | 586 jobject j_decoder_output_buffer = |
586 *j_media_codec_video_decoder_, | 587 jni->CallObjectMethod(*j_media_codec_video_decoder_, |
587 j_dequeue_output_buffer_method_, | 588 use_surface_ ? j_dequeue_texture_buffer_method_ |
588 dequeue_timeout_us); | 589 : j_dequeue_byte_buffer_method_, |
| 590 dequeue_timeout_ms); |
| 591 |
589 if (CheckException(jni)) { | 592 if (CheckException(jni)) { |
590 ALOGE << "dequeueOutputBuffer() error"; | 593 ALOGE << "dequeueOutputBuffer() error"; |
591 return false; | 594 return false; |
592 } | 595 } |
593 if (IsNull(jni, j_decoder_output_buffer)) { | 596 if (IsNull(jni, j_decoder_output_buffer)) { |
594 // No decoded frame ready. | 597 // No decoded frame ready. |
595 return true; | 598 return true; |
596 } | 599 } |
597 | 600 |
598 // Get decoded video frame properties. | 601 // Get decoded video frame properties. |
599 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 602 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, |
600 j_color_format_field_); | 603 j_color_format_field_); |
601 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 604 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); |
602 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 605 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); |
603 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 606 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); |
604 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | 607 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, |
605 j_slice_height_field_); | 608 j_slice_height_field_); |
606 | 609 |
607 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | 610 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; |
608 long output_timestamps_ms = 0; | 611 int64_t output_timestamps_ms = 0; |
| 612 int64_t output_ntp_timestamps_ms = 0; |
| 613 int decode_time_ms = 0; |
| 614 int64_t frame_delayed_ms = 0; |
609 if (use_surface_) { | 615 if (use_surface_) { |
610 // Extract data from Java DecodedTextureBuffer. | 616 // Extract data from Java DecodedTextureBuffer. |
611 const int texture_id = | 617 const int texture_id = |
612 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); | 618 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); |
613 const int64_t timestamp_us = | 619 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. |
614 GetLongField(jni, j_decoder_output_buffer, | 620 const jfloatArray j_transform_matrix = |
615 j_texture_presentation_timestamp_us_field_); | 621 reinterpret_cast<jfloatArray>(GetObjectField( |
616 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; | 622 jni, j_decoder_output_buffer, j_transform_matrix_field_)); |
617 // Create webrtc::VideoFrameBuffer with native texture handle. | 623 const int64_t timestamp_us = |
618 native_handle_.SetTextureObject(surface_texture_, texture_id); | 624 GetLongField(jni, j_decoder_output_buffer, |
619 frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>( | 625 j_texture_timestamp_ms_field_); |
620 &native_handle_, width, height); | 626 output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer, |
| 627 j_texture_timestamp_ms_field_); |
| 628 output_ntp_timestamps_ms = |
| 629 GetLongField(jni, j_decoder_output_buffer, |
| 630 j_texture_ntp_timestamp_ms_field_); |
| 631 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, |
| 632 j_texture_decode_time_ms_field_); |
| 633 frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer, |
| 634 j_texture_frame_delay_ms_field_); |
| 635 |
| 636 // Create webrtc::VideoFrameBuffer with native texture handle. |
| 637 frame_buffer = surface_texture_helper_->CreateTextureFrame( |
| 638 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); |
| 639 } |
621 } else { | 640 } else { |
622 // Extract data from Java ByteBuffer and create output yuv420 frame - | 641 // Extract data from Java ByteBuffer and create output yuv420 frame - |
623 // for non surface decoding only. | 642 // for non surface decoding only. |
624 const int output_buffer_index = | 643 const int output_buffer_index = |
625 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); | 644 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); |
626 const int output_buffer_offset = | 645 const int output_buffer_offset = |
627 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); | 646 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); |
628 const int output_buffer_size = | 647 const int output_buffer_size = |
629 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); | 648 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); |
630 const int64_t timestamp_us = GetLongField( | 649 output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer, |
631 jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); | 650 j_info_timestamp_ms_field_); |
632 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; | 651 output_ntp_timestamps_ms = |
| 652 GetLongField(jni, j_decoder_output_buffer, |
| 653 j_info_ntp_timestamp_ms_field_); |
| 654 |
| 655 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, |
| 656 j_byte_buffer_decode_time_ms_field_); |
633 | 657 |
634 if (output_buffer_size < width * height * 3 / 2) { | 658 if (output_buffer_size < width * height * 3 / 2) { |
635 ALOGE << "Insufficient output buffer size: " << output_buffer_size; | 659 ALOGE << "Insufficient output buffer size: " << output_buffer_size; |
636 return false; | 660 return false; |
637 } | 661 } |
638 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | 662 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( |
639 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | 663 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); |
640 jobject output_buffer = | 664 jobject output_buffer = |
641 jni->GetObjectArrayElement(output_buffers, output_buffer_index); | 665 jni->GetObjectArrayElement(output_buffers, output_buffer_index); |
642 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | 666 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
680 frame_buffer->MutableData(webrtc::kVPlane), | 704 frame_buffer->MutableData(webrtc::kVPlane), |
681 frame_buffer->stride(webrtc::kVPlane), | 705 frame_buffer->stride(webrtc::kVPlane), |
682 width, height); | 706 width, height); |
683 } | 707 } |
684 // Return output byte buffer back to codec. | 708 // Return output byte buffer back to codec. |
685 jni->CallVoidMethod( | 709 jni->CallVoidMethod( |
686 *j_media_codec_video_decoder_, | 710 *j_media_codec_video_decoder_, |
687 j_return_decoded_byte_buffer_method_, | 711 j_return_decoded_byte_buffer_method_, |
688 output_buffer_index); | 712 output_buffer_index); |
689 if (CheckException(jni)) { | 713 if (CheckException(jni)) { |
690 ALOGE << "returnDecodedByteBuffer error"; | 714 ALOGE << "returnDecodedOutputBuffer error"; |
691 return false; | 715 return false; |
692 } | 716 } |
693 } | 717 } |
694 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | 718 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); |
| 719 decoded_frame.set_timestamp(output_timestamps_ms); |
| 720 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); |
695 | 721 |
696 // Get frame timestamps from a queue. | |
697 if (timestamps_.size() > 0) { | |
698 decoded_frame.set_timestamp(timestamps_.front()); | |
699 timestamps_.erase(timestamps_.begin()); | |
700 } | |
701 if (ntp_times_ms_.size() > 0) { | |
702 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); | |
703 ntp_times_ms_.erase(ntp_times_ms_.begin()); | |
704 } | |
705 int64_t frame_decoding_time_ms = 0; | |
706 if (frame_rtc_times_ms_.size() > 0) { | |
707 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); | |
708 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | |
709 } | |
710 if (frames_decoded_ < kMaxDecodedLogFrames) { | 722 if (frames_decoded_ < kMaxDecodedLogFrames) { |
711 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << | 723 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << |
712 " x " << height << ". " << stride << " x " << slice_height << | 724 " x " << height << ". " << stride << " x " << slice_height << |
713 ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms << | 725 ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms << |
714 ". DecTime: " << (int)frame_decoding_time_ms; | 726 ". DecTime: " << (int)decode_time_ms << |
| 727 ". DelayTime: " << (int)frame_delayed_ms; |
715 } | 728 } |
716 | 729 |
717 // Calculate and print decoding statistics - every 3 seconds. | 730 // Calculate and print decoding statistics - every 3 seconds. |
718 frames_decoded_++; | 731 frames_decoded_++; |
719 current_frames_++; | 732 current_frames_++; |
720 current_decoding_time_ms_ += frame_decoding_time_ms; | 733 current_decoding_time_ms_ += decode_time_ms; |
721 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; | 734 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; |
722 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && | 735 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && |
723 current_frames_ > 0) { | 736 current_frames_ > 0) { |
724 ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " << | 737 ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: " |
| 738 << frames_received_ << ". Bitrate: " << |
725 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << | 739 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << |
726 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) | 740 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) |
727 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << | 741 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << |
728 " for last " << statistic_time_ms << " ms."; | 742 " for last " << statistic_time_ms << " ms."; |
729 start_time_ms_ = GetCurrentTimeMs(); | 743 start_time_ms_ = GetCurrentTimeMs(); |
730 current_frames_ = 0; | 744 current_frames_ = 0; |
731 current_bytes_ = 0; | 745 current_bytes_ = 0; |
732 current_decoding_time_ms_ = 0; | 746 current_decoding_time_ms_ = 0; |
733 } | 747 } |
734 | 748 |
735 // Callback - output decoded frame. | 749 // |.IsZeroSize())| returns true when a frame has been dropped. |
736 const int32_t callback_status = callback_->Decoded(decoded_frame); | 750 if (!decoded_frame.IsZeroSize()) { |
737 if (callback_status > 0) { | 751 // Callback - output decoded frame. |
738 ALOGE << "callback error"; | 752 const int32_t callback_status = |
| 753 callback_->Decoded(decoded_frame, decode_time_ms); |
| 754 if (callback_status > 0) { |
| 755 ALOGE << "callback error"; |
| 756 } |
739 } | 757 } |
740 | |
741 return true; | 758 return true; |
742 } | 759 } |
743 | 760 |
744 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 761 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
745 DecodedImageCallback* callback) { | 762 DecodedImageCallback* callback) { |
746 callback_ = callback; | 763 callback_ = callback; |
747 return WEBRTC_VIDEO_CODEC_OK; | 764 return WEBRTC_VIDEO_CODEC_OK; |
748 } | 765 } |
749 | 766 |
750 int32_t MediaCodecVideoDecoder::Reset() { | 767 int32_t MediaCodecVideoDecoder::Reset() { |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
871 } | 888 } |
872 | 889 |
873 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 890 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( |
874 webrtc::VideoDecoder* decoder) { | 891 webrtc::VideoDecoder* decoder) { |
875 ALOGD << "Destroy video decoder."; | 892 ALOGD << "Destroy video decoder."; |
876 delete decoder; | 893 delete decoder; |
877 } | 894 } |
878 | 895 |
879 } // namespace webrtc_jni | 896 } // namespace webrtc_jni |
880 | 897 |
OLD | NEW |