OLD | NEW |
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 15 matching lines...) Expand all Loading... |
26 * | 26 * |
27 */ | 27 */ |
28 | 28 |
29 #include <algorithm> | 29 #include <algorithm> |
30 #include <vector> | 30 #include <vector> |
31 | 31 |
32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | 32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" |
33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
34 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 34 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
35 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | 35 #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
36 #include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" | |
37 #include "webrtc/base/bind.h" | 36 #include "webrtc/base/bind.h" |
38 #include "webrtc/base/checks.h" | 37 #include "webrtc/base/checks.h" |
39 #include "webrtc/base/logging.h" | 38 #include "webrtc/base/logging.h" |
40 #include "webrtc/base/scoped_ref_ptr.h" | 39 #include "webrtc/base/scoped_ref_ptr.h" |
41 #include "webrtc/base/thread.h" | 40 #include "webrtc/base/thread.h" |
42 #include "webrtc/base/timeutils.h" | 41 #include "webrtc/base/timeutils.h" |
43 #include "webrtc/common_video/interface/i420_buffer_pool.h" | 42 #include "webrtc/common_video/interface/i420_buffer_pool.h" |
44 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 43 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
45 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 44 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
46 #include "webrtc/system_wrappers/include/tick_util.h" | 45 #include "webrtc/system_wrappers/include/tick_util.h" |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
106 | 105 |
107 // Type of video codec. | 106 // Type of video codec. |
108 VideoCodecType codecType_; | 107 VideoCodecType codecType_; |
109 | 108 |
110 bool key_frame_required_; | 109 bool key_frame_required_; |
111 bool inited_; | 110 bool inited_; |
112 bool sw_fallback_required_; | 111 bool sw_fallback_required_; |
113 bool use_surface_; | 112 bool use_surface_; |
114 VideoCodec codec_; | 113 VideoCodec codec_; |
115 webrtc::I420BufferPool decoded_frame_pool_; | 114 webrtc::I420BufferPool decoded_frame_pool_; |
116 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; | 115 NativeHandleImpl native_handle_; |
117 DecodedImageCallback* callback_; | 116 DecodedImageCallback* callback_; |
118 int frames_received_; // Number of frames received by decoder. | 117 int frames_received_; // Number of frames received by decoder. |
119 int frames_decoded_; // Number of frames decoded by decoder. | 118 int frames_decoded_; // Number of frames decoded by decoder. |
120 int64_t start_time_ms_; // Start time for statistics. | 119 int64_t start_time_ms_; // Start time for statistics. |
121 int current_frames_; // Number of frames in the current statistics interval. | 120 int current_frames_; // Number of frames in the current statistics interval. |
122 int current_bytes_; // Encoded bytes in the current statistics interval. | 121 int current_bytes_; // Encoded bytes in the current statistics interval. |
123 int current_decoding_time_ms_; // Overall decoding time in the current second | 122 int current_decoding_time_ms_; // Overall decoding time in the current second |
124 uint32_t max_pending_frames_; // Maximum number of pending input frames | 123 uint32_t max_pending_frames_; // Maximum number of pending input frames |
125 std::vector<int32_t> timestamps_; | 124 std::vector<int32_t> timestamps_; |
126 std::vector<int64_t> ntp_times_ms_; | 125 std::vector<int64_t> ntp_times_ms_; |
| 126 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to |
| 127 // decoder input. |
127 | 128 |
128 // State that is constant for the lifetime of this object once the ctor | 129 // State that is constant for the lifetime of this object once the ctor |
129 // returns. | 130 // returns. |
130 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | 131 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. |
131 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 132 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; |
132 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 133 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; |
133 jmethodID j_init_decode_method_; | 134 jmethodID j_init_decode_method_; |
134 jmethodID j_release_method_; | 135 jmethodID j_release_method_; |
135 jmethodID j_dequeue_input_buffer_method_; | 136 jmethodID j_dequeue_input_buffer_method_; |
136 jmethodID j_queue_input_buffer_method_; | 137 jmethodID j_queue_input_buffer_method_; |
137 jmethodID j_dequeue_byte_buffer_method_; | 138 jmethodID j_dequeue_output_buffer_method_; |
138 jmethodID j_dequeue_texture_buffer_method_; | |
139 jmethodID j_return_decoded_byte_buffer_method_; | 139 jmethodID j_return_decoded_byte_buffer_method_; |
140 // MediaCodecVideoDecoder fields. | 140 // MediaCodecVideoDecoder fields. |
141 jfieldID j_input_buffers_field_; | 141 jfieldID j_input_buffers_field_; |
142 jfieldID j_output_buffers_field_; | 142 jfieldID j_output_buffers_field_; |
143 jfieldID j_color_format_field_; | 143 jfieldID j_color_format_field_; |
144 jfieldID j_width_field_; | 144 jfieldID j_width_field_; |
145 jfieldID j_height_field_; | 145 jfieldID j_height_field_; |
146 jfieldID j_stride_field_; | 146 jfieldID j_stride_field_; |
147 jfieldID j_slice_height_field_; | 147 jfieldID j_slice_height_field_; |
| 148 jfieldID j_surface_texture_field_; |
148 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. | 149 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. |
149 jfieldID j_texture_id_field_; | 150 jfieldID j_textureID_field_; |
150 jfieldID j_transform_matrix_field_; | |
151 jfieldID j_texture_presentation_timestamp_us_field_; | 151 jfieldID j_texture_presentation_timestamp_us_field_; |
152 jfieldID j_texture_decode_time_ms_field_; | 152 // MediaCodecVideoDecoder.DecodedByteBuffer fields. |
153 jfieldID j_texture_frame_delay_ms_field_; | |
154 // MediaCodecVideoDecoder.DecodedOutputBuffer fields. | |
155 jfieldID j_info_index_field_; | 153 jfieldID j_info_index_field_; |
156 jfieldID j_info_offset_field_; | 154 jfieldID j_info_offset_field_; |
157 jfieldID j_info_size_field_; | 155 jfieldID j_info_size_field_; |
158 jfieldID j_info_presentation_timestamp_us_field_; | 156 jfieldID j_info_presentation_timestamp_us_field_; |
159 jfieldID j_byte_buffer_decode_time_ms_field_; | |
160 | 157 |
161 // Global references; must be deleted in Release(). | 158 // Global references; must be deleted in Release(). |
162 std::vector<jobject> input_buffers_; | 159 std::vector<jobject> input_buffers_; |
| 160 jobject surface_texture_; |
| 161 jobject previous_surface_texture_; |
163 | 162 |
164 // Render EGL context - owned by factory, should not be allocated/destroyed | 163 // Render EGL context - owned by factory, should not be allocated/destroyed |
165 // by VideoDecoder. | 164 // by VideoDecoder. |
166 jobject render_egl_context_; | 165 jobject render_egl_context_; |
167 }; | 166 }; |
168 | 167 |
169 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | 168 MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
170 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | 169 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
171 codecType_(codecType), | 170 codecType_(codecType), |
172 render_egl_context_(render_egl_context), | 171 render_egl_context_(render_egl_context), |
173 key_frame_required_(true), | 172 key_frame_required_(true), |
174 inited_(false), | 173 inited_(false), |
175 sw_fallback_required_(false), | 174 sw_fallback_required_(false), |
| 175 surface_texture_(NULL), |
| 176 previous_surface_texture_(NULL), |
176 codec_thread_(new Thread()), | 177 codec_thread_(new Thread()), |
177 j_media_codec_video_decoder_class_( | 178 j_media_codec_video_decoder_class_( |
178 jni, | 179 jni, |
179 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 180 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
180 j_media_codec_video_decoder_( | 181 j_media_codec_video_decoder_( |
181 jni, | 182 jni, |
182 jni->NewObject(*j_media_codec_video_decoder_class_, | 183 jni->NewObject(*j_media_codec_video_decoder_class_, |
183 GetMethodID(jni, | 184 GetMethodID(jni, |
184 *j_media_codec_video_decoder_class_, | 185 *j_media_codec_video_decoder_class_, |
185 "<init>", | 186 "<init>", |
186 "()V"))) { | 187 "()V"))) { |
187 ScopedLocalRefFrame local_ref_frame(jni); | 188 ScopedLocalRefFrame local_ref_frame(jni); |
188 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 189 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); |
189 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | 190 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; |
190 | 191 |
191 j_init_decode_method_ = GetMethodID( | 192 j_init_decode_method_ = GetMethodID( |
192 jni, *j_media_codec_video_decoder_class_, "initDecode", | 193 jni, *j_media_codec_video_decoder_class_, "initDecode", |
193 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 194 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
194 "IILorg/webrtc/SurfaceTextureHelper;)Z"); | 195 "IILjavax/microedition/khronos/egl/EGLContext;)Z"); |
195 j_release_method_ = | 196 j_release_method_ = |
196 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 197 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
197 j_dequeue_input_buffer_method_ = GetMethodID( | 198 j_dequeue_input_buffer_method_ = GetMethodID( |
198 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); | 199 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); |
199 j_queue_input_buffer_method_ = GetMethodID( | 200 j_queue_input_buffer_method_ = GetMethodID( |
200 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); | 201 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); |
201 j_dequeue_byte_buffer_method_ = GetMethodID( | 202 j_dequeue_output_buffer_method_ = GetMethodID( |
202 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", | 203 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", |
203 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;"); | 204 "(I)Ljava/lang/Object;"); |
204 j_dequeue_texture_buffer_method_ = GetMethodID( | |
205 jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer", | |
206 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;"); | |
207 j_return_decoded_byte_buffer_method_ = | 205 j_return_decoded_byte_buffer_method_ = |
208 GetMethodID(jni, *j_media_codec_video_decoder_class_, | 206 GetMethodID(jni, *j_media_codec_video_decoder_class_, |
209 "returnDecodedOutputBuffer", "(I)V"); | 207 "returnDecodedByteBuffer", "(I)V"); |
210 | 208 |
211 j_input_buffers_field_ = GetFieldID( | 209 j_input_buffers_field_ = GetFieldID( |
212 jni, *j_media_codec_video_decoder_class_, | 210 jni, *j_media_codec_video_decoder_class_, |
213 "inputBuffers", "[Ljava/nio/ByteBuffer;"); | 211 "inputBuffers", "[Ljava/nio/ByteBuffer;"); |
214 j_output_buffers_field_ = GetFieldID( | 212 j_output_buffers_field_ = GetFieldID( |
215 jni, *j_media_codec_video_decoder_class_, | 213 jni, *j_media_codec_video_decoder_class_, |
216 "outputBuffers", "[Ljava/nio/ByteBuffer;"); | 214 "outputBuffers", "[Ljava/nio/ByteBuffer;"); |
217 j_color_format_field_ = GetFieldID( | 215 j_color_format_field_ = GetFieldID( |
218 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); | 216 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); |
219 j_width_field_ = GetFieldID( | 217 j_width_field_ = GetFieldID( |
220 jni, *j_media_codec_video_decoder_class_, "width", "I"); | 218 jni, *j_media_codec_video_decoder_class_, "width", "I"); |
221 j_height_field_ = GetFieldID( | 219 j_height_field_ = GetFieldID( |
222 jni, *j_media_codec_video_decoder_class_, "height", "I"); | 220 jni, *j_media_codec_video_decoder_class_, "height", "I"); |
223 j_stride_field_ = GetFieldID( | 221 j_stride_field_ = GetFieldID( |
224 jni, *j_media_codec_video_decoder_class_, "stride", "I"); | 222 jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
225 j_slice_height_field_ = GetFieldID( | 223 j_slice_height_field_ = GetFieldID( |
226 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); | 224 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
| 225 j_surface_texture_field_ = GetFieldID( |
| 226 jni, *j_media_codec_video_decoder_class_, "surfaceTexture", |
| 227 "Landroid/graphics/SurfaceTexture;"); |
227 | 228 |
228 jclass j_decoded_texture_buffer_class = FindClass(jni, | 229 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, |
229 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); | 230 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); |
230 j_texture_id_field_ = GetFieldID( | 231 j_textureID_field_ = GetFieldID( |
231 jni, j_decoded_texture_buffer_class, "textureID", "I"); | 232 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); |
232 j_transform_matrix_field_ = GetFieldID( | 233 j_texture_presentation_timestamp_us_field_ = |
233 jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); | 234 GetFieldID(jni, j_decoder_decoded_texture_buffer_class, |
234 j_texture_presentation_timestamp_us_field_ = GetFieldID( | 235 "presentationTimestampUs", "J"); |
235 jni, j_decoded_texture_buffer_class, "presentationTimestampUs", "J"); | |
236 j_texture_decode_time_ms_field_ = GetFieldID( | |
237 jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J"); | |
238 j_texture_frame_delay_ms_field_ = GetFieldID( | |
239 jni, j_decoded_texture_buffer_class, "frameDelayMs", "J"); | |
240 | 236 |
241 jclass j_decoded_output_buffer_class = FindClass(jni, | 237 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, |
242 "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); | 238 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); |
243 j_info_index_field_ = GetFieldID( | 239 j_info_index_field_ = GetFieldID( |
244 jni, j_decoded_output_buffer_class, "index", "I"); | 240 jni, j_decoder_decoded_byte_buffer_class, "index", "I"); |
245 j_info_offset_field_ = GetFieldID( | 241 j_info_offset_field_ = GetFieldID( |
246 jni, j_decoded_output_buffer_class, "offset", "I"); | 242 jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); |
247 j_info_size_field_ = GetFieldID( | 243 j_info_size_field_ = GetFieldID( |
248 jni, j_decoded_output_buffer_class, "size", "I"); | 244 jni, j_decoder_decoded_byte_buffer_class, "size", "I"); |
249 j_info_presentation_timestamp_us_field_ = GetFieldID( | 245 j_info_presentation_timestamp_us_field_ = GetFieldID( |
250 jni, j_decoded_output_buffer_class, "presentationTimestampUs", "J"); | 246 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); |
251 j_byte_buffer_decode_time_ms_field_ = GetFieldID( | |
252 jni, j_decoded_output_buffer_class, "decodeTimeMs", "J"); | |
253 | 247 |
254 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; | 248 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; |
255 use_surface_ = (render_egl_context_ != NULL); | 249 use_surface_ = (render_egl_context_ != NULL); |
256 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; | 250 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; |
257 memset(&codec_, 0, sizeof(codec_)); | 251 memset(&codec_, 0, sizeof(codec_)); |
258 AllowBlockingCalls(); | 252 AllowBlockingCalls(); |
259 } | 253 } |
260 | 254 |
261 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 255 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
262 // Call Release() to ensure no more callbacks to us after we are deleted. | 256 // Call Release() to ensure no more callbacks to us after we are deleted. |
263 Release(); | 257 Release(); |
| 258 // Delete global references. |
| 259 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 260 if (previous_surface_texture_ != NULL) { |
| 261 jni->DeleteGlobalRef(previous_surface_texture_); |
| 262 } |
| 263 if (surface_texture_ != NULL) { |
| 264 jni->DeleteGlobalRef(surface_texture_); |
| 265 } |
264 } | 266 } |
265 | 267 |
266 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 268 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
267 int32_t numberOfCores) { | 269 int32_t numberOfCores) { |
268 ALOGD << "InitDecode."; | 270 ALOGD << "InitDecode."; |
269 if (inst == NULL) { | 271 if (inst == NULL) { |
270 ALOGE << "NULL VideoCodec instance"; | 272 ALOGE << "NULL VideoCodec instance"; |
271 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 273 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
272 } | 274 } |
273 // Factory should guard against other codecs being used with us. | 275 // Factory should guard against other codecs being used with us. |
(...skipping 30 matching lines...) Expand all Loading... |
304 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; | 306 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; |
305 sw_fallback_required_ = true; | 307 sw_fallback_required_ = true; |
306 return WEBRTC_VIDEO_CODEC_ERROR; | 308 return WEBRTC_VIDEO_CODEC_ERROR; |
307 } | 309 } |
308 | 310 |
309 // Always start with a complete key frame. | 311 // Always start with a complete key frame. |
310 key_frame_required_ = true; | 312 key_frame_required_ = true; |
311 frames_received_ = 0; | 313 frames_received_ = 0; |
312 frames_decoded_ = 0; | 314 frames_decoded_ = 0; |
313 | 315 |
314 if (use_surface_) { | |
315 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>( | |
316 jni, render_egl_context_); | |
317 } | |
318 | |
319 jobject j_video_codec_enum = JavaEnumFromIndex( | 316 jobject j_video_codec_enum = JavaEnumFromIndex( |
320 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); | 317 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); |
321 bool success = jni->CallBooleanMethod( | 318 bool success = jni->CallBooleanMethod( |
322 *j_media_codec_video_decoder_, | 319 *j_media_codec_video_decoder_, |
323 j_init_decode_method_, | 320 j_init_decode_method_, |
324 j_video_codec_enum, | 321 j_video_codec_enum, |
325 codec_.width, | 322 codec_.width, |
326 codec_.height, | 323 codec_.height, |
327 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() | 324 use_surface_ ? render_egl_context_ : nullptr); |
328 : nullptr); | |
329 if (CheckException(jni) || !success) { | 325 if (CheckException(jni) || !success) { |
330 ALOGE << "Codec initialization error - fallback to SW codec."; | 326 ALOGE << "Codec initialization error - fallback to SW codec."; |
331 sw_fallback_required_ = true; | 327 sw_fallback_required_ = true; |
332 return WEBRTC_VIDEO_CODEC_ERROR; | 328 return WEBRTC_VIDEO_CODEC_ERROR; |
333 } | 329 } |
334 inited_ = true; | 330 inited_ = true; |
335 | 331 |
336 switch (codecType_) { | 332 switch (codecType_) { |
337 case kVideoCodecVP8: | 333 case kVideoCodecVP8: |
338 max_pending_frames_ = kMaxPendingFramesVp8; | 334 max_pending_frames_ = kMaxPendingFramesVp8; |
339 break; | 335 break; |
340 case kVideoCodecVP9: | 336 case kVideoCodecVP9: |
341 max_pending_frames_ = kMaxPendingFramesVp9; | 337 max_pending_frames_ = kMaxPendingFramesVp9; |
342 break; | 338 break; |
343 case kVideoCodecH264: | 339 case kVideoCodecH264: |
344 max_pending_frames_ = kMaxPendingFramesH264; | 340 max_pending_frames_ = kMaxPendingFramesH264; |
345 break; | 341 break; |
346 default: | 342 default: |
347 max_pending_frames_ = 0; | 343 max_pending_frames_ = 0; |
348 } | 344 } |
349 start_time_ms_ = GetCurrentTimeMs(); | 345 start_time_ms_ = GetCurrentTimeMs(); |
350 current_frames_ = 0; | 346 current_frames_ = 0; |
351 current_bytes_ = 0; | 347 current_bytes_ = 0; |
352 current_decoding_time_ms_ = 0; | 348 current_decoding_time_ms_ = 0; |
353 timestamps_.clear(); | 349 timestamps_.clear(); |
354 ntp_times_ms_.clear(); | 350 ntp_times_ms_.clear(); |
| 351 frame_rtc_times_ms_.clear(); |
355 | 352 |
356 jobjectArray input_buffers = (jobjectArray)GetObjectField( | 353 jobjectArray input_buffers = (jobjectArray)GetObjectField( |
357 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); | 354 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); |
358 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 355 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
359 ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; | 356 ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; |
360 input_buffers_.resize(num_input_buffers); | 357 input_buffers_.resize(num_input_buffers); |
361 for (size_t i = 0; i < num_input_buffers; ++i) { | 358 for (size_t i = 0; i < num_input_buffers; ++i) { |
362 input_buffers_[i] = | 359 input_buffers_[i] = |
363 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 360 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
364 if (CheckException(jni)) { | 361 if (CheckException(jni)) { |
365 ALOGE << "NewGlobalRef error - fallback to SW codec."; | 362 ALOGE << "NewGlobalRef error - fallback to SW codec."; |
366 sw_fallback_required_ = true; | 363 sw_fallback_required_ = true; |
367 return WEBRTC_VIDEO_CODEC_ERROR; | 364 return WEBRTC_VIDEO_CODEC_ERROR; |
368 } | 365 } |
369 } | 366 } |
370 | 367 |
| 368 if (use_surface_) { |
| 369 jobject surface_texture = GetObjectField( |
| 370 jni, *j_media_codec_video_decoder_, j_surface_texture_field_); |
| 371 if (previous_surface_texture_ != NULL) { |
| 372 jni->DeleteGlobalRef(previous_surface_texture_); |
| 373 } |
| 374 previous_surface_texture_ = surface_texture_; |
| 375 surface_texture_ = jni->NewGlobalRef(surface_texture); |
| 376 } |
371 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 377 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
372 | 378 |
373 return WEBRTC_VIDEO_CODEC_OK; | 379 return WEBRTC_VIDEO_CODEC_OK; |
374 } | 380 } |
375 | 381 |
376 int32_t MediaCodecVideoDecoder::Release() { | 382 int32_t MediaCodecVideoDecoder::Release() { |
377 ALOGD << "DecoderRelease request"; | 383 ALOGD << "DecoderRelease request"; |
378 return codec_thread_->Invoke<int32_t>( | 384 return codec_thread_->Invoke<int32_t>( |
379 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | 385 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); |
380 } | 386 } |
381 | 387 |
382 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | 388 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { |
383 if (!inited_) { | 389 if (!inited_) { |
384 return WEBRTC_VIDEO_CODEC_OK; | 390 return WEBRTC_VIDEO_CODEC_OK; |
385 } | 391 } |
386 CheckOnCodecThread(); | 392 CheckOnCodecThread(); |
387 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 393 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
388 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << | 394 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << |
389 frames_received_ << ". Frames decoded: " << frames_decoded_; | 395 frames_received_ << ". Frames decoded: " << frames_decoded_; |
390 ScopedLocalRefFrame local_ref_frame(jni); | 396 ScopedLocalRefFrame local_ref_frame(jni); |
391 for (size_t i = 0; i < input_buffers_.size(); i++) { | 397 for (size_t i = 0; i < input_buffers_.size(); i++) { |
392 jni->DeleteGlobalRef(input_buffers_[i]); | 398 jni->DeleteGlobalRef(input_buffers_[i]); |
393 } | 399 } |
394 input_buffers_.clear(); | 400 input_buffers_.clear(); |
395 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | 401 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
396 surface_texture_helper_ = nullptr; | |
397 inited_ = false; | 402 inited_ = false; |
398 rtc::MessageQueueManager::Clear(this); | 403 rtc::MessageQueueManager::Clear(this); |
399 if (CheckException(jni)) { | 404 if (CheckException(jni)) { |
400 ALOGE << "Decoder release exception"; | 405 ALOGE << "Decoder release exception"; |
401 return WEBRTC_VIDEO_CODEC_ERROR; | 406 return WEBRTC_VIDEO_CODEC_ERROR; |
402 } | 407 } |
403 ALOGD << "DecoderReleaseOnCodecThread done"; | 408 ALOGD << "DecoderReleaseOnCodecThread done"; |
404 return WEBRTC_VIDEO_CODEC_OK; | 409 return WEBRTC_VIDEO_CODEC_OK; |
405 } | 410 } |
406 | 411 |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
496 const EncodedImage& inputImage) { | 501 const EncodedImage& inputImage) { |
497 CheckOnCodecThread(); | 502 CheckOnCodecThread(); |
498 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 503 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
499 ScopedLocalRefFrame local_ref_frame(jni); | 504 ScopedLocalRefFrame local_ref_frame(jni); |
500 | 505 |
501 // Try to drain the decoder and wait until output is not too | 506 // Try to drain the decoder and wait until output is not too |
502 // much behind the input. | 507 // much behind the input. |
503 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 508 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
504 ALOGV("Received: %d. Decoded: %d. Wait for output...", | 509 ALOGV("Received: %d. Decoded: %d. Wait for output...", |
505 frames_received_, frames_decoded_); | 510 frames_received_, frames_decoded_); |
506 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { | 511 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { |
507 ALOGE << "DeliverPendingOutputs error. Frames received: " << | 512 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
508 frames_received_ << ". Frames decoded: " << frames_decoded_; | 513 frames_received_ << ". Frames decoded: " << frames_decoded_; |
509 return ProcessHWErrorOnCodecThread(); | 514 return ProcessHWErrorOnCodecThread(); |
510 } | 515 } |
511 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 516 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
512 ALOGE << "Output buffer dequeue timeout. Frames received: " << | 517 ALOGE << "Output buffer dequeue timeout. Frames received: " << |
513 frames_received_ << ". Frames decoded: " << frames_decoded_; | 518 frames_received_ << ". Frames decoded: " << frames_decoded_; |
514 return ProcessHWErrorOnCodecThread(); | 519 return ProcessHWErrorOnCodecThread(); |
515 } | 520 } |
516 } | 521 } |
(...skipping 24 matching lines...) Expand all Loading... |
541 j_input_buffer_index << ". TS: " << (int)(timestamp_us / 1000) | 546 j_input_buffer_index << ". TS: " << (int)(timestamp_us / 1000) |
542 << ". Size: " << inputImage._length; | 547 << ". Size: " << inputImage._length; |
543 } | 548 } |
544 memcpy(buffer, inputImage._buffer, inputImage._length); | 549 memcpy(buffer, inputImage._buffer, inputImage._length); |
545 | 550 |
546 // Save input image timestamps for later output. | 551 // Save input image timestamps for later output. |
547 frames_received_++; | 552 frames_received_++; |
548 current_bytes_ += inputImage._length; | 553 current_bytes_ += inputImage._length; |
549 timestamps_.push_back(inputImage._timeStamp); | 554 timestamps_.push_back(inputImage._timeStamp); |
550 ntp_times_ms_.push_back(inputImage.ntp_time_ms_); | 555 ntp_times_ms_.push_back(inputImage.ntp_time_ms_); |
| 556 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); |
551 | 557 |
552 // Feed input to decoder. | 558 // Feed input to decoder. |
553 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, | 559 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, |
554 j_queue_input_buffer_method_, | 560 j_queue_input_buffer_method_, |
555 j_input_buffer_index, | 561 j_input_buffer_index, |
556 inputImage._length, | 562 inputImage._length, |
557 timestamp_us); | 563 timestamp_us); |
558 if (CheckException(jni) || !success) { | 564 if (CheckException(jni) || !success) { |
559 ALOGE << "queueInputBuffer error"; | 565 ALOGE << "queueInputBuffer error"; |
560 return ProcessHWErrorOnCodecThread(); | 566 return ProcessHWErrorOnCodecThread(); |
561 } | 567 } |
562 | 568 |
563 // Try to drain the decoder | 569 // Try to drain the decoder |
564 if (!DeliverPendingOutputs(jni, 0)) { | 570 if (!DeliverPendingOutputs(jni, 0)) { |
565 ALOGE << "DeliverPendingOutputs error"; | 571 ALOGE << "DeliverPendingOutputs error"; |
566 return ProcessHWErrorOnCodecThread(); | 572 return ProcessHWErrorOnCodecThread(); |
567 } | 573 } |
568 | 574 |
569 return WEBRTC_VIDEO_CODEC_OK; | 575 return WEBRTC_VIDEO_CODEC_OK; |
570 } | 576 } |
571 | 577 |
572 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | 578 bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
573 JNIEnv* jni, int dequeue_timeout_ms) { | 579 JNIEnv* jni, int dequeue_timeout_us) { |
574 if (frames_received_ <= frames_decoded_) { | 580 if (frames_received_ <= frames_decoded_) { |
575 // No need to query for output buffers - decoder is drained. | 581 // No need to query for output buffers - decoder is drained. |
576 return true; | 582 return true; |
577 } | 583 } |
578 // Get decoder output. | 584 // Get decoder output. |
579 jobject j_decoder_output_buffer = | 585 jobject j_decoder_output_buffer = jni->CallObjectMethod( |
580 jni->CallObjectMethod(*j_media_codec_video_decoder_, | 586 *j_media_codec_video_decoder_, |
581 use_surface_ ? j_dequeue_texture_buffer_method_ | 587 j_dequeue_output_buffer_method_, |
582 : j_dequeue_byte_buffer_method_, | 588 dequeue_timeout_us); |
583 dequeue_timeout_ms); | |
584 | |
585 if (CheckException(jni)) { | 589 if (CheckException(jni)) { |
586 ALOGE << "dequeueOutputBuffer() error"; | 590 ALOGE << "dequeueOutputBuffer() error"; |
587 return false; | 591 return false; |
588 } | 592 } |
589 if (IsNull(jni, j_decoder_output_buffer)) { | 593 if (IsNull(jni, j_decoder_output_buffer)) { |
590 // No decoded frame ready. | 594 // No decoded frame ready. |
591 return true; | 595 return true; |
592 } | 596 } |
593 | 597 |
594 // Get decoded video frame properties. | 598 // Get decoded video frame properties. |
595 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 599 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, |
596 j_color_format_field_); | 600 j_color_format_field_); |
597 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 601 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); |
598 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 602 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); |
599 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 603 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); |
600 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | 604 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, |
601 j_slice_height_field_); | 605 j_slice_height_field_); |
602 | 606 |
603 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | 607 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; |
604 int64_t output_timestamps_ms = 0; | 608 long output_timestamps_ms = 0; |
605 int decode_time_ms = 0; | |
606 int64_t frame_delayed_ms = 0; | |
607 if (use_surface_) { | 609 if (use_surface_) { |
608 // Extract data from Java DecodedTextureBuffer. | 610 // Extract data from Java DecodedTextureBuffer. |
609 const int texture_id = | 611 const int texture_id = |
610 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); | 612 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); |
611 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. | 613 const int64_t timestamp_us = |
612 const jfloatArray j_transform_matrix = | 614 GetLongField(jni, j_decoder_output_buffer, |
613 reinterpret_cast<jfloatArray>(GetObjectField( | 615 j_texture_presentation_timestamp_us_field_); |
614 jni, j_decoder_output_buffer, j_transform_matrix_field_)); | 616 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; |
615 const int64_t timestamp_us = | 617 // Create webrtc::VideoFrameBuffer with native texture handle. |
616 GetLongField(jni, j_decoder_output_buffer, | 618 native_handle_.SetTextureObject(surface_texture_, texture_id); |
617 j_texture_presentation_timestamp_us_field_); | 619 frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>( |
618 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; | 620 &native_handle_, width, height); |
619 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, | |
620 j_texture_decode_time_ms_field_); | |
621 frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer, | |
622 j_texture_frame_delay_ms_field_); | |
623 | |
624 // Create webrtc::VideoFrameBuffer with native texture handle. | |
625 frame_buffer = surface_texture_helper_->CreateTextureFrame( | |
626 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); | |
627 } | |
628 } else { | 621 } else { |
629 // Extract data from Java ByteBuffer and create output yuv420 frame - | 622 // Extract data from Java ByteBuffer and create output yuv420 frame - |
630 // for non surface decoding only. | 623 // for non surface decoding only. |
631 const int output_buffer_index = | 624 const int output_buffer_index = |
632 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); | 625 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); |
633 const int output_buffer_offset = | 626 const int output_buffer_offset = |
634 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); | 627 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); |
635 const int output_buffer_size = | 628 const int output_buffer_size = |
636 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); | 629 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); |
637 const int64_t timestamp_us = GetLongField( | 630 const int64_t timestamp_us = GetLongField( |
638 jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); | 631 jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); |
639 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; | 632 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; |
640 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, | |
641 j_byte_buffer_decode_time_ms_field_); | |
642 | 633 |
643 if (output_buffer_size < width * height * 3 / 2) { | 634 if (output_buffer_size < width * height * 3 / 2) { |
644 ALOGE << "Insufficient output buffer size: " << output_buffer_size; | 635 ALOGE << "Insufficient output buffer size: " << output_buffer_size; |
645 return false; | 636 return false; |
646 } | 637 } |
647 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | 638 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( |
648 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | 639 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); |
649 jobject output_buffer = | 640 jobject output_buffer = |
650 jni->GetObjectArrayElement(output_buffers, output_buffer_index); | 641 jni->GetObjectArrayElement(output_buffers, output_buffer_index); |
651 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | 642 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
689 frame_buffer->MutableData(webrtc::kVPlane), | 680 frame_buffer->MutableData(webrtc::kVPlane), |
690 frame_buffer->stride(webrtc::kVPlane), | 681 frame_buffer->stride(webrtc::kVPlane), |
691 width, height); | 682 width, height); |
692 } | 683 } |
693 // Return output byte buffer back to codec. | 684 // Return output byte buffer back to codec. |
694 jni->CallVoidMethod( | 685 jni->CallVoidMethod( |
695 *j_media_codec_video_decoder_, | 686 *j_media_codec_video_decoder_, |
696 j_return_decoded_byte_buffer_method_, | 687 j_return_decoded_byte_buffer_method_, |
697 output_buffer_index); | 688 output_buffer_index); |
698 if (CheckException(jni)) { | 689 if (CheckException(jni)) { |
699 ALOGE << "returnDecodedOutputBuffer error"; | 690 ALOGE << "returnDecodedByteBuffer error"; |
700 return false; | 691 return false; |
701 } | 692 } |
702 } | 693 } |
703 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | 694 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); |
704 | 695 |
705 // Get frame timestamps from a queue. | 696 // Get frame timestamps from a queue. |
706 if (timestamps_.size() > 0) { | 697 if (timestamps_.size() > 0) { |
707 decoded_frame.set_timestamp(timestamps_.front()); | 698 decoded_frame.set_timestamp(timestamps_.front()); |
708 timestamps_.erase(timestamps_.begin()); | 699 timestamps_.erase(timestamps_.begin()); |
709 } | 700 } |
710 if (ntp_times_ms_.size() > 0) { | 701 if (ntp_times_ms_.size() > 0) { |
711 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); | 702 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); |
712 ntp_times_ms_.erase(ntp_times_ms_.begin()); | 703 ntp_times_ms_.erase(ntp_times_ms_.begin()); |
713 } | 704 } |
714 | 705 int64_t frame_decoding_time_ms = 0; |
| 706 if (frame_rtc_times_ms_.size() > 0) { |
| 707 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); |
| 708 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); |
| 709 } |
715 if (frames_decoded_ < kMaxDecodedLogFrames) { | 710 if (frames_decoded_ < kMaxDecodedLogFrames) { |
716 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << | 711 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << |
717 " x " << height << ". " << stride << " x " << slice_height << | 712 " x " << height << ". " << stride << " x " << slice_height << |
718 ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms << | 713 ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms << |
719 ". DecTime: " << (int)decode_time_ms << | 714 ". DecTime: " << (int)frame_decoding_time_ms; |
720 ". DelayTime: " << (int)frame_delayed_ms; | |
721 } | 715 } |
722 | 716 |
723 // Calculate and print decoding statistics - every 3 seconds. | 717 // Calculate and print decoding statistics - every 3 seconds. |
724 frames_decoded_++; | 718 frames_decoded_++; |
725 current_frames_++; | 719 current_frames_++; |
726 current_decoding_time_ms_ += decode_time_ms; | 720 current_decoding_time_ms_ += frame_decoding_time_ms; |
727 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; | 721 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; |
728 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && | 722 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && |
729 current_frames_ > 0) { | 723 current_frames_ > 0) { |
730 ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: " | 724 ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " << |
731 << frames_received_ << ". Bitrate: " << | |
732 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << | 725 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << |
733 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) | 726 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) |
734 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << | 727 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << |
735 " for last " << statistic_time_ms << " ms."; | 728 " for last " << statistic_time_ms << " ms."; |
736 start_time_ms_ = GetCurrentTimeMs(); | 729 start_time_ms_ = GetCurrentTimeMs(); |
737 current_frames_ = 0; | 730 current_frames_ = 0; |
738 current_bytes_ = 0; | 731 current_bytes_ = 0; |
739 current_decoding_time_ms_ = 0; | 732 current_decoding_time_ms_ = 0; |
740 } | 733 } |
741 | 734 |
742 // |.IsZeroSize())| returns true when a frame has been dropped. | 735 // Callback - output decoded frame. |
743 if (!decoded_frame.IsZeroSize()) { | 736 const int32_t callback_status = callback_->Decoded(decoded_frame); |
744 // Callback - output decoded frame. | 737 if (callback_status > 0) { |
745 const int32_t callback_status = | 738 ALOGE << "callback error"; |
746 callback_->Decoded(decoded_frame, decode_time_ms); | |
747 if (callback_status > 0) { | |
748 ALOGE << "callback error"; | |
749 } | |
750 } | 739 } |
| 740 |
751 return true; | 741 return true; |
752 } | 742 } |
753 | 743 |
754 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 744 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
755 DecodedImageCallback* callback) { | 745 DecodedImageCallback* callback) { |
756 callback_ = callback; | 746 callback_ = callback; |
757 return WEBRTC_VIDEO_CODEC_OK; | 747 return WEBRTC_VIDEO_CODEC_OK; |
758 } | 748 } |
759 | 749 |
760 int32_t MediaCodecVideoDecoder::Reset() { | 750 int32_t MediaCodecVideoDecoder::Reset() { |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
881 } | 871 } |
882 | 872 |
883 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 873 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( |
884 webrtc::VideoDecoder* decoder) { | 874 webrtc::VideoDecoder* decoder) { |
885 ALOGD << "Destroy video decoder."; | 875 ALOGD << "Destroy video decoder."; |
886 delete decoder; | 876 delete decoder; |
887 } | 877 } |
888 | 878 |
889 } // namespace webrtc_jni | 879 } // namespace webrtc_jni |
890 | 880 |
OLD | NEW |