| OLD | NEW |
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 14 matching lines...) Expand all Loading... |
| 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 * | 26 * |
| 27 */ | 27 */ |
| 28 | 28 |
| 29 #include <vector> | 29 #include <vector> |
| 30 | 30 |
| 31 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | 31 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" |
| 32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
| 33 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 33 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
| 34 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | 34 #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
| 35 #include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" |
| 35 #include "webrtc/base/bind.h" | 36 #include "webrtc/base/bind.h" |
| 36 #include "webrtc/base/checks.h" | 37 #include "webrtc/base/checks.h" |
| 37 #include "webrtc/base/logging.h" | 38 #include "webrtc/base/logging.h" |
| 38 #include "webrtc/base/scoped_ref_ptr.h" | 39 #include "webrtc/base/scoped_ref_ptr.h" |
| 39 #include "webrtc/base/thread.h" | 40 #include "webrtc/base/thread.h" |
| 40 #include "webrtc/base/timeutils.h" | 41 #include "webrtc/base/timeutils.h" |
| 41 #include "webrtc/common_video/interface/i420_buffer_pool.h" | 42 #include "webrtc/common_video/interface/i420_buffer_pool.h" |
| 42 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 43 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
| 43 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | 44 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" |
| 44 #include "webrtc/system_wrappers/interface/tick_util.h" | 45 #include "webrtc/system_wrappers/interface/tick_util.h" |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 103 | 104 |
| 104 // Type of video codec. | 105 // Type of video codec. |
| 105 VideoCodecType codecType_; | 106 VideoCodecType codecType_; |
| 106 | 107 |
| 107 bool key_frame_required_; | 108 bool key_frame_required_; |
| 108 bool inited_; | 109 bool inited_; |
| 109 bool sw_fallback_required_; | 110 bool sw_fallback_required_; |
| 110 bool use_surface_; | 111 bool use_surface_; |
| 111 VideoCodec codec_; | 112 VideoCodec codec_; |
| 112 webrtc::I420BufferPool decoded_frame_pool_; | 113 webrtc::I420BufferPool decoded_frame_pool_; |
| 113 NativeHandleImpl native_handle_; | 114 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; |
| 114 DecodedImageCallback* callback_; | 115 DecodedImageCallback* callback_; |
| 115 int frames_received_; // Number of frames received by decoder. | 116 int frames_received_; // Number of frames received by decoder. |
| 116 int frames_decoded_; // Number of frames decoded by decoder. | 117 int frames_decoded_; // Number of frames decoded by decoder. |
| 117 int64_t start_time_ms_; // Start time for statistics. | 118 int64_t start_time_ms_; // Start time for statistics. |
| 118 int current_frames_; // Number of frames in the current statistics interval. | 119 int current_frames_; // Number of frames in the current statistics interval. |
| 119 int current_bytes_; // Encoded bytes in the current statistics interval. | 120 int current_bytes_; // Encoded bytes in the current statistics interval. |
| 120 int current_decoding_time_ms_; // Overall decoding time in the current second | 121 int current_decoding_time_ms_; // Overall decoding time in the current second |
| 121 uint32_t max_pending_frames_; // Maximum number of pending input frames | 122 uint32_t max_pending_frames_; // Maximum number of pending input frames |
| 122 std::vector<int32_t> timestamps_; | 123 std::vector<int32_t> timestamps_; |
| 123 std::vector<int64_t> ntp_times_ms_; | 124 std::vector<int64_t> ntp_times_ms_; |
| (...skipping 12 matching lines...) Expand all Loading... |
| 136 jmethodID j_dequeue_output_buffer_method_; | 137 jmethodID j_dequeue_output_buffer_method_; |
| 137 jmethodID j_return_decoded_byte_buffer_method_; | 138 jmethodID j_return_decoded_byte_buffer_method_; |
| 138 // MediaCodecVideoDecoder fields. | 139 // MediaCodecVideoDecoder fields. |
| 139 jfieldID j_input_buffers_field_; | 140 jfieldID j_input_buffers_field_; |
| 140 jfieldID j_output_buffers_field_; | 141 jfieldID j_output_buffers_field_; |
| 141 jfieldID j_color_format_field_; | 142 jfieldID j_color_format_field_; |
| 142 jfieldID j_width_field_; | 143 jfieldID j_width_field_; |
| 143 jfieldID j_height_field_; | 144 jfieldID j_height_field_; |
| 144 jfieldID j_stride_field_; | 145 jfieldID j_stride_field_; |
| 145 jfieldID j_slice_height_field_; | 146 jfieldID j_slice_height_field_; |
| 146 jfieldID j_surface_texture_field_; | |
| 147 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. | 147 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. |
| 148 jfieldID j_textureID_field_; | 148 jfieldID j_textureID_field_; |
| 149 jfieldID j_texture_presentation_timestamp_us_field_; | 149 jfieldID j_transform_matrix_field_; |
| 150 jfieldID j_texture_timestamp_ns_field_; |
| 150 // MediaCodecVideoDecoder.DecodedByteBuffer fields. | 151 // MediaCodecVideoDecoder.DecodedByteBuffer fields. |
| 151 jfieldID j_info_index_field_; | 152 jfieldID j_info_index_field_; |
| 152 jfieldID j_info_offset_field_; | 153 jfieldID j_info_offset_field_; |
| 153 jfieldID j_info_size_field_; | 154 jfieldID j_info_size_field_; |
| 154 jfieldID j_info_presentation_timestamp_us_field_; | 155 jfieldID j_info_presentation_timestamp_us_field_; |
| 155 | 156 |
| 156 // Global references; must be deleted in Release(). | 157 // Global references; must be deleted in Release(). |
| 157 std::vector<jobject> input_buffers_; | 158 std::vector<jobject> input_buffers_; |
| 158 jobject surface_texture_; | |
| 159 jobject previous_surface_texture_; | |
| 160 | 159 |
| 161 // Render EGL context - owned by factory, should not be allocated/destroyed | 160 // Render EGL context - owned by factory, should not be allocated/destroyed |
| 162 // by VideoDecoder. | 161 // by VideoDecoder. |
| 163 jobject render_egl_context_; | 162 jobject render_egl_context_; |
| 164 }; | 163 }; |
| 165 | 164 |
| 166 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | 165 MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| 167 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | 166 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
| 168 codecType_(codecType), | 167 codecType_(codecType), |
| 169 render_egl_context_(render_egl_context), | 168 render_egl_context_(render_egl_context), |
| 170 key_frame_required_(true), | 169 key_frame_required_(true), |
| 171 inited_(false), | 170 inited_(false), |
| 172 sw_fallback_required_(false), | 171 sw_fallback_required_(false), |
| 173 surface_texture_(NULL), | |
| 174 previous_surface_texture_(NULL), | |
| 175 codec_thread_(new Thread()), | 172 codec_thread_(new Thread()), |
| 176 j_media_codec_video_decoder_class_( | 173 j_media_codec_video_decoder_class_( |
| 177 jni, | 174 jni, |
| 178 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 175 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
| 179 j_media_codec_video_decoder_( | 176 j_media_codec_video_decoder_( |
| 180 jni, | 177 jni, |
| 181 jni->NewObject(*j_media_codec_video_decoder_class_, | 178 jni->NewObject(*j_media_codec_video_decoder_class_, |
| 182 GetMethodID(jni, | 179 GetMethodID(jni, |
| 183 *j_media_codec_video_decoder_class_, | 180 *j_media_codec_video_decoder_class_, |
| 184 "<init>", | 181 "<init>", |
| 185 "()V"))) { | 182 "()V"))) { |
| 186 ScopedLocalRefFrame local_ref_frame(jni); | 183 ScopedLocalRefFrame local_ref_frame(jni); |
| 187 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 184 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); |
| 188 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | 185 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; |
| 189 | 186 |
| 190 j_init_decode_method_ = GetMethodID( | 187 j_init_decode_method_ = GetMethodID( |
| 191 jni, *j_media_codec_video_decoder_class_, "initDecode", | 188 jni, *j_media_codec_video_decoder_class_, "initDecode", |
| 192 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 189 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
| 193 "IILandroid/opengl/EGLContext;)Z"); | 190 "IILorg/webrtc/SurfaceTextureHelper;)Z"); |
| 194 j_release_method_ = | 191 j_release_method_ = |
| 195 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 192 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
| 196 j_dequeue_input_buffer_method_ = GetMethodID( | 193 j_dequeue_input_buffer_method_ = GetMethodID( |
| 197 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); | 194 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); |
| 198 j_queue_input_buffer_method_ = GetMethodID( | 195 j_queue_input_buffer_method_ = GetMethodID( |
| 199 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); | 196 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); |
| 200 j_dequeue_output_buffer_method_ = GetMethodID( | 197 j_dequeue_output_buffer_method_ = GetMethodID( |
| 201 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", | 198 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", |
| 202 "(I)Ljava/lang/Object;"); | 199 "(I)Ljava/lang/Object;"); |
| 203 j_return_decoded_byte_buffer_method_ = | 200 j_return_decoded_byte_buffer_method_ = |
| 204 GetMethodID(jni, *j_media_codec_video_decoder_class_, | 201 GetMethodID(jni, *j_media_codec_video_decoder_class_, |
| 205 "returnDecodedByteBuffer", "(I)V"); | 202 "returnDecodedByteBuffer", "(I)V"); |
| 206 | 203 |
| 207 j_input_buffers_field_ = GetFieldID( | 204 j_input_buffers_field_ = GetFieldID( |
| 208 jni, *j_media_codec_video_decoder_class_, | 205 jni, *j_media_codec_video_decoder_class_, |
| 209 "inputBuffers", "[Ljava/nio/ByteBuffer;"); | 206 "inputBuffers", "[Ljava/nio/ByteBuffer;"); |
| 210 j_output_buffers_field_ = GetFieldID( | 207 j_output_buffers_field_ = GetFieldID( |
| 211 jni, *j_media_codec_video_decoder_class_, | 208 jni, *j_media_codec_video_decoder_class_, |
| 212 "outputBuffers", "[Ljava/nio/ByteBuffer;"); | 209 "outputBuffers", "[Ljava/nio/ByteBuffer;"); |
| 213 j_color_format_field_ = GetFieldID( | 210 j_color_format_field_ = GetFieldID( |
| 214 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); | 211 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); |
| 215 j_width_field_ = GetFieldID( | 212 j_width_field_ = GetFieldID( |
| 216 jni, *j_media_codec_video_decoder_class_, "width", "I"); | 213 jni, *j_media_codec_video_decoder_class_, "width", "I"); |
| 217 j_height_field_ = GetFieldID( | 214 j_height_field_ = GetFieldID( |
| 218 jni, *j_media_codec_video_decoder_class_, "height", "I"); | 215 jni, *j_media_codec_video_decoder_class_, "height", "I"); |
| 219 j_stride_field_ = GetFieldID( | 216 j_stride_field_ = GetFieldID( |
| 220 jni, *j_media_codec_video_decoder_class_, "stride", "I"); | 217 jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
| 221 j_slice_height_field_ = GetFieldID( | 218 j_slice_height_field_ = GetFieldID( |
| 222 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); | 219 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
| 223 j_surface_texture_field_ = GetFieldID( | |
| 224 jni, *j_media_codec_video_decoder_class_, "surfaceTexture", | |
| 225 "Landroid/graphics/SurfaceTexture;"); | |
| 226 | 220 |
| 227 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, | 221 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, |
| 228 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); | 222 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); |
| 229 j_textureID_field_ = GetFieldID( | 223 j_textureID_field_ = GetFieldID( |
| 230 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); | 224 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); |
| 231 j_texture_presentation_timestamp_us_field_ = | 225 j_transform_matrix_field_ = GetFieldID( |
| 232 GetFieldID(jni, j_decoder_decoded_texture_buffer_class, | 226 jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F"); |
| 233 "presentationTimestampUs", "J"); | 227 j_texture_timestamp_ns_field_ = GetFieldID( |
| 228 jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J"); |
| 234 | 229 |
| 235 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, | 230 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, |
| 236 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); | 231 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); |
| 237 j_info_index_field_ = GetFieldID( | 232 j_info_index_field_ = GetFieldID( |
| 238 jni, j_decoder_decoded_byte_buffer_class, "index", "I"); | 233 jni, j_decoder_decoded_byte_buffer_class, "index", "I"); |
| 239 j_info_offset_field_ = GetFieldID( | 234 j_info_offset_field_ = GetFieldID( |
| 240 jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); | 235 jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); |
| 241 j_info_size_field_ = GetFieldID( | 236 j_info_size_field_ = GetFieldID( |
| 242 jni, j_decoder_decoded_byte_buffer_class, "size", "I"); | 237 jni, j_decoder_decoded_byte_buffer_class, "size", "I"); |
| 243 j_info_presentation_timestamp_us_field_ = GetFieldID( | 238 j_info_presentation_timestamp_us_field_ = GetFieldID( |
| 244 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); | 239 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); |
| 245 | 240 |
| 246 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; | 241 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; |
| 247 use_surface_ = (render_egl_context_ != NULL); | 242 use_surface_ = (render_egl_context_ != NULL); |
| 248 ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_); | 243 ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_); |
| 249 memset(&codec_, 0, sizeof(codec_)); | 244 memset(&codec_, 0, sizeof(codec_)); |
| 250 AllowBlockingCalls(); | 245 AllowBlockingCalls(); |
| 251 } | 246 } |
| 252 | 247 |
| 253 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 248 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
| 254 // Call Release() to ensure no more callbacks to us after we are deleted. | 249 // Call Release() to ensure no more callbacks to us after we are deleted. |
| 255 Release(); | 250 Release(); |
| 256 // Delete global references. | |
| 257 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 258 if (previous_surface_texture_ != NULL) { | |
| 259 jni->DeleteGlobalRef(previous_surface_texture_); | |
| 260 } | |
| 261 if (surface_texture_ != NULL) { | |
| 262 jni->DeleteGlobalRef(surface_texture_); | |
| 263 } | |
| 264 } | 251 } |
| 265 | 252 |
| 266 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 253 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
| 267 int32_t numberOfCores) { | 254 int32_t numberOfCores) { |
| 268 ALOGD("InitDecode."); | 255 ALOGD("InitDecode."); |
| 269 if (inst == NULL) { | 256 if (inst == NULL) { |
| 270 ALOGE("NULL VideoCodec instance"); | 257 ALOGE("NULL VideoCodec instance"); |
| 271 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 258 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 272 } | 259 } |
| 273 // Factory should guard against other codecs being used with us. | 260 // Factory should guard against other codecs being used with us. |
| (...skipping 29 matching lines...) Expand all Loading... |
| 303 ALOGE("Release failure: %d - fallback to SW codec", ret_val); | 290 ALOGE("Release failure: %d - fallback to SW codec", ret_val); |
| 304 sw_fallback_required_ = true; | 291 sw_fallback_required_ = true; |
| 305 return WEBRTC_VIDEO_CODEC_ERROR; | 292 return WEBRTC_VIDEO_CODEC_ERROR; |
| 306 } | 293 } |
| 307 | 294 |
| 308 // Always start with a complete key frame. | 295 // Always start with a complete key frame. |
| 309 key_frame_required_ = true; | 296 key_frame_required_ = true; |
| 310 frames_received_ = 0; | 297 frames_received_ = 0; |
| 311 frames_decoded_ = 0; | 298 frames_decoded_ = 0; |
| 312 | 299 |
| 300 if (use_surface_) { |
| 301 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>( |
| 302 jni, render_egl_context_); |
| 303 } |
| 304 |
| 313 jobject j_video_codec_enum = JavaEnumFromIndex( | 305 jobject j_video_codec_enum = JavaEnumFromIndex( |
| 314 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); | 306 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); |
| 315 bool success = jni->CallBooleanMethod( | 307 bool success = jni->CallBooleanMethod( |
| 316 *j_media_codec_video_decoder_, | 308 *j_media_codec_video_decoder_, |
| 317 j_init_decode_method_, | 309 j_init_decode_method_, |
| 318 j_video_codec_enum, | 310 j_video_codec_enum, |
| 319 codec_.width, | 311 codec_.width, |
| 320 codec_.height, | 312 codec_.height, |
| 321 use_surface_ ? render_egl_context_ : nullptr); | 313 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() |
| 314 : nullptr); |
| 322 if (CheckException(jni) || !success) { | 315 if (CheckException(jni) || !success) { |
| 323 ALOGE("Codec initialization error - fallback to SW codec."); | 316 ALOGE("Codec initialization error - fallback to SW codec."); |
| 324 sw_fallback_required_ = true; | 317 sw_fallback_required_ = true; |
| 325 return WEBRTC_VIDEO_CODEC_ERROR; | 318 return WEBRTC_VIDEO_CODEC_ERROR; |
| 326 } | 319 } |
| 327 inited_ = true; | 320 inited_ = true; |
| 328 | 321 |
| 329 switch (codecType_) { | 322 switch (codecType_) { |
| 330 case kVideoCodecVP8: | 323 case kVideoCodecVP8: |
| 331 max_pending_frames_ = kMaxPendingFramesVp8; | 324 max_pending_frames_ = kMaxPendingFramesVp8; |
| (...skipping 19 matching lines...) Expand all Loading... |
| 351 for (size_t i = 0; i < num_input_buffers; ++i) { | 344 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 352 input_buffers_[i] = | 345 input_buffers_[i] = |
| 353 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 346 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 354 if (CheckException(jni)) { | 347 if (CheckException(jni)) { |
| 355 ALOGE("NewGlobalRef error - fallback to SW codec."); | 348 ALOGE("NewGlobalRef error - fallback to SW codec."); |
| 356 sw_fallback_required_ = true; | 349 sw_fallback_required_ = true; |
| 357 return WEBRTC_VIDEO_CODEC_ERROR; | 350 return WEBRTC_VIDEO_CODEC_ERROR; |
| 358 } | 351 } |
| 359 } | 352 } |
| 360 | 353 |
| 361 if (use_surface_) { | |
| 362 jobject surface_texture = GetObjectField( | |
| 363 jni, *j_media_codec_video_decoder_, j_surface_texture_field_); | |
| 364 if (previous_surface_texture_ != NULL) { | |
| 365 jni->DeleteGlobalRef(previous_surface_texture_); | |
| 366 } | |
| 367 previous_surface_texture_ = surface_texture_; | |
| 368 surface_texture_ = jni->NewGlobalRef(surface_texture); | |
| 369 } | |
| 370 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 354 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| 371 | 355 |
| 372 return WEBRTC_VIDEO_CODEC_OK; | 356 return WEBRTC_VIDEO_CODEC_OK; |
| 373 } | 357 } |
| 374 | 358 |
| 375 int32_t MediaCodecVideoDecoder::Release() { | 359 int32_t MediaCodecVideoDecoder::Release() { |
| 376 ALOGD("DecoderRelease request"); | 360 ALOGD("DecoderRelease request"); |
| 377 return codec_thread_->Invoke<int32_t>( | 361 return codec_thread_->Invoke<int32_t>( |
| 378 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | 362 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); |
| 379 } | 363 } |
| 380 | 364 |
| 381 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | 365 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { |
| 382 if (!inited_) { | 366 if (!inited_) { |
| 383 return WEBRTC_VIDEO_CODEC_OK; | 367 return WEBRTC_VIDEO_CODEC_OK; |
| 384 } | 368 } |
| 385 CheckOnCodecThread(); | 369 CheckOnCodecThread(); |
| 386 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 370 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 387 ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_); | 371 ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_); |
| 388 ScopedLocalRefFrame local_ref_frame(jni); | 372 ScopedLocalRefFrame local_ref_frame(jni); |
| 389 for (size_t i = 0; i < input_buffers_.size(); i++) { | 373 for (size_t i = 0; i < input_buffers_.size(); i++) { |
| 390 jni->DeleteGlobalRef(input_buffers_[i]); | 374 jni->DeleteGlobalRef(input_buffers_[i]); |
| 391 } | 375 } |
| 392 input_buffers_.clear(); | 376 input_buffers_.clear(); |
| 393 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | 377 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
| 378 surface_texture_helper_ = nullptr; |
| 394 inited_ = false; | 379 inited_ = false; |
| 395 rtc::MessageQueueManager::Clear(this); | 380 rtc::MessageQueueManager::Clear(this); |
| 396 if (CheckException(jni)) { | 381 if (CheckException(jni)) { |
| 397 ALOGE("Decoder release exception"); | 382 ALOGE("Decoder release exception"); |
| 398 return WEBRTC_VIDEO_CODEC_ERROR; | 383 return WEBRTC_VIDEO_CODEC_ERROR; |
| 399 } | 384 } |
| 400 return WEBRTC_VIDEO_CODEC_OK; | 385 return WEBRTC_VIDEO_CODEC_OK; |
| 401 } | 386 } |
| 402 | 387 |
| 403 void MediaCodecVideoDecoder::CheckOnCodecThread() { | 388 void MediaCodecVideoDecoder::CheckOnCodecThread() { |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 492 const EncodedImage& inputImage) { | 477 const EncodedImage& inputImage) { |
| 493 CheckOnCodecThread(); | 478 CheckOnCodecThread(); |
| 494 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 479 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 495 ScopedLocalRefFrame local_ref_frame(jni); | 480 ScopedLocalRefFrame local_ref_frame(jni); |
| 496 | 481 |
| 497 // Try to drain the decoder and wait until output is not too | 482 // Try to drain the decoder and wait until output is not too |
| 498 // much behind the input. | 483 // much behind the input. |
| 499 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 484 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
| 500 ALOGV("Received: %d. Decoded: %d. Wait for output...", | 485 ALOGV("Received: %d. Decoded: %d. Wait for output...", |
| 501 frames_received_, frames_decoded_); | 486 frames_received_, frames_decoded_); |
| 502 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { | 487 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { |
| 503 ALOGE("DeliverPendingOutputs error"); | 488 ALOGE("DeliverPendingOutputs error"); |
| 504 return ProcessHWErrorOnCodecThread(); | 489 return ProcessHWErrorOnCodecThread(); |
| 505 } | 490 } |
| 506 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 491 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
| 507 ALOGE("Output buffer dequeue timeout"); | 492 ALOGE("Output buffer dequeue timeout"); |
| 508 return ProcessHWErrorOnCodecThread(); | 493 return ProcessHWErrorOnCodecThread(); |
| 509 } | 494 } |
| 510 } | 495 } |
| 511 | 496 |
| 512 // Get input buffer. | 497 // Get input buffer. |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 555 // Try to drain the decoder | 540 // Try to drain the decoder |
| 556 if (!DeliverPendingOutputs(jni, 0)) { | 541 if (!DeliverPendingOutputs(jni, 0)) { |
| 557 ALOGE("DeliverPendingOutputs error"); | 542 ALOGE("DeliverPendingOutputs error"); |
| 558 return ProcessHWErrorOnCodecThread(); | 543 return ProcessHWErrorOnCodecThread(); |
| 559 } | 544 } |
| 560 | 545 |
| 561 return WEBRTC_VIDEO_CODEC_OK; | 546 return WEBRTC_VIDEO_CODEC_OK; |
| 562 } | 547 } |
| 563 | 548 |
| 564 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | 549 bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| 565 JNIEnv* jni, int dequeue_timeout_us) { | 550 JNIEnv* jni, int dequeue_timeout_ms) { |
| 566 if (frames_received_ <= frames_decoded_) { | 551 if (frames_received_ <= frames_decoded_) { |
| 567 // No need to query for output buffers - decoder is drained. | 552 // No need to query for output buffers - decoder is drained. |
| 568 return true; | 553 return true; |
| 569 } | 554 } |
| 570 // Get decoder output. | 555 // Get decoder output. |
| 571 jobject j_decoder_output_buffer = jni->CallObjectMethod( | 556 jobject j_decoder_output_buffer = jni->CallObjectMethod( |
| 572 *j_media_codec_video_decoder_, | 557 *j_media_codec_video_decoder_, |
| 573 j_dequeue_output_buffer_method_, | 558 j_dequeue_output_buffer_method_, |
| 574 dequeue_timeout_us); | 559 dequeue_timeout_ms); |
| 575 if (CheckException(jni)) { | 560 if (CheckException(jni)) { |
| 576 ALOGE("dequeueOutputBuffer() error"); | 561 ALOGE("dequeueOutputBuffer() error"); |
| 577 return false; | 562 return false; |
| 578 } | 563 } |
| 579 if (IsNull(jni, j_decoder_output_buffer)) { | 564 if (IsNull(jni, j_decoder_output_buffer)) { |
| 580 // No decoded frame ready. | 565 // No decoded frame ready. |
| 581 return true; | 566 return true; |
| 582 } | 567 } |
| 583 | 568 |
| 584 // Get decoded video frame properties. | 569 // Get decoded video frame properties. |
| 585 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 570 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, |
| 586 j_color_format_field_); | 571 j_color_format_field_); |
| 587 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 572 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); |
| 588 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 573 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); |
| 589 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 574 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); |
| 590 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | 575 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, |
| 591 j_slice_height_field_); | 576 j_slice_height_field_); |
| 592 | 577 |
| 593 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | 578 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; |
| 594 long output_timestamps_ms = 0; | 579 long output_timestamps_ms = 0; |
| 595 if (use_surface_) { | 580 if (use_surface_) { |
| 596 // Extract data from Java DecodedTextureBuffer. | 581 // Extract data from Java DecodedTextureBuffer. |
| 597 const int texture_id = | 582 const int texture_id = |
| 598 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); | 583 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); |
| 599 const int64_t timestamp_us = | 584 const jfloatArray j_transform_matrix = |
| 600 GetLongField(jni, j_decoder_output_buffer, | 585 reinterpret_cast<jfloatArray>(GetObjectField( |
| 601 j_texture_presentation_timestamp_us_field_); | 586 jni, j_decoder_output_buffer, j_transform_matrix_field_)); |
| 602 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; | 587 const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer, |
| 588 j_texture_timestamp_ns_field_); |
| 589 output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec; |
| 603 // Create webrtc::VideoFrameBuffer with native texture handle. | 590 // Create webrtc::VideoFrameBuffer with native texture handle. |
| 604 native_handle_.SetTextureObject(surface_texture_, texture_id); | 591 frame_buffer = surface_texture_helper_->CreateTextureFrame( |
| 605 frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>( | 592 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); |
| 606 &native_handle_, width, height); | |
| 607 } else { | 593 } else { |
| 608 // Extract data from Java ByteBuffer and create output yuv420 frame - | 594 // Extract data from Java ByteBuffer and create output yuv420 frame - |
| 609 // for non surface decoding only. | 595 // for non surface decoding only. |
| 610 const int output_buffer_index = | 596 const int output_buffer_index = |
| 611 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); | 597 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); |
| 612 const int output_buffer_offset = | 598 const int output_buffer_offset = |
| 613 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); | 599 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); |
| 614 const int output_buffer_size = | 600 const int output_buffer_size = |
| 615 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); | 601 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); |
| 616 const int64_t timestamp_us = GetLongField( | 602 const int64_t timestamp_us = GetLongField( |
| (...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 840 return NULL; | 826 return NULL; |
| 841 } | 827 } |
| 842 | 828 |
| 843 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 829 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( |
| 844 webrtc::VideoDecoder* decoder) { | 830 webrtc::VideoDecoder* decoder) { |
| 845 delete decoder; | 831 delete decoder; |
| 846 } | 832 } |
| 847 | 833 |
| 848 } // namespace webrtc_jni | 834 } // namespace webrtc_jni |
| 849 | 835 |
| OLD | NEW |