Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 26 * | 26 * |
| 27 */ | 27 */ |
| 28 | 28 |
| 29 #include <algorithm> | 29 #include <algorithm> |
| 30 #include <vector> | 30 #include <vector> |
| 31 | 31 |
| 32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | 32 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" |
| 33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 33 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
| 34 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 34 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
| 35 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | 35 #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
| 36 #include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" | |
| 36 #include "webrtc/base/bind.h" | 37 #include "webrtc/base/bind.h" |
| 37 #include "webrtc/base/checks.h" | 38 #include "webrtc/base/checks.h" |
| 38 #include "webrtc/base/logging.h" | 39 #include "webrtc/base/logging.h" |
| 39 #include "webrtc/base/scoped_ref_ptr.h" | 40 #include "webrtc/base/scoped_ref_ptr.h" |
| 40 #include "webrtc/base/thread.h" | 41 #include "webrtc/base/thread.h" |
| 41 #include "webrtc/base/timeutils.h" | 42 #include "webrtc/base/timeutils.h" |
| 42 #include "webrtc/common_video/interface/i420_buffer_pool.h" | 43 #include "webrtc/common_video/interface/i420_buffer_pool.h" |
| 43 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 44 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
| 44 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | 45 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" |
| 45 #include "webrtc/system_wrappers/interface/tick_util.h" | 46 #include "webrtc/system_wrappers/interface/tick_util.h" |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 104 | 105 |
| 105 // Type of video codec. | 106 // Type of video codec. |
| 106 VideoCodecType codecType_; | 107 VideoCodecType codecType_; |
| 107 | 108 |
| 108 bool key_frame_required_; | 109 bool key_frame_required_; |
| 109 bool inited_; | 110 bool inited_; |
| 110 bool sw_fallback_required_; | 111 bool sw_fallback_required_; |
| 111 bool use_surface_; | 112 bool use_surface_; |
| 112 VideoCodec codec_; | 113 VideoCodec codec_; |
| 113 webrtc::I420BufferPool decoded_frame_pool_; | 114 webrtc::I420BufferPool decoded_frame_pool_; |
| 114 NativeHandleImpl native_handle_; | 115 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; |
| 115 DecodedImageCallback* callback_; | 116 DecodedImageCallback* callback_; |
| 116 int frames_received_; // Number of frames received by decoder. | 117 int frames_received_; // Number of frames received by decoder. |
| 117 int frames_decoded_; // Number of frames decoded by decoder. | 118 int frames_decoded_; // Number of frames decoded by decoder. |
| 118 int64_t start_time_ms_; // Start time for statistics. | 119 int64_t start_time_ms_; // Start time for statistics. |
| 119 int current_frames_; // Number of frames in the current statistics interval. | 120 int current_frames_; // Number of frames in the current statistics interval. |
| 120 int current_bytes_; // Encoded bytes in the current statistics interval. | 121 int current_bytes_; // Encoded bytes in the current statistics interval. |
| 121 int current_decoding_time_ms_; // Overall decoding time in the current second | 122 int current_decoding_time_ms_; // Overall decoding time in the current second |
| 122 uint32_t max_pending_frames_; // Maximum number of pending input frames | 123 uint32_t max_pending_frames_; // Maximum number of pending input frames |
| 123 std::vector<int32_t> timestamps_; | 124 std::vector<int32_t> timestamps_; |
| 124 std::vector<int64_t> ntp_times_ms_; | 125 std::vector<int64_t> ntp_times_ms_; |
| 125 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to | 126 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to |
| 126 // decoder input. | 127 // decoder input. |
| 127 | 128 |
| 128 // State that is constant for the lifetime of this object once the ctor | 129 // State that is constant for the lifetime of this object once the ctor |
| 129 // returns. | 130 // returns. |
| 130 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | 131 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. |
| 131 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 132 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; |
| 132 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 133 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; |
| 133 jmethodID j_init_decode_method_; | 134 jmethodID j_init_decode_method_; |
| 134 jmethodID j_release_method_; | 135 jmethodID j_release_method_; |
| 135 jmethodID j_dequeue_input_buffer_method_; | 136 jmethodID j_dequeue_input_buffer_method_; |
| 136 jmethodID j_queue_input_buffer_method_; | 137 jmethodID j_queue_input_buffer_method_; |
| 137 jmethodID j_dequeue_output_buffer_method_; | 138 jmethodID j_dequeue_byte_buffer_method_; |
| 139 jmethodID j_dequeue_texture_buffer_method_; | |
| 138 jmethodID j_return_decoded_byte_buffer_method_; | 140 jmethodID j_return_decoded_byte_buffer_method_; |
| 139 // MediaCodecVideoDecoder fields. | 141 // MediaCodecVideoDecoder fields. |
| 140 jfieldID j_input_buffers_field_; | 142 jfieldID j_input_buffers_field_; |
| 141 jfieldID j_output_buffers_field_; | 143 jfieldID j_output_buffers_field_; |
| 142 jfieldID j_color_format_field_; | 144 jfieldID j_color_format_field_; |
| 143 jfieldID j_width_field_; | 145 jfieldID j_width_field_; |
| 144 jfieldID j_height_field_; | 146 jfieldID j_height_field_; |
| 145 jfieldID j_stride_field_; | 147 jfieldID j_stride_field_; |
| 146 jfieldID j_slice_height_field_; | 148 jfieldID j_slice_height_field_; |
| 147 jfieldID j_surface_texture_field_; | |
| 148 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. | 149 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. |
| 149 jfieldID j_textureID_field_; | 150 jfieldID j_textureID_field_; |
| 150 jfieldID j_texture_presentation_timestamp_us_field_; | 151 jfieldID j_transform_matrix_field_; |
| 152 jfieldID j_texture_timestamp_ns_field_; | |
| 153 jfieldID j_texture_decode_time_ms_field_; | |
| 154 jfieldID j_texture_width_field_; | |
| 155 jfieldID j_texture_height_field_; | |
| 151 // MediaCodecVideoDecoder.DecodedByteBuffer fields. | 156 // MediaCodecVideoDecoder.DecodedByteBuffer fields. |
| 152 jfieldID j_info_index_field_; | 157 jfieldID j_info_index_field_; |
| 153 jfieldID j_info_offset_field_; | 158 jfieldID j_info_offset_field_; |
| 154 jfieldID j_info_size_field_; | 159 jfieldID j_info_size_field_; |
| 155 jfieldID j_info_presentation_timestamp_us_field_; | 160 jfieldID j_info_presentation_timestamp_us_field_; |
| 156 | 161 |
| 157 // Global references; must be deleted in Release(). | 162 // Global references; must be deleted in Release(). |
| 158 std::vector<jobject> input_buffers_; | 163 std::vector<jobject> input_buffers_; |
| 159 jobject surface_texture_; | |
| 160 jobject previous_surface_texture_; | |
| 161 | 164 |
| 162 // Render EGL context - owned by factory, should not be allocated/destroyed | 165 // Render EGL context - owned by factory, should not be allocated/destroyed |
| 163 // by VideoDecoder. | 166 // by VideoDecoder. |
| 164 jobject render_egl_context_; | 167 jobject render_egl_context_; |
| 165 }; | 168 }; |
| 166 | 169 |
| 167 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | 170 MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| 168 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | 171 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
| 169 codecType_(codecType), | 172 codecType_(codecType), |
| 170 render_egl_context_(render_egl_context), | 173 render_egl_context_(render_egl_context), |
| 171 key_frame_required_(true), | 174 key_frame_required_(true), |
| 172 inited_(false), | 175 inited_(false), |
| 173 sw_fallback_required_(false), | 176 sw_fallback_required_(false), |
| 174 surface_texture_(NULL), | |
| 175 previous_surface_texture_(NULL), | |
| 176 codec_thread_(new Thread()), | 177 codec_thread_(new Thread()), |
| 177 j_media_codec_video_decoder_class_( | 178 j_media_codec_video_decoder_class_( |
| 178 jni, | 179 jni, |
| 179 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 180 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
| 180 j_media_codec_video_decoder_( | 181 j_media_codec_video_decoder_( |
| 181 jni, | 182 jni, |
| 182 jni->NewObject(*j_media_codec_video_decoder_class_, | 183 jni->NewObject(*j_media_codec_video_decoder_class_, |
| 183 GetMethodID(jni, | 184 GetMethodID(jni, |
| 184 *j_media_codec_video_decoder_class_, | 185 *j_media_codec_video_decoder_class_, |
| 185 "<init>", | 186 "<init>", |
| 186 "()V"))) { | 187 "()V"))) { |
| 187 ScopedLocalRefFrame local_ref_frame(jni); | 188 ScopedLocalRefFrame local_ref_frame(jni); |
| 188 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 189 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); |
| 189 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | 190 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; |
| 190 | 191 |
| 191 j_init_decode_method_ = GetMethodID( | 192 j_init_decode_method_ = GetMethodID( |
| 192 jni, *j_media_codec_video_decoder_class_, "initDecode", | 193 jni, *j_media_codec_video_decoder_class_, "initDecode", |
| 193 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 194 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
| 194 "IILandroid/opengl/EGLContext;)Z"); | 195 "IILorg/webrtc/SurfaceTextureHelper;)Z"); |
| 195 j_release_method_ = | 196 j_release_method_ = |
| 196 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 197 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
| 197 j_dequeue_input_buffer_method_ = GetMethodID( | 198 j_dequeue_input_buffer_method_ = GetMethodID( |
| 198 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); | 199 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); |
| 199 j_queue_input_buffer_method_ = GetMethodID( | 200 j_queue_input_buffer_method_ = GetMethodID( |
| 200 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); | 201 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); |
| 201 j_dequeue_output_buffer_method_ = GetMethodID( | 202 j_dequeue_byte_buffer_method_ = GetMethodID( |
| 202 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", | 203 jni, *j_media_codec_video_decoder_class_, "dequeueByteBuffer", |
| 203 "(I)Ljava/lang/Object;"); | 204 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer;"); |
| 205 j_dequeue_texture_buffer_method_ = GetMethodID( | |
| 206 jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer", | |
|
magjed_webrtc
2015/10/29 09:44:13
this is a 5 space indent, remove one space
perkj_webrtc
2015/10/29 19:26:44
Done.
| |
| 207 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;"); | |
| 204 j_return_decoded_byte_buffer_method_ = | 208 j_return_decoded_byte_buffer_method_ = |
| 205 GetMethodID(jni, *j_media_codec_video_decoder_class_, | 209 GetMethodID(jni, *j_media_codec_video_decoder_class_, |
| 206 "returnDecodedByteBuffer", "(I)V"); | 210 "returnDecodedByteBuffer", "(I)V"); |
| 207 | 211 |
| 208 j_input_buffers_field_ = GetFieldID( | 212 j_input_buffers_field_ = GetFieldID( |
| 209 jni, *j_media_codec_video_decoder_class_, | 213 jni, *j_media_codec_video_decoder_class_, |
| 210 "inputBuffers", "[Ljava/nio/ByteBuffer;"); | 214 "inputBuffers", "[Ljava/nio/ByteBuffer;"); |
| 211 j_output_buffers_field_ = GetFieldID( | 215 j_output_buffers_field_ = GetFieldID( |
| 212 jni, *j_media_codec_video_decoder_class_, | 216 jni, *j_media_codec_video_decoder_class_, |
| 213 "outputBuffers", "[Ljava/nio/ByteBuffer;"); | 217 "outputBuffers", "[Ljava/nio/ByteBuffer;"); |
| 214 j_color_format_field_ = GetFieldID( | 218 j_color_format_field_ = GetFieldID( |
| 215 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); | 219 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); |
| 216 j_width_field_ = GetFieldID( | 220 j_width_field_ = GetFieldID( |
| 217 jni, *j_media_codec_video_decoder_class_, "width", "I"); | 221 jni, *j_media_codec_video_decoder_class_, "width", "I"); |
| 218 j_height_field_ = GetFieldID( | 222 j_height_field_ = GetFieldID( |
| 219 jni, *j_media_codec_video_decoder_class_, "height", "I"); | 223 jni, *j_media_codec_video_decoder_class_, "height", "I"); |
| 220 j_stride_field_ = GetFieldID( | 224 j_stride_field_ = GetFieldID( |
| 221 jni, *j_media_codec_video_decoder_class_, "stride", "I"); | 225 jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
| 222 j_slice_height_field_ = GetFieldID( | 226 j_slice_height_field_ = GetFieldID( |
| 223 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); | 227 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
| 224 j_surface_texture_field_ = GetFieldID( | |
| 225 jni, *j_media_codec_video_decoder_class_, "surfaceTexture", | |
| 226 "Landroid/graphics/SurfaceTexture;"); | |
| 227 | 228 |
| 228 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, | 229 jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, |
| 229 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); | 230 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); |
| 230 j_textureID_field_ = GetFieldID( | 231 j_textureID_field_ = GetFieldID( |
| 231 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); | 232 jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); |
| 232 j_texture_presentation_timestamp_us_field_ = | 233 j_transform_matrix_field_ = GetFieldID( |
| 233 GetFieldID(jni, j_decoder_decoded_texture_buffer_class, | 234 jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F"); |
| 234 "presentationTimestampUs", "J"); | 235 j_texture_timestamp_ns_field_ = GetFieldID( |
| 236 jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J"); | |
| 237 j_texture_decode_time_ms_field_ = GetFieldID( | |
| 238 jni, j_decoder_decoded_texture_buffer_class, "decodeTimeMs", "J"); | |
| 239 j_texture_width_field_ = GetFieldID( | |
| 240 jni, j_decoder_decoded_texture_buffer_class, "width", "I"); | |
| 241 j_texture_height_field_ = GetFieldID( | |
| 242 jni, j_decoder_decoded_texture_buffer_class, "height", "I"); | |
| 235 | 243 |
| 236 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, | 244 jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, |
| 237 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); | 245 "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); |
| 238 j_info_index_field_ = GetFieldID( | 246 j_info_index_field_ = GetFieldID( |
| 239 jni, j_decoder_decoded_byte_buffer_class, "index", "I"); | 247 jni, j_decoder_decoded_byte_buffer_class, "index", "I"); |
| 240 j_info_offset_field_ = GetFieldID( | 248 j_info_offset_field_ = GetFieldID( |
| 241 jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); | 249 jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); |
| 242 j_info_size_field_ = GetFieldID( | 250 j_info_size_field_ = GetFieldID( |
| 243 jni, j_decoder_decoded_byte_buffer_class, "size", "I"); | 251 jni, j_decoder_decoded_byte_buffer_class, "size", "I"); |
| 244 j_info_presentation_timestamp_us_field_ = GetFieldID( | 252 j_info_presentation_timestamp_us_field_ = GetFieldID( |
| 245 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); | 253 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); |
| 246 | 254 |
| 247 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; | 255 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; |
| 248 use_surface_ = (render_egl_context_ != NULL); | 256 use_surface_ = (render_egl_context_ != NULL); |
| 249 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; | 257 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; |
| 250 memset(&codec_, 0, sizeof(codec_)); | 258 memset(&codec_, 0, sizeof(codec_)); |
| 251 AllowBlockingCalls(); | 259 AllowBlockingCalls(); |
| 252 } | 260 } |
| 253 | 261 |
| 254 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 262 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
| 255 // Call Release() to ensure no more callbacks to us after we are deleted. | 263 // Call Release() to ensure no more callbacks to us after we are deleted. |
| 256 Release(); | 264 Release(); |
| 257 // Delete global references. | |
| 258 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 259 if (previous_surface_texture_ != NULL) { | |
| 260 jni->DeleteGlobalRef(previous_surface_texture_); | |
| 261 } | |
| 262 if (surface_texture_ != NULL) { | |
| 263 jni->DeleteGlobalRef(surface_texture_); | |
| 264 } | |
| 265 } | 265 } |
| 266 | 266 |
| 267 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 267 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
| 268 int32_t numberOfCores) { | 268 int32_t numberOfCores) { |
| 269 ALOGD << "InitDecode."; | 269 ALOGD << "InitDecode."; |
| 270 if (inst == NULL) { | 270 if (inst == NULL) { |
| 271 ALOGE << "NULL VideoCodec instance"; | 271 ALOGE << "NULL VideoCodec instance"; |
| 272 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 272 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 273 } | 273 } |
| 274 // Factory should guard against other codecs being used with us. | 274 // Factory should guard against other codecs being used with us. |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 304 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; | 304 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; |
| 305 sw_fallback_required_ = true; | 305 sw_fallback_required_ = true; |
| 306 return WEBRTC_VIDEO_CODEC_ERROR; | 306 return WEBRTC_VIDEO_CODEC_ERROR; |
| 307 } | 307 } |
| 308 | 308 |
| 309 // Always start with a complete key frame. | 309 // Always start with a complete key frame. |
| 310 key_frame_required_ = true; | 310 key_frame_required_ = true; |
| 311 frames_received_ = 0; | 311 frames_received_ = 0; |
| 312 frames_decoded_ = 0; | 312 frames_decoded_ = 0; |
| 313 | 313 |
| 314 if (use_surface_) { | |
| 315 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>( | |
| 316 jni, render_egl_context_); | |
| 317 } | |
| 318 | |
| 314 jobject j_video_codec_enum = JavaEnumFromIndex( | 319 jobject j_video_codec_enum = JavaEnumFromIndex( |
| 315 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); | 320 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); |
| 316 bool success = jni->CallBooleanMethod( | 321 bool success = jni->CallBooleanMethod( |
| 317 *j_media_codec_video_decoder_, | 322 *j_media_codec_video_decoder_, |
| 318 j_init_decode_method_, | 323 j_init_decode_method_, |
| 319 j_video_codec_enum, | 324 j_video_codec_enum, |
| 320 codec_.width, | 325 codec_.width, |
| 321 codec_.height, | 326 codec_.height, |
| 322 use_surface_ ? render_egl_context_ : nullptr); | 327 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() |
| 328 : nullptr); | |
| 323 if (CheckException(jni) || !success) { | 329 if (CheckException(jni) || !success) { |
| 324 ALOGE << "Codec initialization error - fallback to SW codec."; | 330 ALOGE << "Codec initialization error - fallback to SW codec."; |
| 325 sw_fallback_required_ = true; | 331 sw_fallback_required_ = true; |
| 326 return WEBRTC_VIDEO_CODEC_ERROR; | 332 return WEBRTC_VIDEO_CODEC_ERROR; |
| 327 } | 333 } |
| 328 inited_ = true; | 334 inited_ = true; |
| 329 | 335 |
| 330 switch (codecType_) { | 336 switch (codecType_) { |
| 331 case kVideoCodecVP8: | 337 case kVideoCodecVP8: |
| 332 max_pending_frames_ = kMaxPendingFramesVp8; | 338 max_pending_frames_ = kMaxPendingFramesVp8; |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 354 for (size_t i = 0; i < num_input_buffers; ++i) { | 360 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 355 input_buffers_[i] = | 361 input_buffers_[i] = |
| 356 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 362 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 357 if (CheckException(jni)) { | 363 if (CheckException(jni)) { |
| 358 ALOGE << "NewGlobalRef error - fallback to SW codec."; | 364 ALOGE << "NewGlobalRef error - fallback to SW codec."; |
| 359 sw_fallback_required_ = true; | 365 sw_fallback_required_ = true; |
| 360 return WEBRTC_VIDEO_CODEC_ERROR; | 366 return WEBRTC_VIDEO_CODEC_ERROR; |
| 361 } | 367 } |
| 362 } | 368 } |
| 363 | 369 |
| 364 if (use_surface_) { | |
| 365 jobject surface_texture = GetObjectField( | |
| 366 jni, *j_media_codec_video_decoder_, j_surface_texture_field_); | |
| 367 if (previous_surface_texture_ != NULL) { | |
| 368 jni->DeleteGlobalRef(previous_surface_texture_); | |
| 369 } | |
| 370 previous_surface_texture_ = surface_texture_; | |
| 371 surface_texture_ = jni->NewGlobalRef(surface_texture); | |
| 372 } | |
| 373 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 370 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| 374 | 371 |
| 375 return WEBRTC_VIDEO_CODEC_OK; | 372 return WEBRTC_VIDEO_CODEC_OK; |
| 376 } | 373 } |
| 377 | 374 |
| 378 int32_t MediaCodecVideoDecoder::Release() { | 375 int32_t MediaCodecVideoDecoder::Release() { |
| 379 ALOGD << "DecoderRelease request"; | 376 ALOGD << "DecoderRelease request"; |
| 380 return codec_thread_->Invoke<int32_t>( | 377 return codec_thread_->Invoke<int32_t>( |
| 381 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | 378 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); |
| 382 } | 379 } |
| 383 | 380 |
| 384 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | 381 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { |
| 385 if (!inited_) { | 382 if (!inited_) { |
| 386 return WEBRTC_VIDEO_CODEC_OK; | 383 return WEBRTC_VIDEO_CODEC_OK; |
| 387 } | 384 } |
| 388 CheckOnCodecThread(); | 385 CheckOnCodecThread(); |
| 389 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 386 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 390 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_; | 387 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_; |
| 391 ScopedLocalRefFrame local_ref_frame(jni); | 388 ScopedLocalRefFrame local_ref_frame(jni); |
| 392 for (size_t i = 0; i < input_buffers_.size(); i++) { | 389 for (size_t i = 0; i < input_buffers_.size(); i++) { |
| 393 jni->DeleteGlobalRef(input_buffers_[i]); | 390 jni->DeleteGlobalRef(input_buffers_[i]); |
| 394 } | 391 } |
| 395 input_buffers_.clear(); | 392 input_buffers_.clear(); |
| 396 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | 393 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
| 394 surface_texture_helper_ = nullptr; | |
| 397 inited_ = false; | 395 inited_ = false; |
| 398 rtc::MessageQueueManager::Clear(this); | 396 rtc::MessageQueueManager::Clear(this); |
| 399 if (CheckException(jni)) { | 397 if (CheckException(jni)) { |
| 400 ALOGE << "Decoder release exception"; | 398 ALOGE << "Decoder release exception"; |
| 401 return WEBRTC_VIDEO_CODEC_ERROR; | 399 return WEBRTC_VIDEO_CODEC_ERROR; |
| 402 } | 400 } |
| 403 ALOGD << "DecoderReleaseOnCodecThread done"; | 401 ALOGD << "DecoderReleaseOnCodecThread done"; |
| 404 return WEBRTC_VIDEO_CODEC_OK; | 402 return WEBRTC_VIDEO_CODEC_OK; |
| 405 } | 403 } |
| 406 | 404 |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 494 | 492 |
| 495 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( | 493 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
| 496 const EncodedImage& inputImage) { | 494 const EncodedImage& inputImage) { |
| 497 CheckOnCodecThread(); | 495 CheckOnCodecThread(); |
| 498 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 496 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 499 ScopedLocalRefFrame local_ref_frame(jni); | 497 ScopedLocalRefFrame local_ref_frame(jni); |
| 500 | 498 |
| 501 // Try to drain the decoder and wait until output is not too | 499 // Try to drain the decoder and wait until output is not too |
| 502 // much behind the input. | 500 // much behind the input. |
| 503 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 501 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
| 504 ALOGV("Received: %d. Decoded: %d. Wait for output...", | 502 ALOGD << "Received: " << frames_received_ << ". Decoded: " |
| 505 frames_received_, frames_decoded_); | 503 << frames_decoded_ << ". Wait for output..."; |
| 506 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { | 504 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { |
| 507 ALOGE << "DeliverPendingOutputs error"; | 505 ALOGE << "DeliverPendingOutputs error"; |
| 508 return ProcessHWErrorOnCodecThread(); | 506 return ProcessHWErrorOnCodecThread(); |
| 509 } | 507 } |
| 510 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 508 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
| 511 ALOGE << "Output buffer dequeue timeout"; | 509 ALOGE << "Output buffer dequeue timeout"; |
| 512 return ProcessHWErrorOnCodecThread(); | 510 return ProcessHWErrorOnCodecThread(); |
| 513 } | 511 } |
| 514 } | 512 } |
| 515 | 513 |
| 516 // Get input buffer. | 514 // Get input buffer. |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 559 // Try to drain the decoder | 557 // Try to drain the decoder |
| 560 if (!DeliverPendingOutputs(jni, 0)) { | 558 if (!DeliverPendingOutputs(jni, 0)) { |
| 561 ALOGE << "DeliverPendingOutputs error"; | 559 ALOGE << "DeliverPendingOutputs error"; |
| 562 return ProcessHWErrorOnCodecThread(); | 560 return ProcessHWErrorOnCodecThread(); |
| 563 } | 561 } |
| 564 | 562 |
| 565 return WEBRTC_VIDEO_CODEC_OK; | 563 return WEBRTC_VIDEO_CODEC_OK; |
| 566 } | 564 } |
| 567 | 565 |
| 568 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | 566 bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| 569 JNIEnv* jni, int dequeue_timeout_us) { | 567 JNIEnv* jni, int dequeue_timeout_ms) { |
| 570 if (frames_received_ <= frames_decoded_) { | 568 if (frames_received_ <= frames_decoded_) { |
| 571 // No need to query for output buffers - decoder is drained. | 569 // No need to query for output buffers - decoder is drained. |
| 572 return true; | 570 return true; |
| 573 } | 571 } |
| 574 // Get decoder output. | 572 // Get decoder output. |
| 575 jobject j_decoder_output_buffer = jni->CallObjectMethod( | 573 jobject j_decoder_output_buffer = |
|
magjed_webrtc
2015/10/29 09:44:12
You can write this like:
jobject j_decoder_output_
perkj_webrtc
2015/10/29 19:26:44
Done.
| |
| 574 (use_surface_ ? | |
| 575 jni->CallObjectMethod( | |
| 576 *j_media_codec_video_decoder_, | |
| 577 j_dequeue_texture_buffer_method_, | |
| 578 dequeue_timeout_ms) : | |
| 579 jni->CallObjectMethod( | |
| 576 *j_media_codec_video_decoder_, | 580 *j_media_codec_video_decoder_, |
| 577 j_dequeue_output_buffer_method_, | 581 j_dequeue_byte_buffer_method_, |
| 578 dequeue_timeout_us); | 582 dequeue_timeout_ms)); |
| 579 if (CheckException(jni)) { | 583 if (CheckException(jni)) { |
| 580 ALOGE << "dequeueOutputBuffer() error"; | 584 ALOGE << "dequeueOutputBuffer() error"; |
| 581 return false; | 585 return false; |
| 582 } | 586 } |
| 583 if (IsNull(jni, j_decoder_output_buffer)) { | 587 if (IsNull(jni, j_decoder_output_buffer)) { |
| 584 // No decoded frame ready. | 588 // No decoded frame ready. |
| 585 return true; | 589 return true; |
| 586 } | 590 } |
| 587 | 591 |
| 588 // Get decoded video frame properties. | |
| 589 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | |
| 590 j_color_format_field_); | |
| 591 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | |
| 592 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | |
| 593 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | |
| 594 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | |
| 595 j_slice_height_field_); | |
| 596 | |
| 597 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | 592 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; |
| 598 long output_timestamps_ms = 0; | 593 long output_timestamps_ms = 0; |
| 594 int decode_time_ms = 0; | |
| 599 if (use_surface_) { | 595 if (use_surface_) { |
| 600 // Extract data from Java DecodedTextureBuffer. | 596 // Extract data from Java DecodedTextureBuffer. |
| 601 const int texture_id = | 597 const int texture_id = |
| 602 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); | 598 GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); |
| 603 const int64_t timestamp_us = | 599 const jfloatArray j_transform_matrix = |
| 604 GetLongField(jni, j_decoder_output_buffer, | 600 reinterpret_cast<jfloatArray>(GetObjectField( |
| 605 j_texture_presentation_timestamp_us_field_); | 601 jni, j_decoder_output_buffer, j_transform_matrix_field_)); |
| 606 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; | 602 const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer, |
| 603 j_texture_timestamp_ns_field_); | |
| 604 output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec; | |
| 605 | |
| 606 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, | |
| 607 j_texture_decode_time_ms_field_); | |
| 608 const int width = GetIntField(jni, j_decoder_output_buffer, | |
| 609 j_texture_width_field_); | |
| 610 const int height = GetIntField(jni, j_decoder_output_buffer, | |
| 611 j_texture_height_field_); | |
| 612 | |
| 607 // Create webrtc::VideoFrameBuffer with native texture handle. | 613 // Create webrtc::VideoFrameBuffer with native texture handle. |
| 608 native_handle_.SetTextureObject(surface_texture_, texture_id); | 614 frame_buffer = surface_texture_helper_->CreateTextureFrame( |
| 609 frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>( | 615 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); |
| 610 &native_handle_, width, height); | |
| 611 } else { | 616 } else { |
| 617 // Get decoded video frame properties. | |
| 618 const int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | |
| 619 j_color_format_field_); | |
| 620 const int width = | |
| 621 GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | |
| 622 const int height = | |
| 623 GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | |
| 624 const int stride = | |
| 625 GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | |
| 626 const int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | |
| 627 j_slice_height_field_); | |
| 612 // Extract data from Java ByteBuffer and create output yuv420 frame - | 628 // Extract data from Java ByteBuffer and create output yuv420 frame - |
| 613 // for non surface decoding only. | 629 // for non surface decoding only. |
| 614 const int output_buffer_index = | 630 const int output_buffer_index = |
| 615 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); | 631 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); |
| 616 const int output_buffer_offset = | 632 const int output_buffer_offset = |
| 617 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); | 633 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); |
| 618 const int output_buffer_size = | 634 const int output_buffer_size = |
| 619 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); | 635 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); |
| 620 const int64_t timestamp_us = GetLongField( | 636 const int64_t timestamp_us = GetLongField( |
| 621 jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); | 637 jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); |
| 622 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; | 638 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; |
| 623 | 639 decode_time_ms = (!frame_rtc_times_ms_.empty()) ? |
| 640 GetCurrentTimeMs() - frame_rtc_times_ms_.front() : | |
| 641 0; | |
| 624 if (output_buffer_size < width * height * 3 / 2) { | 642 if (output_buffer_size < width * height * 3 / 2) { |
| 625 ALOGE << "Insufficient output buffer size: " << output_buffer_size; | 643 ALOGE << "Insufficient output buffer size: " << output_buffer_size; |
| 626 return false; | 644 return false; |
| 627 } | 645 } |
| 628 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | 646 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( |
| 629 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | 647 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); |
| 630 jobject output_buffer = | 648 jobject output_buffer = |
| 631 jni->GetObjectArrayElement(output_buffers, output_buffer_index); | 649 jni->GetObjectArrayElement(output_buffers, output_buffer_index); |
| 632 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | 650 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( |
| 633 output_buffer)); | 651 output_buffer)); |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 685 | 703 |
| 686 // Get frame timestamps from a queue. | 704 // Get frame timestamps from a queue. |
| 687 if (timestamps_.size() > 0) { | 705 if (timestamps_.size() > 0) { |
| 688 decoded_frame.set_timestamp(timestamps_.front()); | 706 decoded_frame.set_timestamp(timestamps_.front()); |
| 689 timestamps_.erase(timestamps_.begin()); | 707 timestamps_.erase(timestamps_.begin()); |
| 690 } | 708 } |
| 691 if (ntp_times_ms_.size() > 0) { | 709 if (ntp_times_ms_.size() > 0) { |
| 692 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); | 710 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); |
| 693 ntp_times_ms_.erase(ntp_times_ms_.begin()); | 711 ntp_times_ms_.erase(ntp_times_ms_.begin()); |
| 694 } | 712 } |
| 695 int64_t frame_decoding_time_ms = 0; | 713 int64_t frame_delayed_ms = 0; |
| 696 if (frame_rtc_times_ms_.size() > 0) { | 714 if (frame_rtc_times_ms_.size() > 0) { |
| 697 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); | 715 frame_delayed_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); |
| 698 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | 716 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); |
| 699 } | 717 } |
| 700 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." | 718 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." |
|
magjed_webrtc
2015/10/29 09:44:12
|slice_height| etc is not defined here anymore. Ma
perkj_webrtc
2015/10/29 19:26:44
Changed back to read widht and height as fields fr
| |
| 701 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, | 719 " DecodeTime: %lld. DelayTime: %lld.", frames_decoded_, width, height, |
| 702 color_format, output_timestamps_ms, frame_decoding_time_ms); | 720 stride, slice_height, color_format, output_timestamps_ms, decode_time_ms, |
| 721 frame_delayed_ms); | |
| 703 | 722 |
| 704 // Calculate and print decoding statistics - every 3 seconds. | 723 // Calculate and print decoding statistics - every 3 seconds. |
| 705 frames_decoded_++; | 724 frames_decoded_++; |
| 706 current_frames_++; | 725 current_frames_++; |
| 707 current_decoding_time_ms_ += frame_decoding_time_ms; | 726 current_decoding_time_ms_ += decode_time_ms; |
| 708 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; | 727 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; |
| 709 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && | 728 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && |
| 710 current_frames_ > 0) { | 729 current_frames_ > 0) { |
| 711 ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " << | 730 ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: " |
| 731 << frames_received_ << ". Bitrate: " << | |
| 712 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << | 732 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << |
| 713 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) | 733 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) |
| 714 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << | 734 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << |
| 715 " for last " << statistic_time_ms << " ms."; | 735 " for last " << statistic_time_ms << " ms."; |
| 716 start_time_ms_ = GetCurrentTimeMs(); | 736 start_time_ms_ = GetCurrentTimeMs(); |
| 717 current_frames_ = 0; | 737 current_frames_ = 0; |
| 718 current_bytes_ = 0; | 738 current_bytes_ = 0; |
| 719 current_decoding_time_ms_ = 0; | 739 current_decoding_time_ms_ = 0; |
| 720 } | 740 } |
| 721 | 741 |
| 722 // Callback - output decoded frame. | 742 // Callback - output decoded frame. |
| 723 const int32_t callback_status = callback_->Decoded(decoded_frame); | 743 const int32_t callback_status = |
| 744 callback_->Decoded(decoded_frame, decode_time_ms); | |
| 724 if (callback_status > 0) { | 745 if (callback_status > 0) { |
| 725 ALOGE << "callback error"; | 746 ALOGE << "callback error"; |
| 726 } | 747 } |
| 727 | 748 |
| 728 return true; | 749 return true; |
| 729 } | 750 } |
| 730 | 751 |
| 731 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 752 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
| 732 DecodedImageCallback* callback) { | 753 DecodedImageCallback* callback) { |
| 733 callback_ = callback; | 754 callback_ = callback; |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 846 } | 867 } |
| 847 | 868 |
| 848 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 869 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( |
| 849 webrtc::VideoDecoder* decoder) { | 870 webrtc::VideoDecoder* decoder) { |
| 850 ALOGD << "Destroy video decoder."; | 871 ALOGD << "Destroy video decoder."; |
| 851 delete decoder; | 872 delete decoder; |
| 852 } | 873 } |
| 853 | 874 |
| 854 } // namespace webrtc_jni | 875 } // namespace webrtc_jni |
| 855 | 876 |
| OLD | NEW |