OLD | NEW |
---|---|
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 18 matching lines...) Expand all Loading... | |
29 #include <vector> | 29 #include <vector> |
30 | 30 |
31 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | 31 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" |
32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
33 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 33 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
34 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | 34 #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
35 #include "webrtc/base/bind.h" | 35 #include "webrtc/base/bind.h" |
36 #include "webrtc/base/checks.h" | 36 #include "webrtc/base/checks.h" |
37 #include "webrtc/base/logging.h" | 37 #include "webrtc/base/logging.h" |
38 #include "webrtc/base/thread.h" | 38 #include "webrtc/base/thread.h" |
39 #include "webrtc/common_video/interface/video_frame_buffer.h" | |
39 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 40 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
40 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | 41 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" |
41 #include "webrtc/system_wrappers/interface/tick_util.h" | 42 #include "webrtc/system_wrappers/interface/tick_util.h" |
42 #include "third_party/libyuv/include/libyuv/convert.h" | 43 #include "third_party/libyuv/include/libyuv/convert.h" |
43 #include "third_party/libyuv/include/libyuv/convert_from.h" | 44 #include "third_party/libyuv/include/libyuv/convert_from.h" |
44 #include "third_party/libyuv/include/libyuv/video_common.h" | 45 #include "third_party/libyuv/include/libyuv/video_common.h" |
45 | 46 |
46 using rtc::Bind; | 47 using rtc::Bind; |
47 using rtc::Thread; | 48 using rtc::Thread; |
48 using rtc::ThreadManager; | 49 using rtc::ThreadManager; |
(...skipping 30 matching lines...) Expand all Loading... | |
79 | 80 |
80 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) | 81 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) |
81 override; | 82 override; |
82 | 83 |
83 int32_t Release() override; | 84 int32_t Release() override; |
84 | 85 |
85 int32_t Reset() override; | 86 int32_t Reset() override; |
86 // rtc::MessageHandler implementation. | 87 // rtc::MessageHandler implementation. |
87 void OnMessage(rtc::Message* msg) override; | 88 void OnMessage(rtc::Message* msg) override; |
88 | 89 |
90 void OnTextureFrame(int width, | |
91 int height, | |
92 int64_t timestamp_ns, | |
93 const NativeHandleImpl& native_handle); | |
94 void ReturnTextureFrame(); | |
95 | |
89 private: | 96 private: |
90 // CHECK-fail if not running on |codec_thread_|. | 97 // CHECK-fail if not running on |codec_thread_|. |
91 void CheckOnCodecThread(); | 98 void CheckOnCodecThread(); |
92 | 99 |
93 int32_t InitDecodeOnCodecThread(); | 100 int32_t InitDecodeOnCodecThread(); |
94 int32_t ReleaseOnCodecThread(); | 101 int32_t ReleaseOnCodecThread(); |
95 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); | 102 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); |
96 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 103 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
97 // true on success. | 104 // true on success. |
98 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); | 105 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); |
106 void DeliverFrame(VideoFrame* frame); | |
107 void OnTextureFrameOnCodecThread(int width, | |
108 int height, | |
109 int64_t timestamp_ns, | |
110 const NativeHandleImpl& native_handle); | |
111 void ReturnTextureFrameOnCodecThread(); | |
99 int32_t ProcessHWErrorOnCodecThread(); | 112 int32_t ProcessHWErrorOnCodecThread(); |
100 | 113 |
101 // Type of video codec. | 114 // Type of video codec. |
102 VideoCodecType codecType_; | 115 VideoCodecType codecType_; |
103 | 116 |
104 bool key_frame_required_; | 117 bool key_frame_required_; |
105 bool inited_; | 118 bool inited_; |
106 bool sw_fallback_required_; | 119 bool sw_fallback_required_; |
107 bool use_surface_; | 120 bool use_surface_; |
108 VideoCodec codec_; | 121 VideoCodec codec_; |
122 // TODO(magjed): Use frame pool instead of |decoded_image_|. | |
perkj_webrtc
2015/09/22 12:40:37
I would skip this comment or do it now.
| |
109 VideoFrame decoded_image_; | 123 VideoFrame decoded_image_; |
110 NativeHandleImpl native_handle_; | |
111 DecodedImageCallback* callback_; | 124 DecodedImageCallback* callback_; |
112 int frames_received_; // Number of frames received by decoder. | 125 int frames_received_; // Number of frames received by decoder. |
113 int frames_decoded_; // Number of frames decoded by decoder. | 126 int frames_decoded_; // Number of frames decoded by decoder. |
114 int64_t start_time_ms_; // Start time for statistics. | 127 int64_t start_time_ms_; // Start time for statistics. |
115 int current_frames_; // Number of frames in the current statistics interval. | 128 int current_frames_; // Number of frames in the current statistics interval. |
116 int current_bytes_; // Encoded bytes in the current statistics interval. | 129 int current_bytes_; // Encoded bytes in the current statistics interval. |
117 int current_decoding_time_ms_; // Overall decoding time in the current second | 130 int current_decoding_time_ms_; // Overall decoding time in the current second |
118 uint32_t max_pending_frames_; // Maximum number of pending input frames | 131 uint32_t max_pending_frames_; // Maximum number of pending input frames |
119 std::vector<int32_t> timestamps_; | 132 std::vector<int32_t> timestamps_; |
120 std::vector<int64_t> ntp_times_ms_; | 133 std::vector<int64_t> ntp_times_ms_; |
121 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to | 134 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to |
122 // decoder input. | 135 // decoder input. |
123 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q. | |
124 int64_t output_ntp_time_ms_; // Last output frame ntp time from | |
125 // ntp_times_ms_ queue. | |
126 | 136 |
127 // State that is constant for the lifetime of this object once the ctor | 137 // State that is constant for the lifetime of this object once the ctor |
128 // returns. | 138 // returns. |
129 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | 139 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. |
130 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 140 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; |
131 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 141 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; |
132 jmethodID j_init_decode_method_; | 142 jmethodID j_init_decode_method_; |
133 jmethodID j_release_method_; | 143 jmethodID j_release_method_; |
134 jmethodID j_dequeue_input_buffer_method_; | 144 jmethodID j_dequeue_input_buffer_method_; |
135 jmethodID j_queue_input_buffer_method_; | 145 jmethodID j_queue_input_buffer_method_; |
136 jmethodID j_dequeue_output_buffer_method_; | 146 jmethodID j_dequeue_output_buffer_method_; |
147 jmethodID j_return_texture_frame_method_; | |
137 jmethodID j_release_output_buffer_method_; | 148 jmethodID j_release_output_buffer_method_; |
138 // MediaCodecVideoDecoder fields. | 149 // MediaCodecVideoDecoder fields. |
139 jfieldID j_input_buffers_field_; | 150 jfieldID j_input_buffers_field_; |
140 jfieldID j_output_buffers_field_; | 151 jfieldID j_output_buffers_field_; |
141 jfieldID j_color_format_field_; | 152 jfieldID j_color_format_field_; |
142 jfieldID j_width_field_; | 153 jfieldID j_width_field_; |
143 jfieldID j_height_field_; | 154 jfieldID j_height_field_; |
144 jfieldID j_stride_field_; | 155 jfieldID j_stride_field_; |
145 jfieldID j_slice_height_field_; | 156 jfieldID j_slice_height_field_; |
146 jfieldID j_surface_texture_field_; | |
147 jfieldID j_textureID_field_; | |
148 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields. | 157 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields. |
149 jfieldID j_info_index_field_; | 158 jfieldID j_info_index_field_; |
150 jfieldID j_info_offset_field_; | 159 jfieldID j_info_offset_field_; |
151 jfieldID j_info_size_field_; | 160 jfieldID j_info_size_field_; |
152 jfieldID j_info_presentation_timestamp_us_field_; | 161 jfieldID j_info_presentation_timestamp_us_field_; |
153 | 162 |
154 // Global references; must be deleted in Release(). | 163 // Global references; must be deleted in Release(). |
155 std::vector<jobject> input_buffers_; | 164 std::vector<jobject> input_buffers_; |
156 jobject surface_texture_; | |
157 jobject previous_surface_texture_; | |
158 | 165 |
159 // Render EGL context - owned by factory, should not be allocated/destroyed | 166 // Render EGL context - owned by factory, should not be allocated/destroyed |
160 // by VideoDecoder. | 167 // by VideoDecoder. |
161 jobject render_egl_context_; | 168 jobject render_egl_context_; |
162 }; | 169 }; |
163 | 170 |
171 namespace { | |
172 | |
173 class AndroidTextureBuffer : public webrtc::NativeHandleBuffer { | |
174 public: | |
175 AndroidTextureBuffer(int width, | |
176 int height, | |
177 MediaCodecVideoDecoder* decoder, | |
178 const NativeHandleImpl& native_handle) | |
179 : webrtc::NativeHandleBuffer(&native_handle_, width, height), | |
180 native_handle_(native_handle), | |
181 decoder_(decoder) {} | |
182 | |
183 ~AndroidTextureBuffer() { | |
184 decoder_->ReturnTextureFrame(); | |
185 } | |
186 | |
187 rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override { | |
188 // TODO(magjed): Implement this. | |
perkj_webrtc
2015/09/22 12:40:37
CHECK(false) - should never be called.
| |
189 return nullptr; | |
190 } | |
191 | |
192 private: | |
193 NativeHandleImpl native_handle_; | |
194 MediaCodecVideoDecoder*const decoder_; | |
perkj_webrtc
2015/09/22 12:40:37
There is nothing that guarantee the lifetime of de
| |
195 }; | |
196 | |
197 } // anonymous namespace | |
198 | |
164 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | 199 MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
165 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | 200 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
166 codecType_(codecType), | 201 codecType_(codecType), |
167 render_egl_context_(render_egl_context), | 202 render_egl_context_(render_egl_context), |
168 key_frame_required_(true), | 203 key_frame_required_(true), |
169 inited_(false), | 204 inited_(false), |
170 sw_fallback_required_(false), | 205 sw_fallback_required_(false), |
171 surface_texture_(NULL), | |
172 previous_surface_texture_(NULL), | |
173 codec_thread_(new Thread()), | 206 codec_thread_(new Thread()), |
174 j_media_codec_video_decoder_class_( | 207 j_media_codec_video_decoder_class_( |
175 jni, | 208 jni, |
176 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 209 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
177 j_media_codec_video_decoder_( | 210 j_media_codec_video_decoder_( |
178 jni, | 211 jni, |
179 jni->NewObject(*j_media_codec_video_decoder_class_, | 212 jni->NewObject(*j_media_codec_video_decoder_class_, |
180 GetMethodID(jni, | 213 GetMethodID(jni, |
181 *j_media_codec_video_decoder_class_, | 214 *j_media_codec_video_decoder_class_, |
182 "<init>", | 215 "<init>", |
183 "()V"))) { | 216 "()V"))) { |
184 ScopedLocalRefFrame local_ref_frame(jni); | 217 ScopedLocalRefFrame local_ref_frame(jni); |
185 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 218 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); |
186 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | 219 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; |
187 | 220 |
188 j_init_decode_method_ = GetMethodID( | 221 j_init_decode_method_ = GetMethodID( |
189 jni, *j_media_codec_video_decoder_class_, "initDecode", | 222 jni, *j_media_codec_video_decoder_class_, "initDecode", |
190 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 223 "(JLorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
191 "IILandroid/opengl/EGLContext;)Z"); | 224 "IILandroid/opengl/EGLContext;)Z"); |
192 j_release_method_ = | 225 j_release_method_ = |
193 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 226 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
194 j_dequeue_input_buffer_method_ = GetMethodID( | 227 j_dequeue_input_buffer_method_ = GetMethodID( |
195 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); | 228 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); |
196 j_queue_input_buffer_method_ = GetMethodID( | 229 j_queue_input_buffer_method_ = GetMethodID( |
197 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); | 230 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); |
198 j_dequeue_output_buffer_method_ = GetMethodID( | 231 j_dequeue_output_buffer_method_ = GetMethodID( |
199 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", | 232 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", |
200 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;"); | 233 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;"); |
234 j_return_texture_frame_method_ = GetMethodID( | |
235 jni, *j_media_codec_video_decoder_class_, "returnTextureFrame", "()V"); | |
201 j_release_output_buffer_method_ = GetMethodID( | 236 j_release_output_buffer_method_ = GetMethodID( |
202 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)Z"); | 237 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)Z"); |
203 | 238 |
204 j_input_buffers_field_ = GetFieldID( | 239 j_input_buffers_field_ = GetFieldID( |
205 jni, *j_media_codec_video_decoder_class_, | 240 jni, *j_media_codec_video_decoder_class_, |
206 "inputBuffers", "[Ljava/nio/ByteBuffer;"); | 241 "inputBuffers", "[Ljava/nio/ByteBuffer;"); |
207 j_output_buffers_field_ = GetFieldID( | 242 j_output_buffers_field_ = GetFieldID( |
208 jni, *j_media_codec_video_decoder_class_, | 243 jni, *j_media_codec_video_decoder_class_, |
209 "outputBuffers", "[Ljava/nio/ByteBuffer;"); | 244 "outputBuffers", "[Ljava/nio/ByteBuffer;"); |
210 j_color_format_field_ = GetFieldID( | 245 j_color_format_field_ = GetFieldID( |
211 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); | 246 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); |
212 j_width_field_ = GetFieldID( | 247 j_width_field_ = GetFieldID( |
213 jni, *j_media_codec_video_decoder_class_, "width", "I"); | 248 jni, *j_media_codec_video_decoder_class_, "width", "I"); |
214 j_height_field_ = GetFieldID( | 249 j_height_field_ = GetFieldID( |
215 jni, *j_media_codec_video_decoder_class_, "height", "I"); | 250 jni, *j_media_codec_video_decoder_class_, "height", "I"); |
216 j_stride_field_ = GetFieldID( | 251 j_stride_field_ = GetFieldID( |
217 jni, *j_media_codec_video_decoder_class_, "stride", "I"); | 252 jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
218 j_slice_height_field_ = GetFieldID( | 253 j_slice_height_field_ = GetFieldID( |
219 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); | 254 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
220 j_textureID_field_ = GetFieldID( | |
221 jni, *j_media_codec_video_decoder_class_, "textureID", "I"); | |
222 j_surface_texture_field_ = GetFieldID( | |
223 jni, *j_media_codec_video_decoder_class_, "surfaceTexture", | |
224 "Landroid/graphics/SurfaceTexture;"); | |
225 | 255 |
226 jclass j_decoder_output_buffer_info_class = FindClass(jni, | 256 jclass j_decoder_output_buffer_info_class = FindClass(jni, |
227 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo"); | 257 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo"); |
228 j_info_index_field_ = GetFieldID( | 258 j_info_index_field_ = GetFieldID( |
229 jni, j_decoder_output_buffer_info_class, "index", "I"); | 259 jni, j_decoder_output_buffer_info_class, "index", "I"); |
230 j_info_offset_field_ = GetFieldID( | 260 j_info_offset_field_ = GetFieldID( |
231 jni, j_decoder_output_buffer_info_class, "offset", "I"); | 261 jni, j_decoder_output_buffer_info_class, "offset", "I"); |
232 j_info_size_field_ = GetFieldID( | 262 j_info_size_field_ = GetFieldID( |
233 jni, j_decoder_output_buffer_info_class, "size", "I"); | 263 jni, j_decoder_output_buffer_info_class, "size", "I"); |
234 j_info_presentation_timestamp_us_field_ = GetFieldID( | 264 j_info_presentation_timestamp_us_field_ = GetFieldID( |
235 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J"); | 265 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J"); |
236 | 266 |
237 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; | 267 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; |
238 use_surface_ = (render_egl_context_ != NULL); | 268 use_surface_ = (render_egl_context_ != NULL); |
239 ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_); | 269 ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_); |
240 memset(&codec_, 0, sizeof(codec_)); | 270 memset(&codec_, 0, sizeof(codec_)); |
241 AllowBlockingCalls(); | 271 AllowBlockingCalls(); |
242 } | 272 } |
243 | 273 |
244 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 274 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
245 // Call Release() to ensure no more callbacks to us after we are deleted. | 275 // Call Release() to ensure no more callbacks to us after we are deleted. |
246 Release(); | 276 Release(); |
247 // Delete global references. | |
248 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
249 if (previous_surface_texture_ != NULL) { | |
250 jni->DeleteGlobalRef(previous_surface_texture_); | |
251 } | |
252 if (surface_texture_ != NULL) { | |
253 jni->DeleteGlobalRef(surface_texture_); | |
254 } | |
255 } | 277 } |
256 | 278 |
257 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 279 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
258 int32_t numberOfCores) { | 280 int32_t numberOfCores) { |
259 ALOGD("InitDecode."); | 281 ALOGD("InitDecode."); |
260 if (inst == NULL) { | 282 if (inst == NULL) { |
261 ALOGE("NULL VideoCodec instance"); | 283 ALOGE("NULL VideoCodec instance"); |
262 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 284 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
263 } | 285 } |
264 // Factory should guard against other codecs being used with us. | 286 // Factory should guard against other codecs being used with us. |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
299 // Always start with a complete key frame. | 321 // Always start with a complete key frame. |
300 key_frame_required_ = true; | 322 key_frame_required_ = true; |
301 frames_received_ = 0; | 323 frames_received_ = 0; |
302 frames_decoded_ = 0; | 324 frames_decoded_ = 0; |
303 | 325 |
304 jobject j_video_codec_enum = JavaEnumFromIndex( | 326 jobject j_video_codec_enum = JavaEnumFromIndex( |
305 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); | 327 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); |
306 bool success = jni->CallBooleanMethod( | 328 bool success = jni->CallBooleanMethod( |
307 *j_media_codec_video_decoder_, | 329 *j_media_codec_video_decoder_, |
308 j_init_decode_method_, | 330 j_init_decode_method_, |
331 jlongFromPointer(this), | |
309 j_video_codec_enum, | 332 j_video_codec_enum, |
310 codec_.width, | 333 codec_.width, |
311 codec_.height, | 334 codec_.height, |
312 use_surface_ ? render_egl_context_ : nullptr); | 335 use_surface_ ? render_egl_context_ : nullptr); |
313 if (CheckException(jni) || !success) { | 336 if (CheckException(jni) || !success) { |
314 ALOGE("Codec initialization error - fallback to SW codec."); | 337 ALOGE("Codec initialization error - fallback to SW codec."); |
315 sw_fallback_required_ = true; | 338 sw_fallback_required_ = true; |
316 return WEBRTC_VIDEO_CODEC_ERROR; | 339 return WEBRTC_VIDEO_CODEC_ERROR; |
317 } | 340 } |
318 inited_ = true; | 341 inited_ = true; |
319 | 342 |
320 switch (codecType_) { | 343 switch (codecType_) { |
321 case kVideoCodecVP8: | 344 case kVideoCodecVP8: |
322 max_pending_frames_ = kMaxPendingFramesVp8; | 345 max_pending_frames_ = kMaxPendingFramesVp8; |
323 break; | 346 break; |
324 case kVideoCodecH264: | 347 case kVideoCodecH264: |
325 max_pending_frames_ = kMaxPendingFramesH264; | 348 max_pending_frames_ = kMaxPendingFramesH264; |
326 break; | 349 break; |
327 default: | 350 default: |
328 max_pending_frames_ = 0; | 351 max_pending_frames_ = 0; |
329 } | 352 } |
330 start_time_ms_ = GetCurrentTimeMs(); | 353 start_time_ms_ = GetCurrentTimeMs(); |
331 current_frames_ = 0; | 354 current_frames_ = 0; |
332 current_bytes_ = 0; | 355 current_bytes_ = 0; |
333 current_decoding_time_ms_ = 0; | 356 current_decoding_time_ms_ = 0; |
334 output_timestamp_ = 0; | 357 decoded_image_.set_timestamp(0); |
335 output_ntp_time_ms_ = 0; | 358 decoded_image_.set_ntp_time_ms(0); |
336 timestamps_.clear(); | 359 timestamps_.clear(); |
337 ntp_times_ms_.clear(); | 360 ntp_times_ms_.clear(); |
338 frame_rtc_times_ms_.clear(); | 361 frame_rtc_times_ms_.clear(); |
339 | 362 |
340 jobjectArray input_buffers = (jobjectArray)GetObjectField( | 363 jobjectArray input_buffers = (jobjectArray)GetObjectField( |
341 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); | 364 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); |
342 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 365 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
343 input_buffers_.resize(num_input_buffers); | 366 input_buffers_.resize(num_input_buffers); |
344 for (size_t i = 0; i < num_input_buffers; ++i) { | 367 for (size_t i = 0; i < num_input_buffers; ++i) { |
345 input_buffers_[i] = | 368 input_buffers_[i] = |
346 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 369 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
347 if (CheckException(jni)) { | 370 if (CheckException(jni)) { |
348 ALOGE("NewGlobalRef error - fallback to SW codec."); | 371 ALOGE("NewGlobalRef error - fallback to SW codec."); |
349 sw_fallback_required_ = true; | 372 sw_fallback_required_ = true; |
350 return WEBRTC_VIDEO_CODEC_ERROR; | 373 return WEBRTC_VIDEO_CODEC_ERROR; |
351 } | 374 } |
352 } | 375 } |
353 | 376 |
354 if (use_surface_) { | |
355 jobject surface_texture = GetObjectField( | |
356 jni, *j_media_codec_video_decoder_, j_surface_texture_field_); | |
357 if (previous_surface_texture_ != NULL) { | |
358 jni->DeleteGlobalRef(previous_surface_texture_); | |
359 } | |
360 previous_surface_texture_ = surface_texture_; | |
361 surface_texture_ = jni->NewGlobalRef(surface_texture); | |
362 } | |
363 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 377 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
364 | 378 |
365 return WEBRTC_VIDEO_CODEC_OK; | 379 return WEBRTC_VIDEO_CODEC_OK; |
366 } | 380 } |
367 | 381 |
368 int32_t MediaCodecVideoDecoder::Release() { | 382 int32_t MediaCodecVideoDecoder::Release() { |
369 ALOGD("DecoderRelease request"); | 383 ALOGD("DecoderRelease request"); |
370 return codec_thread_->Invoke<int32_t>( | 384 return codec_thread_->Invoke<int32_t>( |
371 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | 385 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); |
372 } | 386 } |
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
547 | 561 |
548 // Try to drain the decoder | 562 // Try to drain the decoder |
549 if (!DeliverPendingOutputs(jni, 0)) { | 563 if (!DeliverPendingOutputs(jni, 0)) { |
550 ALOGE("DeliverPendingOutputs error"); | 564 ALOGE("DeliverPendingOutputs error"); |
551 return ProcessHWErrorOnCodecThread(); | 565 return ProcessHWErrorOnCodecThread(); |
552 } | 566 } |
553 | 567 |
554 return WEBRTC_VIDEO_CODEC_OK; | 568 return WEBRTC_VIDEO_CODEC_OK; |
555 } | 569 } |
556 | 570 |
571 void MediaCodecVideoDecoder::DeliverFrame(VideoFrame* frame) { | |
572 // Pop timestamps from queues and update |frame|. | |
perkj_webrtc
2015/09/22 12:40:37
please CheckOnCodecThread();
| |
573 if (!timestamps_.empty()) { | |
574 frame->set_timestamp(timestamps_.front()); | |
575 timestamps_.erase(timestamps_.begin()); | |
576 } | |
577 if (!ntp_times_ms_.empty()) { | |
578 frame->set_ntp_time_ms(ntp_times_ms_.front()); | |
579 ntp_times_ms_.erase(ntp_times_ms_.begin()); | |
580 } | |
581 | |
582 const int32_t callback_status = callback_->Decoded(*frame); | |
583 if (callback_status > 0) { | |
584 ALOGE("callback error"); | |
585 } | |
586 } | |
587 | |
557 bool MediaCodecVideoDecoder::DeliverPendingOutputs( | 588 bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
558 JNIEnv* jni, int dequeue_timeout_us) { | 589 JNIEnv* jni, int dequeue_timeout_us) { |
559 if (frames_received_ <= frames_decoded_) { | 590 if (frames_received_ <= frames_decoded_) { |
560 // No need to query for output buffers - decoder is drained. | 591 // No need to query for output buffers - decoder is drained. |
561 return true; | 592 return true; |
562 } | 593 } |
563 // Get decoder output. | 594 // Get decoder output. |
564 jobject j_decoder_output_buffer_info = jni->CallObjectMethod( | 595 jobject j_decoder_output_buffer_info = jni->CallObjectMethod( |
565 *j_media_codec_video_decoder_, | 596 *j_media_codec_video_decoder_, |
566 j_dequeue_output_buffer_method_, | 597 j_dequeue_output_buffer_method_, |
(...skipping 23 matching lines...) Expand all Loading... | |
590 } | 621 } |
591 | 622 |
592 // Get decoded video frame properties. | 623 // Get decoded video frame properties. |
593 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 624 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, |
594 j_color_format_field_); | 625 j_color_format_field_); |
595 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 626 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); |
596 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 627 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); |
597 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 628 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); |
598 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | 629 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, |
599 j_slice_height_field_); | 630 j_slice_height_field_); |
600 int texture_id = GetIntField(jni, *j_media_codec_video_decoder_, | |
601 j_textureID_field_); | |
602 | 631 |
603 // Extract data from Java ByteBuffer and create output yuv420 frame - | 632 // Extract data from Java ByteBuffer and create output yuv420 frame - |
604 // for non surface decoding only. | 633 // for non surface decoding only. |
605 if (!use_surface_) { | 634 if (!use_surface_) { |
606 if (output_buffer_size < width * height * 3 / 2) { | 635 if (output_buffer_size < width * height * 3 / 2) { |
607 ALOGE("Insufficient output buffer size: %d", output_buffer_size); | 636 ALOGE("Insufficient output buffer size: %d", output_buffer_size); |
608 return false; | 637 return false; |
609 } | 638 } |
610 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | 639 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( |
611 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | 640 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); |
(...skipping 24 matching lines...) Expand all Loading... | |
636 decoded_image_.buffer(webrtc::kYPlane), | 665 decoded_image_.buffer(webrtc::kYPlane), |
637 decoded_image_.stride(webrtc::kYPlane), | 666 decoded_image_.stride(webrtc::kYPlane), |
638 decoded_image_.buffer(webrtc::kUPlane), | 667 decoded_image_.buffer(webrtc::kUPlane), |
639 decoded_image_.stride(webrtc::kUPlane), | 668 decoded_image_.stride(webrtc::kUPlane), |
640 decoded_image_.buffer(webrtc::kVPlane), | 669 decoded_image_.buffer(webrtc::kVPlane), |
641 decoded_image_.stride(webrtc::kVPlane), | 670 decoded_image_.stride(webrtc::kVPlane), |
642 width, height); | 671 width, height); |
643 } | 672 } |
644 } | 673 } |
645 | 674 |
646 // Get frame timestamps from a queue. | |
647 if (timestamps_.size() > 0) { | |
648 output_timestamp_ = timestamps_.front(); | |
649 timestamps_.erase(timestamps_.begin()); | |
650 } | |
651 if (ntp_times_ms_.size() > 0) { | |
652 output_ntp_time_ms_ = ntp_times_ms_.front(); | |
653 ntp_times_ms_.erase(ntp_times_ms_.begin()); | |
654 } | |
655 int64_t frame_decoding_time_ms = 0; | 675 int64_t frame_decoding_time_ms = 0; |
656 if (frame_rtc_times_ms_.size() > 0) { | 676 if (frame_rtc_times_ms_.size() > 0) { |
657 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); | 677 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); |
658 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | 678 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); |
659 } | 679 } |
660 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." | 680 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." |
661 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, | 681 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, |
662 color_format, output_timestamps_ms, frame_decoding_time_ms); | 682 color_format, output_timestamps_ms, frame_decoding_time_ms); |
663 | 683 |
664 // Return output buffer back to codec. | 684 // Return output buffer back to codec. |
(...skipping 17 matching lines...) Expand all Loading... | |
682 current_bytes_ * 8 / statistic_time_ms, | 702 current_bytes_ * 8 / statistic_time_ms, |
683 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, | 703 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, |
684 current_decoding_time_ms_ / current_frames_, statistic_time_ms); | 704 current_decoding_time_ms_ / current_frames_, statistic_time_ms); |
685 start_time_ms_ = GetCurrentTimeMs(); | 705 start_time_ms_ = GetCurrentTimeMs(); |
686 current_frames_ = 0; | 706 current_frames_ = 0; |
687 current_bytes_ = 0; | 707 current_bytes_ = 0; |
688 current_decoding_time_ms_ = 0; | 708 current_decoding_time_ms_ = 0; |
689 } | 709 } |
690 | 710 |
691 // Callback - output decoded frame. | 711 // Callback - output decoded frame. |
692 int32_t callback_status = WEBRTC_VIDEO_CODEC_OK; | |
693 if (use_surface_) { | 712 if (use_surface_) { |
694 native_handle_.SetTextureObject(surface_texture_, texture_id); | 713 // We will receive a callback in OnTextureFrame() when the texture frame is |
695 VideoFrame texture_image(new rtc::RefCountedObject<JniNativeHandleBuffer>( | 714 // ready to deliver. |
696 &native_handle_, width, height), | |
697 output_timestamp_, 0, webrtc::kVideoRotation_0); | |
698 texture_image.set_ntp_time_ms(output_ntp_time_ms_); | |
699 callback_status = callback_->Decoded(texture_image); | |
700 } else { | 715 } else { |
701 decoded_image_.set_timestamp(output_timestamp_); | 716 DeliverFrame(&decoded_image_); |
702 decoded_image_.set_ntp_time_ms(output_ntp_time_ms_); | |
703 callback_status = callback_->Decoded(decoded_image_); | |
704 } | |
705 if (callback_status > 0) { | |
706 ALOGE("callback error"); | |
707 } | 717 } |
708 | 718 |
709 return true; | 719 return true; |
710 } | 720 } |
711 | 721 |
722 void MediaCodecVideoDecoder::OnTextureFrame( | |
723 int width, | |
724 int height, | |
725 int64_t timestamp_ns, | |
726 const NativeHandleImpl& native_handle) { | |
727 codec_thread_->Invoke<void>( | |
perkj_webrtc
2015/09/22 12:40:37
CheckOnCodecThread ?
Can we skip this communicati
AlexG
2015/09/28 20:06:21
I agree with perkj@ - keep polling strategy - it w
| |
728 Bind(&MediaCodecVideoDecoder::OnTextureFrameOnCodecThread, this, | |
729 width, height, timestamp_ns, native_handle)); | |
730 } | |
731 | |
732 void MediaCodecVideoDecoder::OnTextureFrameOnCodecThread( | |
733 int width, | |
734 int height, | |
735 int64_t timestamp_ns, | |
736 const NativeHandleImpl& native_handle) { | |
737 VideoFrame texture_frame(new rtc::RefCountedObject<AndroidTextureBuffer>( | |
738 width, height, this, native_handle), | |
739 0, 0, webrtc::kVideoRotation_0); | |
740 DeliverFrame(&texture_frame); | |
741 } | |
742 | |
743 void MediaCodecVideoDecoder::ReturnTextureFrame() { | |
744 codec_thread_->Invoke<void>( | |
745 Bind(&MediaCodecVideoDecoder::ReturnTextureFrameOnCodecThread, this)); | |
746 } | |
747 | |
748 void MediaCodecVideoDecoder::ReturnTextureFrameOnCodecThread() { | |
749 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
750 jni->CallVoidMethod(*j_media_codec_video_decoder_, | |
751 j_return_texture_frame_method_); | |
752 } | |
753 | |
712 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 754 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
713 DecodedImageCallback* callback) { | 755 DecodedImageCallback* callback) { |
714 callback_ = callback; | 756 callback_ = callback; |
715 return WEBRTC_VIDEO_CODEC_OK; | 757 return WEBRTC_VIDEO_CODEC_OK; |
716 } | 758 } |
717 | 759 |
718 int32_t MediaCodecVideoDecoder::Reset() { | 760 int32_t MediaCodecVideoDecoder::Reset() { |
719 ALOGD("DecoderReset"); | 761 ALOGD("DecoderReset"); |
720 if (!inited_) { | 762 if (!inited_) { |
721 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 763 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
824 } | 866 } |
825 ALOGE("Can not find HW video decoder for type %d.", (int)type); | 867 ALOGE("Can not find HW video decoder for type %d.", (int)type); |
826 return NULL; | 868 return NULL; |
827 } | 869 } |
828 | 870 |
829 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 871 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( |
830 webrtc::VideoDecoder* decoder) { | 872 webrtc::VideoDecoder* decoder) { |
831 delete decoder; | 873 delete decoder; |
832 } | 874 } |
833 | 875 |
876 JOW(void, MediaCodecVideoDecoder_nativeOnTextureFrame)( | |
877 JNIEnv* jni, | |
878 jclass, | |
879 jlong j_decoder_ptr, | |
880 jint j_width, | |
881 jint j_height, | |
882 jint j_oes_texture_id, | |
883 jfloatArray j_transform_matrix, | |
884 jlong j_timestamp_ns) { | |
885 reinterpret_cast<MediaCodecVideoDecoder*>(j_decoder_ptr) | |
AlexG
2015/09/28 20:06:21
How do you ensure that j_decoder_ptr is still vali
| |
886 ->OnTextureFrame(j_width, j_height, j_timestamp_ns, | |
887 NativeHandleImpl(jni, j_oes_texture_id, | |
888 j_transform_matrix)); | |
889 } | |
890 | |
834 } // namespace webrtc_jni | 891 } // namespace webrtc_jni |
835 | 892 |
OLD | NEW |