Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 29 #include <vector> | 29 #include <vector> |
| 30 | 30 |
| 31 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | 31 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" |
| 32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
| 33 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 33 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
| 34 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | 34 #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
| 35 #include "webrtc/base/bind.h" | 35 #include "webrtc/base/bind.h" |
| 36 #include "webrtc/base/checks.h" | 36 #include "webrtc/base/checks.h" |
| 37 #include "webrtc/base/logging.h" | 37 #include "webrtc/base/logging.h" |
| 38 #include "webrtc/base/thread.h" | 38 #include "webrtc/base/thread.h" |
| 39 #include "webrtc/common_video/interface/video_frame_buffer.h" | |
| 39 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 40 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
| 40 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | 41 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" |
| 41 #include "webrtc/system_wrappers/interface/tick_util.h" | 42 #include "webrtc/system_wrappers/interface/tick_util.h" |
| 42 #include "third_party/libyuv/include/libyuv/convert.h" | 43 #include "third_party/libyuv/include/libyuv/convert.h" |
| 43 #include "third_party/libyuv/include/libyuv/convert_from.h" | 44 #include "third_party/libyuv/include/libyuv/convert_from.h" |
| 44 #include "third_party/libyuv/include/libyuv/video_common.h" | 45 #include "third_party/libyuv/include/libyuv/video_common.h" |
| 45 | 46 |
| 46 using rtc::Bind; | 47 using rtc::Bind; |
| 47 using rtc::Thread; | 48 using rtc::Thread; |
| 48 using rtc::ThreadManager; | 49 using rtc::ThreadManager; |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 79 | 80 |
| 80 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) | 81 int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) |
| 81 override; | 82 override; |
| 82 | 83 |
| 83 int32_t Release() override; | 84 int32_t Release() override; |
| 84 | 85 |
| 85 int32_t Reset() override; | 86 int32_t Reset() override; |
| 86 // rtc::MessageHandler implementation. | 87 // rtc::MessageHandler implementation. |
| 87 void OnMessage(rtc::Message* msg) override; | 88 void OnMessage(rtc::Message* msg) override; |
| 88 | 89 |
| 90 void OnTextureFrame(int width, | |
| 91 int height, | |
| 92 int64_t timestamp_ns, | |
| 93 const NativeHandleImpl& native_handle); | |
| 94 void ReturnTextureFrame(); | |
|
magjed_webrtc
2015/09/21 18:29:13
Self review: Make ReturnTextureFrame() private.
magjed_webrtc
2015/09/22 09:47:36
MediaCodecVideoDecoder is an implementation detail
| |
| 95 | |
| 89 private: | 96 private: |
| 90 // CHECK-fail if not running on |codec_thread_|. | 97 // CHECK-fail if not running on |codec_thread_|. |
| 91 void CheckOnCodecThread(); | 98 void CheckOnCodecThread(); |
| 92 | 99 |
| 93 int32_t InitDecodeOnCodecThread(); | 100 int32_t InitDecodeOnCodecThread(); |
| 94 int32_t ReleaseOnCodecThread(); | 101 int32_t ReleaseOnCodecThread(); |
| 95 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); | 102 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); |
| 96 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 103 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
| 97 // true on success. | 104 // true on success. |
| 98 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); | 105 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); |
| 106 void OnTextureFrameOnCodecThread(int width, | |
| 107 int height, | |
| 108 int64_t timestamp_ns, | |
| 109 const NativeHandleImpl& native_handle); | |
| 110 void ReturnTextureFrameOnCodecThread(); | |
| 99 int32_t ProcessHWErrorOnCodecThread(); | 111 int32_t ProcessHWErrorOnCodecThread(); |
| 100 | 112 |
| 101 // Type of video codec. | 113 // Type of video codec. |
| 102 VideoCodecType codecType_; | 114 VideoCodecType codecType_; |
| 103 | 115 |
| 104 bool key_frame_required_; | 116 bool key_frame_required_; |
| 105 bool inited_; | 117 bool inited_; |
| 106 bool sw_fallback_required_; | 118 bool sw_fallback_required_; |
| 107 bool use_surface_; | 119 bool use_surface_; |
| 108 VideoCodec codec_; | 120 VideoCodec codec_; |
| 109 VideoFrame decoded_image_; | 121 VideoFrame decoded_image_; |
| 110 NativeHandleImpl native_handle_; | |
| 111 DecodedImageCallback* callback_; | 122 DecodedImageCallback* callback_; |
| 112 int frames_received_; // Number of frames received by decoder. | 123 int frames_received_; // Number of frames received by decoder. |
| 113 int frames_decoded_; // Number of frames decoded by decoder. | 124 int frames_decoded_; // Number of frames decoded by decoder. |
| 114 int64_t start_time_ms_; // Start time for statistics. | 125 int64_t start_time_ms_; // Start time for statistics. |
| 115 int current_frames_; // Number of frames in the current statistics interval. | 126 int current_frames_; // Number of frames in the current statistics interval. |
| 116 int current_bytes_; // Encoded bytes in the current statistics interval. | 127 int current_bytes_; // Encoded bytes in the current statistics interval. |
| 117 int current_decoding_time_ms_; // Overall decoding time in the current second | 128 int current_decoding_time_ms_; // Overall decoding time in the current second |
| 118 uint32_t max_pending_frames_; // Maximum number of pending input frames | 129 uint32_t max_pending_frames_; // Maximum number of pending input frames |
| 119 std::vector<int32_t> timestamps_; | 130 std::vector<int32_t> timestamps_; |
| 120 std::vector<int64_t> ntp_times_ms_; | 131 std::vector<int64_t> ntp_times_ms_; |
| 121 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to | 132 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to |
| 122 // decoder input. | 133 // decoder input. |
| 123 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q. | 134 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q. |
| 124 int64_t output_ntp_time_ms_; // Last output frame ntp time from | 135 int64_t output_ntp_time_ms_; // Last output frame ntp time from |
| 125 // ntp_times_ms_ queue. | 136 // ntp_times_ms_ queue. |
| 126 | 137 |
| 127 // State that is constant for the lifetime of this object once the ctor | 138 // State that is constant for the lifetime of this object once the ctor |
| 128 // returns. | 139 // returns. |
| 129 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | 140 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. |
| 130 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 141 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; |
| 131 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 142 ScopedGlobalRef<jobject> j_media_codec_video_decoder_; |
| 132 jmethodID j_init_decode_method_; | 143 jmethodID j_init_decode_method_; |
| 133 jmethodID j_release_method_; | 144 jmethodID j_release_method_; |
| 134 jmethodID j_dequeue_input_buffer_method_; | 145 jmethodID j_dequeue_input_buffer_method_; |
| 135 jmethodID j_queue_input_buffer_method_; | 146 jmethodID j_queue_input_buffer_method_; |
| 136 jmethodID j_dequeue_output_buffer_method_; | 147 jmethodID j_dequeue_output_buffer_method_; |
| 148 jmethodID j_return_texture_frame_method_; | |
| 137 jmethodID j_release_output_buffer_method_; | 149 jmethodID j_release_output_buffer_method_; |
| 138 // MediaCodecVideoDecoder fields. | 150 // MediaCodecVideoDecoder fields. |
| 139 jfieldID j_input_buffers_field_; | 151 jfieldID j_input_buffers_field_; |
| 140 jfieldID j_output_buffers_field_; | 152 jfieldID j_output_buffers_field_; |
| 141 jfieldID j_color_format_field_; | 153 jfieldID j_color_format_field_; |
| 142 jfieldID j_width_field_; | 154 jfieldID j_width_field_; |
| 143 jfieldID j_height_field_; | 155 jfieldID j_height_field_; |
| 144 jfieldID j_stride_field_; | 156 jfieldID j_stride_field_; |
| 145 jfieldID j_slice_height_field_; | 157 jfieldID j_slice_height_field_; |
| 146 jfieldID j_surface_texture_field_; | |
| 147 jfieldID j_textureID_field_; | |
| 148 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields. | 158 // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields. |
| 149 jfieldID j_info_index_field_; | 159 jfieldID j_info_index_field_; |
| 150 jfieldID j_info_offset_field_; | 160 jfieldID j_info_offset_field_; |
| 151 jfieldID j_info_size_field_; | 161 jfieldID j_info_size_field_; |
| 152 jfieldID j_info_presentation_timestamp_us_field_; | 162 jfieldID j_info_presentation_timestamp_us_field_; |
| 153 | 163 |
| 154 // Global references; must be deleted in Release(). | 164 // Global references; must be deleted in Release(). |
| 155 std::vector<jobject> input_buffers_; | 165 std::vector<jobject> input_buffers_; |
| 156 jobject surface_texture_; | |
| 157 jobject previous_surface_texture_; | |
| 158 | 166 |
| 159 // Render EGL context - owned by factory, should not be allocated/destroyed | 167 // Render EGL context - owned by factory, should not be allocated/destroyed |
| 160 // by VideoDecoder. | 168 // by VideoDecoder. |
| 161 jobject render_egl_context_; | 169 jobject render_egl_context_; |
| 162 }; | 170 }; |
| 163 | 171 |
| 164 MediaCodecVideoDecoder::MediaCodecVideoDecoder( | 172 MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| 165 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : | 173 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
| 166 codecType_(codecType), | 174 codecType_(codecType), |
| 167 render_egl_context_(render_egl_context), | 175 render_egl_context_(render_egl_context), |
| 168 key_frame_required_(true), | 176 key_frame_required_(true), |
| 169 inited_(false), | 177 inited_(false), |
| 170 sw_fallback_required_(false), | 178 sw_fallback_required_(false), |
| 171 surface_texture_(NULL), | |
| 172 previous_surface_texture_(NULL), | |
| 173 codec_thread_(new Thread()), | 179 codec_thread_(new Thread()), |
| 174 j_media_codec_video_decoder_class_( | 180 j_media_codec_video_decoder_class_( |
| 175 jni, | 181 jni, |
| 176 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), | 182 FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), |
| 177 j_media_codec_video_decoder_( | 183 j_media_codec_video_decoder_( |
| 178 jni, | 184 jni, |
| 179 jni->NewObject(*j_media_codec_video_decoder_class_, | 185 jni->NewObject(*j_media_codec_video_decoder_class_, |
| 180 GetMethodID(jni, | 186 GetMethodID(jni, |
| 181 *j_media_codec_video_decoder_class_, | 187 *j_media_codec_video_decoder_class_, |
| 182 "<init>", | 188 "<init>", |
| 183 "()V"))) { | 189 "()V"))) { |
| 184 ScopedLocalRefFrame local_ref_frame(jni); | 190 ScopedLocalRefFrame local_ref_frame(jni); |
| 185 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); | 191 codec_thread_->SetName("MediaCodecVideoDecoder", NULL); |
| 186 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; | 192 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; |
| 187 | 193 |
| 188 j_init_decode_method_ = GetMethodID( | 194 j_init_decode_method_ = GetMethodID( |
| 189 jni, *j_media_codec_video_decoder_class_, "initDecode", | 195 jni, *j_media_codec_video_decoder_class_, "initDecode", |
| 190 "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" | 196 "(JLorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
| 191 "IILandroid/opengl/EGLContext;)Z"); | 197 "IILandroid/opengl/EGLContext;)Z"); |
| 192 j_release_method_ = | 198 j_release_method_ = |
| 193 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); | 199 GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
| 194 j_dequeue_input_buffer_method_ = GetMethodID( | 200 j_dequeue_input_buffer_method_ = GetMethodID( |
| 195 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); | 201 jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); |
| 196 j_queue_input_buffer_method_ = GetMethodID( | 202 j_queue_input_buffer_method_ = GetMethodID( |
| 197 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); | 203 jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); |
| 198 j_dequeue_output_buffer_method_ = GetMethodID( | 204 j_dequeue_output_buffer_method_ = GetMethodID( |
| 199 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", | 205 jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", |
| 200 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;"); | 206 "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;"); |
| 207 j_return_texture_frame_method_ = GetMethodID( | |
| 208 jni, *j_media_codec_video_decoder_class_, "returnTextureFrame", "()V"); | |
| 201 j_release_output_buffer_method_ = GetMethodID( | 209 j_release_output_buffer_method_ = GetMethodID( |
| 202 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)Z"); | 210 jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)Z"); |
| 203 | 211 |
| 204 j_input_buffers_field_ = GetFieldID( | 212 j_input_buffers_field_ = GetFieldID( |
| 205 jni, *j_media_codec_video_decoder_class_, | 213 jni, *j_media_codec_video_decoder_class_, |
| 206 "inputBuffers", "[Ljava/nio/ByteBuffer;"); | 214 "inputBuffers", "[Ljava/nio/ByteBuffer;"); |
| 207 j_output_buffers_field_ = GetFieldID( | 215 j_output_buffers_field_ = GetFieldID( |
| 208 jni, *j_media_codec_video_decoder_class_, | 216 jni, *j_media_codec_video_decoder_class_, |
| 209 "outputBuffers", "[Ljava/nio/ByteBuffer;"); | 217 "outputBuffers", "[Ljava/nio/ByteBuffer;"); |
| 210 j_color_format_field_ = GetFieldID( | 218 j_color_format_field_ = GetFieldID( |
| 211 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); | 219 jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); |
| 212 j_width_field_ = GetFieldID( | 220 j_width_field_ = GetFieldID( |
| 213 jni, *j_media_codec_video_decoder_class_, "width", "I"); | 221 jni, *j_media_codec_video_decoder_class_, "width", "I"); |
| 214 j_height_field_ = GetFieldID( | 222 j_height_field_ = GetFieldID( |
| 215 jni, *j_media_codec_video_decoder_class_, "height", "I"); | 223 jni, *j_media_codec_video_decoder_class_, "height", "I"); |
| 216 j_stride_field_ = GetFieldID( | 224 j_stride_field_ = GetFieldID( |
| 217 jni, *j_media_codec_video_decoder_class_, "stride", "I"); | 225 jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
| 218 j_slice_height_field_ = GetFieldID( | 226 j_slice_height_field_ = GetFieldID( |
| 219 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); | 227 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
| 220 j_textureID_field_ = GetFieldID( | |
| 221 jni, *j_media_codec_video_decoder_class_, "textureID", "I"); | |
| 222 j_surface_texture_field_ = GetFieldID( | |
| 223 jni, *j_media_codec_video_decoder_class_, "surfaceTexture", | |
| 224 "Landroid/graphics/SurfaceTexture;"); | |
| 225 | 228 |
| 226 jclass j_decoder_output_buffer_info_class = FindClass(jni, | 229 jclass j_decoder_output_buffer_info_class = FindClass(jni, |
| 227 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo"); | 230 "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo"); |
| 228 j_info_index_field_ = GetFieldID( | 231 j_info_index_field_ = GetFieldID( |
| 229 jni, j_decoder_output_buffer_info_class, "index", "I"); | 232 jni, j_decoder_output_buffer_info_class, "index", "I"); |
| 230 j_info_offset_field_ = GetFieldID( | 233 j_info_offset_field_ = GetFieldID( |
| 231 jni, j_decoder_output_buffer_info_class, "offset", "I"); | 234 jni, j_decoder_output_buffer_info_class, "offset", "I"); |
| 232 j_info_size_field_ = GetFieldID( | 235 j_info_size_field_ = GetFieldID( |
| 233 jni, j_decoder_output_buffer_info_class, "size", "I"); | 236 jni, j_decoder_output_buffer_info_class, "size", "I"); |
| 234 j_info_presentation_timestamp_us_field_ = GetFieldID( | 237 j_info_presentation_timestamp_us_field_ = GetFieldID( |
| 235 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J"); | 238 jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J"); |
| 236 | 239 |
| 237 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; | 240 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; |
| 238 use_surface_ = (render_egl_context_ != NULL); | 241 use_surface_ = (render_egl_context_ != NULL); |
| 239 ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_); | 242 ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_); |
| 240 memset(&codec_, 0, sizeof(codec_)); | 243 memset(&codec_, 0, sizeof(codec_)); |
| 241 AllowBlockingCalls(); | 244 AllowBlockingCalls(); |
| 242 } | 245 } |
| 243 | 246 |
| 244 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 247 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
| 245 // Call Release() to ensure no more callbacks to us after we are deleted. | 248 // Call Release() to ensure no more callbacks to us after we are deleted. |
| 246 Release(); | 249 Release(); |
| 247 // Delete global references. | |
| 248 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 249 if (previous_surface_texture_ != NULL) { | |
| 250 jni->DeleteGlobalRef(previous_surface_texture_); | |
| 251 } | |
| 252 if (surface_texture_ != NULL) { | |
| 253 jni->DeleteGlobalRef(surface_texture_); | |
| 254 } | |
| 255 } | 250 } |
| 256 | 251 |
| 257 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, | 252 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
| 258 int32_t numberOfCores) { | 253 int32_t numberOfCores) { |
| 259 ALOGD("InitDecode."); | 254 ALOGD("InitDecode."); |
| 260 if (inst == NULL) { | 255 if (inst == NULL) { |
| 261 ALOGE("NULL VideoCodec instance"); | 256 ALOGE("NULL VideoCodec instance"); |
| 262 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 257 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 263 } | 258 } |
| 264 // Factory should guard against other codecs being used with us. | 259 // Factory should guard against other codecs being used with us. |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 299 // Always start with a complete key frame. | 294 // Always start with a complete key frame. |
| 300 key_frame_required_ = true; | 295 key_frame_required_ = true; |
| 301 frames_received_ = 0; | 296 frames_received_ = 0; |
| 302 frames_decoded_ = 0; | 297 frames_decoded_ = 0; |
| 303 | 298 |
| 304 jobject j_video_codec_enum = JavaEnumFromIndex( | 299 jobject j_video_codec_enum = JavaEnumFromIndex( |
| 305 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); | 300 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); |
| 306 bool success = jni->CallBooleanMethod( | 301 bool success = jni->CallBooleanMethod( |
| 307 *j_media_codec_video_decoder_, | 302 *j_media_codec_video_decoder_, |
| 308 j_init_decode_method_, | 303 j_init_decode_method_, |
| 304 jlongFromPointer(this), | |
| 309 j_video_codec_enum, | 305 j_video_codec_enum, |
| 310 codec_.width, | 306 codec_.width, |
| 311 codec_.height, | 307 codec_.height, |
| 312 use_surface_ ? render_egl_context_ : nullptr); | 308 use_surface_ ? render_egl_context_ : nullptr); |
| 313 if (CheckException(jni) || !success) { | 309 if (CheckException(jni) || !success) { |
| 314 ALOGE("Codec initialization error - fallback to SW codec."); | 310 ALOGE("Codec initialization error - fallback to SW codec."); |
| 315 sw_fallback_required_ = true; | 311 sw_fallback_required_ = true; |
| 316 return WEBRTC_VIDEO_CODEC_ERROR; | 312 return WEBRTC_VIDEO_CODEC_ERROR; |
| 317 } | 313 } |
| 318 inited_ = true; | 314 inited_ = true; |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 344 for (size_t i = 0; i < num_input_buffers; ++i) { | 340 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 345 input_buffers_[i] = | 341 input_buffers_[i] = |
| 346 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 342 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 347 if (CheckException(jni)) { | 343 if (CheckException(jni)) { |
| 348 ALOGE("NewGlobalRef error - fallback to SW codec."); | 344 ALOGE("NewGlobalRef error - fallback to SW codec."); |
| 349 sw_fallback_required_ = true; | 345 sw_fallback_required_ = true; |
| 350 return WEBRTC_VIDEO_CODEC_ERROR; | 346 return WEBRTC_VIDEO_CODEC_ERROR; |
| 351 } | 347 } |
| 352 } | 348 } |
| 353 | 349 |
| 354 if (use_surface_) { | |
| 355 jobject surface_texture = GetObjectField( | |
| 356 jni, *j_media_codec_video_decoder_, j_surface_texture_field_); | |
| 357 if (previous_surface_texture_ != NULL) { | |
| 358 jni->DeleteGlobalRef(previous_surface_texture_); | |
| 359 } | |
| 360 previous_surface_texture_ = surface_texture_; | |
| 361 surface_texture_ = jni->NewGlobalRef(surface_texture); | |
| 362 } | |
| 363 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 350 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| 364 | 351 |
| 365 return WEBRTC_VIDEO_CODEC_OK; | 352 return WEBRTC_VIDEO_CODEC_OK; |
| 366 } | 353 } |
| 367 | 354 |
| 368 int32_t MediaCodecVideoDecoder::Release() { | 355 int32_t MediaCodecVideoDecoder::Release() { |
| 369 ALOGD("DecoderRelease request"); | 356 ALOGD("DecoderRelease request"); |
| 370 return codec_thread_->Invoke<int32_t>( | 357 return codec_thread_->Invoke<int32_t>( |
| 371 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | 358 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); |
| 372 } | 359 } |
| (...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 590 } | 577 } |
| 591 | 578 |
| 592 // Get decoded video frame properties. | 579 // Get decoded video frame properties. |
| 593 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 580 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, |
| 594 j_color_format_field_); | 581 j_color_format_field_); |
| 595 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 582 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); |
| 596 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 583 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); |
| 597 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 584 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); |
| 598 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | 585 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, |
| 599 j_slice_height_field_); | 586 j_slice_height_field_); |
| 600 int texture_id = GetIntField(jni, *j_media_codec_video_decoder_, | |
| 601 j_textureID_field_); | |
| 602 | 587 |
| 603 // Extract data from Java ByteBuffer and create output yuv420 frame - | 588 // Extract data from Java ByteBuffer and create output yuv420 frame - |
| 604 // for non surface decoding only. | 589 // for non surface decoding only. |
| 605 if (!use_surface_) { | 590 if (!use_surface_) { |
| 606 if (output_buffer_size < width * height * 3 / 2) { | 591 if (output_buffer_size < width * height * 3 / 2) { |
| 607 ALOGE("Insufficient output buffer size: %d", output_buffer_size); | 592 ALOGE("Insufficient output buffer size: %d", output_buffer_size); |
| 608 return false; | 593 return false; |
| 609 } | 594 } |
| 610 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | 595 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( |
| 611 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | 596 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 682 current_bytes_ * 8 / statistic_time_ms, | 667 current_bytes_ * 8 / statistic_time_ms, |
| 683 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, | 668 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, |
| 684 current_decoding_time_ms_ / current_frames_, statistic_time_ms); | 669 current_decoding_time_ms_ / current_frames_, statistic_time_ms); |
| 685 start_time_ms_ = GetCurrentTimeMs(); | 670 start_time_ms_ = GetCurrentTimeMs(); |
| 686 current_frames_ = 0; | 671 current_frames_ = 0; |
| 687 current_bytes_ = 0; | 672 current_bytes_ = 0; |
| 688 current_decoding_time_ms_ = 0; | 673 current_decoding_time_ms_ = 0; |
| 689 } | 674 } |
| 690 | 675 |
| 691 // Callback - output decoded frame. | 676 // Callback - output decoded frame. |
| 692 int32_t callback_status = WEBRTC_VIDEO_CODEC_OK; | |
| 693 if (use_surface_) { | 677 if (use_surface_) { |
| 694 native_handle_.SetTextureObject(surface_texture_, texture_id); | 678 // We will receive a callback in OnTextureFrame() when the texture frame is |
| 695 VideoFrame texture_image(new rtc::RefCountedObject<JniNativeHandleBuffer>( | 679 // ready. |
| 696 &native_handle_, width, height), | 680 // TODO: Should probably move the timestamp updates (output_timestamp_, |
| 697 output_timestamp_, 0, webrtc::kVideoRotation_0); | 681 // output_ntp_time_ms_, etc) into a separate function, and call it for |
| 698 texture_image.set_ntp_time_ms(output_ntp_time_ms_); | 682 // !use_surface_ in this function, and otherwise in OnTextureFrame(). |
| 699 callback_status = callback_->Decoded(texture_image); | |
| 700 } else { | 683 } else { |
| 701 decoded_image_.set_timestamp(output_timestamp_); | 684 decoded_image_.set_timestamp(output_timestamp_); |
| 702 decoded_image_.set_ntp_time_ms(output_ntp_time_ms_); | 685 decoded_image_.set_ntp_time_ms(output_ntp_time_ms_); |
| 703 callback_status = callback_->Decoded(decoded_image_); | 686 const int32_t callback_status = callback_->Decoded(decoded_image_); |
| 704 } | 687 if (callback_status > 0) { |
| 705 if (callback_status > 0) { | 688 ALOGE("callback error"); |
| 706 ALOGE("callback error"); | 689 } |
| 707 } | 690 } |
| 708 | 691 |
| 709 return true; | 692 return true; |
| 710 } | 693 } |
| 711 | 694 |
| 695 class AndroidTextureBuffer : public webrtc::NativeHandleBuffer { | |
| 696 public: | |
| 697 AndroidTextureBuffer(int width, | |
| 698 int height, | |
| 699 MediaCodecVideoDecoder* decoder, | |
| 700 const NativeHandleImpl& native_handle) | |
| 701 : webrtc::NativeHandleBuffer(&native_handle_, width, height), | |
| 702 native_handle_(native_handle), | |
| 703 decoder_(decoder) {} | |
| 704 | |
| 705 ~AndroidTextureBuffer() { | |
| 706 decoder_->ReturnTextureFrame(); | |
| 707 } | |
| 708 | |
| 709 rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override { | |
| 710 // TODO(magjed): Implement this. | |
| 711 return nullptr; | |
| 712 } | |
| 713 | |
| 714 private: | |
| 715 NativeHandleImpl native_handle_; | |
| 716 MediaCodecVideoDecoder*const decoder_; | |
| 717 }; | |
| 718 | |
| 719 void MediaCodecVideoDecoder::OnTextureFrame( | |
| 720 int width, | |
| 721 int height, | |
| 722 int64_t timestamp_ns, | |
| 723 const NativeHandleImpl& native_handle) { | |
| 724 codec_thread_->Invoke<void>( | |
| 725 Bind(&MediaCodecVideoDecoder::OnTextureFrameOnCodecThread, this, | |
| 726 width, height, timestamp_ns, native_handle)); | |
| 727 } | |
| 728 | |
| 729 void MediaCodecVideoDecoder::OnTextureFrameOnCodecThread( | |
| 730 int width, | |
| 731 int height, | |
| 732 int64_t timestamp_ns, | |
| 733 const NativeHandleImpl& native_handle) { | |
| 734 VideoFrame texture_image(new rtc::RefCountedObject<AndroidTextureBuffer>( | |
| 735 width, height, this, native_handle), | |
| 736 output_timestamp_, 0, webrtc::kVideoRotation_0); | |
| 737 texture_image.set_ntp_time_ms(output_ntp_time_ms_); | |
| 738 const int32_t callback_status = callback_->Decoded(texture_image); | |
| 739 if (callback_status > 0) { | |
| 740 ALOGE("callback error"); | |
| 741 } | |
| 742 } | |
| 743 | |
| 744 void MediaCodecVideoDecoder::ReturnTextureFrame() { | |
| 745 codec_thread_->Invoke<void>( | |
| 746 Bind(&MediaCodecVideoDecoder::ReturnTextureFrameOnCodecThread, this)); | |
| 747 } | |
| 748 | |
| 749 void MediaCodecVideoDecoder::ReturnTextureFrameOnCodecThread() { | |
| 750 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 751 jni->CallVoidMethod(*j_media_codec_video_decoder_, | |
| 752 j_return_texture_frame_method_); | |
| 753 } | |
| 754 | |
| 712 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 755 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
| 713 DecodedImageCallback* callback) { | 756 DecodedImageCallback* callback) { |
| 714 callback_ = callback; | 757 callback_ = callback; |
| 715 return WEBRTC_VIDEO_CODEC_OK; | 758 return WEBRTC_VIDEO_CODEC_OK; |
| 716 } | 759 } |
| 717 | 760 |
| 718 int32_t MediaCodecVideoDecoder::Reset() { | 761 int32_t MediaCodecVideoDecoder::Reset() { |
| 719 ALOGD("DecoderReset"); | 762 ALOGD("DecoderReset"); |
| 720 if (!inited_) { | 763 if (!inited_) { |
| 721 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 764 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 824 } | 867 } |
| 825 ALOGE("Can not find HW video decoder for type %d.", (int)type); | 868 ALOGE("Can not find HW video decoder for type %d.", (int)type); |
| 826 return NULL; | 869 return NULL; |
| 827 } | 870 } |
| 828 | 871 |
| 829 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 872 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( |
| 830 webrtc::VideoDecoder* decoder) { | 873 webrtc::VideoDecoder* decoder) { |
| 831 delete decoder; | 874 delete decoder; |
| 832 } | 875 } |
| 833 | 876 |
| 877 JOW(void, MediaCodecVideoDecoder_nativeOnTextureFrame)( | |
| 878 JNIEnv* jni, | |
| 879 jclass, | |
| 880 jlong j_decoder_ptr, | |
| 881 jint j_width, | |
| 882 jint j_height, | |
| 883 jint j_oes_texture_id, | |
| 884 jfloatArray j_transform_matrix, | |
| 885 jlong j_timestamp_ns) { | |
| 886 reinterpret_cast<MediaCodecVideoDecoder*>(j_decoder_ptr) | |
| 887 ->OnTextureFrame(j_width, j_height, j_timestamp_ns, | |
| 888 NativeHandleImpl(jni, j_oes_texture_id, | |
| 889 j_transform_matrix)); | |
| 890 } | |
| 891 | |
| 834 } // namespace webrtc_jni | 892 } // namespace webrtc_jni |
| 835 | 893 |
| OLD | NEW |