Chromium Code Reviews| Index: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
| diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
| index a67dd502ffc433dac2e5b63e2d5a8f079aee66c9..e4b35b8420e09bd05623c027fd7b8cf0cf446168 100644 |
| --- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
| +++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
| @@ -36,6 +36,7 @@ |
| #include "webrtc/base/checks.h" |
| #include "webrtc/base/logging.h" |
| #include "webrtc/base/thread.h" |
| +#include "webrtc/common_video/interface/video_frame_buffer.h" |
| #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
| #include "webrtc/system_wrappers/interface/logcat_trace_context.h" |
| #include "webrtc/system_wrappers/interface/tick_util.h" |
| @@ -86,6 +87,12 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| // rtc::MessageHandler implementation. |
| void OnMessage(rtc::Message* msg) override; |
| + void OnTextureFrame(int width, |
| + int height, |
| + int64_t timestamp_ns, |
| + const NativeHandleImpl& native_handle); |
| + void ReturnTextureFrame(); |
| + |
| private: |
| // CHECK-fail if not running on |codec_thread_|. |
| void CheckOnCodecThread(); |
| @@ -96,6 +103,12 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
| // true on success. |
| bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); |
| + void DeliverFrame(VideoFrame* frame); |
| + void OnTextureFrameOnCodecThread(int width, |
| + int height, |
| + int64_t timestamp_ns, |
| + const NativeHandleImpl& native_handle); |
| + void ReturnTextureFrameOnCodecThread(); |
| int32_t ProcessHWErrorOnCodecThread(); |
| // Type of video codec. |
| @@ -106,8 +119,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| bool sw_fallback_required_; |
| bool use_surface_; |
| VideoCodec codec_; |
| + // TODO(magjed): Use frame pool instead of |decoded_image_|. |
|
perkj_webrtc
2015/09/22 12:40:37
I would skip this comment or do it now.
|
| VideoFrame decoded_image_; |
| - NativeHandleImpl native_handle_; |
| DecodedImageCallback* callback_; |
| int frames_received_; // Number of frames received by decoder. |
| int frames_decoded_; // Number of frames decoded by decoder. |
| @@ -120,9 +133,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| std::vector<int64_t> ntp_times_ms_; |
| std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to |
| // decoder input. |
| - int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q. |
| - int64_t output_ntp_time_ms_; // Last output frame ntp time from |
| - // ntp_times_ms_ queue. |
| // State that is constant for the lifetime of this object once the ctor |
| // returns. |
| @@ -134,6 +144,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| jmethodID j_dequeue_input_buffer_method_; |
| jmethodID j_queue_input_buffer_method_; |
| jmethodID j_dequeue_output_buffer_method_; |
| + jmethodID j_return_texture_frame_method_; |
| jmethodID j_release_output_buffer_method_; |
| // MediaCodecVideoDecoder fields. |
| jfieldID j_input_buffers_field_; |
| @@ -143,8 +154,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| jfieldID j_height_field_; |
| jfieldID j_stride_field_; |
| jfieldID j_slice_height_field_; |
| - jfieldID j_surface_texture_field_; |
| - jfieldID j_textureID_field_; |
| // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields. |
| jfieldID j_info_index_field_; |
| jfieldID j_info_offset_field_; |
| @@ -153,14 +162,40 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| // Global references; must be deleted in Release(). |
| std::vector<jobject> input_buffers_; |
| - jobject surface_texture_; |
| - jobject previous_surface_texture_; |
| // Render EGL context - owned by factory, should not be allocated/destroyed |
| // by VideoDecoder. |
| jobject render_egl_context_; |
| }; |
| +namespace { |
| + |
| +class AndroidTextureBuffer : public webrtc::NativeHandleBuffer { |
| + public: |
| + AndroidTextureBuffer(int width, |
| + int height, |
| + MediaCodecVideoDecoder* decoder, |
| + const NativeHandleImpl& native_handle) |
| + : webrtc::NativeHandleBuffer(&native_handle_, width, height), |
| + native_handle_(native_handle), |
| + decoder_(decoder) {} |
| + |
| + ~AndroidTextureBuffer() { |
| + decoder_->ReturnTextureFrame(); |
| + } |
| + |
| + rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override { |
| + // TODO(magjed): Implement this. |
|
perkj_webrtc
2015/09/22 12:40:37
CHECK(false) - should never be called.
|
| + return nullptr; |
| + } |
| + |
| + private: |
| + NativeHandleImpl native_handle_; |
| + MediaCodecVideoDecoder*const decoder_; |
|
perkj_webrtc
2015/09/22 12:40:37
There is nothing that guarantee the lifetime of de
|
| +}; |
| + |
| +} // anonymous namespace |
| + |
| MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : |
| codecType_(codecType), |
| @@ -168,8 +203,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| key_frame_required_(true), |
| inited_(false), |
| sw_fallback_required_(false), |
| - surface_texture_(NULL), |
| - previous_surface_texture_(NULL), |
| codec_thread_(new Thread()), |
| j_media_codec_video_decoder_class_( |
| jni, |
| @@ -187,7 +220,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| j_init_decode_method_ = GetMethodID( |
| jni, *j_media_codec_video_decoder_class_, "initDecode", |
| - "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
| + "(JLorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
| "IILandroid/opengl/EGLContext;)Z"); |
| j_release_method_ = |
| GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
| @@ -198,6 +231,8 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| j_dequeue_output_buffer_method_ = GetMethodID( |
| jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", |
| "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;"); |
| + j_return_texture_frame_method_ = GetMethodID( |
| + jni, *j_media_codec_video_decoder_class_, "returnTextureFrame", "()V"); |
| j_release_output_buffer_method_ = GetMethodID( |
| jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)Z"); |
| @@ -217,11 +252,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
| j_slice_height_field_ = GetFieldID( |
| jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
| - j_textureID_field_ = GetFieldID( |
| - jni, *j_media_codec_video_decoder_class_, "textureID", "I"); |
| - j_surface_texture_field_ = GetFieldID( |
| - jni, *j_media_codec_video_decoder_class_, "surfaceTexture", |
| - "Landroid/graphics/SurfaceTexture;"); |
| jclass j_decoder_output_buffer_info_class = FindClass(jni, |
| "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo"); |
| @@ -244,14 +274,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
| // Call Release() to ensure no more callbacks to us after we are deleted. |
| Release(); |
| - // Delete global references. |
| - JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| - if (previous_surface_texture_ != NULL) { |
| - jni->DeleteGlobalRef(previous_surface_texture_); |
| - } |
| - if (surface_texture_ != NULL) { |
| - jni->DeleteGlobalRef(surface_texture_); |
| - } |
| } |
| int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
| @@ -306,6 +328,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
| bool success = jni->CallBooleanMethod( |
| *j_media_codec_video_decoder_, |
| j_init_decode_method_, |
| + jlongFromPointer(this), |
| j_video_codec_enum, |
| codec_.width, |
| codec_.height, |
| @@ -331,8 +354,8 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
| current_frames_ = 0; |
| current_bytes_ = 0; |
| current_decoding_time_ms_ = 0; |
| - output_timestamp_ = 0; |
| - output_ntp_time_ms_ = 0; |
| + decoded_image_.set_timestamp(0); |
| + decoded_image_.set_ntp_time_ms(0); |
| timestamps_.clear(); |
| ntp_times_ms_.clear(); |
| frame_rtc_times_ms_.clear(); |
| @@ -351,15 +374,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
| } |
| } |
| - if (use_surface_) { |
| - jobject surface_texture = GetObjectField( |
| - jni, *j_media_codec_video_decoder_, j_surface_texture_field_); |
| - if (previous_surface_texture_ != NULL) { |
| - jni->DeleteGlobalRef(previous_surface_texture_); |
| - } |
| - previous_surface_texture_ = surface_texture_; |
| - surface_texture_ = jni->NewGlobalRef(surface_texture); |
| - } |
| codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| return WEBRTC_VIDEO_CODEC_OK; |
| @@ -554,6 +568,23 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
| return WEBRTC_VIDEO_CODEC_OK; |
| } |
| +void MediaCodecVideoDecoder::DeliverFrame(VideoFrame* frame) { |
| + // Pop timestamps from queues and update |frame|. |
|
perkj_webrtc
2015/09/22 12:40:37
please CheckOnCodecThread();
|
| + if (!timestamps_.empty()) { |
| + frame->set_timestamp(timestamps_.front()); |
| + timestamps_.erase(timestamps_.begin()); |
| + } |
| + if (!ntp_times_ms_.empty()) { |
| + frame->set_ntp_time_ms(ntp_times_ms_.front()); |
| + ntp_times_ms_.erase(ntp_times_ms_.begin()); |
| + } |
| + |
| + const int32_t callback_status = callback_->Decoded(*frame); |
| + if (callback_status > 0) { |
| + ALOGE("callback error"); |
| + } |
| +} |
| + |
| bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| JNIEnv* jni, int dequeue_timeout_us) { |
| if (frames_received_ <= frames_decoded_) { |
| @@ -597,8 +628,6 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); |
| int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, |
| j_slice_height_field_); |
| - int texture_id = GetIntField(jni, *j_media_codec_video_decoder_, |
| - j_textureID_field_); |
| // Extract data from Java ByteBuffer and create output yuv420 frame - |
| // for non surface decoding only. |
| @@ -643,15 +672,6 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| } |
| } |
| - // Get frame timestamps from a queue. |
| - if (timestamps_.size() > 0) { |
| - output_timestamp_ = timestamps_.front(); |
| - timestamps_.erase(timestamps_.begin()); |
| - } |
| - if (ntp_times_ms_.size() > 0) { |
| - output_ntp_time_ms_ = ntp_times_ms_.front(); |
| - ntp_times_ms_.erase(ntp_times_ms_.begin()); |
| - } |
| int64_t frame_decoding_time_ms = 0; |
| if (frame_rtc_times_ms_.size() > 0) { |
| frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); |
| @@ -689,26 +709,48 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| } |
| // Callback - output decoded frame. |
| - int32_t callback_status = WEBRTC_VIDEO_CODEC_OK; |
| if (use_surface_) { |
| - native_handle_.SetTextureObject(surface_texture_, texture_id); |
| - VideoFrame texture_image(new rtc::RefCountedObject<JniNativeHandleBuffer>( |
| - &native_handle_, width, height), |
| - output_timestamp_, 0, webrtc::kVideoRotation_0); |
| - texture_image.set_ntp_time_ms(output_ntp_time_ms_); |
| - callback_status = callback_->Decoded(texture_image); |
| + // We will receive a callback in OnTextureFrame() when the texture frame is |
| + // ready to deliver. |
| } else { |
| - decoded_image_.set_timestamp(output_timestamp_); |
| - decoded_image_.set_ntp_time_ms(output_ntp_time_ms_); |
| - callback_status = callback_->Decoded(decoded_image_); |
| - } |
| - if (callback_status > 0) { |
| - ALOGE("callback error"); |
| + DeliverFrame(&decoded_image_); |
| } |
| return true; |
| } |
| +void MediaCodecVideoDecoder::OnTextureFrame( |
| + int width, |
| + int height, |
| + int64_t timestamp_ns, |
| + const NativeHandleImpl& native_handle) { |
| + codec_thread_->Invoke<void>( |
|
perkj_webrtc
2015/09/22 12:40:37
CheckOnCodecThread ?
Can we skip this communicati
AlexG
2015/09/28 20:06:21
I agree with perkj@ - keep polling strategy - it w
|
| + Bind(&MediaCodecVideoDecoder::OnTextureFrameOnCodecThread, this, |
| + width, height, timestamp_ns, native_handle)); |
| +} |
| + |
| +void MediaCodecVideoDecoder::OnTextureFrameOnCodecThread( |
| + int width, |
| + int height, |
| + int64_t timestamp_ns, |
| + const NativeHandleImpl& native_handle) { |
| + VideoFrame texture_frame(new rtc::RefCountedObject<AndroidTextureBuffer>( |
| + width, height, this, native_handle), |
| + 0, 0, webrtc::kVideoRotation_0); |
| + DeliverFrame(&texture_frame); |
| +} |
| + |
| +void MediaCodecVideoDecoder::ReturnTextureFrame() { |
| + codec_thread_->Invoke<void>( |
| + Bind(&MediaCodecVideoDecoder::ReturnTextureFrameOnCodecThread, this)); |
| +} |
| + |
| +void MediaCodecVideoDecoder::ReturnTextureFrameOnCodecThread() { |
| + JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| + jni->CallVoidMethod(*j_media_codec_video_decoder_, |
| + j_return_texture_frame_method_); |
| +} |
| + |
| int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( |
| DecodedImageCallback* callback) { |
| callback_ = callback; |
| @@ -831,5 +873,20 @@ void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( |
| delete decoder; |
| } |
| +JOW(void, MediaCodecVideoDecoder_nativeOnTextureFrame)( |
| + JNIEnv* jni, |
| + jclass, |
| + jlong j_decoder_ptr, |
| + jint j_width, |
| + jint j_height, |
| + jint j_oes_texture_id, |
| + jfloatArray j_transform_matrix, |
| + jlong j_timestamp_ns) { |
| + reinterpret_cast<MediaCodecVideoDecoder*>(j_decoder_ptr) |
|
AlexG
2015/09/28 20:06:21
How do you ensure that j_decoder_ptr is still vali
|
| + ->OnTextureFrame(j_width, j_height, j_timestamp_ns, |
| + NativeHandleImpl(jni, j_oes_texture_id, |
| + j_transform_matrix)); |
| +} |
| + |
| } // namespace webrtc_jni |