Chromium Code Reviews| Index: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
| diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
| index dce5a22e8262ef3ee2329b09d85d63553714e294..0b2c6a7a3072e1717c3de8536a9cb0e10bfd646e 100644 |
| --- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
| +++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
| @@ -33,6 +33,7 @@ |
| #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
| #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
| #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
| +#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" |
| #include "webrtc/base/bind.h" |
| #include "webrtc/base/checks.h" |
| #include "webrtc/base/logging.h" |
| @@ -111,7 +112,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| bool use_surface_; |
| VideoCodec codec_; |
| webrtc::I420BufferPool decoded_frame_pool_; |
| - NativeHandleImpl native_handle_; |
| + rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; |
| DecodedImageCallback* callback_; |
| int frames_received_; // Number of frames received by decoder. |
| int frames_decoded_; // Number of frames decoded by decoder. |
| @@ -144,10 +145,11 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| jfieldID j_height_field_; |
| jfieldID j_stride_field_; |
| jfieldID j_slice_height_field_; |
| - jfieldID j_surface_texture_field_; |
| // MediaCodecVideoDecoder.DecodedTextureBuffer fields. |
| jfieldID j_textureID_field_; |
| - jfieldID j_texture_presentation_timestamp_us_field_; |
| + jfieldID j_transform_matrix_field_; |
| + jfieldID j_texture_timestamp_ns_field_; |
| + jfieldID j_texture_decode_time_ms_field_; |
| // MediaCodecVideoDecoder.DecodedByteBuffer fields. |
| jfieldID j_info_index_field_; |
| jfieldID j_info_offset_field_; |
| @@ -156,8 +158,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
| // Global references; must be deleted in Release(). |
| std::vector<jobject> input_buffers_; |
| - jobject surface_texture_; |
| - jobject previous_surface_texture_; |
| // Render EGL context - owned by factory, should not be allocated/destroyed |
| // by VideoDecoder. |
| @@ -171,8 +171,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| key_frame_required_(true), |
| inited_(false), |
| sw_fallback_required_(false), |
| - surface_texture_(NULL), |
| - previous_surface_texture_(NULL), |
| codec_thread_(new Thread()), |
| j_media_codec_video_decoder_class_( |
| jni, |
| @@ -191,7 +189,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| j_init_decode_method_ = GetMethodID( |
| jni, *j_media_codec_video_decoder_class_, "initDecode", |
| "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
| - "IILandroid/opengl/EGLContext;)Z"); |
| + "IILorg/webrtc/SurfaceTextureHelper;)Z"); |
| j_release_method_ = |
| GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
| j_dequeue_input_buffer_method_ = GetMethodID( |
| @@ -221,17 +219,17 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
| j_slice_height_field_ = GetFieldID( |
| jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
| - j_surface_texture_field_ = GetFieldID( |
| - jni, *j_media_codec_video_decoder_class_, "surfaceTexture", |
| - "Landroid/graphics/SurfaceTexture;"); |
| jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, |
| "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); |
| j_textureID_field_ = GetFieldID( |
| jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); |
| - j_texture_presentation_timestamp_us_field_ = |
| - GetFieldID(jni, j_decoder_decoded_texture_buffer_class, |
| - "presentationTimestampUs", "J"); |
| + j_transform_matrix_field_ = GetFieldID( |
| + jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F"); |
| + j_texture_timestamp_ns_field_ = GetFieldID( |
| + jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J"); |
| + j_texture_decode_time_ms_field_ = GetFieldID( |
| + jni, j_decoder_decoded_texture_buffer_class, "decodeTimeMs", "J"); |
| jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, |
| "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); |
| @@ -254,14 +252,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
| MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
| // Call Release() to ensure no more callbacks to us after we are deleted. |
| Release(); |
| - // Delete global references. |
| - JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| - if (previous_surface_texture_ != NULL) { |
| - jni->DeleteGlobalRef(previous_surface_texture_); |
| - } |
| - if (surface_texture_ != NULL) { |
| - jni->DeleteGlobalRef(surface_texture_); |
| - } |
| } |
| int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
| @@ -311,6 +301,11 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
| frames_received_ = 0; |
| frames_decoded_ = 0; |
| + if (use_surface_) { |
| + surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>( |
| + jni, render_egl_context_); |
| + } |
| + |
| jobject j_video_codec_enum = JavaEnumFromIndex( |
| jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); |
| bool success = jni->CallBooleanMethod( |
| @@ -319,7 +314,8 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
| j_video_codec_enum, |
| codec_.width, |
| codec_.height, |
| - use_surface_ ? render_egl_context_ : nullptr); |
| + use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() |
| + : nullptr); |
| if (CheckException(jni) || !success) { |
| ALOGE << "Codec initialization error - fallback to SW codec."; |
| sw_fallback_required_ = true; |
| @@ -361,15 +357,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
| } |
| } |
| - if (use_surface_) { |
| - jobject surface_texture = GetObjectField( |
| - jni, *j_media_codec_video_decoder_, j_surface_texture_field_); |
| - if (previous_surface_texture_ != NULL) { |
| - jni->DeleteGlobalRef(previous_surface_texture_); |
| - } |
| - previous_surface_texture_ = surface_texture_; |
| - surface_texture_ = jni->NewGlobalRef(surface_texture); |
| - } |
| codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| return WEBRTC_VIDEO_CODEC_OK; |
| @@ -394,6 +381,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { |
| } |
| input_buffers_.clear(); |
| jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
| + surface_texture_helper_ = nullptr; |
| inited_ = false; |
| rtc::MessageQueueManager::Clear(this); |
| if (CheckException(jni)) { |
| @@ -501,9 +489,9 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
| // Try to drain the decoder and wait until output is not too |
| // much behind the input. |
| if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
| - ALOGV("Received: %d. Decoded: %d. Wait for output...", |
| - frames_received_, frames_decoded_); |
| - if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { |
| + ALOGD << "Received: " << frames_received_ << ". Decoded: " |
| + << frames_decoded_ << ". Wait for output..."; |
| + if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { |
| ALOGE << "DeliverPendingOutputs error"; |
| return ProcessHWErrorOnCodecThread(); |
| } |
| @@ -566,7 +554,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
| } |
| bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| - JNIEnv* jni, int dequeue_timeout_us) { |
| + JNIEnv* jni, int dequeue_timeout_ms) { |
| if (frames_received_ <= frames_decoded_) { |
| // No need to query for output buffers - decoder is drained. |
| return true; |
| @@ -575,7 +563,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| jobject j_decoder_output_buffer = jni->CallObjectMethod( |
| *j_media_codec_video_decoder_, |
| j_dequeue_output_buffer_method_, |
| - dequeue_timeout_us); |
| + dequeue_timeout_ms); |
| if (CheckException(jni)) { |
| ALOGE << "dequeueOutputBuffer() error"; |
| return false; |
| @@ -596,18 +584,25 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; |
| long output_timestamps_ms = 0; |
| + int decode_time_ms = 0; |
| if (use_surface_) { |
| // Extract data from Java DecodedTextureBuffer. |
| const int texture_id = |
| GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); |
| - const int64_t timestamp_us = |
| - GetLongField(jni, j_decoder_output_buffer, |
| - j_texture_presentation_timestamp_us_field_); |
| - output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; |
| + const jfloatArray j_transform_matrix = |
| + reinterpret_cast<jfloatArray>(GetObjectField( |
| + jni, j_decoder_output_buffer, j_transform_matrix_field_)); |
| + const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer, |
| + j_texture_timestamp_ns_field_); |
| + output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec; |
| + |
| + decode_time_ms = GetLongField(jni, j_decoder_output_buffer, |
| + j_texture_decode_time_ms_field_); |
| + output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec; |
|
magjed_webrtc
2015/10/28 11:57:16
This is a duplicated line.
perkj_webrtc
2015/10/28 21:12:39
Done.
|
| + |
| // Create webrtc::VideoFrameBuffer with native texture handle. |
| - native_handle_.SetTextureObject(surface_texture_, texture_id); |
| - frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>( |
| - &native_handle_, width, height); |
| + frame_buffer = surface_texture_helper_->CreateTextureFrame( |
| + width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); |
| } else { |
| // Extract data from Java ByteBuffer and create output yuv420 frame - |
| // for non surface decoding only. |
| @@ -692,23 +687,24 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); |
| ntp_times_ms_.erase(ntp_times_ms_.begin()); |
| } |
| - int64_t frame_decoding_time_ms = 0; |
| + int64_t frame_delayed_ms = 0; |
| if (frame_rtc_times_ms_.size() > 0) { |
| - frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); |
| + frame_delayed_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); |
| frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); |
| } |
| ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." |
| - " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, |
| - color_format, output_timestamps_ms, frame_decoding_time_ms); |
| + " DelayTime: %lld", frames_decoded_, width, height, stride, slice_height, |
|
magjed_webrtc
2015/10/28 11:57:16
I think you should print both latency and decode t
perkj_webrtc
2015/10/28 21:12:39
Done.
|
| + color_format, output_timestamps_ms, frame_delayed_ms); |
| // Calculate and print decoding statistics - every 3 seconds. |
| frames_decoded_++; |
| current_frames_++; |
| - current_decoding_time_ms_ += frame_decoding_time_ms; |
| + current_decoding_time_ms_ += decode_time_ms; |
|
magjed_webrtc
2015/10/28 11:57:16
You need to set |decode_time_ms| for ByteBuffer ou
perkj_webrtc
2015/10/28 21:12:39
Done.
|
| int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; |
| if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && |
| current_frames_ > 0) { |
| - ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " << |
| + ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: " |
| + << frames_received_ << ". Bitrate: " << |
| (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << |
| ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) |
| << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << |
| @@ -720,7 +716,8 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
| } |
| // Callback - output decoded frame. |
| - const int32_t callback_status = callback_->Decoded(decoded_frame); |
| + const int32_t callback_status = |
| + callback_->Decoded(decoded_frame, decode_time_ms); |
| if (callback_status > 0) { |
| ALOGE << "callback error"; |
| } |