Index: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
index ed5ba0b478edd0df334db0e1eca929e101312415..dce5a22e8262ef3ee2329b09d85d63553714e294 100644 |
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
@@ -33,7 +33,6 @@ |
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
#include "talk/app/webrtc/java/jni/classreferenceholder.h" |
#include "talk/app/webrtc/java/jni/native_handle_impl.h" |
-#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h" |
#include "webrtc/base/bind.h" |
#include "webrtc/base/checks.h" |
#include "webrtc/base/logging.h" |
@@ -112,7 +111,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
bool use_surface_; |
VideoCodec codec_; |
webrtc::I420BufferPool decoded_frame_pool_; |
- rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; |
+ NativeHandleImpl native_handle_; |
DecodedImageCallback* callback_; |
int frames_received_; // Number of frames received by decoder. |
int frames_decoded_; // Number of frames decoded by decoder. |
@@ -145,10 +144,10 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
jfieldID j_height_field_; |
jfieldID j_stride_field_; |
jfieldID j_slice_height_field_; |
+ jfieldID j_surface_texture_field_; |
// MediaCodecVideoDecoder.DecodedTextureBuffer fields. |
jfieldID j_textureID_field_; |
- jfieldID j_transform_matrix_field_; |
- jfieldID j_texture_timestamp_ns_field_; |
+ jfieldID j_texture_presentation_timestamp_us_field_; |
// MediaCodecVideoDecoder.DecodedByteBuffer fields. |
jfieldID j_info_index_field_; |
jfieldID j_info_offset_field_; |
@@ -157,6 +156,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
// Global references; must be deleted in Release(). |
std::vector<jobject> input_buffers_; |
+ jobject surface_texture_; |
+ jobject previous_surface_texture_; |
// Render EGL context - owned by factory, should not be allocated/destroyed |
// by VideoDecoder. |
@@ -170,6 +171,8 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
key_frame_required_(true), |
inited_(false), |
sw_fallback_required_(false), |
+ surface_texture_(NULL), |
+ previous_surface_texture_(NULL), |
codec_thread_(new Thread()), |
j_media_codec_video_decoder_class_( |
jni, |
@@ -188,7 +191,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
j_init_decode_method_ = GetMethodID( |
jni, *j_media_codec_video_decoder_class_, "initDecode", |
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" |
- "IILorg/webrtc/SurfaceTextureHelper;)Z"); |
+ "IILandroid/opengl/EGLContext;)Z"); |
j_release_method_ = |
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); |
j_dequeue_input_buffer_method_ = GetMethodID( |
@@ -218,15 +221,17 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
jni, *j_media_codec_video_decoder_class_, "stride", "I"); |
j_slice_height_field_ = GetFieldID( |
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); |
+ j_surface_texture_field_ = GetFieldID( |
+ jni, *j_media_codec_video_decoder_class_, "surfaceTexture", |
+ "Landroid/graphics/SurfaceTexture;"); |
jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, |
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); |
j_textureID_field_ = GetFieldID( |
jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); |
- j_transform_matrix_field_ = GetFieldID( |
- jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F"); |
- j_texture_timestamp_ns_field_ = GetFieldID( |
- jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J"); |
+ j_texture_presentation_timestamp_us_field_ = |
+ GetFieldID(jni, j_decoder_decoded_texture_buffer_class, |
+ "presentationTimestampUs", "J"); |
jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, |
"org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); |
@@ -249,6 +254,14 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( |
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
// Call Release() to ensure no more callbacks to us after we are deleted. |
Release(); |
+ // Delete global references. |
+ JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
+ if (previous_surface_texture_ != NULL) { |
+ jni->DeleteGlobalRef(previous_surface_texture_); |
+ } |
+ if (surface_texture_ != NULL) { |
+ jni->DeleteGlobalRef(surface_texture_); |
+ } |
} |
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, |
@@ -298,11 +311,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
frames_received_ = 0; |
frames_decoded_ = 0; |
- if (use_surface_) { |
- surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>( |
- jni, render_egl_context_); |
- } |
- |
jobject j_video_codec_enum = JavaEnumFromIndex( |
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); |
bool success = jni->CallBooleanMethod( |
@@ -311,8 +319,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
j_video_codec_enum, |
codec_.width, |
codec_.height, |
- use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() |
- : nullptr); |
+ use_surface_ ? render_egl_context_ : nullptr); |
if (CheckException(jni) || !success) { |
ALOGE << "Codec initialization error - fallback to SW codec."; |
sw_fallback_required_ = true; |
@@ -354,6 +361,15 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
} |
} |
+ if (use_surface_) { |
+ jobject surface_texture = GetObjectField( |
+ jni, *j_media_codec_video_decoder_, j_surface_texture_field_); |
+ if (previous_surface_texture_ != NULL) { |
+ jni->DeleteGlobalRef(previous_surface_texture_); |
+ } |
+ previous_surface_texture_ = surface_texture_; |
+ surface_texture_ = jni->NewGlobalRef(surface_texture); |
+ } |
codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
return WEBRTC_VIDEO_CODEC_OK; |
@@ -378,7 +394,6 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { |
} |
input_buffers_.clear(); |
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
- surface_texture_helper_ = nullptr; |
inited_ = false; |
rtc::MessageQueueManager::Clear(this); |
if (CheckException(jni)) { |
@@ -488,7 +503,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
ALOGV("Received: %d. Decoded: %d. Wait for output...", |
frames_received_, frames_decoded_); |
- if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { |
+ if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { |
ALOGE << "DeliverPendingOutputs error"; |
return ProcessHWErrorOnCodecThread(); |
} |
@@ -551,7 +566,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
} |
bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
- JNIEnv* jni, int dequeue_timeout_ms) { |
+ JNIEnv* jni, int dequeue_timeout_us) { |
if (frames_received_ <= frames_decoded_) { |
// No need to query for output buffers - decoder is drained. |
return true; |
@@ -560,7 +575,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
jobject j_decoder_output_buffer = jni->CallObjectMethod( |
*j_media_codec_video_decoder_, |
j_dequeue_output_buffer_method_, |
- dequeue_timeout_ms); |
+ dequeue_timeout_us); |
if (CheckException(jni)) { |
ALOGE << "dequeueOutputBuffer() error"; |
return false; |
@@ -585,15 +600,14 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( |
// Extract data from Java DecodedTextureBuffer. |
const int texture_id = |
GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); |
- const jfloatArray j_transform_matrix = |
- reinterpret_cast<jfloatArray>(GetObjectField( |
- jni, j_decoder_output_buffer, j_transform_matrix_field_)); |
- const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer, |
- j_texture_timestamp_ns_field_); |
- output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec; |
+ const int64_t timestamp_us = |
+ GetLongField(jni, j_decoder_output_buffer, |
+ j_texture_presentation_timestamp_us_field_); |
+ output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; |
// Create webrtc::VideoFrameBuffer with native texture handle. |
- frame_buffer = surface_texture_helper_->CreateTextureFrame( |
- width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); |
+ native_handle_.SetTextureObject(surface_texture_, texture_id); |
+ frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>( |
+ &native_handle_, width, height); |
} else { |
// Extract data from Java ByteBuffer and create output yuv420 frame - |
// for non surface decoding only. |