| Index: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
|
| diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
|
| index f859410acb011ee137387610e814d1fdf71c268d..aa58f06802bb24d40214b1f5ecc2ca10d5e57a87 100644
|
| --- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
|
| +++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
|
| @@ -32,6 +32,7 @@
|
| #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
|
| #include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
| #include "talk/app/webrtc/java/jni/native_handle_impl.h"
|
| +#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
|
| #include "webrtc/base/bind.h"
|
| #include "webrtc/base/checks.h"
|
| #include "webrtc/base/logging.h"
|
| @@ -110,7 +111,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
| bool use_surface_;
|
| VideoCodec codec_;
|
| webrtc::I420BufferPool decoded_frame_pool_;
|
| - NativeHandleImpl native_handle_;
|
| + rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
|
| DecodedImageCallback* callback_;
|
| int frames_received_; // Number of frames received by decoder.
|
| int frames_decoded_; // Number of frames decoded by decoder.
|
| @@ -143,10 +144,10 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
| jfieldID j_height_field_;
|
| jfieldID j_stride_field_;
|
| jfieldID j_slice_height_field_;
|
| - jfieldID j_surface_texture_field_;
|
| // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
|
| jfieldID j_textureID_field_;
|
| - jfieldID j_texture_presentation_timestamp_us_field_;
|
| + jfieldID j_transform_matrix_field_;
|
| + jfieldID j_texture_timestamp_ns_field_;
|
| // MediaCodecVideoDecoder.DecodedByteBuffer fields.
|
| jfieldID j_info_index_field_;
|
| jfieldID j_info_offset_field_;
|
| @@ -155,8 +156,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
|
|
| // Global references; must be deleted in Release().
|
| std::vector<jobject> input_buffers_;
|
| - jobject surface_texture_;
|
| - jobject previous_surface_texture_;
|
|
|
| // Render EGL context - owned by factory, should not be allocated/destroyed
|
| // by VideoDecoder.
|
| @@ -170,8 +169,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
| key_frame_required_(true),
|
| inited_(false),
|
| sw_fallback_required_(false),
|
| - surface_texture_(NULL),
|
| - previous_surface_texture_(NULL),
|
| codec_thread_(new Thread()),
|
| j_media_codec_video_decoder_class_(
|
| jni,
|
| @@ -190,7 +187,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
| j_init_decode_method_ = GetMethodID(
|
| jni, *j_media_codec_video_decoder_class_, "initDecode",
|
| "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
|
| - "IILandroid/opengl/EGLContext;)Z");
|
| + "IILorg/webrtc/SurfaceTextureHelper;)Z");
|
| j_release_method_ =
|
| GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
|
| j_dequeue_input_buffer_method_ = GetMethodID(
|
| @@ -220,17 +217,15 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
| jni, *j_media_codec_video_decoder_class_, "stride", "I");
|
| j_slice_height_field_ = GetFieldID(
|
| jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
|
| - j_surface_texture_field_ = GetFieldID(
|
| - jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
|
| - "Landroid/graphics/SurfaceTexture;");
|
|
|
| jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
|
| "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
|
| j_textureID_field_ = GetFieldID(
|
| jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
|
| - j_texture_presentation_timestamp_us_field_ =
|
| - GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
|
| - "presentationTimestampUs", "J");
|
| + j_transform_matrix_field_ = GetFieldID(
|
| + jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F");
|
| + j_texture_timestamp_ns_field_ = GetFieldID(
|
| + jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J");
|
|
|
| jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
|
| "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
|
| @@ -253,14 +248,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
| MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
|
| // Call Release() to ensure no more callbacks to us after we are deleted.
|
| Release();
|
| - // Delete global references.
|
| - JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
| - if (previous_surface_texture_ != NULL) {
|
| - jni->DeleteGlobalRef(previous_surface_texture_);
|
| - }
|
| - if (surface_texture_ != NULL) {
|
| - jni->DeleteGlobalRef(surface_texture_);
|
| - }
|
| }
|
|
|
| int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
|
| @@ -310,6 +297,11 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
| frames_received_ = 0;
|
| frames_decoded_ = 0;
|
|
|
| + if (use_surface_) {
|
| + surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
|
| + jni, render_egl_context_);
|
| + }
|
| +
|
| jobject j_video_codec_enum = JavaEnumFromIndex(
|
| jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
|
| bool success = jni->CallBooleanMethod(
|
| @@ -318,7 +310,8 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
| j_video_codec_enum,
|
| codec_.width,
|
| codec_.height,
|
| - use_surface_ ? render_egl_context_ : nullptr);
|
| + use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
|
| + : nullptr);
|
| if (CheckException(jni) || !success) {
|
| ALOGE("Codec initialization error - fallback to SW codec.");
|
| sw_fallback_required_ = true;
|
| @@ -358,15 +351,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
| }
|
| }
|
|
|
| - if (use_surface_) {
|
| - jobject surface_texture = GetObjectField(
|
| - jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
|
| - if (previous_surface_texture_ != NULL) {
|
| - jni->DeleteGlobalRef(previous_surface_texture_);
|
| - }
|
| - previous_surface_texture_ = surface_texture_;
|
| - surface_texture_ = jni->NewGlobalRef(surface_texture);
|
| - }
|
| codec_thread_->PostDelayed(kMediaCodecPollMs, this);
|
|
|
| return WEBRTC_VIDEO_CODEC_OK;
|
| @@ -391,6 +375,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
|
| }
|
| input_buffers_.clear();
|
| jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
|
| + surface_texture_helper_ = nullptr;
|
| inited_ = false;
|
| rtc::MessageQueueManager::Clear(this);
|
| if (CheckException(jni)) {
|
| @@ -596,14 +581,15 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
| // Extract data from Java DecodedTextureBuffer.
|
| const int texture_id =
|
| GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
|
| - const int64_t timestamp_us =
|
| - GetLongField(jni, j_decoder_output_buffer,
|
| - j_texture_presentation_timestamp_us_field_);
|
| - output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
|
| + const jfloatArray j_transform_matrix =
|
| + reinterpret_cast<jfloatArray>(GetObjectField(
|
| + jni, j_decoder_output_buffer, j_transform_matrix_field_));
|
| + const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer,
|
| + j_texture_timestamp_ns_field_);
|
| + output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec;
|
| // Create webrtc::VideoFrameBuffer with native texture handle.
|
| - native_handle_.SetTextureObject(surface_texture_, texture_id);
|
| - frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
|
| - &native_handle_, width, height);
|
| + frame_buffer = surface_texture_helper_->CreateTextureFrame(
|
| + width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
|
| } else {
|
| // Extract data from Java ByteBuffer and create output yuv420 frame -
|
| // for non surface decoding only.
|
|
|