Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(391)

Unified Diff: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc

Issue 1422963003: Android MediaCodecVideoDecoder: Manage lifetime of texture frames (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Addressed comments and further refactored. Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
index dce5a22e8262ef3ee2329b09d85d63553714e294..2646ffd89281ee42b4f86203ab3795c1e678a3a5 100644
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
@@ -33,6 +33,7 @@
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
@@ -111,7 +112,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
bool use_surface_;
VideoCodec codec_;
webrtc::I420BufferPool decoded_frame_pool_;
- NativeHandleImpl native_handle_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
DecodedImageCallback* callback_;
int frames_received_; // Number of frames received by decoder.
int frames_decoded_; // Number of frames decoded by decoder.
@@ -134,7 +135,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jmethodID j_release_method_;
jmethodID j_dequeue_input_buffer_method_;
jmethodID j_queue_input_buffer_method_;
- jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_dequeue_byte_buffer_method_;
+ jmethodID j_dequeue_texture_buffer_method_;
jmethodID j_return_decoded_byte_buffer_method_;
// MediaCodecVideoDecoder fields.
jfieldID j_input_buffers_field_;
@@ -144,10 +146,13 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jfieldID j_height_field_;
jfieldID j_stride_field_;
jfieldID j_slice_height_field_;
- jfieldID j_surface_texture_field_;
// MediaCodecVideoDecoder.DecodedTextureBuffer fields.
jfieldID j_textureID_field_;
- jfieldID j_texture_presentation_timestamp_us_field_;
+ jfieldID j_transform_matrix_field_;
+ jfieldID j_texture_timestamp_ns_field_;
+ jfieldID j_texture_decode_time_ms_field_;
+ jfieldID j_texture_width_field_;
+ jfieldID j_texture_height_field_;
// MediaCodecVideoDecoder.DecodedByteBuffer fields.
jfieldID j_info_index_field_;
jfieldID j_info_offset_field_;
@@ -156,8 +161,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
- jobject surface_texture_;
- jobject previous_surface_texture_;
// Render EGL context - owned by factory, should not be allocated/destroyed
// by VideoDecoder.
@@ -171,8 +174,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
key_frame_required_(true),
inited_(false),
sw_fallback_required_(false),
- surface_texture_(NULL),
- previous_surface_texture_(NULL),
codec_thread_(new Thread()),
j_media_codec_video_decoder_class_(
jni,
@@ -191,16 +192,19 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
j_init_decode_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "initDecode",
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
- "IILandroid/opengl/EGLContext;)Z");
+ "IILorg/webrtc/SurfaceTextureHelper;)Z");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
j_queue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
- j_dequeue_output_buffer_method_ = GetMethodID(
- jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
- "(I)Ljava/lang/Object;");
+ j_dequeue_byte_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueByteBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer;");
+ j_dequeue_texture_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
magjed_webrtc 2015/10/29 09:44:13 this is a 5 space indent, remove one space
perkj_webrtc 2015/10/29 19:26:44 Done.
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
j_return_decoded_byte_buffer_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_,
"returnDecodedByteBuffer", "(I)V");
@@ -221,17 +225,21 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
jni, *j_media_codec_video_decoder_class_, "stride", "I");
j_slice_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
- j_surface_texture_field_ = GetFieldID(
- jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
- "Landroid/graphics/SurfaceTexture;");
jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
j_textureID_field_ = GetFieldID(
jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
- j_texture_presentation_timestamp_us_field_ =
- GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
- "presentationTimestampUs", "J");
+ j_transform_matrix_field_ = GetFieldID(
+ jni, j_decoder_decoded_texture_buffer_class, "transformMatrix", "[F");
+ j_texture_timestamp_ns_field_ = GetFieldID(
+ jni, j_decoder_decoded_texture_buffer_class, "timestampNs", "J");
+ j_texture_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoder_decoded_texture_buffer_class, "decodeTimeMs", "J");
+ j_texture_width_field_ = GetFieldID(
+ jni, j_decoder_decoded_texture_buffer_class, "width", "I");
+ j_texture_height_field_ = GetFieldID(
+ jni, j_decoder_decoded_texture_buffer_class, "height", "I");
jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
@@ -254,14 +262,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
// Call Release() to ensure no more callbacks to us after we are deleted.
Release();
- // Delete global references.
- JNIEnv* jni = AttachCurrentThreadIfNeeded();
- if (previous_surface_texture_ != NULL) {
- jni->DeleteGlobalRef(previous_surface_texture_);
- }
- if (surface_texture_ != NULL) {
- jni->DeleteGlobalRef(surface_texture_);
- }
}
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
@@ -311,6 +311,11 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
frames_received_ = 0;
frames_decoded_ = 0;
+ if (use_surface_) {
+ surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, render_egl_context_);
+ }
+
jobject j_video_codec_enum = JavaEnumFromIndex(
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
bool success = jni->CallBooleanMethod(
@@ -319,7 +324,8 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
j_video_codec_enum,
codec_.width,
codec_.height,
- use_surface_ ? render_egl_context_ : nullptr);
+ use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
+ : nullptr);
if (CheckException(jni) || !success) {
ALOGE << "Codec initialization error - fallback to SW codec.";
sw_fallback_required_ = true;
@@ -361,15 +367,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
}
}
- if (use_surface_) {
- jobject surface_texture = GetObjectField(
- jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
- if (previous_surface_texture_ != NULL) {
- jni->DeleteGlobalRef(previous_surface_texture_);
- }
- previous_surface_texture_ = surface_texture_;
- surface_texture_ = jni->NewGlobalRef(surface_texture);
- }
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
return WEBRTC_VIDEO_CODEC_OK;
@@ -394,6 +391,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
}
input_buffers_.clear();
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+ surface_texture_helper_ = nullptr;
inited_ = false;
rtc::MessageQueueManager::Clear(this);
if (CheckException(jni)) {
@@ -501,9 +499,9 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Try to drain the decoder and wait until output is not too
// much behind the input.
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
- ALOGV("Received: %d. Decoded: %d. Wait for output...",
- frames_received_, frames_decoded_);
- if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
+ ALOGD << "Received: " << frames_received_ << ". Decoded: "
+ << frames_decoded_ << ". Wait for output...";
+ if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
ALOGE << "DeliverPendingOutputs error";
return ProcessHWErrorOnCodecThread();
}
@@ -566,16 +564,22 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
}
bool MediaCodecVideoDecoder::DeliverPendingOutputs(
- JNIEnv* jni, int dequeue_timeout_us) {
+ JNIEnv* jni, int dequeue_timeout_ms) {
if (frames_received_ <= frames_decoded_) {
// No need to query for output buffers - decoder is drained.
return true;
}
// Get decoder output.
- jobject j_decoder_output_buffer = jni->CallObjectMethod(
+ jobject j_decoder_output_buffer =
magjed_webrtc 2015/10/29 09:44:12 You can write this like: jobject j_decoder_output_
perkj_webrtc 2015/10/29 19:26:44 Done.
+ (use_surface_ ?
+ jni->CallObjectMethod(
+ *j_media_codec_video_decoder_,
+ j_dequeue_texture_buffer_method_,
+ dequeue_timeout_ms) :
+ jni->CallObjectMethod(
*j_media_codec_video_decoder_,
- j_dequeue_output_buffer_method_,
- dequeue_timeout_us);
+ j_dequeue_byte_buffer_method_,
+ dequeue_timeout_ms));
if (CheckException(jni)) {
ALOGE << "dequeueOutputBuffer() error";
return false;
@@ -585,30 +589,42 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
return true;
}
- // Get decoded video frame properties.
- int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
- j_color_format_field_);
- int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
- int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
- int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
- int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
- j_slice_height_field_);
-
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
long output_timestamps_ms = 0;
+ int decode_time_ms = 0;
if (use_surface_) {
// Extract data from Java DecodedTextureBuffer.
const int texture_id =
GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
- const int64_t timestamp_us =
- GetLongField(jni, j_decoder_output_buffer,
- j_texture_presentation_timestamp_us_field_);
- output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
+ const jfloatArray j_transform_matrix =
+ reinterpret_cast<jfloatArray>(GetObjectField(
+ jni, j_decoder_output_buffer, j_transform_matrix_field_));
+ const int64_t timestamp_ns = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_timestamp_ns_field_);
+ output_timestamps_ms = timestamp_ns / rtc::kNumNanosecsPerMillisec;
+
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_texture_decode_time_ms_field_);
+ const int width = GetIntField(jni, j_decoder_output_buffer,
+ j_texture_width_field_);
+ const int height = GetIntField(jni, j_decoder_output_buffer,
+ j_texture_height_field_);
+
// Create webrtc::VideoFrameBuffer with native texture handle.
- native_handle_.SetTextureObject(surface_texture_, texture_id);
- frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
- &native_handle_, width, height);
+ frame_buffer = surface_texture_helper_->CreateTextureFrame(
+ width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
} else {
+ // Get decoded video frame properties.
+ const int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_color_format_field_);
+ const int width =
+ GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
+ const int height =
+ GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
+ const int stride =
+ GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
+ const int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_slice_height_field_);
// Extract data from Java ByteBuffer and create output yuv420 frame -
// for non surface decoding only.
const int output_buffer_index =
@@ -620,7 +636,9 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
const int64_t timestamp_us = GetLongField(
jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
-
+ decode_time_ms = (!frame_rtc_times_ms_.empty()) ?
+ GetCurrentTimeMs() - frame_rtc_times_ms_.front() :
+ 0;
if (output_buffer_size < width * height * 3 / 2) {
ALOGE << "Insufficient output buffer size: " << output_buffer_size;
return false;
@@ -692,23 +710,25 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
ntp_times_ms_.erase(ntp_times_ms_.begin());
}
- int64_t frame_decoding_time_ms = 0;
+ int64_t frame_delayed_ms = 0;
if (frame_rtc_times_ms_.size() > 0) {
- frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
+ frame_delayed_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
}
ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld."
magjed_webrtc 2015/10/29 09:44:12 |slice_height| etc is not defined here anymore. Ma
perkj_webrtc 2015/10/29 19:26:44 Changed back to read widht and height as fields fr
- " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
- color_format, output_timestamps_ms, frame_decoding_time_ms);
+ " DecodeTime: %lld. DelayTime: %lld.", frames_decoded_, width, height,
+ stride, slice_height, color_format, output_timestamps_ms, decode_time_ms,
+ frame_delayed_ms);
// Calculate and print decoding statistics - every 3 seconds.
frames_decoded_++;
current_frames_++;
- current_decoding_time_ms_ += frame_decoding_time_ms;
+ current_decoding_time_ms_ += decode_time_ms;
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) {
- ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
+ ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: "
+ << frames_received_ << ". Bitrate: " <<
(current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
<< ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
@@ -720,7 +740,8 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
}
// Callback - output decoded frame.
- const int32_t callback_status = callback_->Decoded(decoded_frame);
+ const int32_t callback_status =
+ callback_->Decoded(decoded_frame, decode_time_ms);
if (callback_status > 0) {
ALOGE << "callback error";
}

Powered by Google App Engine
This is Rietveld 408576698