Index: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
index dce5a22e8262ef3ee2329b09d85d63553714e294..a59a2e9606dbca371b6ec6761f6065861f6820c6 100644 |
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc |
@@ -96,7 +96,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, |
int32_t InitDecodeOnCodecThread(); |
int32_t ReleaseOnCodecThread(); |
- int32_t DecodeOnCodecThread(const EncodedImage& inputImage); |
+ int32_t DecodeOnCodecThread(const EncodedImage* inputImage); |
// Deliver any outputs pending in the MediaCodec to our |callback_| and return |
// true on success. |
bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); |
@@ -489,11 +489,11 @@ int32_t MediaCodecVideoDecoder::Decode( |
} |
return codec_thread_->Invoke<int32_t>(Bind( |
- &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage)); |
+ &MediaCodecVideoDecoder::DecodeOnCodecThread, this, &inputImage)); |
} |
int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
- const EncodedImage& inputImage) { |
+ const EncodedImage* inputImage) { |
CheckOnCodecThread(); |
JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
ScopedLocalRefFrame local_ref_frame(jni); |
@@ -527,29 +527,29 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( |
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
RTC_CHECK(buffer) << "Indirect buffer??"; |
int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); |
- if (CheckException(jni) || buffer_capacity < inputImage._length) { |
- ALOGE << "Input frame size "<< inputImage._length << |
+ if (CheckException(jni) || buffer_capacity < inputImage->_length) { |
+ ALOGE << "Input frame size "<< inputImage->_length << |
" is bigger than buffer size " << buffer_capacity; |
return ProcessHWErrorOnCodecThread(); |
} |
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate; |
ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d", |
- frames_received_, inputImage._frameType, j_input_buffer_index, |
- timestamp_us / 1000, inputImage._length); |
- memcpy(buffer, inputImage._buffer, inputImage._length); |
+ frames_received_, inputImage->_frameType, j_input_buffer_index, |
+ timestamp_us / 1000, inputImage->_length); |
+ memcpy(buffer, inputImage->_buffer, inputImage->_length); |
// Save input image timestamps for later output. |
frames_received_++; |
- current_bytes_ += inputImage._length; |
- timestamps_.push_back(inputImage._timeStamp); |
- ntp_times_ms_.push_back(inputImage.ntp_time_ms_); |
+ current_bytes_ += inputImage->_length; |
+ timestamps_.push_back(inputImage->_timeStamp); |
+ ntp_times_ms_.push_back(inputImage->ntp_time_ms_); |
frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); |
// Feed input to decoder. |
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, |
j_queue_input_buffer_method_, |
j_input_buffer_index, |
- inputImage._length, |
+ inputImage->_length, |
timestamp_us); |
if (CheckException(jni) || !success) { |
ALOGE << "queueInputBuffer error"; |