| Index: webrtc/api/java/jni/androidmediaencoder_jni.cc
|
| diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc
|
| index 540a18f83813602cac212b85d3d26dc6acc3219f..47b91e3b15d9341b3e7bf1a6a7ccceeadc64ef2b 100644
|
| --- a/webrtc/api/java/jni/androidmediaencoder_jni.cc
|
| +++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc
|
| @@ -145,7 +145,8 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
|
| bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
|
| int32_t EncodeOnCodecThread(
|
| const webrtc::VideoFrame& input_image,
|
| - const std::vector<webrtc::FrameType>* frame_types);
|
| + const std::vector<webrtc::FrameType>* frame_types,
|
| + const int64_t frame_input_time_ms);
|
| bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
|
| bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
|
| bool EncodeTextureOnCodecThread(JNIEnv* jni,
|
| @@ -423,7 +424,7 @@ int32_t MediaCodecVideoEncoder::Encode(
|
| const std::vector<webrtc::FrameType>* frame_types) {
|
| return codec_thread_->Invoke<int32_t>(
|
| RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this,
|
| - frame, frame_types));
|
| + frame, frame_types, rtc::TimeMillis()));
|
| }
|
|
|
| int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
|
| @@ -598,7 +599,8 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
|
|
|
| int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
| const webrtc::VideoFrame& frame,
|
| - const std::vector<webrtc::FrameType>* frame_types) {
|
| + const std::vector<webrtc::FrameType>* frame_types,
|
| + const int64_t frame_input_time_ms) {
|
| RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
|
| JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
| ScopedLocalRefFrame local_ref_frame(jni);
|
| @@ -697,7 +699,6 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
| return WEBRTC_VIDEO_CODEC_ERROR;
|
| }
|
|
|
| - const int64_t time_before_calling_encode = rtc::TimeMillis();
|
| const bool key_frame =
|
| frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
|
| bool encode_status = true;
|
| @@ -708,12 +709,18 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
| if (j_input_buffer_index == -1) {
|
| // Video codec falls behind - no input buffer available.
|
| ALOGW << "Encoder drop frame - no input buffers available";
|
| - current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
|
| - frames_dropped_media_encoder_++;
|
| - OnDroppedFrameOnCodecThread();
|
| + if (frames_received_ > 1) {
|
| + current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
|
| + frames_dropped_media_encoder_++;
|
| + OnDroppedFrameOnCodecThread();
|
| + } else {
|
| + // Input buffers are not ready after codec initialization, HW is still
|
| + // allocating thme - this is expected and should not result in drop
|
| + // frame report.
|
| + frames_received_ = 0;
|
| + }
|
| return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
|
| - }
|
| - if (j_input_buffer_index == -2) {
|
| + } else if (j_input_buffer_index == -2) {
|
| ResetCodecOnCodecThread();
|
| return WEBRTC_VIDEO_CODEC_ERROR;
|
| }
|
| @@ -731,7 +738,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
|
|
| // Save input image timestamps for later output.
|
| input_frame_infos_.emplace_back(
|
| - time_before_calling_encode, input_frame.timestamp(),
|
| + frame_input_time_ms, input_frame.timestamp(),
|
| input_frame.render_time_ms(), input_frame.rotation());
|
|
|
| last_input_timestamp_ms_ =
|
| @@ -944,6 +951,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
| bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
|
|
|
| // Get frame timestamps from a queue - for non config frames only.
|
| + int64_t encoding_start_time_ms = 0;
|
| int64_t frame_encoding_time_ms = 0;
|
| last_output_timestamp_ms_ =
|
| GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
|
| @@ -953,8 +961,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
| output_timestamp_ = frame_info.frame_timestamp;
|
| output_render_time_ms_ = frame_info.frame_render_time_ms;
|
| output_rotation_ = frame_info.rotation;
|
| - frame_encoding_time_ms =
|
| - rtc::TimeMillis() - frame_info.encode_start_time;
|
| + encoding_start_time_ms = frame_info.encode_start_time;
|
| input_frame_infos_.pop_front();
|
| }
|
|
|
| @@ -964,17 +971,6 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
| jni->GetDirectBufferAddress(j_output_buffer));
|
| CHECK_EXCEPTION(jni);
|
|
|
| - if (frames_encoded_ < kMaxEncodedLogFrames) {
|
| - int current_latency =
|
| - (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_);
|
| - ALOGD << "Encoder frame out # " << frames_encoded_ <<
|
| - ". Key: " << key_frame <<
|
| - ". Size: " << payload_size <<
|
| - ". TS: " << (int)last_output_timestamp_ms_ <<
|
| - ". Latency: " << current_latency <<
|
| - ". EncTime: " << frame_encoding_time_ms;
|
| - }
|
| -
|
| // Callback - return encoded frame.
|
| int32_t callback_status = 0;
|
| if (callback_) {
|
| @@ -1100,6 +1096,21 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
| return false;
|
| }
|
|
|
| + // Print per frame statistics.
|
| + if (encoding_start_time_ms > 0) {
|
| + frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms;
|
| + }
|
| + if (frames_encoded_ < kMaxEncodedLogFrames) {
|
| + int current_latency =
|
| + (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_);
|
| + ALOGD << "Encoder frame out # " << frames_encoded_ <<
|
| + ". Key: " << key_frame <<
|
| + ". Size: " << payload_size <<
|
| + ". TS: " << (int)last_output_timestamp_ms_ <<
|
| + ". Latency: " << current_latency <<
|
| + ". EncTime: " << frame_encoding_time_ms;
|
| + }
|
| +
|
| // Calculate and print encoding statistics - every 3 seconds.
|
| frames_encoded_++;
|
| current_frames_++;
|
|
|