Index: webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc |
diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc |
index 92406ec107c0b4fc8365f0b7e26e2eec69efa223..49f25b0c963a124cde205f4b9c775a8b5df9a32f 100644 |
--- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc |
+++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc |
@@ -242,11 +242,13 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder { |
InputFrameInfo(int64_t encode_start_time, |
int32_t frame_timestamp, |
int64_t frame_render_time_ms, |
- webrtc::VideoRotation rotation) |
+ webrtc::VideoRotation rotation, |
+ webrtc::VideoContentTypeId content_type) |
: encode_start_time(encode_start_time), |
frame_timestamp(frame_timestamp), |
frame_render_time_ms(frame_render_time_ms), |
- rotation(rotation) {} |
+ rotation(rotation), |
+ content_type(content_type) {} |
// Time when video frame is sent to encoder input. |
const int64_t encode_start_time; |
@@ -254,6 +256,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder { |
const int32_t frame_timestamp; |
const int64_t frame_render_time_ms; |
const webrtc::VideoRotation rotation; |
+ const webrtc::VideoContentTypeId content_type; |
}; |
std::list<InputFrameInfo> input_frame_infos_; |
int32_t output_timestamp_; // Last output frame timestamp from |
@@ -262,6 +265,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder { |
// |input_frame_infos_|. |
webrtc::VideoRotation output_rotation_; // Last output frame rotation from |
// |input_frame_infos_|. |
+ webrtc::VideoContentTypeId output_content_type_; |
// Frame size in bytes fed to MediaCodec. |
int yuv_size_; |
// True only when between a callback_->OnEncodedImage() call return a positive |
@@ -709,7 +713,8 @@ int32_t MediaCodecVideoEncoder::Encode( |
frame.video_frame_buffer()); |
VideoFrame input_frame(input_buffer, frame.timestamp(), |
- frame.render_time_ms(), frame.rotation()); |
+ frame.render_time_ms(), frame.rotation(), |
+ frame.content_type()); |
if (!MaybeReconfigureEncoder(input_frame)) { |
ALOGE << "Failed to reconfigure encoder."; |
@@ -756,7 +761,8 @@ int32_t MediaCodecVideoEncoder::Encode( |
// Save input image timestamps for later output. |
input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), |
input_frame.render_time_ms(), |
- input_frame.rotation()); |
+ input_frame.rotation(), |
+ input_frame.content_type()); |
last_input_timestamp_ms_ = |
current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
@@ -1007,6 +1013,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
output_timestamp_ = frame_info.frame_timestamp; |
output_render_time_ms_ = frame_info.frame_render_time_ms; |
output_rotation_ = frame_info.rotation; |
+ output_content_type_ = frame_info.content_type; |
encoding_start_time_ms = frame_info.encode_start_time; |
input_frame_infos_.pop_front(); |
} |
@@ -1035,6 +1042,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
image->_timeStamp = output_timestamp_; |
image->capture_time_ms_ = output_render_time_ms_; |
image->rotation_ = output_rotation_; |
+ image->content_type_ = output_content_type_; |
image->_frameType = |
(key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
image->_completeFrame = true; |