OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
238 int current_encoding_time_ms_; // Overall encoding time in the current second | 238 int current_encoding_time_ms_; // Overall encoding time in the current second |
239 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | 239 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. |
240 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | 240 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. |
241 // Holds the task while the polling loop is paused. | 241 // Holds the task while the polling loop is paused. |
242 std::unique_ptr<rtc::QueuedTask> encode_task_; | 242 std::unique_ptr<rtc::QueuedTask> encode_task_; |
243 | 243 |
244 struct InputFrameInfo { | 244 struct InputFrameInfo { |
245 InputFrameInfo(int64_t encode_start_time, | 245 InputFrameInfo(int64_t encode_start_time, |
246 int32_t frame_timestamp, | 246 int32_t frame_timestamp, |
247 int64_t frame_render_time_ms, | 247 int64_t frame_render_time_ms, |
248 webrtc::VideoRotation rotation) | 248 webrtc::VideoRotation rotation, |
| 249 webrtc::VideoContentType content_type) |
249 : encode_start_time(encode_start_time), | 250 : encode_start_time(encode_start_time), |
250 frame_timestamp(frame_timestamp), | 251 frame_timestamp(frame_timestamp), |
251 frame_render_time_ms(frame_render_time_ms), | 252 frame_render_time_ms(frame_render_time_ms), |
252 rotation(rotation) {} | 253 rotation(rotation), |
| 254 content_type(content_type) {} |
253 // Time when video frame is sent to encoder input. | 255 // Time when video frame is sent to encoder input. |
254 const int64_t encode_start_time; | 256 const int64_t encode_start_time; |
255 | 257 |
256 // Input frame information. | 258 // Input frame information. |
257 const int32_t frame_timestamp; | 259 const int32_t frame_timestamp; |
258 const int64_t frame_render_time_ms; | 260 const int64_t frame_render_time_ms; |
259 const webrtc::VideoRotation rotation; | 261 const webrtc::VideoRotation rotation; |
| 262 const webrtc::VideoContentType content_type; |
260 }; | 263 }; |
261 std::list<InputFrameInfo> input_frame_infos_; | 264 std::list<InputFrameInfo> input_frame_infos_; |
262 int32_t output_timestamp_; // Last output frame timestamp from | 265 int32_t output_timestamp_; // Last output frame timestamp from |
263 // |input_frame_infos_|. | 266 // |input_frame_infos_|. |
264 int64_t output_render_time_ms_; // Last output frame render time from | 267 int64_t output_render_time_ms_; // Last output frame render time from |
265 // |input_frame_infos_|. | 268 // |input_frame_infos_|. |
266 webrtc::VideoRotation output_rotation_; // Last output frame rotation from | 269 webrtc::VideoRotation output_rotation_; // Last output frame rotation from |
267 // |input_frame_infos_|. | 270 // |input_frame_infos_|. |
| 271 webrtc::VideoContentType output_content_type_; |
268 // Frame size in bytes fed to MediaCodec. | 272 // Frame size in bytes fed to MediaCodec. |
269 int yuv_size_; | 273 int yuv_size_; |
270 // True only when between a callback_->OnEncodedImage() call return a positive | 274 // True only when between a callback_->OnEncodedImage() call return a positive |
271 // value and the next Encode() call being ignored. | 275 // value and the next Encode() call being ignored. |
272 bool drop_next_input_frame_; | 276 bool drop_next_input_frame_; |
273 bool scale_; | 277 bool scale_; |
274 // Global references; must be deleted in Release(). | 278 // Global references; must be deleted in Release(). |
275 std::vector<jobject> input_buffers_; | 279 std::vector<jobject> input_buffers_; |
276 webrtc::H264BitstreamParser h264_bitstream_parser_; | 280 webrtc::H264BitstreamParser h264_bitstream_parser_; |
277 | 281 |
(...skipping 430 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
708 } | 712 } |
709 frames_dropped_media_encoder_++; | 713 frames_dropped_media_encoder_++; |
710 return WEBRTC_VIDEO_CODEC_OK; | 714 return WEBRTC_VIDEO_CODEC_OK; |
711 } | 715 } |
712 consecutive_full_queue_frame_drops_ = 0; | 716 consecutive_full_queue_frame_drops_ = 0; |
713 | 717 |
714 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | 718 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( |
715 frame.video_frame_buffer()); | 719 frame.video_frame_buffer()); |
716 | 720 |
717 VideoFrame input_frame(input_buffer, frame.timestamp(), | 721 VideoFrame input_frame(input_buffer, frame.timestamp(), |
718 frame.render_time_ms(), frame.rotation()); | 722 frame.render_time_ms(), frame.rotation(), |
| 723 frame.content_type()); |
719 | 724 |
720 if (!MaybeReconfigureEncoder(input_frame)) { | 725 if (!MaybeReconfigureEncoder(input_frame)) { |
721 ALOGE << "Failed to reconfigure encoder."; | 726 ALOGE << "Failed to reconfigure encoder."; |
722 return WEBRTC_VIDEO_CODEC_ERROR; | 727 return WEBRTC_VIDEO_CODEC_ERROR; |
723 } | 728 } |
724 | 729 |
725 const bool key_frame = | 730 const bool key_frame = |
726 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | 731 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; |
727 bool encode_status = true; | 732 bool encode_status = true; |
728 if (!input_frame.video_frame_buffer()->native_handle()) { | 733 if (!input_frame.video_frame_buffer()->native_handle()) { |
(...skipping 26 matching lines...) Expand all Loading... |
755 } | 760 } |
756 | 761 |
757 if (!encode_status) { | 762 if (!encode_status) { |
758 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | 763 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
759 return ProcessHWErrorOnEncode(); | 764 return ProcessHWErrorOnEncode(); |
760 } | 765 } |
761 | 766 |
762 // Save input image timestamps for later output. | 767 // Save input image timestamps for later output. |
763 input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), | 768 input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), |
764 input_frame.render_time_ms(), | 769 input_frame.render_time_ms(), |
765 input_frame.rotation()); | 770 input_frame.rotation(), |
| 771 input_frame.content_type()); |
766 | 772 |
767 last_input_timestamp_ms_ = | 773 last_input_timestamp_ms_ = |
768 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | 774 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
769 | 775 |
770 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 776 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
771 | 777 |
772 // Start the polling loop if it is not started. | 778 // Start the polling loop if it is not started. |
773 if (encode_task_) { | 779 if (encode_task_) { |
774 rtc::TaskQueue::Current()->PostDelayedTask(std::move(encode_task_), | 780 rtc::TaskQueue::Current()->PostDelayedTask(std::move(encode_task_), |
775 kMediaCodecPollMs); | 781 kMediaCodecPollMs); |
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1006 int64_t encoding_start_time_ms = 0; | 1012 int64_t encoding_start_time_ms = 0; |
1007 int64_t frame_encoding_time_ms = 0; | 1013 int64_t frame_encoding_time_ms = 0; |
1008 last_output_timestamp_ms_ = | 1014 last_output_timestamp_ms_ = |
1009 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | 1015 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / |
1010 rtc::kNumMicrosecsPerMillisec; | 1016 rtc::kNumMicrosecsPerMillisec; |
1011 if (!input_frame_infos_.empty()) { | 1017 if (!input_frame_infos_.empty()) { |
1012 const InputFrameInfo& frame_info = input_frame_infos_.front(); | 1018 const InputFrameInfo& frame_info = input_frame_infos_.front(); |
1013 output_timestamp_ = frame_info.frame_timestamp; | 1019 output_timestamp_ = frame_info.frame_timestamp; |
1014 output_render_time_ms_ = frame_info.frame_render_time_ms; | 1020 output_render_time_ms_ = frame_info.frame_render_time_ms; |
1015 output_rotation_ = frame_info.rotation; | 1021 output_rotation_ = frame_info.rotation; |
| 1022 output_content_type_ = frame_info.content_type; |
1016 encoding_start_time_ms = frame_info.encode_start_time; | 1023 encoding_start_time_ms = frame_info.encode_start_time; |
1017 input_frame_infos_.pop_front(); | 1024 input_frame_infos_.pop_front(); |
1018 } | 1025 } |
1019 | 1026 |
1020 // Extract payload. | 1027 // Extract payload. |
1021 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 1028 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
1022 uint8_t* payload = reinterpret_cast<uint8_t*>( | 1029 uint8_t* payload = reinterpret_cast<uint8_t*>( |
1023 jni->GetDirectBufferAddress(j_output_buffer)); | 1030 jni->GetDirectBufferAddress(j_output_buffer)); |
1024 if (CheckException(jni)) { | 1031 if (CheckException(jni)) { |
1025 ALOGE << "Exception in get direct buffer address."; | 1032 ALOGE << "Exception in get direct buffer address."; |
1026 ProcessHWError(true /* reset_if_fallback_unavailable */); | 1033 ProcessHWError(true /* reset_if_fallback_unavailable */); |
1027 return WEBRTC_VIDEO_CODEC_ERROR; | 1034 return WEBRTC_VIDEO_CODEC_ERROR; |
1028 } | 1035 } |
1029 | 1036 |
1030 // Callback - return encoded frame. | 1037 // Callback - return encoded frame. |
1031 const VideoCodecType codec_type = GetCodecType(); | 1038 const VideoCodecType codec_type = GetCodecType(); |
1032 webrtc::EncodedImageCallback::Result callback_result( | 1039 webrtc::EncodedImageCallback::Result callback_result( |
1033 webrtc::EncodedImageCallback::Result::OK); | 1040 webrtc::EncodedImageCallback::Result::OK); |
1034 if (callback_) { | 1041 if (callback_) { |
1035 std::unique_ptr<webrtc::EncodedImage> image( | 1042 std::unique_ptr<webrtc::EncodedImage> image( |
1036 new webrtc::EncodedImage(payload, payload_size, payload_size)); | 1043 new webrtc::EncodedImage(payload, payload_size, payload_size)); |
1037 image->_encodedWidth = width_; | 1044 image->_encodedWidth = width_; |
1038 image->_encodedHeight = height_; | 1045 image->_encodedHeight = height_; |
1039 image->_timeStamp = output_timestamp_; | 1046 image->_timeStamp = output_timestamp_; |
1040 image->capture_time_ms_ = output_render_time_ms_; | 1047 image->capture_time_ms_ = output_render_time_ms_; |
1041 image->rotation_ = output_rotation_; | 1048 image->rotation_ = output_rotation_; |
| 1049 image->content_type_ = output_content_type_; |
1042 image->_frameType = | 1050 image->_frameType = |
1043 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 1051 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
1044 image->_completeFrame = true; | 1052 image->_completeFrame = true; |
1045 webrtc::CodecSpecificInfo info; | 1053 webrtc::CodecSpecificInfo info; |
1046 memset(&info, 0, sizeof(info)); | 1054 memset(&info, 0, sizeof(info)); |
1047 info.codecType = codec_type; | 1055 info.codecType = codec_type; |
1048 if (codec_type == kVideoCodecVP8) { | 1056 if (codec_type == kVideoCodecVP8) { |
1049 info.codecSpecific.VP8.pictureId = picture_id_; | 1057 info.codecSpecific.VP8.pictureId = picture_id_; |
1050 info.codecSpecific.VP8.nonReference = false; | 1058 info.codecSpecific.VP8.nonReference = false; |
1051 info.codecSpecific.VP8.simulcastIdx = 0; | 1059 info.codecSpecific.VP8.simulcastIdx = 0; |
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1342 } | 1350 } |
1343 } | 1351 } |
1344 | 1352 |
1345 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1353 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1346 webrtc::VideoEncoder* encoder) { | 1354 webrtc::VideoEncoder* encoder) { |
1347 ALOGD << "Destroy video encoder."; | 1355 ALOGD << "Destroy video encoder."; |
1348 delete encoder; | 1356 delete encoder; |
1349 } | 1357 } |
1350 | 1358 |
1351 } // namespace webrtc_jni | 1359 } // namespace webrtc_jni |
OLD | NEW |