OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 // NOTICE: androidmediaencoder_jni.h must be included before | 11 // NOTICE: androidmediaencoder_jni.h must be included before |
12 // androidmediacodeccommon.h to avoid build errors. | 12 // androidmediacodeccommon.h to avoid build errors. |
13 #include "webrtc/api/java/jni/androidmediaencoder_jni.h" | 13 #include "webrtc/api/java/jni/androidmediaencoder_jni.h" |
14 | 14 |
15 #include <algorithm> | |
pbos-webrtc
2016/04/14 14:29:57
Not needed, right?
perkj_webrtc
2016/04/15 12:37:39
git cl lint say its needed.
| |
16 #include <list> | |
17 | |
15 #include "third_party/libyuv/include/libyuv/convert.h" | 18 #include "third_party/libyuv/include/libyuv/convert.h" |
16 #include "third_party/libyuv/include/libyuv/convert_from.h" | 19 #include "third_party/libyuv/include/libyuv/convert_from.h" |
17 #include "third_party/libyuv/include/libyuv/video_common.h" | 20 #include "third_party/libyuv/include/libyuv/video_common.h" |
18 #include "webrtc/api/java/jni/androidmediacodeccommon.h" | 21 #include "webrtc/api/java/jni/androidmediacodeccommon.h" |
19 #include "webrtc/api/java/jni/classreferenceholder.h" | 22 #include "webrtc/api/java/jni/classreferenceholder.h" |
20 #include "webrtc/api/java/jni/native_handle_impl.h" | 23 #include "webrtc/api/java/jni/native_handle_impl.h" |
21 #include "webrtc/base/bind.h" | 24 #include "webrtc/base/bind.h" |
22 #include "webrtc/base/checks.h" | 25 #include "webrtc/base/checks.h" |
23 #include "webrtc/base/logging.h" | 26 #include "webrtc/base/logging.h" |
24 #include "webrtc/base/thread.h" | 27 #include "webrtc/base/thread.h" |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
214 uint16_t picture_id_; | 217 uint16_t picture_id_; |
215 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. | 218 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. |
216 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | 219 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. |
217 int last_set_fps_; // Last-requested frame rate. | 220 int last_set_fps_; // Last-requested frame rate. |
218 int64_t current_timestamp_us_; // Current frame timestamps in us. | 221 int64_t current_timestamp_us_; // Current frame timestamps in us. |
219 int frames_received_; // Number of frames received by encoder. | 222 int frames_received_; // Number of frames received by encoder. |
220 int frames_encoded_; // Number of frames encoded by encoder. | 223 int frames_encoded_; // Number of frames encoded by encoder. |
221 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. | 224 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. |
222 // Number of dropped frames caused by full queue. | 225 // Number of dropped frames caused by full queue. |
223 int consecutive_full_queue_frame_drops_; | 226 int consecutive_full_queue_frame_drops_; |
224 int frames_in_queue_; // Number of frames in encoder queue. | 227 int frames_in_queue_; // Number of frames in encoder queue. |
magjed_webrtc
2016/04/15 09:43:30
This looks identical to input_frame_infos_.size().
perkj_webrtc
2016/04/15 12:37:39
Done.
| |
225 int64_t stat_start_time_ms_; // Start time for statistics. | 228 int64_t stat_start_time_ms_; // Start time for statistics. |
226 int current_frames_; // Number of frames in the current statistics interval. | 229 int current_frames_; // Number of frames in the current statistics interval. |
227 int current_bytes_; // Encoded bytes in the current statistics interval. | 230 int current_bytes_; // Encoded bytes in the current statistics interval. |
228 int current_acc_qp_; // Accumulated QP in the current statistics interval. | 231 int current_acc_qp_; // Accumulated QP in the current statistics interval. |
229 int current_encoding_time_ms_; // Overall encoding time in the current second | 232 int current_encoding_time_ms_; // Overall encoding time in the current second |
230 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | 233 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. |
231 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | 234 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. |
232 std::vector<int32_t> timestamps_; // Video frames timestamp queue. | 235 |
233 std::vector<int64_t> render_times_ms_; // Video frames render time queue. | 236 struct InputFrameInfo { |
237 InputFrameInfo(int32_t timestamp, | |
238 int64_t render_time_ms, | |
239 webrtc::VideoRotation rotation) | |
240 : timestamp(timestamp), | |
241 render_time_ms(render_time_ms), | |
242 rotation(rotation) {} | |
243 const int32_t timestamp; | |
244 const int64_t render_time_ms; | |
245 const webrtc::VideoRotation rotation; | |
246 }; | |
247 std::list<InputFrameInfo> input_frame_infos_; | |
234 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to | 248 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to |
magjed_webrtc
2016/04/15 09:43:30
Can you add a rtc_time member in InputFrameInfo an
perkj_webrtc
2016/04/15 12:37:39
Done.
| |
235 // encoder input. | 249 // encoder input. |
236 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q. | 250 int32_t output_timestamp_; // Last output frame timestamp from |
magjed_webrtc
2016/04/15 09:43:30
Can you replace |output_timestamp_|, |output_rende
perkj_webrtc
2016/04/15 12:37:39
I could but that would mean a slight beaviour diff
| |
251 // |input_frame_infos_|. | |
237 int64_t output_render_time_ms_; // Last output frame render time from | 252 int64_t output_render_time_ms_; // Last output frame render time from |
238 // render_times_ms_ queue. | 253 // |input_frame_infos_|. |
254 webrtc::VideoRotation output_rotation_; // Last output frame rotation from | |
255 // |input_frame_infos_|. | |
239 // Frame size in bytes fed to MediaCodec. | 256 // Frame size in bytes fed to MediaCodec. |
240 int yuv_size_; | 257 int yuv_size_; |
241 // True only when between a callback_->Encoded() call return a positive value | 258 // True only when between a callback_->Encoded() call return a positive value |
242 // and the next Encode() call being ignored. | 259 // and the next Encode() call being ignored. |
243 bool drop_next_input_frame_; | 260 bool drop_next_input_frame_; |
244 // Global references; must be deleted in Release(). | 261 // Global references; must be deleted in Release(). |
245 std::vector<jobject> input_buffers_; | 262 std::vector<jobject> input_buffers_; |
246 QualityScaler quality_scaler_; | 263 QualityScaler quality_scaler_; |
247 // Dynamic resolution change, off by default. | 264 // Dynamic resolution change, off by default. |
248 bool scale_; | 265 bool scale_; |
(...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
516 current_timestamp_us_ = 0; | 533 current_timestamp_us_ = 0; |
517 stat_start_time_ms_ = GetCurrentTimeMs(); | 534 stat_start_time_ms_ = GetCurrentTimeMs(); |
518 current_frames_ = 0; | 535 current_frames_ = 0; |
519 current_bytes_ = 0; | 536 current_bytes_ = 0; |
520 current_acc_qp_ = 0; | 537 current_acc_qp_ = 0; |
521 current_encoding_time_ms_ = 0; | 538 current_encoding_time_ms_ = 0; |
522 last_input_timestamp_ms_ = -1; | 539 last_input_timestamp_ms_ = -1; |
523 last_output_timestamp_ms_ = -1; | 540 last_output_timestamp_ms_ = -1; |
524 output_timestamp_ = 0; | 541 output_timestamp_ = 0; |
525 output_render_time_ms_ = 0; | 542 output_render_time_ms_ = 0; |
526 timestamps_.clear(); | 543 input_frame_infos_.clear(); |
527 render_times_ms_.clear(); | |
528 frame_rtc_times_ms_.clear(); | 544 frame_rtc_times_ms_.clear(); |
529 drop_next_input_frame_ = false; | 545 drop_next_input_frame_ = false; |
530 use_surface_ = use_surface; | 546 use_surface_ = use_surface; |
531 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | 547 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; |
532 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); | 548 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); |
533 tl0_pic_idx_ = static_cast<uint8_t>(rand()); | 549 tl0_pic_idx_ = static_cast<uint8_t>(rand()); |
534 gof_idx_ = 0; | 550 gof_idx_ = 0; |
535 last_frame_received_ms_ = -1; | 551 last_frame_received_ms_ = -1; |
536 frames_received_since_last_key_ = kMinKeyFrameInterval; | 552 frames_received_since_last_key_ = kMinKeyFrameInterval; |
537 | 553 |
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
728 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | 744 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
729 ResetCodecOnCodecThread(); | 745 ResetCodecOnCodecThread(); |
730 return WEBRTC_VIDEO_CODEC_ERROR; | 746 return WEBRTC_VIDEO_CODEC_ERROR; |
731 } | 747 } |
732 | 748 |
733 last_input_timestamp_ms_ = | 749 last_input_timestamp_ms_ = |
734 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | 750 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
735 frames_in_queue_++; | 751 frames_in_queue_++; |
736 | 752 |
737 // Save input image timestamps for later output | 753 // Save input image timestamps for later output |
738 timestamps_.push_back(input_frame.timestamp()); | 754 |
magjed_webrtc
2016/04/15 09:43:30
nit: remove empty line
perkj_webrtc
2016/04/15 12:37:39
Done.
| |
739 render_times_ms_.push_back(input_frame.render_time_ms()); | 755 input_frame_infos_.push_back(InputFrameInfo(input_frame.timestamp(), |
magjed_webrtc
2016/04/15 09:43:30
emplace_back is allowed now according to http://ch
perkj_webrtc
2016/04/15 12:37:39
Done.
| |
756 input_frame.render_time_ms(), | |
757 input_frame.rotation())); | |
740 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 758 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
741 | 759 |
742 if (!DeliverPendingOutputs(jni)) { | 760 if (!DeliverPendingOutputs(jni)) { |
743 ALOGE << "Failed deliver pending outputs."; | 761 ALOGE << "Failed deliver pending outputs."; |
744 ResetCodecOnCodecThread(); | 762 ResetCodecOnCodecThread(); |
745 return WEBRTC_VIDEO_CODEC_ERROR; | 763 return WEBRTC_VIDEO_CODEC_ERROR; |
746 } | 764 } |
747 return WEBRTC_VIDEO_CODEC_OK; | 765 return WEBRTC_VIDEO_CODEC_OK; |
748 } | 766 } |
749 | 767 |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
935 // Get key and config frame flags. | 953 // Get key and config frame flags. |
936 jobject j_output_buffer = | 954 jobject j_output_buffer = |
937 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | 955 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); |
938 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | 956 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); |
939 | 957 |
940 // Get frame timestamps from a queue - for non config frames only. | 958 // Get frame timestamps from a queue - for non config frames only. |
941 int64_t frame_encoding_time_ms = 0; | 959 int64_t frame_encoding_time_ms = 0; |
942 last_output_timestamp_ms_ = | 960 last_output_timestamp_ms_ = |
943 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | 961 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / |
944 1000; | 962 1000; |
945 if (frames_in_queue_ > 0) { | 963 if (frames_in_queue_ > 0) { |
magjed_webrtc
2016/04/15 09:43:30
Can you CHECK_GT(frames_in_queue_, 0) here? It loo
perkj_webrtc
2016/04/15 12:37:39
as discussed- I don't want to in this cl. The comm
| |
946 output_timestamp_ = timestamps_.front(); | 964 const InputFrameInfo& frame_info = input_frame_infos_.front(); |
947 timestamps_.erase(timestamps_.begin()); | 965 output_timestamp_ = frame_info.timestamp; |
948 output_render_time_ms_ = render_times_ms_.front(); | 966 output_render_time_ms_ = frame_info.render_time_ms; |
949 render_times_ms_.erase(render_times_ms_.begin()); | 967 output_rotation_ = frame_info.rotation; |
968 input_frame_infos_.pop_front(); | |
950 frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); | 969 frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); |
951 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | 970 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); |
952 frames_in_queue_--; | 971 frames_in_queue_--; |
953 } | 972 } |
954 | 973 |
955 // Extract payload. | 974 // Extract payload. |
956 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 975 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
957 uint8_t* payload = reinterpret_cast<uint8_t*>( | 976 uint8_t* payload = reinterpret_cast<uint8_t*>( |
958 jni->GetDirectBufferAddress(j_output_buffer)); | 977 jni->GetDirectBufferAddress(j_output_buffer)); |
959 CHECK_EXCEPTION(jni); | 978 CHECK_EXCEPTION(jni); |
(...skipping 11 matching lines...) Expand all Loading... | |
971 | 990 |
972 // Callback - return encoded frame. | 991 // Callback - return encoded frame. |
973 int32_t callback_status = 0; | 992 int32_t callback_status = 0; |
974 if (callback_) { | 993 if (callback_) { |
975 scoped_ptr<webrtc::EncodedImage> image( | 994 scoped_ptr<webrtc::EncodedImage> image( |
976 new webrtc::EncodedImage(payload, payload_size, payload_size)); | 995 new webrtc::EncodedImage(payload, payload_size, payload_size)); |
977 image->_encodedWidth = width_; | 996 image->_encodedWidth = width_; |
978 image->_encodedHeight = height_; | 997 image->_encodedHeight = height_; |
979 image->_timeStamp = output_timestamp_; | 998 image->_timeStamp = output_timestamp_; |
980 image->capture_time_ms_ = output_render_time_ms_; | 999 image->capture_time_ms_ = output_render_time_ms_; |
1000 image->rotation_ = output_rotation_; | |
981 image->_frameType = | 1001 image->_frameType = |
982 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 1002 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
983 image->_completeFrame = true; | 1003 image->_completeFrame = true; |
984 image->adapt_reason_.quality_resolution_downscales = | 1004 image->adapt_reason_.quality_resolution_downscales = |
985 scale_ ? quality_scaler_.downscale_shift() : -1; | 1005 scale_ ? quality_scaler_.downscale_shift() : -1; |
986 | 1006 |
987 webrtc::CodecSpecificInfo info; | 1007 webrtc::CodecSpecificInfo info; |
988 memset(&info, 0, sizeof(info)); | 1008 memset(&info, 0, sizeof(info)); |
989 info.codecType = codecType_; | 1009 info.codecType = codecType_; |
990 if (codecType_ == kVideoCodecVP8) { | 1010 if (codecType_ == kVideoCodecVP8) { |
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1260 } | 1280 } |
1261 | 1281 |
1262 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1282 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1263 webrtc::VideoEncoder* encoder) { | 1283 webrtc::VideoEncoder* encoder) { |
1264 ALOGD << "Destroy video encoder."; | 1284 ALOGD << "Destroy video encoder."; |
1265 delete encoder; | 1285 delete encoder; |
1266 } | 1286 } |
1267 | 1287 |
1268 } // namespace webrtc_jni | 1288 } // namespace webrtc_jni |
1269 | 1289 |
OLD | NEW |