OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 // NOTICE: androidmediaencoder_jni.h must be included before | 11 // NOTICE: androidmediaencoder_jni.h must be included before |
12 // androidmediacodeccommon.h to avoid build errors. | 12 // androidmediacodeccommon.h to avoid build errors. |
13 #include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h" | 13 #include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h" |
14 | 14 |
15 #include <algorithm> | 15 #include <algorithm> |
| 16 #include <list> |
16 #include <memory> | 17 #include <memory> |
17 #include <list> | 18 #include <string> |
| 19 #include <utility> |
18 | 20 |
19 #include "third_party/libyuv/include/libyuv/convert.h" | 21 #include "third_party/libyuv/include/libyuv/convert.h" |
20 #include "third_party/libyuv/include/libyuv/convert_from.h" | 22 #include "third_party/libyuv/include/libyuv/convert_from.h" |
21 #include "third_party/libyuv/include/libyuv/video_common.h" | 23 #include "third_party/libyuv/include/libyuv/video_common.h" |
22 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" | 24 #include "webrtc/api/video_codecs/video_encoder.h" |
23 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | |
24 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | |
25 #include "webrtc/base/bind.h" | 25 #include "webrtc/base/bind.h" |
26 #include "webrtc/base/checks.h" | 26 #include "webrtc/base/checks.h" |
27 #include "webrtc/base/logging.h" | 27 #include "webrtc/base/logging.h" |
28 #include "webrtc/base/sequenced_task_checker.h" | 28 #include "webrtc/base/sequenced_task_checker.h" |
29 #include "webrtc/base/task_queue.h" | 29 #include "webrtc/base/task_queue.h" |
30 #include "webrtc/base/thread.h" | 30 #include "webrtc/base/thread.h" |
31 #include "webrtc/base/timeutils.h" | 31 #include "webrtc/base/timeutils.h" |
32 #include "webrtc/base/weak_ptr.h" | 32 #include "webrtc/base/weak_ptr.h" |
33 #include "webrtc/common_types.h" | 33 #include "webrtc/common_types.h" |
34 #include "webrtc/common_video/h264/h264_bitstream_parser.h" | 34 #include "webrtc/common_video/h264/h264_bitstream_parser.h" |
35 #include "webrtc/common_video/h264/h264_common.h" | 35 #include "webrtc/common_video/h264/h264_common.h" |
36 #include "webrtc/common_video/h264/profile_level_id.h" | 36 #include "webrtc/common_video/h264/profile_level_id.h" |
37 #include "webrtc/media/engine/internalencoderfactory.h" | 37 #include "webrtc/media/engine/internalencoderfactory.h" |
38 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 38 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
39 #include "webrtc/modules/video_coding/utility/quality_scaler.h" | 39 #include "webrtc/modules/video_coding/utility/quality_scaler.h" |
40 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 40 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
| 41 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" |
| 42 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
| 43 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
41 #include "webrtc/system_wrappers/include/field_trial.h" | 44 #include "webrtc/system_wrappers/include/field_trial.h" |
42 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 45 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
43 #include "webrtc/video_encoder.h" | |
44 | 46 |
45 using rtc::Bind; | 47 using rtc::Bind; |
46 using rtc::Thread; | 48 using rtc::Thread; |
47 using rtc::ThreadManager; | 49 using rtc::ThreadManager; |
48 | 50 |
49 using webrtc::CodecSpecificInfo; | 51 using webrtc::CodecSpecificInfo; |
50 using webrtc::EncodedImage; | 52 using webrtc::EncodedImage; |
51 using webrtc::VideoFrame; | 53 using webrtc::VideoFrame; |
52 using webrtc::RTPFragmentationHeader; | 54 using webrtc::RTPFragmentationHeader; |
53 using webrtc::VideoCodec; | 55 using webrtc::VideoCodec; |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
116 int64_t /* rtt */) override; | 118 int64_t /* rtt */) override; |
117 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, | 119 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, |
118 uint32_t frame_rate) override; | 120 uint32_t frame_rate) override; |
119 | 121 |
120 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | 122 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } |
121 const char* ImplementationName() const override; | 123 const char* ImplementationName() const override; |
122 | 124 |
123 private: | 125 private: |
124 class EncodeTask : public rtc::QueuedTask { | 126 class EncodeTask : public rtc::QueuedTask { |
125 public: | 127 public: |
126 EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); | 128 explicit EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); |
127 bool Run() override; | 129 bool Run() override; |
128 | 130 |
129 private: | 131 private: |
130 rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; | 132 rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; |
131 }; | 133 }; |
132 | 134 |
133 // ResetCodec() calls Release() and InitEncodeInternal() in an attempt to | 135 // ResetCodec() calls Release() and InitEncodeInternal() in an attempt to |
134 // restore the codec to an operable state. Necessary after all manner of | 136 // restore the codec to an operable state. Necessary after all manner of |
135 // OMX-layer errors. Returns true if the codec was reset successfully. | 137 // OMX-layer errors. Returns true if the codec was reset successfully. |
136 bool ResetCodec(); | 138 bool ResetCodec(); |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
227 int last_set_fps_; // Last-requested frame rate. | 229 int last_set_fps_; // Last-requested frame rate. |
228 int64_t current_timestamp_us_; // Current frame timestamps in us. | 230 int64_t current_timestamp_us_; // Current frame timestamps in us. |
229 int frames_received_; // Number of frames received by encoder. | 231 int frames_received_; // Number of frames received by encoder. |
230 int frames_encoded_; // Number of frames encoded by encoder. | 232 int frames_encoded_; // Number of frames encoded by encoder. |
231 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. | 233 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. |
232 // Number of dropped frames caused by full queue. | 234 // Number of dropped frames caused by full queue. |
233 int consecutive_full_queue_frame_drops_; | 235 int consecutive_full_queue_frame_drops_; |
234 int64_t stat_start_time_ms_; // Start time for statistics. | 236 int64_t stat_start_time_ms_; // Start time for statistics. |
235 int current_frames_; // Number of frames in the current statistics interval. | 237 int current_frames_; // Number of frames in the current statistics interval. |
236 int current_bytes_; // Encoded bytes in the current statistics interval. | 238 int current_bytes_; // Encoded bytes in the current statistics interval. |
237 int current_acc_qp_; // Accumulated QP in the current statistics interval. | 239 int current_acc_qp_; // Accumulated QP in the current statistics interval. |
238 int current_encoding_time_ms_; // Overall encoding time in the current second | 240 int current_encoding_time_ms_; // Overall encoding time in the current second |
239 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | 241 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. |
240 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | 242 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. |
241 // Holds the task while the polling loop is paused. | 243 // Holds the task while the polling loop is paused. |
242 std::unique_ptr<rtc::QueuedTask> encode_task_; | 244 std::unique_ptr<rtc::QueuedTask> encode_task_; |
243 | 245 |
244 struct InputFrameInfo { | 246 struct InputFrameInfo { |
245 InputFrameInfo(int64_t encode_start_time, | 247 InputFrameInfo(int64_t encode_start_time, |
246 int32_t frame_timestamp, | 248 int32_t frame_timestamp, |
247 int64_t frame_render_time_ms, | 249 int64_t frame_render_time_ms, |
248 webrtc::VideoRotation rotation) | 250 webrtc::VideoRotation rotation) |
249 : encode_start_time(encode_start_time), | 251 : encode_start_time(encode_start_time), |
250 frame_timestamp(frame_timestamp), | 252 frame_timestamp(frame_timestamp), |
251 frame_render_time_ms(frame_render_time_ms), | 253 frame_render_time_ms(frame_render_time_ms), |
252 rotation(rotation) {} | 254 rotation(rotation) {} |
253 // Time when video frame is sent to encoder input. | 255 // Time when video frame is sent to encoder input. |
254 const int64_t encode_start_time; | 256 const int64_t encode_start_time; |
255 | 257 |
256 // Input frame information. | 258 // Input frame information. |
257 const int32_t frame_timestamp; | 259 const int32_t frame_timestamp; |
258 const int64_t frame_render_time_ms; | 260 const int64_t frame_render_time_ms; |
259 const webrtc::VideoRotation rotation; | 261 const webrtc::VideoRotation rotation; |
260 }; | 262 }; |
261 std::list<InputFrameInfo> input_frame_infos_; | 263 std::list<InputFrameInfo> input_frame_infos_; |
262 int32_t output_timestamp_; // Last output frame timestamp from | 264 int32_t output_timestamp_; // Last output frame timestamp from |
263 // |input_frame_infos_|. | 265 // |input_frame_infos_|. |
264 int64_t output_render_time_ms_; // Last output frame render time from | 266 int64_t output_render_time_ms_; // Last output frame render time from |
265 // |input_frame_infos_|. | 267 // |input_frame_infos_|. |
266 webrtc::VideoRotation output_rotation_; // Last output frame rotation from | 268 webrtc::VideoRotation output_rotation_; // Last output frame rotation from |
267 // |input_frame_infos_|. | 269 // |input_frame_infos_|. |
268 // Frame size in bytes fed to MediaCodec. | 270 // Frame size in bytes fed to MediaCodec. |
269 int yuv_size_; | 271 int yuv_size_; |
270 // True only when between a callback_->OnEncodedImage() call return a positive | 272 // True only when between a callback_->OnEncodedImage() call return a positive |
271 // value and the next Encode() call being ignored. | 273 // value and the next Encode() call being ignored. |
272 bool drop_next_input_frame_; | 274 bool drop_next_input_frame_; |
273 bool scale_; | 275 bool scale_; |
274 // Global references; must be deleted in Release(). | 276 // Global references; must be deleted in Release(). |
275 std::vector<jobject> input_buffers_; | 277 std::vector<jobject> input_buffers_; |
276 webrtc::H264BitstreamParser h264_bitstream_parser_; | 278 webrtc::H264BitstreamParser h264_bitstream_parser_; |
277 | 279 |
278 // VP9 variables to populate codec specific structure. | 280 // VP9 variables to populate codec specific structure. |
279 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for | 281 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for |
280 // non-flexible VP9 mode. | 282 // non-flexible VP9 mode. |
281 uint8_t tl0_pic_idx_; | 283 uint8_t tl0_pic_idx_; |
282 size_t gof_idx_; | 284 size_t gof_idx_; |
283 | 285 |
284 // EGL context - owned by factory, should not be allocated/destroyed | 286 // EGL context - owned by factory, should not be allocated/destroyed |
285 // by MediaCodecVideoEncoder. | 287 // by MediaCodecVideoEncoder. |
286 jobject egl_context_; | 288 jobject egl_context_; |
287 | 289 |
288 // Temporary fix for VP8. | 290 // Temporary fix for VP8. |
289 // Sends a key frame if frames are largely spaced apart (possibly | 291 // Sends a key frame if frames are largely spaced apart (possibly |
290 // corresponding to a large image change). | 292 // corresponding to a large image change). |
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
512 bool use_surface) { | 514 bool use_surface) { |
513 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | 515 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
514 if (sw_fallback_required_) { | 516 if (sw_fallback_required_) { |
515 return WEBRTC_VIDEO_CODEC_OK; | 517 return WEBRTC_VIDEO_CODEC_OK; |
516 } | 518 } |
517 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; | 519 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; |
518 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 520 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
519 ScopedLocalRefFrame local_ref_frame(jni); | 521 ScopedLocalRefFrame local_ref_frame(jni); |
520 | 522 |
521 const VideoCodecType codec_type = GetCodecType(); | 523 const VideoCodecType codec_type = GetCodecType(); |
522 ALOGD << "InitEncodeInternal Type: " << (int)codec_type << ", " << width | 524 ALOGD << "InitEncodeInternal Type: " << static_cast<int>(codec_type) << ", " |
523 << " x " << height << ". Bitrate: " << kbps << " kbps. Fps: " << fps; | 525 << width << " x " << height << ". Bitrate: " << kbps |
| 526 << " kbps. Fps: " << fps; |
524 if (kbps == 0) { | 527 if (kbps == 0) { |
525 kbps = last_set_bitrate_kbps_; | 528 kbps = last_set_bitrate_kbps_; |
526 } | 529 } |
527 if (fps == 0) { | 530 if (fps == 0) { |
528 fps = MAX_VIDEO_FPS; | 531 fps = MAX_VIDEO_FPS; |
529 } | 532 } |
530 | 533 |
531 width_ = width; | 534 width_ = width; |
532 height_ = height; | 535 height_ = height; |
533 last_set_bitrate_kbps_ = kbps; | 536 last_set_bitrate_kbps_ = kbps; |
534 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; | 537 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; |
535 yuv_size_ = width_ * height_ * 3 / 2; | 538 yuv_size_ = width_ * height_ * 3 / 2; |
536 frames_received_ = 0; | 539 frames_received_ = 0; |
537 frames_encoded_ = 0; | 540 frames_encoded_ = 0; |
538 frames_dropped_media_encoder_ = 0; | 541 frames_dropped_media_encoder_ = 0; |
539 consecutive_full_queue_frame_drops_ = 0; | 542 consecutive_full_queue_frame_drops_ = 0; |
540 current_timestamp_us_ = 0; | 543 current_timestamp_us_ = 0; |
541 stat_start_time_ms_ = rtc::TimeMillis(); | 544 stat_start_time_ms_ = rtc::TimeMillis(); |
542 current_frames_ = 0; | 545 current_frames_ = 0; |
543 current_bytes_ = 0; | 546 current_bytes_ = 0; |
544 current_acc_qp_ = 0; | 547 current_acc_qp_ = 0; |
545 current_encoding_time_ms_ = 0; | 548 current_encoding_time_ms_ = 0; |
546 last_input_timestamp_ms_ = -1; | 549 last_input_timestamp_ms_ = -1; |
547 last_output_timestamp_ms_ = -1; | 550 last_output_timestamp_ms_ = -1; |
548 output_timestamp_ = 0; | 551 output_timestamp_ = 0; |
549 output_render_time_ms_ = 0; | 552 output_render_time_ms_ = 0; |
550 input_frame_infos_.clear(); | 553 input_frame_infos_.clear(); |
551 drop_next_input_frame_ = false; | 554 drop_next_input_frame_ = false; |
552 use_surface_ = use_surface; | 555 use_surface_ = use_surface; |
553 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | 556 // TODO(ilnik): Use rand_r() instead to avoid LINT warnings below. |
| 557 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT |
554 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); | 558 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); |
555 tl0_pic_idx_ = static_cast<uint8_t>(rand()); | 559 tl0_pic_idx_ = static_cast<uint8_t>(rand()); // NOLINT |
556 gof_idx_ = 0; | 560 gof_idx_ = 0; |
557 last_frame_received_ms_ = -1; | 561 last_frame_received_ms_ = -1; |
558 frames_received_since_last_key_ = kMinKeyFrameInterval; | 562 frames_received_since_last_key_ = kMinKeyFrameInterval; |
559 | 563 |
560 // We enforce no extra stride/padding in the format creation step. | 564 // We enforce no extra stride/padding in the format creation step. |
561 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | 565 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( |
562 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); | 566 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); |
563 const bool encode_status = jni->CallBooleanMethod( | 567 const bool encode_status = jni->CallBooleanMethod( |
564 *j_media_codec_video_encoder_, j_init_encode_method_, | 568 *j_media_codec_video_encoder_, j_init_encode_method_, |
565 j_video_codec_enum, width, height, kbps, fps, | 569 j_video_codec_enum, width, height, kbps, fps, |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
669 | 673 |
670 frames_received_++; | 674 frames_received_++; |
671 if (!DeliverPendingOutputs(jni)) { | 675 if (!DeliverPendingOutputs(jni)) { |
672 if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) { | 676 if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) { |
673 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | 677 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
674 : WEBRTC_VIDEO_CODEC_ERROR; | 678 : WEBRTC_VIDEO_CODEC_ERROR; |
675 } | 679 } |
676 } | 680 } |
677 if (frames_encoded_ < kMaxEncodedLogFrames) { | 681 if (frames_encoded_ < kMaxEncodedLogFrames) { |
678 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | 682 ALOGD << "Encoder frame in # " << (frames_received_ - 1) |
679 << ". TS: " << (int)(current_timestamp_us_ / 1000) | 683 << ". TS: " << static_cast<int>(current_timestamp_us_ / 1000) |
680 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | 684 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ |
681 << ". Kbps: " << last_set_bitrate_kbps_; | 685 << ". Kbps: " << last_set_bitrate_kbps_; |
682 } | 686 } |
683 | 687 |
684 if (drop_next_input_frame_) { | 688 if (drop_next_input_frame_) { |
685 ALOGW << "Encoder drop frame - failed callback."; | 689 ALOGW << "Encoder drop frame - failed callback."; |
686 drop_next_input_frame_ = false; | 690 drop_next_input_frame_ = false; |
687 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 691 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
688 frames_dropped_media_encoder_++; | 692 frames_dropped_media_encoder_++; |
689 return WEBRTC_VIDEO_CODEC_OK; | 693 return WEBRTC_VIDEO_CODEC_OK; |
690 } | 694 } |
691 | 695 |
692 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | 696 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
693 | 697 |
694 // Check if we accumulated too many frames in encoder input buffers and drop | 698 // Check if we accumulated too many frames in encoder input buffers and drop |
695 // frame if so. | 699 // frame if so. |
696 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { | 700 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { |
697 ALOGD << "Already " << input_frame_infos_.size() | 701 ALOGD << "Already " << input_frame_infos_.size() |
698 << " frames in the queue, dropping" | 702 << " frames in the queue, dropping" |
699 << ". TS: " << (int)(current_timestamp_us_ / 1000) | 703 << ". TS: " << static_cast<int>(current_timestamp_us_ / 1000) |
700 << ". Fps: " << last_set_fps_ | 704 << ". Fps: " << last_set_fps_ |
701 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | 705 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; |
702 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 706 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
703 consecutive_full_queue_frame_drops_++; | 707 consecutive_full_queue_frame_drops_++; |
704 if (consecutive_full_queue_frame_drops_ >= | 708 if (consecutive_full_queue_frame_drops_ >= |
705 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | 709 ENCODER_STALL_FRAMEDROP_THRESHOLD) { |
706 ALOGE << "Encoder got stuck."; | 710 ALOGE << "Encoder got stuck."; |
707 return ProcessHWErrorOnEncode(); | 711 return ProcessHWErrorOnEncode(); |
708 } | 712 } |
709 frames_dropped_media_encoder_++; | 713 frames_dropped_media_encoder_++; |
(...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1132 if (CheckException(jni) || !success) { | 1136 if (CheckException(jni) || !success) { |
1133 ProcessHWError(true /* reset_if_fallback_unavailable */); | 1137 ProcessHWError(true /* reset_if_fallback_unavailable */); |
1134 return false; | 1138 return false; |
1135 } | 1139 } |
1136 | 1140 |
1137 // Print per frame statistics. | 1141 // Print per frame statistics. |
1138 if (encoding_start_time_ms > 0) { | 1142 if (encoding_start_time_ms > 0) { |
1139 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; | 1143 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; |
1140 } | 1144 } |
1141 if (frames_encoded_ < kMaxEncodedLogFrames) { | 1145 if (frames_encoded_ < kMaxEncodedLogFrames) { |
1142 int current_latency = | 1146 int current_latency = static_cast<int>(last_input_timestamp_ms_ - |
1143 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); | 1147 last_output_timestamp_ms_); |
1144 ALOGD << "Encoder frame out # " << frames_encoded_ << | 1148 ALOGD << "Encoder frame out # " << frames_encoded_ |
1145 ". Key: " << key_frame << | 1149 << ". Key: " << key_frame << ". Size: " << payload_size |
1146 ". Size: " << payload_size << | 1150 << ". TS: " << static_cast<int>(last_output_timestamp_ms_) |
1147 ". TS: " << (int)last_output_timestamp_ms_ << | 1151 << ". Latency: " << current_latency |
1148 ". Latency: " << current_latency << | 1152 << ". EncTime: " << frame_encoding_time_ms; |
1149 ". EncTime: " << frame_encoding_time_ms; | |
1150 } | 1153 } |
1151 | 1154 |
1152 // Calculate and print encoding statistics - every 3 seconds. | 1155 // Calculate and print encoding statistics - every 3 seconds. |
1153 frames_encoded_++; | 1156 frames_encoded_++; |
1154 current_frames_++; | 1157 current_frames_++; |
1155 current_bytes_ += payload_size; | 1158 current_bytes_ += payload_size; |
1156 current_encoding_time_ms_ += frame_encoding_time_ms; | 1159 current_encoding_time_ms_ += frame_encoding_time_ms; |
1157 LogStatistics(false); | 1160 LogStatistics(false); |
1158 | 1161 |
1159 // Errors in callback_result are currently ignored. | 1162 // Errors in callback_result are currently ignored. |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1342 } | 1345 } |
1343 } | 1346 } |
1344 | 1347 |
1345 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1348 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1346 webrtc::VideoEncoder* encoder) { | 1349 webrtc::VideoEncoder* encoder) { |
1347 ALOGD << "Destroy video encoder."; | 1350 ALOGD << "Destroy video encoder."; |
1348 delete encoder; | 1351 delete encoder; |
1349 } | 1352 } |
1350 | 1353 |
1351 } // namespace webrtc_jni | 1354 } // namespace webrtc_jni |
OLD | NEW |