| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 // NOTICE: androidmediaencoder_jni.h must be included before | 11 // NOTICE: androidmediaencoder_jni.h must be included before |
| 12 // androidmediacodeccommon.h to avoid build errors. | 12 // androidmediacodeccommon.h to avoid build errors. |
| 13 #include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h" | 13 #include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h" |
| 14 | 14 |
| 15 #include <algorithm> | 15 #include <algorithm> |
| 16 #include <memory> |
| 16 #include <list> | 17 #include <list> |
| 17 #include <memory> | |
| 18 #include <string> | |
| 19 #include <utility> | |
| 20 | 18 |
| 21 #include "third_party/libyuv/include/libyuv/convert.h" | 19 #include "third_party/libyuv/include/libyuv/convert.h" |
| 22 #include "third_party/libyuv/include/libyuv/convert_from.h" | 20 #include "third_party/libyuv/include/libyuv/convert_from.h" |
| 23 #include "third_party/libyuv/include/libyuv/video_common.h" | 21 #include "third_party/libyuv/include/libyuv/video_common.h" |
| 24 #include "webrtc/api/video_codecs/video_encoder.h" | 22 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" |
| 23 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
| 24 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
| 25 #include "webrtc/base/bind.h" | 25 #include "webrtc/base/bind.h" |
| 26 #include "webrtc/base/checks.h" | 26 #include "webrtc/base/checks.h" |
| 27 #include "webrtc/base/logging.h" | 27 #include "webrtc/base/logging.h" |
| 28 #include "webrtc/base/sequenced_task_checker.h" | 28 #include "webrtc/base/sequenced_task_checker.h" |
| 29 #include "webrtc/base/task_queue.h" | 29 #include "webrtc/base/task_queue.h" |
| 30 #include "webrtc/base/thread.h" | 30 #include "webrtc/base/thread.h" |
| 31 #include "webrtc/base/timeutils.h" | 31 #include "webrtc/base/timeutils.h" |
| 32 #include "webrtc/base/weak_ptr.h" | 32 #include "webrtc/base/weak_ptr.h" |
| 33 #include "webrtc/common_types.h" | 33 #include "webrtc/common_types.h" |
| 34 #include "webrtc/common_video/h264/h264_bitstream_parser.h" | 34 #include "webrtc/common_video/h264/h264_bitstream_parser.h" |
| 35 #include "webrtc/common_video/h264/h264_common.h" | 35 #include "webrtc/common_video/h264/h264_common.h" |
| 36 #include "webrtc/common_video/h264/profile_level_id.h" | 36 #include "webrtc/common_video/h264/profile_level_id.h" |
| 37 #include "webrtc/media/engine/internalencoderfactory.h" | 37 #include "webrtc/media/engine/internalencoderfactory.h" |
| 38 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 38 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
| 39 #include "webrtc/modules/video_coding/utility/quality_scaler.h" | 39 #include "webrtc/modules/video_coding/utility/quality_scaler.h" |
| 40 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 40 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
| 41 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" | |
| 42 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | |
| 43 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | |
| 44 #include "webrtc/system_wrappers/include/field_trial.h" | 41 #include "webrtc/system_wrappers/include/field_trial.h" |
| 45 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 42 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
| 43 #include "webrtc/video_encoder.h" |
| 46 | 44 |
| 47 using rtc::Bind; | 45 using rtc::Bind; |
| 48 using rtc::Thread; | 46 using rtc::Thread; |
| 49 using rtc::ThreadManager; | 47 using rtc::ThreadManager; |
| 50 | 48 |
| 51 using webrtc::CodecSpecificInfo; | 49 using webrtc::CodecSpecificInfo; |
| 52 using webrtc::EncodedImage; | 50 using webrtc::EncodedImage; |
| 53 using webrtc::VideoFrame; | 51 using webrtc::VideoFrame; |
| 54 using webrtc::RTPFragmentationHeader; | 52 using webrtc::RTPFragmentationHeader; |
| 55 using webrtc::VideoCodec; | 53 using webrtc::VideoCodec; |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 118 int64_t /* rtt */) override; | 116 int64_t /* rtt */) override; |
| 119 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, | 117 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, |
| 120 uint32_t frame_rate) override; | 118 uint32_t frame_rate) override; |
| 121 | 119 |
| 122 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | 120 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } |
| 123 const char* ImplementationName() const override; | 121 const char* ImplementationName() const override; |
| 124 | 122 |
| 125 private: | 123 private: |
| 126 class EncodeTask : public rtc::QueuedTask { | 124 class EncodeTask : public rtc::QueuedTask { |
| 127 public: | 125 public: |
| 128 explicit EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); | 126 EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); |
| 129 bool Run() override; | 127 bool Run() override; |
| 130 | 128 |
| 131 private: | 129 private: |
| 132 rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; | 130 rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; |
| 133 }; | 131 }; |
| 134 | 132 |
| 135 // ResetCodec() calls Release() and InitEncodeInternal() in an attempt to | 133 // ResetCodec() calls Release() and InitEncodeInternal() in an attempt to |
| 136 // restore the codec to an operable state. Necessary after all manner of | 134 // restore the codec to an operable state. Necessary after all manner of |
| 137 // OMX-layer errors. Returns true if the codec was reset successfully. | 135 // OMX-layer errors. Returns true if the codec was reset successfully. |
| 138 bool ResetCodec(); | 136 bool ResetCodec(); |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 229 int last_set_fps_; // Last-requested frame rate. | 227 int last_set_fps_; // Last-requested frame rate. |
| 230 int64_t current_timestamp_us_; // Current frame timestamps in us. | 228 int64_t current_timestamp_us_; // Current frame timestamps in us. |
| 231 int frames_received_; // Number of frames received by encoder. | 229 int frames_received_; // Number of frames received by encoder. |
| 232 int frames_encoded_; // Number of frames encoded by encoder. | 230 int frames_encoded_; // Number of frames encoded by encoder. |
| 233 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. | 231 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. |
| 234 // Number of dropped frames caused by full queue. | 232 // Number of dropped frames caused by full queue. |
| 235 int consecutive_full_queue_frame_drops_; | 233 int consecutive_full_queue_frame_drops_; |
| 236 int64_t stat_start_time_ms_; // Start time for statistics. | 234 int64_t stat_start_time_ms_; // Start time for statistics. |
| 237 int current_frames_; // Number of frames in the current statistics interval. | 235 int current_frames_; // Number of frames in the current statistics interval. |
| 238 int current_bytes_; // Encoded bytes in the current statistics interval. | 236 int current_bytes_; // Encoded bytes in the current statistics interval. |
| 239 int current_acc_qp_; // Accumulated QP in the current statistics interval. | 237 int current_acc_qp_; // Accumulated QP in the current statistics interval. |
| 240 int current_encoding_time_ms_; // Overall encoding time in the current second | 238 int current_encoding_time_ms_; // Overall encoding time in the current second |
| 241 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | 239 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. |
| 242 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | 240 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. |
| 243 // Holds the task while the polling loop is paused. | 241 // Holds the task while the polling loop is paused. |
| 244 std::unique_ptr<rtc::QueuedTask> encode_task_; | 242 std::unique_ptr<rtc::QueuedTask> encode_task_; |
| 245 | 243 |
| 246 struct InputFrameInfo { | 244 struct InputFrameInfo { |
| 247 InputFrameInfo(int64_t encode_start_time, | 245 InputFrameInfo(int64_t encode_start_time, |
| 248 int32_t frame_timestamp, | 246 int32_t frame_timestamp, |
| 249 int64_t frame_render_time_ms, | 247 int64_t frame_render_time_ms, |
| 250 webrtc::VideoRotation rotation) | 248 webrtc::VideoRotation rotation) |
| 251 : encode_start_time(encode_start_time), | 249 : encode_start_time(encode_start_time), |
| 252 frame_timestamp(frame_timestamp), | 250 frame_timestamp(frame_timestamp), |
| 253 frame_render_time_ms(frame_render_time_ms), | 251 frame_render_time_ms(frame_render_time_ms), |
| 254 rotation(rotation) {} | 252 rotation(rotation) {} |
| 255 // Time when video frame is sent to encoder input. | 253 // Time when video frame is sent to encoder input. |
| 256 const int64_t encode_start_time; | 254 const int64_t encode_start_time; |
| 257 | 255 |
| 258 // Input frame information. | 256 // Input frame information. |
| 259 const int32_t frame_timestamp; | 257 const int32_t frame_timestamp; |
| 260 const int64_t frame_render_time_ms; | 258 const int64_t frame_render_time_ms; |
| 261 const webrtc::VideoRotation rotation; | 259 const webrtc::VideoRotation rotation; |
| 262 }; | 260 }; |
| 263 std::list<InputFrameInfo> input_frame_infos_; | 261 std::list<InputFrameInfo> input_frame_infos_; |
| 264 int32_t output_timestamp_; // Last output frame timestamp from | 262 int32_t output_timestamp_; // Last output frame timestamp from |
| 265 // |input_frame_infos_|. | 263 // |input_frame_infos_|. |
| 266 int64_t output_render_time_ms_; // Last output frame render time from | 264 int64_t output_render_time_ms_; // Last output frame render time from |
| 267 // |input_frame_infos_|. | 265 // |input_frame_infos_|. |
| 268 webrtc::VideoRotation output_rotation_; // Last output frame rotation from | 266 webrtc::VideoRotation output_rotation_; // Last output frame rotation from |
| 269 // |input_frame_infos_|. | 267 // |input_frame_infos_|. |
| 270 // Frame size in bytes fed to MediaCodec. | 268 // Frame size in bytes fed to MediaCodec. |
| 271 int yuv_size_; | 269 int yuv_size_; |
| 272 // True only when between a callback_->OnEncodedImage() call return a positive | 270 // True only when between a callback_->OnEncodedImage() call return a positive |
| 273 // value and the next Encode() call being ignored. | 271 // value and the next Encode() call being ignored. |
| 274 bool drop_next_input_frame_; | 272 bool drop_next_input_frame_; |
| 275 bool scale_; | 273 bool scale_; |
| 276 // Global references; must be deleted in Release(). | 274 // Global references; must be deleted in Release(). |
| 277 std::vector<jobject> input_buffers_; | 275 std::vector<jobject> input_buffers_; |
| 278 webrtc::H264BitstreamParser h264_bitstream_parser_; | 276 webrtc::H264BitstreamParser h264_bitstream_parser_; |
| 279 | 277 |
| 280 // VP9 variables to populate codec specific structure. | 278 // VP9 variables to populate codec specific structure. |
| 281 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for | 279 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for |
| 282 // non-flexible VP9 mode. | 280 // non-flexible VP9 mode. |
| 283 uint8_t tl0_pic_idx_; | 281 uint8_t tl0_pic_idx_; |
| 284 size_t gof_idx_; | 282 size_t gof_idx_; |
| 285 | 283 |
| 286 // EGL context - owned by factory, should not be allocated/destroyed | 284 // EGL context - owned by factory, should not be allocated/destroyed |
| 287 // by MediaCodecVideoEncoder. | 285 // by MediaCodecVideoEncoder. |
| 288 jobject egl_context_; | 286 jobject egl_context_; |
| 289 | 287 |
| 290 // Temporary fix for VP8. | 288 // Temporary fix for VP8. |
| 291 // Sends a key frame if frames are largely spaced apart (possibly | 289 // Sends a key frame if frames are largely spaced apart (possibly |
| 292 // corresponding to a large image change). | 290 // corresponding to a large image change). |
| (...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 514 bool use_surface) { | 512 bool use_surface) { |
| 515 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | 513 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
| 516 if (sw_fallback_required_) { | 514 if (sw_fallback_required_) { |
| 517 return WEBRTC_VIDEO_CODEC_OK; | 515 return WEBRTC_VIDEO_CODEC_OK; |
| 518 } | 516 } |
| 519 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; | 517 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; |
| 520 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 518 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 521 ScopedLocalRefFrame local_ref_frame(jni); | 519 ScopedLocalRefFrame local_ref_frame(jni); |
| 522 | 520 |
| 523 const VideoCodecType codec_type = GetCodecType(); | 521 const VideoCodecType codec_type = GetCodecType(); |
| 524 ALOGD << "InitEncodeInternal Type: " << static_cast<int>(codec_type) << ", " | 522 ALOGD << "InitEncodeInternal Type: " << (int)codec_type << ", " << width |
| 525 << width << " x " << height << ". Bitrate: " << kbps | 523 << " x " << height << ". Bitrate: " << kbps << " kbps. Fps: " << fps; |
| 526 << " kbps. Fps: " << fps; | |
| 527 if (kbps == 0) { | 524 if (kbps == 0) { |
| 528 kbps = last_set_bitrate_kbps_; | 525 kbps = last_set_bitrate_kbps_; |
| 529 } | 526 } |
| 530 if (fps == 0) { | 527 if (fps == 0) { |
| 531 fps = MAX_VIDEO_FPS; | 528 fps = MAX_VIDEO_FPS; |
| 532 } | 529 } |
| 533 | 530 |
| 534 width_ = width; | 531 width_ = width; |
| 535 height_ = height; | 532 height_ = height; |
| 536 last_set_bitrate_kbps_ = kbps; | 533 last_set_bitrate_kbps_ = kbps; |
| 537 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; | 534 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; |
| 538 yuv_size_ = width_ * height_ * 3 / 2; | 535 yuv_size_ = width_ * height_ * 3 / 2; |
| 539 frames_received_ = 0; | 536 frames_received_ = 0; |
| 540 frames_encoded_ = 0; | 537 frames_encoded_ = 0; |
| 541 frames_dropped_media_encoder_ = 0; | 538 frames_dropped_media_encoder_ = 0; |
| 542 consecutive_full_queue_frame_drops_ = 0; | 539 consecutive_full_queue_frame_drops_ = 0; |
| 543 current_timestamp_us_ = 0; | 540 current_timestamp_us_ = 0; |
| 544 stat_start_time_ms_ = rtc::TimeMillis(); | 541 stat_start_time_ms_ = rtc::TimeMillis(); |
| 545 current_frames_ = 0; | 542 current_frames_ = 0; |
| 546 current_bytes_ = 0; | 543 current_bytes_ = 0; |
| 547 current_acc_qp_ = 0; | 544 current_acc_qp_ = 0; |
| 548 current_encoding_time_ms_ = 0; | 545 current_encoding_time_ms_ = 0; |
| 549 last_input_timestamp_ms_ = -1; | 546 last_input_timestamp_ms_ = -1; |
| 550 last_output_timestamp_ms_ = -1; | 547 last_output_timestamp_ms_ = -1; |
| 551 output_timestamp_ = 0; | 548 output_timestamp_ = 0; |
| 552 output_render_time_ms_ = 0; | 549 output_render_time_ms_ = 0; |
| 553 input_frame_infos_.clear(); | 550 input_frame_infos_.clear(); |
| 554 drop_next_input_frame_ = false; | 551 drop_next_input_frame_ = false; |
| 555 use_surface_ = use_surface; | 552 use_surface_ = use_surface; |
| 556 // TODO(ilnik): Use rand_r() instead to avoid LINT warnings below. | 553 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; |
| 557 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT | |
| 558 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); | 554 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); |
| 559 tl0_pic_idx_ = static_cast<uint8_t>(rand()); // NOLINT | 555 tl0_pic_idx_ = static_cast<uint8_t>(rand()); |
| 560 gof_idx_ = 0; | 556 gof_idx_ = 0; |
| 561 last_frame_received_ms_ = -1; | 557 last_frame_received_ms_ = -1; |
| 562 frames_received_since_last_key_ = kMinKeyFrameInterval; | 558 frames_received_since_last_key_ = kMinKeyFrameInterval; |
| 563 | 559 |
| 564 // We enforce no extra stride/padding in the format creation step. | 560 // We enforce no extra stride/padding in the format creation step. |
| 565 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | 561 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( |
| 566 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); | 562 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); |
| 567 const bool encode_status = jni->CallBooleanMethod( | 563 const bool encode_status = jni->CallBooleanMethod( |
| 568 *j_media_codec_video_encoder_, j_init_encode_method_, | 564 *j_media_codec_video_encoder_, j_init_encode_method_, |
| 569 j_video_codec_enum, width, height, kbps, fps, | 565 j_video_codec_enum, width, height, kbps, fps, |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 673 | 669 |
| 674 frames_received_++; | 670 frames_received_++; |
| 675 if (!DeliverPendingOutputs(jni)) { | 671 if (!DeliverPendingOutputs(jni)) { |
| 676 if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) { | 672 if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) { |
| 677 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | 673 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
| 678 : WEBRTC_VIDEO_CODEC_ERROR; | 674 : WEBRTC_VIDEO_CODEC_ERROR; |
| 679 } | 675 } |
| 680 } | 676 } |
| 681 if (frames_encoded_ < kMaxEncodedLogFrames) { | 677 if (frames_encoded_ < kMaxEncodedLogFrames) { |
| 682 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | 678 ALOGD << "Encoder frame in # " << (frames_received_ - 1) |
| 683 << ". TS: " << static_cast<int>(current_timestamp_us_ / 1000) | 679 << ". TS: " << (int)(current_timestamp_us_ / 1000) |
| 684 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | 680 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ |
| 685 << ". Kbps: " << last_set_bitrate_kbps_; | 681 << ". Kbps: " << last_set_bitrate_kbps_; |
| 686 } | 682 } |
| 687 | 683 |
| 688 if (drop_next_input_frame_) { | 684 if (drop_next_input_frame_) { |
| 689 ALOGW << "Encoder drop frame - failed callback."; | 685 ALOGW << "Encoder drop frame - failed callback."; |
| 690 drop_next_input_frame_ = false; | 686 drop_next_input_frame_ = false; |
| 691 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 687 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
| 692 frames_dropped_media_encoder_++; | 688 frames_dropped_media_encoder_++; |
| 693 return WEBRTC_VIDEO_CODEC_OK; | 689 return WEBRTC_VIDEO_CODEC_OK; |
| 694 } | 690 } |
| 695 | 691 |
| 696 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | 692 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
| 697 | 693 |
| 698 // Check if we accumulated too many frames in encoder input buffers and drop | 694 // Check if we accumulated too many frames in encoder input buffers and drop |
| 699 // frame if so. | 695 // frame if so. |
| 700 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { | 696 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { |
| 701 ALOGD << "Already " << input_frame_infos_.size() | 697 ALOGD << "Already " << input_frame_infos_.size() |
| 702 << " frames in the queue, dropping" | 698 << " frames in the queue, dropping" |
| 703 << ". TS: " << static_cast<int>(current_timestamp_us_ / 1000) | 699 << ". TS: " << (int)(current_timestamp_us_ / 1000) |
| 704 << ". Fps: " << last_set_fps_ | 700 << ". Fps: " << last_set_fps_ |
| 705 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | 701 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; |
| 706 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 702 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
| 707 consecutive_full_queue_frame_drops_++; | 703 consecutive_full_queue_frame_drops_++; |
| 708 if (consecutive_full_queue_frame_drops_ >= | 704 if (consecutive_full_queue_frame_drops_ >= |
| 709 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | 705 ENCODER_STALL_FRAMEDROP_THRESHOLD) { |
| 710 ALOGE << "Encoder got stuck."; | 706 ALOGE << "Encoder got stuck."; |
| 711 return ProcessHWErrorOnEncode(); | 707 return ProcessHWErrorOnEncode(); |
| 712 } | 708 } |
| 713 frames_dropped_media_encoder_++; | 709 frames_dropped_media_encoder_++; |
| (...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1136 if (CheckException(jni) || !success) { | 1132 if (CheckException(jni) || !success) { |
| 1137 ProcessHWError(true /* reset_if_fallback_unavailable */); | 1133 ProcessHWError(true /* reset_if_fallback_unavailable */); |
| 1138 return false; | 1134 return false; |
| 1139 } | 1135 } |
| 1140 | 1136 |
| 1141 // Print per frame statistics. | 1137 // Print per frame statistics. |
| 1142 if (encoding_start_time_ms > 0) { | 1138 if (encoding_start_time_ms > 0) { |
| 1143 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; | 1139 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; |
| 1144 } | 1140 } |
| 1145 if (frames_encoded_ < kMaxEncodedLogFrames) { | 1141 if (frames_encoded_ < kMaxEncodedLogFrames) { |
| 1146 int current_latency = static_cast<int>(last_input_timestamp_ms_ - | 1142 int current_latency = |
| 1147 last_output_timestamp_ms_); | 1143 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); |
| 1148 ALOGD << "Encoder frame out # " << frames_encoded_ | 1144 ALOGD << "Encoder frame out # " << frames_encoded_ << |
| 1149 << ". Key: " << key_frame << ". Size: " << payload_size | 1145 ". Key: " << key_frame << |
| 1150 << ". TS: " << static_cast<int>(last_output_timestamp_ms_) | 1146 ". Size: " << payload_size << |
| 1151 << ". Latency: " << current_latency | 1147 ". TS: " << (int)last_output_timestamp_ms_ << |
| 1152 << ". EncTime: " << frame_encoding_time_ms; | 1148 ". Latency: " << current_latency << |
| 1149 ". EncTime: " << frame_encoding_time_ms; |
| 1153 } | 1150 } |
| 1154 | 1151 |
| 1155 // Calculate and print encoding statistics - every 3 seconds. | 1152 // Calculate and print encoding statistics - every 3 seconds. |
| 1156 frames_encoded_++; | 1153 frames_encoded_++; |
| 1157 current_frames_++; | 1154 current_frames_++; |
| 1158 current_bytes_ += payload_size; | 1155 current_bytes_ += payload_size; |
| 1159 current_encoding_time_ms_ += frame_encoding_time_ms; | 1156 current_encoding_time_ms_ += frame_encoding_time_ms; |
| 1160 LogStatistics(false); | 1157 LogStatistics(false); |
| 1161 | 1158 |
| 1162 // Errors in callback_result are currently ignored. | 1159 // Errors in callback_result are currently ignored. |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1345 } | 1342 } |
| 1346 } | 1343 } |
| 1347 | 1344 |
| 1348 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1345 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
| 1349 webrtc::VideoEncoder* encoder) { | 1346 webrtc::VideoEncoder* encoder) { |
| 1350 ALOGD << "Destroy video encoder."; | 1347 ALOGD << "Destroy video encoder."; |
| 1351 delete encoder; | 1348 delete encoder; |
| 1352 } | 1349 } |
| 1353 | 1350 |
| 1354 } // namespace webrtc_jni | 1351 } // namespace webrtc_jni |
| OLD | NEW |