OLD | NEW |
---|---|
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
70 #define MAX_VIDEO_WIDTH 1280 | 70 #define MAX_VIDEO_WIDTH 1280 |
71 #define MAX_VIDEO_HEIGHT 1280 | 71 #define MAX_VIDEO_HEIGHT 1280 |
72 // Maximum supported HW video encoder fps. | 72 // Maximum supported HW video encoder fps. |
73 #define MAX_VIDEO_FPS 30 | 73 #define MAX_VIDEO_FPS 30 |
74 // Maximum allowed fps value in SetRates() call. | 74 // Maximum allowed fps value in SetRates() call. |
75 #define MAX_ALLOWED_VIDEO_FPS 60 | 75 #define MAX_ALLOWED_VIDEO_FPS 60 |
76 // Maximum allowed frames in encoder input queue. | 76 // Maximum allowed frames in encoder input queue. |
77 #define MAX_ENCODER_Q_SIZE 2 | 77 #define MAX_ENCODER_Q_SIZE 2 |
78 // Maximum allowed latency in ms. | 78 // Maximum allowed latency in ms. |
79 #define MAX_ENCODER_LATENCY_MS 70 | 79 #define MAX_ENCODER_LATENCY_MS 70 |
80 | 80 // Maximum amount of dropped frames caused by full encoder queue - exceeding |
81 // this threshold means that encoder probably got stuck and need to be reset. | |
82 #define ENCODER_STALL_FRAMEDROP_THRESHOLD 60 | |
81 | 83 |
82 // Logging macros. | 84 // Logging macros. |
83 #define TAG_ENCODER "MediaCodecVideoEncoder" | 85 #define TAG_ENCODER "MediaCodecVideoEncoder" |
84 #ifdef TRACK_BUFFER_TIMING | 86 #ifdef TRACK_BUFFER_TIMING |
85 #define ALOGV(...) | 87 #define ALOGV(...) |
86 __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__) | 88 __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__) |
87 #else | 89 #else |
88 #define ALOGV(...) | 90 #define ALOGV(...) |
89 #endif | 91 #endif |
90 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER) | 92 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER) |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
214 bool inited_; | 216 bool inited_; |
215 bool use_surface_; | 217 bool use_surface_; |
216 uint16_t picture_id_; | 218 uint16_t picture_id_; |
217 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. | 219 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. |
218 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | 220 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. |
219 int last_set_fps_; // Last-requested frame rate. | 221 int last_set_fps_; // Last-requested frame rate. |
220 int64_t current_timestamp_us_; // Current frame timestamps in us. | 222 int64_t current_timestamp_us_; // Current frame timestamps in us. |
221 int frames_received_; // Number of frames received by encoder. | 223 int frames_received_; // Number of frames received by encoder. |
222 int frames_encoded_; // Number of frames encoded by encoder. | 224 int frames_encoded_; // Number of frames encoded by encoder. |
223 int frames_dropped_; // Number of frames dropped by encoder. | 225 int frames_dropped_; // Number of frames dropped by encoder. |
226 // Number of dropped frames caused by full queue. | |
227 int consecutive_full_queue_frame_drops_; | |
224 int frames_in_queue_; // Number of frames in encoder queue. | 228 int frames_in_queue_; // Number of frames in encoder queue. |
225 int64_t start_time_ms_; // Start time for statistics. | 229 int64_t start_time_ms_; // Start time for statistics. |
226 int current_frames_; // Number of frames in the current statistics interval. | 230 int current_frames_; // Number of frames in the current statistics interval. |
227 int current_bytes_; // Encoded bytes in the current statistics interval. | 231 int current_bytes_; // Encoded bytes in the current statistics interval. |
228 int current_acc_qp_; // Accumulated QP in the current statistics interval. | 232 int current_acc_qp_; // Accumulated QP in the current statistics interval. |
229 int current_encoding_time_ms_; // Overall encoding time in the current second | 233 int current_encoding_time_ms_; // Overall encoding time in the current second |
230 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | 234 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. |
231 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | 235 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. |
232 std::vector<int32_t> timestamps_; // Video frames timestamp queue. | 236 std::vector<int32_t> timestamps_; // Video frames timestamp queue. |
233 std::vector<int64_t> render_times_ms_; // Video frames render time queue. | 237 std::vector<int64_t> render_times_ms_; // Video frames render time queue. |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
363 scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName( | 367 scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName( |
364 "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled"); | 368 "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled"); |
365 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); | 369 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); |
366 if (scale_) { | 370 if (scale_) { |
367 if (codecType_ == kVideoCodecVP8) { | 371 if (codecType_ == kVideoCodecVP8) { |
368 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the | 372 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the |
369 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is | 373 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is |
370 // always = 127. Note that in SW, QP is that of the user-level range [0, | 374 // always = 127. Note that in SW, QP is that of the user-level range [0, |
371 // 63]. | 375 // 63]. |
372 const int kMaxQp = 127; | 376 const int kMaxQp = 127; |
373 // TODO(pbos): Investigate whether high-QP thresholds make sense for VP8. | 377 const int kBadQpThreshold = 95; |
374 // This effectively disables high QP as VP8 QP can't go above this | 378 quality_scaler_.Init( |
375 // threshold. | 379 kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false); |
jackychen_
2016/01/26 15:03:57
LGTM.
You found framerate reduction bad for VP8
AlexG
2016/01/26 20:34:18
It is not bad, but I thought there is less differe
| |
376 const int kDisabledBadQpThreshold = kMaxQp + 1; | |
377 quality_scaler_.Init(kMaxQp / kLowQpThresholdDenominator, | |
378 kDisabledBadQpThreshold, true); | |
379 } else if (codecType_ == kVideoCodecH264) { | 380 } else if (codecType_ == kVideoCodecH264) { |
380 // H264 QP is in the range [0, 51]. | 381 // H264 QP is in the range [0, 51]. |
381 const int kMaxQp = 51; | 382 const int kMaxQp = 51; |
382 const int kBadQpThreshold = 40; | 383 const int kBadQpThreshold = 40; |
383 quality_scaler_.Init(kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, | 384 quality_scaler_.Init( |
384 false); | 385 kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false); |
385 } else { | 386 } else { |
386 // When adding codec support to additional hardware codecs, also configure | 387 // When adding codec support to additional hardware codecs, also configure |
387 // their QP thresholds for scaling. | 388 // their QP thresholds for scaling. |
388 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds."; | 389 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds."; |
389 } | 390 } |
390 quality_scaler_.SetMinResolution(kMinWidth, kMinHeight); | 391 quality_scaler_.SetMinResolution(kMinWidth, kMinHeight); |
391 quality_scaler_.ReportFramerate(codec_settings->maxFramerate); | 392 quality_scaler_.ReportFramerate(codec_settings->maxFramerate); |
392 } | 393 } |
393 return codec_thread_->Invoke<int32_t>( | 394 return codec_thread_->Invoke<int32_t>( |
394 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, | 395 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, |
(...skipping 27 matching lines...) Expand all Loading... | |
422 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); | 423 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); |
423 } | 424 } |
424 | 425 |
425 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, | 426 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, |
426 int64_t /* rtt */) { | 427 int64_t /* rtt */) { |
427 return WEBRTC_VIDEO_CODEC_OK; | 428 return WEBRTC_VIDEO_CODEC_OK; |
428 } | 429 } |
429 | 430 |
430 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate, | 431 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate, |
431 uint32_t frame_rate) { | 432 uint32_t frame_rate) { |
432 if (scale_) | |
433 quality_scaler_.ReportFramerate(frame_rate); | |
434 | |
435 return codec_thread_->Invoke<int32_t>( | 433 return codec_thread_->Invoke<int32_t>( |
436 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, | 434 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, |
437 this, | 435 this, |
438 new_bit_rate, | 436 new_bit_rate, |
439 frame_rate)); | 437 frame_rate)); |
440 } | 438 } |
441 | 439 |
442 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { | 440 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { |
443 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 441 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
444 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 442 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
491 } | 489 } |
492 | 490 |
493 width_ = width; | 491 width_ = width; |
494 height_ = height; | 492 height_ = height; |
495 last_set_bitrate_kbps_ = kbps; | 493 last_set_bitrate_kbps_ = kbps; |
496 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; | 494 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; |
497 yuv_size_ = width_ * height_ * 3 / 2; | 495 yuv_size_ = width_ * height_ * 3 / 2; |
498 frames_received_ = 0; | 496 frames_received_ = 0; |
499 frames_encoded_ = 0; | 497 frames_encoded_ = 0; |
500 frames_dropped_ = 0; | 498 frames_dropped_ = 0; |
499 consecutive_full_queue_frame_drops_ = 0; | |
501 frames_in_queue_ = 0; | 500 frames_in_queue_ = 0; |
502 current_timestamp_us_ = 0; | 501 current_timestamp_us_ = 0; |
503 start_time_ms_ = GetCurrentTimeMs(); | 502 start_time_ms_ = GetCurrentTimeMs(); |
504 current_frames_ = 0; | 503 current_frames_ = 0; |
505 current_bytes_ = 0; | 504 current_bytes_ = 0; |
506 current_acc_qp_ = 0; | 505 current_acc_qp_ = 0; |
507 current_encoding_time_ms_ = 0; | 506 current_encoding_time_ms_ = 0; |
508 last_input_timestamp_ms_ = -1; | 507 last_input_timestamp_ms_ = -1; |
509 last_output_timestamp_ms_ = -1; | 508 last_output_timestamp_ms_ = -1; |
510 output_timestamp_ = 0; | 509 output_timestamp_ = 0; |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
593 if (frames_encoded_ < kMaxEncodedLogFrames) { | 592 if (frames_encoded_ < kMaxEncodedLogFrames) { |
594 ALOGD << "Encoder frame in # " << (frames_received_ - 1) << ". TS: " << | 593 ALOGD << "Encoder frame in # " << (frames_received_ - 1) << ". TS: " << |
595 (int)(current_timestamp_us_ / 1000) << ". Q: " << frames_in_queue_ << | 594 (int)(current_timestamp_us_ / 1000) << ". Q: " << frames_in_queue_ << |
596 ". Fps: " << last_set_fps_ << ". Kbps: " << last_set_bitrate_kbps_; | 595 ". Fps: " << last_set_fps_ << ". Kbps: " << last_set_bitrate_kbps_; |
597 } | 596 } |
598 | 597 |
599 if (drop_next_input_frame_) { | 598 if (drop_next_input_frame_) { |
600 ALOGW << "Encoder drop frame - failed callback."; | 599 ALOGW << "Encoder drop frame - failed callback."; |
601 drop_next_input_frame_ = false; | 600 drop_next_input_frame_ = false; |
602 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 601 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
602 frames_dropped_++; | |
pbos-webrtc
2016/01/26 14:48:57
move frames_dropped_ into OnDroppedFrame() ?
AlexG
2016/01/26 20:34:18
OnDroppedFrame is an override - I think it may be
pbos-webrtc
2016/01/27 15:55:49
Should we rename frames_dropped_ to frames_dropped
| |
603 OnDroppedFrame(); | 603 OnDroppedFrame(); |
604 return WEBRTC_VIDEO_CODEC_OK; | 604 return WEBRTC_VIDEO_CODEC_OK; |
605 } | 605 } |
606 | 606 |
607 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | 607 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
608 | 608 |
609 // Check if we accumulated too many frames in encoder input buffers | 609 // Check if we accumulated too many frames in encoder input buffers |
610 // or the encoder latency exceeds 70 ms and drop frame if so. | 610 // or the encoder latency exceeds 70 ms and drop frame if so. |
611 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { | 611 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { |
612 int encoder_latency_ms = last_input_timestamp_ms_ - | 612 int encoder_latency_ms = last_input_timestamp_ms_ - |
613 last_output_timestamp_ms_; | 613 last_output_timestamp_ms_; |
614 if (frames_in_queue_ > MAX_ENCODER_Q_SIZE || | 614 if (frames_in_queue_ > MAX_ENCODER_Q_SIZE || |
615 encoder_latency_ms > MAX_ENCODER_LATENCY_MS) { | 615 encoder_latency_ms > MAX_ENCODER_LATENCY_MS) { |
616 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << | 616 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << |
617 " ms. Q size: " << frames_in_queue_; | 617 " ms. Q size: " << frames_in_queue_ << ". Consecutive drops: " << |
618 consecutive_full_queue_frame_drops_; | |
618 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 619 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
620 consecutive_full_queue_frame_drops_++; | |
621 if (consecutive_full_queue_frame_drops_ >= | |
622 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | |
623 ALOGE << "Encoder got stuck. Reset."; | |
624 ResetCodecOnCodecThread(); | |
625 return WEBRTC_VIDEO_CODEC_ERROR; | |
626 } | |
627 frames_dropped_++; | |
619 OnDroppedFrame(); | 628 OnDroppedFrame(); |
620 return WEBRTC_VIDEO_CODEC_OK; | 629 return WEBRTC_VIDEO_CODEC_OK; |
621 } | 630 } |
622 } | 631 } |
632 consecutive_full_queue_frame_drops_ = 0; | |
623 | 633 |
624 VideoFrame input_frame = frame; | 634 VideoFrame input_frame = frame; |
625 if (scale_) { | 635 if (scale_) { |
626 // Check framerate before spatial resolution change. | 636 // Check framerate before spatial resolution change. |
627 quality_scaler_.OnEncodeFrame(frame); | 637 quality_scaler_.OnEncodeFrame(frame); |
628 const webrtc::QualityScaler::Resolution scaled_resolution = | 638 const webrtc::QualityScaler::Resolution scaled_resolution = |
629 quality_scaler_.GetScaledResolution(); | 639 quality_scaler_.GetScaledResolution(); |
630 if (scaled_resolution.width != frame.width() || | 640 if (scaled_resolution.width != frame.width() || |
631 scaled_resolution.height != frame.height()) { | 641 scaled_resolution.height != frame.height()) { |
632 if (frame.native_handle() != nullptr) { | 642 if (frame.native_handle() != nullptr) { |
(...skipping 20 matching lines...) Expand all Loading... | |
653 | 663 |
654 const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta; | 664 const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta; |
655 bool encode_status = true; | 665 bool encode_status = true; |
656 if (!input_frame.native_handle()) { | 666 if (!input_frame.native_handle()) { |
657 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 667 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
658 j_dequeue_input_buffer_method_); | 668 j_dequeue_input_buffer_method_); |
659 CHECK_EXCEPTION(jni); | 669 CHECK_EXCEPTION(jni); |
660 if (j_input_buffer_index == -1) { | 670 if (j_input_buffer_index == -1) { |
661 // Video codec falls behind - no input buffer available. | 671 // Video codec falls behind - no input buffer available. |
662 ALOGW << "Encoder drop frame - no input buffers available"; | 672 ALOGW << "Encoder drop frame - no input buffers available"; |
673 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | |
663 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 674 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
664 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | 675 frames_dropped_++; |
676 OnDroppedFrame(); | |
665 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 677 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
666 } | 678 } |
667 if (j_input_buffer_index == -2) { | 679 if (j_input_buffer_index == -2) { |
668 ResetCodecOnCodecThread(); | 680 ResetCodecOnCodecThread(); |
669 return WEBRTC_VIDEO_CODEC_ERROR; | 681 return WEBRTC_VIDEO_CODEC_ERROR; |
670 } | 682 } |
671 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | 683 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, |
672 j_input_buffer_index); | 684 j_input_buffer_index); |
673 } else { | 685 } else { |
674 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | 686 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
808 | 820 |
809 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | 821 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
810 uint32_t frame_rate) { | 822 uint32_t frame_rate) { |
811 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 823 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
812 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? | 824 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? |
813 frame_rate : MAX_ALLOWED_VIDEO_FPS; | 825 frame_rate : MAX_ALLOWED_VIDEO_FPS; |
814 if (last_set_bitrate_kbps_ == new_bit_rate && | 826 if (last_set_bitrate_kbps_ == new_bit_rate && |
815 last_set_fps_ == frame_rate) { | 827 last_set_fps_ == frame_rate) { |
816 return WEBRTC_VIDEO_CODEC_OK; | 828 return WEBRTC_VIDEO_CODEC_OK; |
817 } | 829 } |
830 if (scale_) { | |
831 quality_scaler_.ReportFramerate(frame_rate); | |
832 } | |
818 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 833 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
819 ScopedLocalRefFrame local_ref_frame(jni); | 834 ScopedLocalRefFrame local_ref_frame(jni); |
820 if (new_bit_rate > 0) { | 835 if (new_bit_rate > 0) { |
821 last_set_bitrate_kbps_ = new_bit_rate; | 836 last_set_bitrate_kbps_ = new_bit_rate; |
822 } | 837 } |
823 if (frame_rate > 0) { | 838 if (frame_rate > 0) { |
824 last_set_fps_ = frame_rate; | 839 last_set_fps_ = frame_rate; |
825 } | 840 } |
826 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 841 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
827 j_set_rates_method_, | 842 j_set_rates_method_, |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1093 if (head[3] != 0x01) { // got 000000xx | 1108 if (head[3] != 0x01) { // got 000000xx |
1094 head++; // xx != 1, continue searching. | 1109 head++; // xx != 1, continue searching. |
1095 continue; | 1110 continue; |
1096 } | 1111 } |
1097 return (int32_t)(head - buffer); | 1112 return (int32_t)(head - buffer); |
1098 } | 1113 } |
1099 return -1; | 1114 return -1; |
1100 } | 1115 } |
1101 | 1116 |
1102 void MediaCodecVideoEncoder::OnDroppedFrame() { | 1117 void MediaCodecVideoEncoder::OnDroppedFrame() { |
1103 frames_dropped_++; | |
1104 // Report dropped frame to quality_scaler_. | 1118 // Report dropped frame to quality_scaler_. |
1105 if (scale_) | 1119 if (scale_) |
1106 quality_scaler_.ReportDroppedFrame(); | 1120 quality_scaler_.ReportDroppedFrame(); |
1107 } | 1121 } |
1108 | 1122 |
1109 int MediaCodecVideoEncoder::GetTargetFramerate() { | 1123 int MediaCodecVideoEncoder::GetTargetFramerate() { |
1110 return scale_ ? quality_scaler_.GetTargetFramerate() : -1; | 1124 return scale_ ? quality_scaler_.GetTargetFramerate() : -1; |
1111 } | 1125 } |
1112 | 1126 |
1113 const char* MediaCodecVideoEncoder::ImplementationName() const { | 1127 const char* MediaCodecVideoEncoder::ImplementationName() const { |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1206 } | 1220 } |
1207 | 1221 |
1208 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1222 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1209 webrtc::VideoEncoder* encoder) { | 1223 webrtc::VideoEncoder* encoder) { |
1210 ALOGD << "Destroy video encoder."; | 1224 ALOGD << "Destroy video encoder."; |
1211 delete encoder; | 1225 delete encoder; |
1212 } | 1226 } |
1213 | 1227 |
1214 } // namespace webrtc_jni | 1228 } // namespace webrtc_jni |
1215 | 1229 |
OLD | NEW |