OLD | NEW |
---|---|
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
64 | 64 |
65 // H.264 start code length. | 65 // H.264 start code length. |
66 #define H264_SC_LENGTH 4 | 66 #define H264_SC_LENGTH 4 |
67 // Maximum allowed NALUs in one output frame. | 67 // Maximum allowed NALUs in one output frame. |
68 #define MAX_NALUS_PERFRAME 32 | 68 #define MAX_NALUS_PERFRAME 32 |
69 // Maximum supported HW video encoder resolution. | 69 // Maximum supported HW video encoder resolution. |
70 #define MAX_VIDEO_WIDTH 1280 | 70 #define MAX_VIDEO_WIDTH 1280 |
71 #define MAX_VIDEO_HEIGHT 1280 | 71 #define MAX_VIDEO_HEIGHT 1280 |
72 // Maximum supported HW video encoder fps. | 72 // Maximum supported HW video encoder fps. |
73 #define MAX_VIDEO_FPS 30 | 73 #define MAX_VIDEO_FPS 30 |
74 // Maximum allowed fps value in SetRates() call. | |
75 #define MAX_ALLOWED_VIDEO_FPS 60 | |
76 // Maximum allowed frames in encoder input queue. | |
77 #define MAX_ENCODER_Q_SIZE 2 | |
78 // Maximum allowed latency in ms. | |
79 #define MAX_LATENCY_MS 70 | |
80 | |
81 | |
82 // Logging macros. | |
83 #define TAG_ENCODER "MediaCodecVideoEncoder" | |
84 #ifdef TRACK_BUFFER_TIMING | |
85 #define ALOGV(...) | |
86 __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__) | |
87 #else | |
88 #define ALOGV(...) | |
89 #endif | |
90 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER) | |
91 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_ENCODER) | |
92 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER) | |
74 | 93 |
75 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses | 94 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses |
76 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) | 95 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) |
77 // HW-backed video encode. This C++ class is implemented as a very thin shim, | 96 // HW-backed video encode. This C++ class is implemented as a very thin shim, |
78 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. | 97 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. |
79 // MediaCodecVideoEncoder is created, operated, and destroyed on a single | 98 // MediaCodecVideoEncoder is created, operated, and destroyed on a single |
80 // thread, currently the libjingle Worker thread. | 99 // thread, currently the libjingle Worker thread. |
81 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, | 100 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
82 public rtc::MessageHandler { | 101 public rtc::MessageHandler { |
83 public: | 102 public: |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
199 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | 218 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. |
200 int last_set_fps_; // Last-requested frame rate. | 219 int last_set_fps_; // Last-requested frame rate. |
201 int64_t current_timestamp_us_; // Current frame timestamps in us. | 220 int64_t current_timestamp_us_; // Current frame timestamps in us. |
202 int frames_received_; // Number of frames received by encoder. | 221 int frames_received_; // Number of frames received by encoder. |
203 int frames_encoded_; // Number of frames encoded by encoder. | 222 int frames_encoded_; // Number of frames encoded by encoder. |
204 int frames_dropped_; // Number of frames dropped by encoder. | 223 int frames_dropped_; // Number of frames dropped by encoder. |
205 int frames_in_queue_; // Number of frames in encoder queue. | 224 int frames_in_queue_; // Number of frames in encoder queue. |
206 int64_t start_time_ms_; // Start time for statistics. | 225 int64_t start_time_ms_; // Start time for statistics. |
207 int current_frames_; // Number of frames in the current statistics interval. | 226 int current_frames_; // Number of frames in the current statistics interval. |
208 int current_bytes_; // Encoded bytes in the current statistics interval. | 227 int current_bytes_; // Encoded bytes in the current statistics interval. |
228 int current_acc_qp_; // Accumulated QP in the current statistics interval. | |
209 int current_encoding_time_ms_; // Overall encoding time in the current second | 229 int current_encoding_time_ms_; // Overall encoding time in the current second |
210 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | 230 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. |
211 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | 231 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. |
212 std::vector<int32_t> timestamps_; // Video frames timestamp queue. | 232 std::vector<int32_t> timestamps_; // Video frames timestamp queue. |
213 std::vector<int64_t> render_times_ms_; // Video frames render time queue. | 233 std::vector<int64_t> render_times_ms_; // Video frames render time queue. |
214 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to | 234 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to |
215 // encoder input. | 235 // encoder input. |
216 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q. | 236 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q. |
217 int64_t output_render_time_ms_; // Last output frame render time from | 237 int64_t output_render_time_ms_; // Last output frame render time from |
218 // render_times_ms_ queue. | 238 // render_times_ms_ queue. |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
460 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 480 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
461 ScopedLocalRefFrame local_ref_frame(jni); | 481 ScopedLocalRefFrame local_ref_frame(jni); |
462 | 482 |
463 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << | 483 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << |
464 width << " x " << height << ". Bitrate: " << kbps << | 484 width << " x " << height << ". Bitrate: " << kbps << |
465 " kbps. Fps: " << fps; | 485 " kbps. Fps: " << fps; |
466 if (kbps == 0) { | 486 if (kbps == 0) { |
467 kbps = last_set_bitrate_kbps_; | 487 kbps = last_set_bitrate_kbps_; |
468 } | 488 } |
469 if (fps == 0) { | 489 if (fps == 0) { |
470 fps = last_set_fps_; | 490 fps = MAX_VIDEO_FPS; |
471 } | 491 } |
472 | 492 |
473 width_ = width; | 493 width_ = width; |
474 height_ = height; | 494 height_ = height; |
475 last_set_bitrate_kbps_ = kbps; | 495 last_set_bitrate_kbps_ = kbps; |
476 last_set_fps_ = fps; | 496 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; |
477 yuv_size_ = width_ * height_ * 3 / 2; | 497 yuv_size_ = width_ * height_ * 3 / 2; |
478 frames_received_ = 0; | 498 frames_received_ = 0; |
479 frames_encoded_ = 0; | 499 frames_encoded_ = 0; |
480 frames_dropped_ = 0; | 500 frames_dropped_ = 0; |
481 frames_in_queue_ = 0; | 501 frames_in_queue_ = 0; |
482 current_timestamp_us_ = 0; | 502 current_timestamp_us_ = 0; |
483 start_time_ms_ = GetCurrentTimeMs(); | 503 start_time_ms_ = GetCurrentTimeMs(); |
484 current_frames_ = 0; | 504 current_frames_ = 0; |
485 current_bytes_ = 0; | 505 current_bytes_ = 0; |
506 current_acc_qp_ = 0; | |
486 current_encoding_time_ms_ = 0; | 507 current_encoding_time_ms_ = 0; |
487 last_input_timestamp_ms_ = -1; | 508 last_input_timestamp_ms_ = -1; |
488 last_output_timestamp_ms_ = -1; | 509 last_output_timestamp_ms_ = -1; |
489 output_timestamp_ = 0; | 510 output_timestamp_ = 0; |
490 output_render_time_ms_ = 0; | 511 output_render_time_ms_ = 0; |
491 timestamps_.clear(); | 512 timestamps_.clear(); |
492 render_times_ms_.clear(); | 513 render_times_ms_.clear(); |
493 frame_rtc_times_ms_.clear(); | 514 frame_rtc_times_ms_.clear(); |
494 drop_next_input_frame_ = false; | 515 drop_next_input_frame_ = false; |
495 use_surface_ = use_surface; | 516 use_surface_ = use_surface; |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
562 | 583 |
563 if (!inited_) { | 584 if (!inited_) { |
564 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 585 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
565 } | 586 } |
566 | 587 |
567 frames_received_++; | 588 frames_received_++; |
568 if (!DeliverPendingOutputs(jni)) { | 589 if (!DeliverPendingOutputs(jni)) { |
569 if (!ResetCodecOnCodecThread()) | 590 if (!ResetCodecOnCodecThread()) |
570 return WEBRTC_VIDEO_CODEC_ERROR; | 591 return WEBRTC_VIDEO_CODEC_ERROR; |
571 } | 592 } |
593 if (frames_encoded_ < kMaxEncodedLogFrames) { | |
594 ALOGD << "Encoder frame in # " << (frames_received_ - 1) << ". TS: " << | |
595 (int)(current_timestamp_us_ / 1000) << ". Q: " << frames_in_queue_ << | |
596 ". Fps: " << last_set_fps_ << ". Kbps: " << last_set_bitrate_kbps_; | |
597 } | |
572 | 598 |
573 if (drop_next_input_frame_) { | 599 if (drop_next_input_frame_) { |
574 ALOGW << "Encoder drop frame - failed callback."; | 600 ALOGW << "Encoder drop frame - failed callback."; |
575 drop_next_input_frame_ = false; | 601 drop_next_input_frame_ = false; |
602 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
603 frames_dropped_++; | |
604 // Report dropped frame to quality_scaler_. | |
605 OnDroppedFrame(); | |
576 return WEBRTC_VIDEO_CODEC_OK; | 606 return WEBRTC_VIDEO_CODEC_OK; |
577 } | 607 } |
578 | 608 |
579 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | 609 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
580 | 610 |
611 // Check if we accumulated too many frames in encoder input buffers | |
612 // or the encoder latency exceeds 70 ms and drop frame if so. | |
613 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { | |
614 int encoder_latency_ms = last_input_timestamp_ms_ - | |
615 last_output_timestamp_ms_; | |
616 if (frames_in_queue_ > MAX_ENCODER_Q_SIZE || | |
617 encoder_latency_ms > MAX_LATENCY_MS) { | |
618 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << | |
619 " ms. Q size: " << frames_in_queue_; | |
620 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
621 frames_dropped_++; | |
pbos-webrtc
2016/01/15 10:54:04
Can we move frames_dropped_++ into OnDroppedFrame(
AlexG
2016/01/15 20:48:11
Done.
| |
622 // Report dropped frame to quality_scaler_. | |
623 OnDroppedFrame(); | |
perkj_webrtc
2016/01/15 07:33:08
OnDropped frame now happens before quality_scaler_
pbos-webrtc
2016/01/15 10:54:04
I think this is fine from glancing at it. OnEncode
AlexG
2016/01/15 20:48:10
Acknowledged.
| |
624 return WEBRTC_VIDEO_CODEC_OK; | |
625 } | |
626 } | |
627 | |
581 VideoFrame input_frame = frame; | 628 VideoFrame input_frame = frame; |
582 if (scale_) { | 629 if (scale_) { |
583 // Check framerate before spatial resolution change. | 630 // Check framerate before spatial resolution change. |
584 quality_scaler_.OnEncodeFrame(frame); | 631 quality_scaler_.OnEncodeFrame(frame); |
585 const webrtc::QualityScaler::Resolution scaled_resolution = | 632 const webrtc::QualityScaler::Resolution scaled_resolution = |
586 quality_scaler_.GetScaledResolution(); | 633 quality_scaler_.GetScaledResolution(); |
587 if (scaled_resolution.width != frame.width() || | 634 if (scaled_resolution.width != frame.width() || |
588 scaled_resolution.height != frame.height()) { | 635 scaled_resolution.height != frame.height()) { |
589 if (frame.native_handle() != nullptr) { | 636 if (frame.native_handle() != nullptr) { |
590 rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer( | 637 rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer( |
591 static_cast<AndroidTextureBuffer*>( | 638 static_cast<AndroidTextureBuffer*>( |
592 frame.video_frame_buffer().get())->ScaleAndRotate( | 639 frame.video_frame_buffer().get())->ScaleAndRotate( |
593 scaled_resolution.width, | 640 scaled_resolution.width, |
594 scaled_resolution.height, | 641 scaled_resolution.height, |
595 webrtc::kVideoRotation_0)); | 642 webrtc::kVideoRotation_0)); |
596 input_frame.set_video_frame_buffer(scaled_buffer); | 643 input_frame.set_video_frame_buffer(scaled_buffer); |
597 } else { | 644 } else { |
598 input_frame = quality_scaler_.GetScaledFrame(frame); | 645 input_frame = quality_scaler_.GetScaledFrame(frame); |
599 } | 646 } |
600 } | 647 } |
601 } | 648 } |
602 | 649 |
603 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { | 650 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { |
604 ALOGE << "Failed to reconfigure encoder."; | 651 ALOGE << "Failed to reconfigure encoder."; |
605 return WEBRTC_VIDEO_CODEC_ERROR; | 652 return WEBRTC_VIDEO_CODEC_ERROR; |
606 } | 653 } |
607 | 654 |
608 // Check if we accumulated too many frames in encoder input buffers | 655 // Save time when input frame is sent to the encoder input. |
609 // or the encoder latency exceeds 70 ms and drop frame if so. | 656 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); |
610 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { | |
611 int encoder_latency_ms = last_input_timestamp_ms_ - | |
612 last_output_timestamp_ms_; | |
613 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) { | |
614 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << | |
615 " ms. Q size: " << frames_in_queue_; | |
616 frames_dropped_++; | |
617 // Report dropped frame to quality_scaler_. | |
618 OnDroppedFrame(); | |
619 return WEBRTC_VIDEO_CODEC_OK; | |
620 } | |
621 } | |
622 | 657 |
623 const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta; | 658 const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta; |
624 bool encode_status = true; | 659 bool encode_status = true; |
625 if (!input_frame.native_handle()) { | 660 if (!input_frame.native_handle()) { |
626 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 661 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
627 j_dequeue_input_buffer_method_); | 662 j_dequeue_input_buffer_method_); |
628 CHECK_EXCEPTION(jni); | 663 CHECK_EXCEPTION(jni); |
629 if (j_input_buffer_index == -1) { | 664 if (j_input_buffer_index == -1) { |
630 // Video codec falls behind - no input buffer available. | 665 // Video codec falls behind - no input buffer available. |
631 ALOGW << "Encoder drop frame - no input buffers available"; | 666 ALOGW << "Encoder drop frame - no input buffers available"; |
667 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
632 frames_dropped_++; | 668 frames_dropped_++; |
669 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | |
633 // Report dropped frame to quality_scaler_. | 670 // Report dropped frame to quality_scaler_. |
634 OnDroppedFrame(); | 671 OnDroppedFrame(); |
635 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 672 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
636 } | 673 } |
637 if (j_input_buffer_index == -2) { | 674 if (j_input_buffer_index == -2) { |
638 ResetCodecOnCodecThread(); | 675 ResetCodecOnCodecThread(); |
639 return WEBRTC_VIDEO_CODEC_ERROR; | 676 return WEBRTC_VIDEO_CODEC_ERROR; |
640 } | 677 } |
641 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | 678 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, |
642 j_input_buffer_index); | 679 j_input_buffer_index); |
643 } else { | 680 } else { |
644 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | 681 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); |
645 } | 682 } |
646 | 683 |
647 if (!encode_status) { | 684 if (!encode_status) { |
648 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | 685 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
649 ResetCodecOnCodecThread(); | 686 ResetCodecOnCodecThread(); |
650 return WEBRTC_VIDEO_CODEC_ERROR; | 687 return WEBRTC_VIDEO_CODEC_ERROR; |
651 } | 688 } |
652 | 689 |
653 last_input_timestamp_ms_ = | 690 last_input_timestamp_ms_ = |
654 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | 691 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
655 frames_in_queue_++; | 692 frames_in_queue_++; |
656 | 693 |
657 // Save input image timestamps for later output | 694 // Save input image timestamps for later output |
658 timestamps_.push_back(input_frame.timestamp()); | 695 timestamps_.push_back(input_frame.timestamp()); |
659 render_times_ms_.push_back(input_frame.render_time_ms()); | 696 render_times_ms_.push_back(input_frame.render_time_ms()); |
660 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); | |
661 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 697 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
662 | 698 |
663 if (!DeliverPendingOutputs(jni)) { | 699 if (!DeliverPendingOutputs(jni)) { |
664 ALOGE << "Failed deliver pending outputs."; | 700 ALOGE << "Failed deliver pending outputs."; |
665 ResetCodecOnCodecThread(); | 701 ResetCodecOnCodecThread(); |
666 return WEBRTC_VIDEO_CODEC_ERROR; | 702 return WEBRTC_VIDEO_CODEC_ERROR; |
667 } | 703 } |
668 return WEBRTC_VIDEO_CODEC_OK; | 704 return WEBRTC_VIDEO_CODEC_OK; |
669 } | 705 } |
670 | 706 |
(...skipping 27 matching lines...) Expand all Loading... | |
698 | 734 |
699 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == | 735 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == |
700 WEBRTC_VIDEO_CODEC_OK; | 736 WEBRTC_VIDEO_CODEC_OK; |
701 } | 737 } |
702 | 738 |
703 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | 739 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, |
704 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | 740 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { |
705 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 741 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
706 RTC_CHECK(!use_surface_); | 742 RTC_CHECK(!use_surface_); |
707 | 743 |
708 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", | |
709 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); | |
710 | |
711 jobject j_input_buffer = input_buffers_[input_buffer_index]; | 744 jobject j_input_buffer = input_buffers_[input_buffer_index]; |
712 uint8_t* yuv_buffer = | 745 uint8_t* yuv_buffer = |
713 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 746 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
714 CHECK_EXCEPTION(jni); | 747 CHECK_EXCEPTION(jni); |
715 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | 748 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
716 RTC_CHECK(!libyuv::ConvertFromI420( | 749 RTC_CHECK(!libyuv::ConvertFromI420( |
717 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane), | 750 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane), |
718 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), | 751 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), |
719 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), | 752 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), |
720 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | 753 yuv_buffer, width_, width_, height_, encoder_fourcc_)) |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
776 rtc::MessageQueueManager::Clear(this); | 809 rtc::MessageQueueManager::Clear(this); |
777 inited_ = false; | 810 inited_ = false; |
778 use_surface_ = false; | 811 use_surface_ = false; |
779 ALOGD << "EncoderReleaseOnCodecThread done."; | 812 ALOGD << "EncoderReleaseOnCodecThread done."; |
780 return WEBRTC_VIDEO_CODEC_OK; | 813 return WEBRTC_VIDEO_CODEC_OK; |
781 } | 814 } |
782 | 815 |
783 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | 816 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
784 uint32_t frame_rate) { | 817 uint32_t frame_rate) { |
785 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 818 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
819 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? | |
820 frame_rate : MAX_ALLOWED_VIDEO_FPS; | |
786 if (last_set_bitrate_kbps_ == new_bit_rate && | 821 if (last_set_bitrate_kbps_ == new_bit_rate && |
787 last_set_fps_ == frame_rate) { | 822 last_set_fps_ == frame_rate) { |
788 return WEBRTC_VIDEO_CODEC_OK; | 823 return WEBRTC_VIDEO_CODEC_OK; |
789 } | 824 } |
790 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 825 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
791 ScopedLocalRefFrame local_ref_frame(jni); | 826 ScopedLocalRefFrame local_ref_frame(jni); |
792 if (new_bit_rate > 0) { | 827 if (new_bit_rate > 0) { |
793 last_set_bitrate_kbps_ = new_bit_rate; | 828 last_set_bitrate_kbps_ = new_bit_rate; |
794 } | 829 } |
795 if (frame_rate > 0) { | 830 if (frame_rate > 0) { |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
868 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | 903 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); |
869 frames_in_queue_--; | 904 frames_in_queue_--; |
870 } | 905 } |
871 | 906 |
872 // Extract payload. | 907 // Extract payload. |
873 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 908 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
874 uint8_t* payload = reinterpret_cast<uint8_t*>( | 909 uint8_t* payload = reinterpret_cast<uint8_t*>( |
875 jni->GetDirectBufferAddress(j_output_buffer)); | 910 jni->GetDirectBufferAddress(j_output_buffer)); |
876 CHECK_EXCEPTION(jni); | 911 CHECK_EXCEPTION(jni); |
877 | 912 |
878 ALOGV("Encoder frame out # %d. Key: %d. Size: %d. TS: %lld." | 913 if (frames_encoded_ < kMaxEncodedLogFrames) { |
879 " Latency: %lld. EncTime: %lld", | 914 ALOGD << "Encoder frame out # " << frames_encoded_ << ". Key: " << |
880 frames_encoded_, key_frame, payload_size, | 915 key_frame << ". Size: " << payload_size << ". TS: " << |
perkj_webrtc
2016/01/15 07:33:08
Do you want to change key_frame ? "true": "false"
AlexG
2016/01/15 20:48:11
I think it will be a bit longer in the log : "fals
| |
881 last_output_timestamp_ms_, | 916 (int)last_output_timestamp_ms_ << ". Latency: " << |
882 last_input_timestamp_ms_ - last_output_timestamp_ms_, | 917 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_) << |
883 frame_encoding_time_ms); | 918 ". EncTime: " << frame_encoding_time_ms; |
884 | |
885 // Calculate and print encoding statistics - every 3 seconds. | |
886 frames_encoded_++; | |
887 current_frames_++; | |
888 current_bytes_ += payload_size; | |
889 current_encoding_time_ms_ += frame_encoding_time_ms; | |
890 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; | |
891 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && | |
892 current_frames_ > 0) { | |
893 ALOGD << "Encoded frames: " << frames_encoded_ << ". Bitrate: " << | |
894 (current_bytes_ * 8 / statistic_time_ms) << | |
895 ", target: " << last_set_bitrate_kbps_ << " kbps, fps: " << | |
896 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) | |
897 << ", encTime: " << | |
898 (current_encoding_time_ms_ / current_frames_) << " for last " << | |
899 statistic_time_ms << " ms."; | |
900 start_time_ms_ = GetCurrentTimeMs(); | |
901 current_frames_ = 0; | |
902 current_bytes_ = 0; | |
903 current_encoding_time_ms_ = 0; | |
904 } | 919 } |
905 | 920 |
906 // Callback - return encoded frame. | 921 // Callback - return encoded frame. |
907 int32_t callback_status = 0; | 922 int32_t callback_status = 0; |
908 if (callback_) { | 923 if (callback_) { |
909 scoped_ptr<webrtc::EncodedImage> image( | 924 scoped_ptr<webrtc::EncodedImage> image( |
910 new webrtc::EncodedImage(payload, payload_size, payload_size)); | 925 new webrtc::EncodedImage(payload, payload_size, payload_size)); |
911 image->_encodedWidth = width_; | 926 image->_encodedWidth = width_; |
912 image->_encodedHeight = height_; | 927 image->_encodedHeight = height_; |
913 image->_timeStamp = output_timestamp_; | 928 image->_timeStamp = output_timestamp_; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
959 webrtc::RTPFragmentationHeader header; | 974 webrtc::RTPFragmentationHeader header; |
960 memset(&header, 0, sizeof(header)); | 975 memset(&header, 0, sizeof(header)); |
961 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { | 976 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { |
962 header.VerifyAndAllocateFragmentationHeader(1); | 977 header.VerifyAndAllocateFragmentationHeader(1); |
963 header.fragmentationOffset[0] = 0; | 978 header.fragmentationOffset[0] = 0; |
964 header.fragmentationLength[0] = image->_length; | 979 header.fragmentationLength[0] = image->_length; |
965 header.fragmentationPlType[0] = 0; | 980 header.fragmentationPlType[0] = 0; |
966 header.fragmentationTimeDiff[0] = 0; | 981 header.fragmentationTimeDiff[0] = 0; |
967 if (codecType_ == kVideoCodecVP8 && scale_) { | 982 if (codecType_ == kVideoCodecVP8 && scale_) { |
968 int qp; | 983 int qp; |
969 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) | 984 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { |
985 current_acc_qp_ += qp; | |
970 quality_scaler_.ReportQP(qp); | 986 quality_scaler_.ReportQP(qp); |
987 } | |
971 } | 988 } |
972 } else if (codecType_ == kVideoCodecH264) { | 989 } else if (codecType_ == kVideoCodecH264) { |
973 if (scale_) { | 990 if (scale_) { |
974 h264_bitstream_parser_.ParseBitstream(payload, payload_size); | 991 h264_bitstream_parser_.ParseBitstream(payload, payload_size); |
975 int qp; | 992 int qp; |
976 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) | 993 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { |
994 current_acc_qp_ += qp; | |
977 quality_scaler_.ReportQP(qp); | 995 quality_scaler_.ReportQP(qp); |
996 } | |
978 } | 997 } |
979 // For H.264 search for start codes. | 998 // For H.264 search for start codes. |
980 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; | 999 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; |
981 int32_t scPositionsLength = 0; | 1000 int32_t scPositionsLength = 0; |
982 int32_t scPosition = 0; | 1001 int32_t scPosition = 0; |
983 while (scPositionsLength < MAX_NALUS_PERFRAME) { | 1002 while (scPositionsLength < MAX_NALUS_PERFRAME) { |
984 int32_t naluPosition = NextNaluPosition( | 1003 int32_t naluPosition = NextNaluPosition( |
985 payload + scPosition, payload_size - scPosition); | 1004 payload + scPosition, payload_size - scPosition); |
986 if (naluPosition < 0) { | 1005 if (naluPosition < 0) { |
987 break; | 1006 break; |
(...skipping 27 matching lines...) Expand all Loading... | |
1015 // Return output buffer back to the encoder. | 1034 // Return output buffer back to the encoder. |
1016 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 1035 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
1017 j_release_output_buffer_method_, | 1036 j_release_output_buffer_method_, |
1018 output_buffer_index); | 1037 output_buffer_index); |
1019 CHECK_EXCEPTION(jni); | 1038 CHECK_EXCEPTION(jni); |
1020 if (!success) { | 1039 if (!success) { |
1021 ResetCodecOnCodecThread(); | 1040 ResetCodecOnCodecThread(); |
1022 return false; | 1041 return false; |
1023 } | 1042 } |
1024 | 1043 |
1044 // Calculate and print encoding statistics - every 3 seconds. | |
1045 frames_encoded_++; | |
1046 current_frames_++; | |
1047 current_bytes_ += payload_size; | |
1048 current_encoding_time_ms_ += frame_encoding_time_ms; | |
1049 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; | |
1050 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && | |
1051 current_frames_ > 0) { | |
1052 ALOGD << "Encoded frames: " << frames_encoded_ << ". Bitrate: " << | |
1053 (current_bytes_ * 8 / statistic_time_ms) << | |
1054 ", target: " << last_set_bitrate_kbps_ << " kbps, fps: " << | |
1055 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) | |
1056 << ", encTime: " << | |
1057 (current_encoding_time_ms_ / current_frames_) << ". QP: " << | |
1058 (current_acc_qp_ / current_frames_) << " for last " << | |
1059 statistic_time_ms << " ms."; | |
1060 start_time_ms_ = GetCurrentTimeMs(); | |
1061 current_frames_ = 0; | |
1062 current_bytes_ = 0; | |
1063 current_acc_qp_ = 0; | |
1064 current_encoding_time_ms_ = 0; | |
1065 } | |
1066 | |
1025 if (callback_status > 0) { | 1067 if (callback_status > 0) { |
1026 drop_next_input_frame_ = true; | 1068 drop_next_input_frame_ = true; |
1027 // Theoretically could handle callback_status<0 here, but unclear what | 1069 // Theoretically could handle callback_status<0 here, but unclear what |
1028 // that would mean for us. | 1070 // that would mean for us. |
1029 } | 1071 } |
1030 } | 1072 } |
1031 | 1073 |
1032 return true; | 1074 return true; |
1033 } | 1075 } |
1034 | 1076 |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1169 } | 1211 } |
1170 | 1212 |
1171 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1213 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1172 webrtc::VideoEncoder* encoder) { | 1214 webrtc::VideoEncoder* encoder) { |
1173 ALOGD << "Destroy video encoder."; | 1215 ALOGD << "Destroy video encoder."; |
1174 delete encoder; | 1216 delete encoder; |
1175 } | 1217 } |
1176 | 1218 |
1177 } // namespace webrtc_jni | 1219 } // namespace webrtc_jni |
1178 | 1220 |
OLD | NEW |