OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
138 // previously-current values are reused instead of the passed parameters | 138 // previously-current values are reused instead of the passed parameters |
139 // (makes it easier to reason about thread-safety). | 139 // (makes it easier to reason about thread-safety). |
140 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, | 140 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, |
141 bool use_surface); | 141 bool use_surface); |
142 // Reconfigure to match |frame| in width, height. Also reconfigures the | 142 // Reconfigure to match |frame| in width, height. Also reconfigures the |
143 // encoder if |frame| is a texture/byte buffer and the encoder is initialized | 143 // encoder if |frame| is a texture/byte buffer and the encoder is initialized |
144 // for byte buffer/texture. Returns false if reconfiguring fails. | 144 // for byte buffer/texture. Returns false if reconfiguring fails. |
145 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); | 145 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); |
146 int32_t EncodeOnCodecThread( | 146 int32_t EncodeOnCodecThread( |
147 const webrtc::VideoFrame& input_image, | 147 const webrtc::VideoFrame& input_image, |
148 const std::vector<webrtc::FrameType>* frame_types); | 148 const std::vector<webrtc::FrameType>* frame_types, |
| 149 const int64_t frame_input_time_ms); |
149 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, | 150 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, |
150 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); | 151 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); |
151 bool EncodeTextureOnCodecThread(JNIEnv* jni, | 152 bool EncodeTextureOnCodecThread(JNIEnv* jni, |
152 bool key_frame, const webrtc::VideoFrame& frame); | 153 bool key_frame, const webrtc::VideoFrame& frame); |
153 | 154 |
154 int32_t RegisterEncodeCompleteCallbackOnCodecThread( | 155 int32_t RegisterEncodeCompleteCallbackOnCodecThread( |
155 webrtc::EncodedImageCallback* callback); | 156 webrtc::EncodedImageCallback* callback); |
156 int32_t ReleaseOnCodecThread(); | 157 int32_t ReleaseOnCodecThread(); |
157 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); | 158 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); |
158 void OnDroppedFrameOnCodecThread(); | 159 void OnDroppedFrameOnCodecThread(); |
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
416 init_height, codec_settings->startBitrate, | 417 init_height, codec_settings->startBitrate, |
417 codec_settings->maxFramerate, false /* use_surface */)); | 418 codec_settings->maxFramerate, false /* use_surface */)); |
418 } | 419 } |
419 | 420 |
420 int32_t MediaCodecVideoEncoder::Encode( | 421 int32_t MediaCodecVideoEncoder::Encode( |
421 const webrtc::VideoFrame& frame, | 422 const webrtc::VideoFrame& frame, |
422 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | 423 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
423 const std::vector<webrtc::FrameType>* frame_types) { | 424 const std::vector<webrtc::FrameType>* frame_types) { |
424 return codec_thread_->Invoke<int32_t>( | 425 return codec_thread_->Invoke<int32_t>( |
425 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this, | 426 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this, |
426 frame, frame_types)); | 427 frame, frame_types, rtc::TimeMillis())); |
427 } | 428 } |
428 | 429 |
429 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( | 430 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( |
430 webrtc::EncodedImageCallback* callback) { | 431 webrtc::EncodedImageCallback* callback) { |
431 return codec_thread_->Invoke<int32_t>( | 432 return codec_thread_->Invoke<int32_t>( |
432 RTC_FROM_HERE, | 433 RTC_FROM_HERE, |
433 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread, | 434 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread, |
434 this, callback)); | 435 this, callback)); |
435 } | 436 } |
436 | 437 |
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
591 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | 592 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
592 } | 593 } |
593 } | 594 } |
594 | 595 |
595 inited_ = true; | 596 inited_ = true; |
596 return WEBRTC_VIDEO_CODEC_OK; | 597 return WEBRTC_VIDEO_CODEC_OK; |
597 } | 598 } |
598 | 599 |
599 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 600 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
600 const webrtc::VideoFrame& frame, | 601 const webrtc::VideoFrame& frame, |
601 const std::vector<webrtc::FrameType>* frame_types) { | 602 const std::vector<webrtc::FrameType>* frame_types, |
| 603 const int64_t frame_input_time_ms) { |
602 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 604 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
603 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 605 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
604 ScopedLocalRefFrame local_ref_frame(jni); | 606 ScopedLocalRefFrame local_ref_frame(jni); |
605 | 607 |
606 if (!inited_) { | 608 if (!inited_) { |
607 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 609 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
608 } | 610 } |
609 | 611 |
610 bool send_key_frame = false; | 612 bool send_key_frame = false; |
611 if (codec_mode_ == webrtc::kRealtimeVideo) { | 613 if (codec_mode_ == webrtc::kRealtimeVideo) { |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
690 quality_scaler_.GetScaledBuffer(frame.video_frame_buffer())); | 692 quality_scaler_.GetScaledBuffer(frame.video_frame_buffer())); |
691 } | 693 } |
692 } | 694 } |
693 } | 695 } |
694 | 696 |
695 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { | 697 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { |
696 ALOGE << "Failed to reconfigure encoder."; | 698 ALOGE << "Failed to reconfigure encoder."; |
697 return WEBRTC_VIDEO_CODEC_ERROR; | 699 return WEBRTC_VIDEO_CODEC_ERROR; |
698 } | 700 } |
699 | 701 |
700 const int64_t time_before_calling_encode = rtc::TimeMillis(); | |
701 const bool key_frame = | 702 const bool key_frame = |
702 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | 703 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; |
703 bool encode_status = true; | 704 bool encode_status = true; |
704 if (!input_frame.video_frame_buffer()->native_handle()) { | 705 if (!input_frame.video_frame_buffer()->native_handle()) { |
705 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 706 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
706 j_dequeue_input_buffer_method_); | 707 j_dequeue_input_buffer_method_); |
707 CHECK_EXCEPTION(jni); | 708 CHECK_EXCEPTION(jni); |
708 if (j_input_buffer_index == -1) { | 709 if (j_input_buffer_index == -1) { |
709 // Video codec falls behind - no input buffer available. | 710 // Video codec falls behind - no input buffer available. |
710 ALOGW << "Encoder drop frame - no input buffers available"; | 711 ALOGW << "Encoder drop frame - no input buffers available"; |
711 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 712 if (frames_received_ > 1) { |
712 frames_dropped_media_encoder_++; | 713 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
713 OnDroppedFrameOnCodecThread(); | 714 frames_dropped_media_encoder_++; |
| 715 OnDroppedFrameOnCodecThread(); |
| 716 } else { |
| 717 // Input buffers are not ready after codec initialization, HW is still |
| 718 // allocating thme - this is expected and should not result in drop |
| 719 // frame report. |
| 720 frames_received_ = 0; |
| 721 } |
714 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 722 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
715 } | 723 } else if (j_input_buffer_index == -2) { |
716 if (j_input_buffer_index == -2) { | |
717 ResetCodecOnCodecThread(); | 724 ResetCodecOnCodecThread(); |
718 return WEBRTC_VIDEO_CODEC_ERROR; | 725 return WEBRTC_VIDEO_CODEC_ERROR; |
719 } | 726 } |
720 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | 727 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, |
721 j_input_buffer_index); | 728 j_input_buffer_index); |
722 } else { | 729 } else { |
723 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | 730 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); |
724 } | 731 } |
725 | 732 |
726 if (!encode_status) { | 733 if (!encode_status) { |
727 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | 734 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
728 ResetCodecOnCodecThread(); | 735 ResetCodecOnCodecThread(); |
729 return WEBRTC_VIDEO_CODEC_ERROR; | 736 return WEBRTC_VIDEO_CODEC_ERROR; |
730 } | 737 } |
731 | 738 |
732 // Save input image timestamps for later output. | 739 // Save input image timestamps for later output. |
733 input_frame_infos_.emplace_back( | 740 input_frame_infos_.emplace_back( |
734 time_before_calling_encode, input_frame.timestamp(), | 741 frame_input_time_ms, input_frame.timestamp(), |
735 input_frame.render_time_ms(), input_frame.rotation()); | 742 input_frame.render_time_ms(), input_frame.rotation()); |
736 | 743 |
737 last_input_timestamp_ms_ = | 744 last_input_timestamp_ms_ = |
738 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | 745 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
739 | 746 |
740 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 747 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
741 | 748 |
742 codec_thread_->Clear(this); | 749 codec_thread_->Clear(this); |
743 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | 750 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); |
744 | 751 |
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
937 ResetCodecOnCodecThread(); | 944 ResetCodecOnCodecThread(); |
938 return false; | 945 return false; |
939 } | 946 } |
940 | 947 |
941 // Get key and config frame flags. | 948 // Get key and config frame flags. |
942 jobject j_output_buffer = | 949 jobject j_output_buffer = |
943 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | 950 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); |
944 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | 951 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); |
945 | 952 |
946 // Get frame timestamps from a queue - for non config frames only. | 953 // Get frame timestamps from a queue - for non config frames only. |
| 954 int64_t encoding_start_time_ms = 0; |
947 int64_t frame_encoding_time_ms = 0; | 955 int64_t frame_encoding_time_ms = 0; |
948 last_output_timestamp_ms_ = | 956 last_output_timestamp_ms_ = |
949 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | 957 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / |
950 rtc::kNumMicrosecsPerMillisec; | 958 rtc::kNumMicrosecsPerMillisec; |
951 if (!input_frame_infos_.empty()) { | 959 if (!input_frame_infos_.empty()) { |
952 const InputFrameInfo& frame_info = input_frame_infos_.front(); | 960 const InputFrameInfo& frame_info = input_frame_infos_.front(); |
953 output_timestamp_ = frame_info.frame_timestamp; | 961 output_timestamp_ = frame_info.frame_timestamp; |
954 output_render_time_ms_ = frame_info.frame_render_time_ms; | 962 output_render_time_ms_ = frame_info.frame_render_time_ms; |
955 output_rotation_ = frame_info.rotation; | 963 output_rotation_ = frame_info.rotation; |
956 frame_encoding_time_ms = | 964 encoding_start_time_ms = frame_info.encode_start_time; |
957 rtc::TimeMillis() - frame_info.encode_start_time; | |
958 input_frame_infos_.pop_front(); | 965 input_frame_infos_.pop_front(); |
959 } | 966 } |
960 | 967 |
961 // Extract payload. | 968 // Extract payload. |
962 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 969 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
963 uint8_t* payload = reinterpret_cast<uint8_t*>( | 970 uint8_t* payload = reinterpret_cast<uint8_t*>( |
964 jni->GetDirectBufferAddress(j_output_buffer)); | 971 jni->GetDirectBufferAddress(j_output_buffer)); |
965 CHECK_EXCEPTION(jni); | 972 CHECK_EXCEPTION(jni); |
966 | 973 |
967 if (frames_encoded_ < kMaxEncodedLogFrames) { | |
968 int current_latency = | |
969 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); | |
970 ALOGD << "Encoder frame out # " << frames_encoded_ << | |
971 ". Key: " << key_frame << | |
972 ". Size: " << payload_size << | |
973 ". TS: " << (int)last_output_timestamp_ms_ << | |
974 ". Latency: " << current_latency << | |
975 ". EncTime: " << frame_encoding_time_ms; | |
976 } | |
977 | |
978 // Callback - return encoded frame. | 974 // Callback - return encoded frame. |
979 int32_t callback_status = 0; | 975 int32_t callback_status = 0; |
980 if (callback_) { | 976 if (callback_) { |
981 std::unique_ptr<webrtc::EncodedImage> image( | 977 std::unique_ptr<webrtc::EncodedImage> image( |
982 new webrtc::EncodedImage(payload, payload_size, payload_size)); | 978 new webrtc::EncodedImage(payload, payload_size, payload_size)); |
983 image->_encodedWidth = width_; | 979 image->_encodedWidth = width_; |
984 image->_encodedHeight = height_; | 980 image->_encodedHeight = height_; |
985 image->_timeStamp = output_timestamp_; | 981 image->_timeStamp = output_timestamp_; |
986 image->capture_time_ms_ = output_render_time_ms_; | 982 image->capture_time_ms_ = output_render_time_ms_; |
987 image->rotation_ = output_rotation_; | 983 image->rotation_ = output_rotation_; |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1093 // Return output buffer back to the encoder. | 1089 // Return output buffer back to the encoder. |
1094 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 1090 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
1095 j_release_output_buffer_method_, | 1091 j_release_output_buffer_method_, |
1096 output_buffer_index); | 1092 output_buffer_index); |
1097 CHECK_EXCEPTION(jni); | 1093 CHECK_EXCEPTION(jni); |
1098 if (!success) { | 1094 if (!success) { |
1099 ResetCodecOnCodecThread(); | 1095 ResetCodecOnCodecThread(); |
1100 return false; | 1096 return false; |
1101 } | 1097 } |
1102 | 1098 |
| 1099 // Print per frame statistics. |
| 1100 if (encoding_start_time_ms > 0) { |
| 1101 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; |
| 1102 } |
| 1103 if (frames_encoded_ < kMaxEncodedLogFrames) { |
| 1104 int current_latency = |
| 1105 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); |
| 1106 ALOGD << "Encoder frame out # " << frames_encoded_ << |
| 1107 ". Key: " << key_frame << |
| 1108 ". Size: " << payload_size << |
| 1109 ". TS: " << (int)last_output_timestamp_ms_ << |
| 1110 ". Latency: " << current_latency << |
| 1111 ". EncTime: " << frame_encoding_time_ms; |
| 1112 } |
| 1113 |
1103 // Calculate and print encoding statistics - every 3 seconds. | 1114 // Calculate and print encoding statistics - every 3 seconds. |
1104 frames_encoded_++; | 1115 frames_encoded_++; |
1105 current_frames_++; | 1116 current_frames_++; |
1106 current_bytes_ += payload_size; | 1117 current_bytes_ += payload_size; |
1107 current_encoding_time_ms_ += frame_encoding_time_ms; | 1118 current_encoding_time_ms_ += frame_encoding_time_ms; |
1108 LogStatistics(false); | 1119 LogStatistics(false); |
1109 | 1120 |
1110 if (callback_status > 0) { | 1121 if (callback_status > 0) { |
1111 drop_next_input_frame_ = true; | 1122 drop_next_input_frame_ = true; |
1112 // Theoretically could handle callback_status<0 here, but unclear what | 1123 // Theoretically could handle callback_status<0 here, but unclear what |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1276 return supported_codecs_; | 1287 return supported_codecs_; |
1277 } | 1288 } |
1278 | 1289 |
1279 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1290 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1280 webrtc::VideoEncoder* encoder) { | 1291 webrtc::VideoEncoder* encoder) { |
1281 ALOGD << "Destroy video encoder."; | 1292 ALOGD << "Destroy video encoder."; |
1282 delete encoder; | 1293 delete encoder; |
1283 } | 1294 } |
1284 | 1295 |
1285 } // namespace webrtc_jni | 1296 } // namespace webrtc_jni |
OLD | NEW |