Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(535)

Side by Side Diff: webrtc/api/android/jni/androidmediaencoder_jni.cc

Issue 2398963003: Move usage of QualityScaler to ViEEncoder. (Closed)
Patch Set: fix android build Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | webrtc/api/peerconnection_unittest.cc » ('j') | webrtc/video/vie_encoder.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 17 matching lines...) Expand all
28 #include "webrtc/base/thread.h" 28 #include "webrtc/base/thread.h"
29 #include "webrtc/base/thread_checker.h" 29 #include "webrtc/base/thread_checker.h"
30 #include "webrtc/base/timeutils.h" 30 #include "webrtc/base/timeutils.h"
31 #include "webrtc/common_types.h" 31 #include "webrtc/common_types.h"
32 #include "webrtc/common_video/h264/h264_bitstream_parser.h" 32 #include "webrtc/common_video/h264/h264_bitstream_parser.h"
33 #include "webrtc/modules/video_coding/include/video_codec_interface.h" 33 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
34 #include "webrtc/modules/video_coding/utility/quality_scaler.h" 34 #include "webrtc/modules/video_coding/utility/quality_scaler.h"
35 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" 35 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
36 #include "webrtc/system_wrappers/include/field_trial.h" 36 #include "webrtc/system_wrappers/include/field_trial.h"
37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" 37 #include "webrtc/system_wrappers/include/logcat_trace_context.h"
38 #include "webrtc/video_encoder.h"
38 39
39 using rtc::Bind; 40 using rtc::Bind;
40 using rtc::Thread; 41 using rtc::Thread;
41 using rtc::ThreadManager; 42 using rtc::ThreadManager;
42 43
43 using webrtc::CodecSpecificInfo; 44 using webrtc::CodecSpecificInfo;
44 using webrtc::EncodedImage; 45 using webrtc::EncodedImage;
45 using webrtc::VideoFrame; 46 using webrtc::VideoFrame;
46 using webrtc::RTPFragmentationHeader; 47 using webrtc::RTPFragmentationHeader;
47 using webrtc::VideoCodec; 48 using webrtc::VideoCodec;
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
158 const int64_t frame_input_time_ms); 159 const int64_t frame_input_time_ms);
159 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, 160 bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
160 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); 161 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
161 bool EncodeTextureOnCodecThread(JNIEnv* jni, 162 bool EncodeTextureOnCodecThread(JNIEnv* jni,
162 bool key_frame, const webrtc::VideoFrame& frame); 163 bool key_frame, const webrtc::VideoFrame& frame);
163 164
164 int32_t RegisterEncodeCompleteCallbackOnCodecThread( 165 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
165 webrtc::EncodedImageCallback* callback); 166 webrtc::EncodedImageCallback* callback);
166 int32_t ReleaseOnCodecThread(); 167 int32_t ReleaseOnCodecThread();
167 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); 168 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
168 void OnDroppedFrameOnCodecThread();
169 169
170 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. 170 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
171 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); 171 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
172 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); 172 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
173 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); 173 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
174 jlong GetOutputBufferInfoPresentationTimestampUs( 174 jlong GetOutputBufferInfoPresentationTimestampUs(
175 JNIEnv* jni, jobject j_output_buffer_info); 175 JNIEnv* jni, jobject j_output_buffer_info);
176 176
177 // Deliver any outputs pending in the MediaCodec to our |callback_| and return 177 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
178 // true on success. 178 // true on success.
179 bool DeliverPendingOutputs(JNIEnv* jni); 179 bool DeliverPendingOutputs(JNIEnv* jni);
180 180
181 // Search for H.264 start codes. 181 // Search for H.264 start codes.
182 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); 182 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
183 183
184 VideoEncoder::ScalingSettings GetScalingSettings() const override;
185
184 // Displays encoder statistics. 186 // Displays encoder statistics.
185 void LogStatistics(bool force_log); 187 void LogStatistics(bool force_log);
186 188
187 // Type of video codec. 189 // Type of video codec.
188 VideoCodecType codecType_; 190 VideoCodecType codecType_;
189 191
190 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to 192 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
191 // |codec_thread_| synchronously. 193 // |codec_thread_| synchronously.
192 webrtc::EncodedImageCallback* callback_; 194 webrtc::EncodedImageCallback* callback_;
193 195
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
259 // |input_frame_infos_|. 261 // |input_frame_infos_|.
260 int64_t output_render_time_ms_; // Last output frame render time from 262 int64_t output_render_time_ms_; // Last output frame render time from
261 // |input_frame_infos_|. 263 // |input_frame_infos_|.
262 webrtc::VideoRotation output_rotation_; // Last output frame rotation from 264 webrtc::VideoRotation output_rotation_; // Last output frame rotation from
263 // |input_frame_infos_|. 265 // |input_frame_infos_|.
264 // Frame size in bytes fed to MediaCodec. 266 // Frame size in bytes fed to MediaCodec.
265 int yuv_size_; 267 int yuv_size_;
266 // True only when between a callback_->OnEncodedImage() call return a positive 268 // True only when between a callback_->OnEncodedImage() call return a positive
267 // value and the next Encode() call being ignored. 269 // value and the next Encode() call being ignored.
268 bool drop_next_input_frame_; 270 bool drop_next_input_frame_;
271 bool scale_;
269 // Global references; must be deleted in Release(). 272 // Global references; must be deleted in Release().
270 std::vector<jobject> input_buffers_; 273 std::vector<jobject> input_buffers_;
271 QualityScaler quality_scaler_;
272 // Dynamic resolution change, off by default.
273 bool scale_;
274
275 // H264 bitstream parser, used to extract QP from encoded bitstreams.
276 webrtc::H264BitstreamParser h264_bitstream_parser_; 274 webrtc::H264BitstreamParser h264_bitstream_parser_;
277 275
278 // VP9 variables to populate codec specific structure. 276 // VP9 variables to populate codec specific structure.
279 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for 277 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
280 // non-flexible VP9 mode. 278 // non-flexible VP9 mode.
281 uint8_t tl0_pic_idx_; 279 uint8_t tl0_pic_idx_;
282 size_t gof_idx_; 280 size_t gof_idx_;
283 281
284 // EGL context - owned by factory, should not be allocated/destroyed 282 // EGL context - owned by factory, should not be allocated/destroyed
285 // by MediaCodecVideoEncoder. 283 // by MediaCodecVideoEncoder.
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
405 scale_ = false; 403 scale_ = false;
406 if (codecType_ == kVideoCodecVP8) { 404 if (codecType_ == kVideoCodecVP8) {
407 scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn; 405 scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn;
408 } else if (codecType_ != kVideoCodecVP9) { 406 } else if (codecType_ != kVideoCodecVP9) {
409 scale_ = true; 407 scale_ = true;
410 } 408 }
411 409
412 ALOGD << "InitEncode request: " << init_width << " x " << init_height; 410 ALOGD << "InitEncode request: " << init_width << " x " << init_height;
413 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); 411 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
414 412
415 if (scale_) {
416 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264) {
417 quality_scaler_.Init(codecType_, codec_settings->startBitrate,
418 codec_settings->width, codec_settings->height,
419 codec_settings->maxFramerate);
420 } else {
421 // When adding codec support to additional hardware codecs, also configure
422 // their QP thresholds for scaling.
423 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
424 scale_ = false;
425 }
426 QualityScaler::Resolution res = quality_scaler_.GetScaledResolution();
427 init_width = res.width;
428 init_height = res.height;
429 ALOGD << "Scaled resolution: " << init_width << " x " << init_height;
430 }
431
432 return codec_thread_->Invoke<int32_t>( 413 return codec_thread_->Invoke<int32_t>(
433 RTC_FROM_HERE, 414 RTC_FROM_HERE,
434 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, 415 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width,
435 init_height, codec_settings->startBitrate, 416 init_height, codec_settings->startBitrate,
436 codec_settings->maxFramerate, 417 codec_settings->maxFramerate,
437 codec_settings->expect_encode_from_texture)); 418 codec_settings->expect_encode_from_texture));
438 } 419 }
439 420
440 int32_t MediaCodecVideoEncoder::Encode( 421 int32_t MediaCodecVideoEncoder::Encode(
441 const webrtc::VideoFrame& frame, 422 const webrtc::VideoFrame& frame,
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
701 << ". TS: " << (int)(current_timestamp_us_ / 1000) 682 << ". TS: " << (int)(current_timestamp_us_ / 1000)
702 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ 683 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_
703 << ". Kbps: " << last_set_bitrate_kbps_; 684 << ". Kbps: " << last_set_bitrate_kbps_;
704 } 685 }
705 686
706 if (drop_next_input_frame_) { 687 if (drop_next_input_frame_) {
707 ALOGW << "Encoder drop frame - failed callback."; 688 ALOGW << "Encoder drop frame - failed callback.";
708 drop_next_input_frame_ = false; 689 drop_next_input_frame_ = false;
709 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 690 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
710 frames_dropped_media_encoder_++; 691 frames_dropped_media_encoder_++;
711 OnDroppedFrameOnCodecThread();
712 return WEBRTC_VIDEO_CODEC_OK; 692 return WEBRTC_VIDEO_CODEC_OK;
713 } 693 }
714 694
715 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; 695 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
716 696
717 // Check if we accumulated too many frames in encoder input buffers and drop 697 // Check if we accumulated too many frames in encoder input buffers and drop
718 // frame if so. 698 // frame if so.
719 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { 699 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) {
720 ALOGD << "Already " << input_frame_infos_.size() 700 ALOGD << "Already " << input_frame_infos_.size()
721 << " frames in the queue, dropping" 701 << " frames in the queue, dropping"
722 << ". TS: " << (int)(current_timestamp_us_ / 1000) 702 << ". TS: " << (int)(current_timestamp_us_ / 1000)
723 << ". Fps: " << last_set_fps_ 703 << ". Fps: " << last_set_fps_
724 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; 704 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_;
725 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 705 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
726 consecutive_full_queue_frame_drops_++; 706 consecutive_full_queue_frame_drops_++;
727 if (consecutive_full_queue_frame_drops_ >= 707 if (consecutive_full_queue_frame_drops_ >=
728 ENCODER_STALL_FRAMEDROP_THRESHOLD) { 708 ENCODER_STALL_FRAMEDROP_THRESHOLD) {
729 ALOGE << "Encoder got stuck."; 709 ALOGE << "Encoder got stuck.";
730 return ProcessHWErrorOnEncodeOnCodecThread(); 710 return ProcessHWErrorOnEncodeOnCodecThread();
731 } 711 }
732 frames_dropped_media_encoder_++; 712 frames_dropped_media_encoder_++;
733 OnDroppedFrameOnCodecThread();
734 return WEBRTC_VIDEO_CODEC_OK; 713 return WEBRTC_VIDEO_CODEC_OK;
735 } 714 }
736 consecutive_full_queue_frame_drops_ = 0; 715 consecutive_full_queue_frame_drops_ = 0;
737 716
738 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( 717 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
739 frame.video_frame_buffer()); 718 frame.video_frame_buffer());
740 if (scale_) {
741 // Check framerate before spatial resolution change.
742 quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
743 const webrtc::QualityScaler::Resolution scaled_resolution =
744 quality_scaler_.GetScaledResolution();
745 if (scaled_resolution.width != frame.width() ||
746 scaled_resolution.height != frame.height()) {
747 if (input_buffer->native_handle() != nullptr) {
748 input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get())
749 ->CropScaleAndRotate(frame.width(), frame.height(),
750 0, 0,
751 scaled_resolution.width,
752 scaled_resolution.height,
753 webrtc::kVideoRotation_0);
754 } else {
755 input_buffer = quality_scaler_.GetScaledBuffer(input_buffer);
756 }
757 }
758 }
759 719
760 VideoFrame input_frame(input_buffer, frame.timestamp(), 720 VideoFrame input_frame(input_buffer, frame.timestamp(),
761 frame.render_time_ms(), frame.rotation()); 721 frame.render_time_ms(), frame.rotation());
762 722
763 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { 723 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
764 ALOGE << "Failed to reconfigure encoder."; 724 ALOGE << "Failed to reconfigure encoder.";
765 return WEBRTC_VIDEO_CODEC_ERROR; 725 return WEBRTC_VIDEO_CODEC_ERROR;
766 } 726 }
767 727
768 const bool key_frame = 728 const bool key_frame =
769 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; 729 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
770 bool encode_status = true; 730 bool encode_status = true;
771 if (!input_frame.video_frame_buffer()->native_handle()) { 731 if (!input_frame.video_frame_buffer()->native_handle()) {
772 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, 732 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
773 j_dequeue_input_buffer_method_); 733 j_dequeue_input_buffer_method_);
774 if (CheckException(jni)) { 734 if (CheckException(jni)) {
775 ALOGE << "Exception in dequeu input buffer."; 735 ALOGE << "Exception in dequeu input buffer.";
776 return ProcessHWErrorOnEncodeOnCodecThread(); 736 return ProcessHWErrorOnEncodeOnCodecThread();
777 } 737 }
778 if (j_input_buffer_index == -1) { 738 if (j_input_buffer_index == -1) {
779 // Video codec falls behind - no input buffer available. 739 // Video codec falls behind - no input buffer available.
780 ALOGW << "Encoder drop frame - no input buffers available"; 740 ALOGW << "Encoder drop frame - no input buffers available";
781 if (frames_received_ > 1) { 741 if (frames_received_ > 1) {
782 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 742 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
783 frames_dropped_media_encoder_++; 743 frames_dropped_media_encoder_++;
784 OnDroppedFrameOnCodecThread();
785 } else { 744 } else {
786 // Input buffers are not ready after codec initialization, HW is still 745 // Input buffers are not ready after codec initialization, HW is still
787 // allocating thme - this is expected and should not result in drop 746 // allocating thme - this is expected and should not result in drop
788 // frame report. 747 // frame report.
789 frames_received_ = 0; 748 frames_received_ = 0;
790 } 749 }
791 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. 750 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
792 } else if (j_input_buffer_index == -2) { 751 } else if (j_input_buffer_index == -2) {
793 return ProcessHWErrorOnEncodeOnCodecThread(); 752 return ProcessHWErrorOnEncodeOnCodecThread();
794 } 753 }
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
955 uint32_t frame_rate) { 914 uint32_t frame_rate) {
956 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); 915 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
957 if (sw_fallback_required_) 916 if (sw_fallback_required_)
958 return WEBRTC_VIDEO_CODEC_OK; 917 return WEBRTC_VIDEO_CODEC_OK;
959 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? 918 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
960 frame_rate : MAX_ALLOWED_VIDEO_FPS; 919 frame_rate : MAX_ALLOWED_VIDEO_FPS;
961 if (last_set_bitrate_kbps_ == new_bit_rate && 920 if (last_set_bitrate_kbps_ == new_bit_rate &&
962 last_set_fps_ == frame_rate) { 921 last_set_fps_ == frame_rate) {
963 return WEBRTC_VIDEO_CODEC_OK; 922 return WEBRTC_VIDEO_CODEC_OK;
964 } 923 }
965 if (scale_) {
966 quality_scaler_.ReportFramerate(frame_rate);
967 }
968 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 924 JNIEnv* jni = AttachCurrentThreadIfNeeded();
969 ScopedLocalRefFrame local_ref_frame(jni); 925 ScopedLocalRefFrame local_ref_frame(jni);
970 if (new_bit_rate > 0) { 926 if (new_bit_rate > 0) {
971 last_set_bitrate_kbps_ = new_bit_rate; 927 last_set_bitrate_kbps_ = new_bit_rate;
972 } 928 }
973 if (frame_rate > 0) { 929 if (frame_rate > 0) {
974 last_set_fps_ = frame_rate; 930 last_set_fps_ = frame_rate;
975 } 931 }
976 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, 932 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
977 j_set_rates_method_, 933 j_set_rates_method_,
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1069 std::unique_ptr<webrtc::EncodedImage> image( 1025 std::unique_ptr<webrtc::EncodedImage> image(
1070 new webrtc::EncodedImage(payload, payload_size, payload_size)); 1026 new webrtc::EncodedImage(payload, payload_size, payload_size));
1071 image->_encodedWidth = width_; 1027 image->_encodedWidth = width_;
1072 image->_encodedHeight = height_; 1028 image->_encodedHeight = height_;
1073 image->_timeStamp = output_timestamp_; 1029 image->_timeStamp = output_timestamp_;
1074 image->capture_time_ms_ = output_render_time_ms_; 1030 image->capture_time_ms_ = output_render_time_ms_;
1075 image->rotation_ = output_rotation_; 1031 image->rotation_ = output_rotation_;
1076 image->_frameType = 1032 image->_frameType =
1077 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); 1033 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
1078 image->_completeFrame = true; 1034 image->_completeFrame = true;
1079 image->adapt_reason_.quality_resolution_downscales =
1080 scale_ ? quality_scaler_.downscale_shift() : -1;
1081
1082 webrtc::CodecSpecificInfo info; 1035 webrtc::CodecSpecificInfo info;
1083 memset(&info, 0, sizeof(info)); 1036 memset(&info, 0, sizeof(info));
1084 info.codecType = codecType_; 1037 info.codecType = codecType_;
1085 if (codecType_ == kVideoCodecVP8) { 1038 if (codecType_ == kVideoCodecVP8) {
1086 info.codecSpecific.VP8.pictureId = picture_id_; 1039 info.codecSpecific.VP8.pictureId = picture_id_;
1087 info.codecSpecific.VP8.nonReference = false; 1040 info.codecSpecific.VP8.nonReference = false;
1088 info.codecSpecific.VP8.simulcastIdx = 0; 1041 info.codecSpecific.VP8.simulcastIdx = 0;
1089 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; 1042 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
1090 info.codecSpecific.VP8.layerSync = false; 1043 info.codecSpecific.VP8.layerSync = false;
1091 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; 1044 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
(...skipping 26 matching lines...) Expand all
1118 1071
1119 // Generate a header describing a single fragment. 1072 // Generate a header describing a single fragment.
1120 webrtc::RTPFragmentationHeader header; 1073 webrtc::RTPFragmentationHeader header;
1121 memset(&header, 0, sizeof(header)); 1074 memset(&header, 0, sizeof(header));
1122 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { 1075 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
1123 header.VerifyAndAllocateFragmentationHeader(1); 1076 header.VerifyAndAllocateFragmentationHeader(1);
1124 header.fragmentationOffset[0] = 0; 1077 header.fragmentationOffset[0] = 0;
1125 header.fragmentationLength[0] = image->_length; 1078 header.fragmentationLength[0] = image->_length;
1126 header.fragmentationPlType[0] = 0; 1079 header.fragmentationPlType[0] = 0;
1127 header.fragmentationTimeDiff[0] = 0; 1080 header.fragmentationTimeDiff[0] = 0;
1128 if (codecType_ == kVideoCodecVP8 && scale_) { 1081 if (codecType_ == kVideoCodecVP8) {
1129 int qp; 1082 int qp;
1130 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { 1083 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
1131 current_acc_qp_ += qp; 1084 current_acc_qp_ += qp;
1132 quality_scaler_.ReportQP(qp);
1133 image->qp_ = qp; 1085 image->qp_ = qp;
1134 } 1086 }
1135 } 1087 }
1136 } else if (codecType_ == kVideoCodecH264) { 1088 } else if (codecType_ == kVideoCodecH264) {
1137 if (scale_) { 1089 h264_bitstream_parser_.ParseBitstream(payload, payload_size);
1138 h264_bitstream_parser_.ParseBitstream(payload, payload_size); 1090 int qp;
1139 int qp; 1091 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
1140 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { 1092 current_acc_qp_ += qp;
1141 current_acc_qp_ += qp; 1093 image->qp_ = qp;
1142 quality_scaler_.ReportQP(qp);
1143 }
1144 } 1094 }
1145 // For H.264 search for start codes. 1095 // For H.264 search for start codes.
1146 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; 1096 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
1147 int32_t scPositionsLength = 0; 1097 int32_t scPositionsLength = 0;
1148 int32_t scPosition = 0; 1098 int32_t scPosition = 0;
1149 while (scPositionsLength < MAX_NALUS_PERFRAME) { 1099 while (scPositionsLength < MAX_NALUS_PERFRAME) {
1150 int32_t naluPosition = NextNaluPosition( 1100 int32_t naluPosition = NextNaluPosition(
1151 payload + scPosition, payload_size - scPosition); 1101 payload + scPosition, payload_size - scPosition);
1152 if (naluPosition < 0) { 1102 if (naluPosition < 0) {
1153 break; 1103 break;
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1234 ". QP: " << (current_acc_qp_ / current_frames_divider) << 1184 ". QP: " << (current_acc_qp_ / current_frames_divider) <<
1235 " for last " << statistic_time_ms << " ms."; 1185 " for last " << statistic_time_ms << " ms.";
1236 stat_start_time_ms_ = rtc::TimeMillis(); 1186 stat_start_time_ms_ = rtc::TimeMillis();
1237 current_frames_ = 0; 1187 current_frames_ = 0;
1238 current_bytes_ = 0; 1188 current_bytes_ = 0;
1239 current_acc_qp_ = 0; 1189 current_acc_qp_ = 0;
1240 current_encoding_time_ms_ = 0; 1190 current_encoding_time_ms_ = 0;
1241 } 1191 }
1242 } 1192 }
1243 1193
1194 webrtc::VideoEncoder::ScalingSettings
1195 MediaCodecVideoEncoder::GetScalingSettings() const {
1196 return VideoEncoder::ScalingSettings(scale_);
1197 }
1198
1244 int32_t MediaCodecVideoEncoder::NextNaluPosition( 1199 int32_t MediaCodecVideoEncoder::NextNaluPosition(
1245 uint8_t *buffer, size_t buffer_size) { 1200 uint8_t *buffer, size_t buffer_size) {
1246 if (buffer_size < H264_SC_LENGTH) { 1201 if (buffer_size < H264_SC_LENGTH) {
1247 return -1; 1202 return -1;
1248 } 1203 }
1249 uint8_t *head = buffer; 1204 uint8_t *head = buffer;
1250 // Set end buffer pointer to 4 bytes before actual buffer end so we can 1205 // Set end buffer pointer to 4 bytes before actual buffer end so we can
1251 // access head[1], head[2] and head[3] in a loop without buffer overrun. 1206 // access head[1], head[2] and head[3] in a loop without buffer overrun.
1252 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH; 1207 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
1253 1208
(...skipping 13 matching lines...) Expand all
1267 if (head[3] != 0x01) { // got 000000xx 1222 if (head[3] != 0x01) { // got 000000xx
1268 head++; // xx != 1, continue searching. 1223 head++; // xx != 1, continue searching.
1269 continue; 1224 continue;
1270 } 1225 }
1271 return (int32_t)(head - buffer); 1226 return (int32_t)(head - buffer);
1272 } 1227 }
1273 return -1; 1228 return -1;
1274 } 1229 }
1275 1230
1276 void MediaCodecVideoEncoder::OnDroppedFrame() { 1231 void MediaCodecVideoEncoder::OnDroppedFrame() {
1277 // Methods running on the codec thread should call OnDroppedFrameOnCodecThread
1278 // directly.
1279 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread()); 1232 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread());
1280 codec_thread_->Invoke<void>(
1281 RTC_FROM_HERE,
1282 Bind(&MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread, this));
1283 }
1284
1285 void MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread() {
1286 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
1287 // Report dropped frame to quality_scaler_.
1288 if (scale_)
1289 quality_scaler_.ReportDroppedFrame();
1290 } 1233 }
1291 1234
1292 const char* MediaCodecVideoEncoder::ImplementationName() const { 1235 const char* MediaCodecVideoEncoder::ImplementationName() const {
1293 return "MediaCodec"; 1236 return "MediaCodec";
1294 } 1237 }
1295 1238
1296 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() 1239 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
1297 : egl_context_(nullptr) { 1240 : egl_context_(nullptr) {
1298 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 1241 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1299 ScopedLocalRefFrame local_ref_frame(jni); 1242 ScopedLocalRefFrame local_ref_frame(jni);
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
1370 return supported_codecs_; 1313 return supported_codecs_;
1371 } 1314 }
1372 1315
1373 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( 1316 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1374 webrtc::VideoEncoder* encoder) { 1317 webrtc::VideoEncoder* encoder) {
1375 ALOGD << "Destroy video encoder."; 1318 ALOGD << "Destroy video encoder.";
1376 delete encoder; 1319 delete encoder;
1377 } 1320 }
1378 1321
1379 } // namespace webrtc_jni 1322 } // namespace webrtc_jni
OLDNEW
« no previous file with comments | « no previous file | webrtc/api/peerconnection_unittest.cc » ('j') | webrtc/video/vie_encoder.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698