Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(20)

Side by Side Diff: webrtc/api/android/jni/androidmediaencoder_jni.cc

Issue 2398963003: Move usage of QualityScaler to ViEEncoder. (Closed)
Patch Set: implement getQPThresholds for various wrappers Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 13 matching lines...) Expand all
24 #include "webrtc/api/android/jni/native_handle_impl.h" 24 #include "webrtc/api/android/jni/native_handle_impl.h"
25 #include "webrtc/base/bind.h" 25 #include "webrtc/base/bind.h"
26 #include "webrtc/base/checks.h" 26 #include "webrtc/base/checks.h"
27 #include "webrtc/base/logging.h" 27 #include "webrtc/base/logging.h"
28 #include "webrtc/base/thread.h" 28 #include "webrtc/base/thread.h"
29 #include "webrtc/base/thread_checker.h" 29 #include "webrtc/base/thread_checker.h"
30 #include "webrtc/base/timeutils.h" 30 #include "webrtc/base/timeutils.h"
31 #include "webrtc/common_types.h" 31 #include "webrtc/common_types.h"
32 #include "webrtc/modules/video_coding/include/video_codec_interface.h" 32 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
33 #include "webrtc/modules/video_coding/utility/h264_bitstream_parser.h" 33 #include "webrtc/modules/video_coding/utility/h264_bitstream_parser.h"
34 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
34 #include "webrtc/modules/video_coding/utility/quality_scaler.h" 35 #include "webrtc/modules/video_coding/utility/quality_scaler.h"
35 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
36 #include "webrtc/system_wrappers/include/field_trial.h" 36 #include "webrtc/system_wrappers/include/field_trial.h"
37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" 37 #include "webrtc/system_wrappers/include/logcat_trace_context.h"
38 38
39 using rtc::Bind; 39 using rtc::Bind;
40 using rtc::Thread; 40 using rtc::Thread;
41 using rtc::ThreadManager; 41 using rtc::ThreadManager;
42 42
43 using webrtc::CodecSpecificInfo; 43 using webrtc::CodecSpecificInfo;
44 using webrtc::EncodedImage; 44 using webrtc::EncodedImage;
45 using webrtc::VideoFrame; 45 using webrtc::VideoFrame;
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
149 const int64_t frame_input_time_ms); 149 const int64_t frame_input_time_ms);
150 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, 150 bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
151 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); 151 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
152 bool EncodeTextureOnCodecThread(JNIEnv* jni, 152 bool EncodeTextureOnCodecThread(JNIEnv* jni,
153 bool key_frame, const webrtc::VideoFrame& frame); 153 bool key_frame, const webrtc::VideoFrame& frame);
154 154
155 int32_t RegisterEncodeCompleteCallbackOnCodecThread( 155 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
156 webrtc::EncodedImageCallback* callback); 156 webrtc::EncodedImageCallback* callback);
157 int32_t ReleaseOnCodecThread(); 157 int32_t ReleaseOnCodecThread();
158 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); 158 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
159 void OnDroppedFrameOnCodecThread();
160 159
161 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. 160 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
162 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); 161 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
163 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); 162 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
164 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); 163 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
165 jlong GetOutputBufferInfoPresentationTimestampUs( 164 jlong GetOutputBufferInfoPresentationTimestampUs(
166 JNIEnv* jni, jobject j_output_buffer_info); 165 JNIEnv* jni, jobject j_output_buffer_info);
167 166
168 // Deliver any outputs pending in the MediaCodec to our |callback_| and return 167 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
169 // true on success. 168 // true on success.
170 bool DeliverPendingOutputs(JNIEnv* jni); 169 bool DeliverPendingOutputs(JNIEnv* jni);
171 170
172 // Search for H.264 start codes. 171 // Search for H.264 start codes.
173 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); 172 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
174 173
174 QualityScaler::Settings GetQPThresholds() const override;
175
175 // Displays encoder statistics. 176 // Displays encoder statistics.
176 void LogStatistics(bool force_log); 177 void LogStatistics(bool force_log);
177 178
178 // Type of video codec. 179 // Type of video codec.
179 VideoCodecType codecType_; 180 VideoCodecType codecType_;
180 181
181 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to 182 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
182 // |codec_thread_| synchronously. 183 // |codec_thread_| synchronously.
183 webrtc::EncodedImageCallback* callback_; 184 webrtc::EncodedImageCallback* callback_;
184 185
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
252 // |input_frame_infos_|. 253 // |input_frame_infos_|.
253 webrtc::VideoRotation output_rotation_; // Last output frame rotation from 254 webrtc::VideoRotation output_rotation_; // Last output frame rotation from
254 // |input_frame_infos_|. 255 // |input_frame_infos_|.
255 // Frame size in bytes fed to MediaCodec. 256 // Frame size in bytes fed to MediaCodec.
256 int yuv_size_; 257 int yuv_size_;
257 // True only when between a callback_->Encoded() call return a positive value 258 // True only when between a callback_->Encoded() call return a positive value
258 // and the next Encode() call being ignored. 259 // and the next Encode() call being ignored.
259 bool drop_next_input_frame_; 260 bool drop_next_input_frame_;
260 // Global references; must be deleted in Release(). 261 // Global references; must be deleted in Release().
261 std::vector<jobject> input_buffers_; 262 std::vector<jobject> input_buffers_;
262 QualityScaler quality_scaler_;
263 // Dynamic resolution change, off by default.
264 bool scale_;
265
266 // H264 bitstream parser, used to extract QP from encoded bitstreams.
267 webrtc::H264BitstreamParser h264_bitstream_parser_; 263 webrtc::H264BitstreamParser h264_bitstream_parser_;
268 264
269 // VP9 variables to populate codec specific structure. 265 // VP9 variables to populate codec specific structure.
270 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for 266 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
271 // non-flexible VP9 mode. 267 // non-flexible VP9 mode.
272 uint8_t tl0_pic_idx_; 268 uint8_t tl0_pic_idx_;
273 size_t gof_idx_; 269 size_t gof_idx_;
274 270
275 // EGL context - owned by factory, should not be allocated/destroyed 271 // EGL context - owned by factory, should not be allocated/destroyed
276 // by MediaCodecVideoEncoder. 272 // by MediaCodecVideoEncoder.
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
375 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 371 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
376 } 372 }
377 // Factory should guard against other codecs being used with us. 373 // Factory should guard against other codecs being used with us.
378 RTC_CHECK(codec_settings->codecType == codecType_) 374 RTC_CHECK(codec_settings->codecType == codecType_)
379 << "Unsupported codec " << codec_settings->codecType << " for " 375 << "Unsupported codec " << codec_settings->codecType << " for "
380 << codecType_; 376 << codecType_;
381 377
382 codec_mode_ = codec_settings->mode; 378 codec_mode_ = codec_settings->mode;
383 int init_width = codec_settings->width; 379 int init_width = codec_settings->width;
384 int init_height = codec_settings->height; 380 int init_height = codec_settings->height;
385 // Scaling is disabled for VP9, but optionally enabled for VP8.
386 // TODO(pbos): Extract automaticResizeOn out of VP8 settings.
387 scale_ = false;
388 if (codecType_ == kVideoCodecVP8) {
389 scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn;
magjed_webrtc 2016/10/19 13:00:12 I don't see that you have preserved this logic. If
kthelgason 2016/10/19 13:07:27 You're right. This is one of the items mentioned a
magjed_webrtc 2016/10/19 13:55:07 Sorry, I didn't see that comment in the descriptio
390 } else if (codecType_ != kVideoCodecVP9) {
391 scale_ = true;
392 }
393
394 ALOGD << "InitEncode request: " << init_width << " x " << init_height; 381 ALOGD << "InitEncode request: " << init_width << " x " << init_height;
395 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
396
397 if (scale_) {
398 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264) {
399 quality_scaler_.Init(codecType_, codec_settings->startBitrate,
400 codec_settings->width, codec_settings->height,
401 codec_settings->maxFramerate);
402 } else {
403 // When adding codec support to additional hardware codecs, also configure
404 // their QP thresholds for scaling.
405 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
406 scale_ = false;
407 }
408 QualityScaler::Resolution res = quality_scaler_.GetScaledResolution();
409 init_width = res.width;
410 init_height = res.height;
411 ALOGD << "Scaled resolution: " << init_width << " x " << init_height;
412 }
413 382
414 return codec_thread_->Invoke<int32_t>( 383 return codec_thread_->Invoke<int32_t>(
415 RTC_FROM_HERE, 384 RTC_FROM_HERE,
416 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, 385 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width,
417 init_height, codec_settings->startBitrate, 386 init_height, codec_settings->startBitrate,
418 codec_settings->maxFramerate, 387 codec_settings->maxFramerate,
419 codec_settings->expect_encode_from_texture)); 388 codec_settings->expect_encode_from_texture));
420 } 389 }
421 390
422 int32_t MediaCodecVideoEncoder::Encode( 391 int32_t MediaCodecVideoEncoder::Encode(
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
637 << ". TS: " << (int)(current_timestamp_us_ / 1000) 606 << ". TS: " << (int)(current_timestamp_us_ / 1000)
638 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ 607 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_
639 << ". Kbps: " << last_set_bitrate_kbps_; 608 << ". Kbps: " << last_set_bitrate_kbps_;
640 } 609 }
641 610
642 if (drop_next_input_frame_) { 611 if (drop_next_input_frame_) {
643 ALOGW << "Encoder drop frame - failed callback."; 612 ALOGW << "Encoder drop frame - failed callback.";
644 drop_next_input_frame_ = false; 613 drop_next_input_frame_ = false;
645 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 614 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
646 frames_dropped_media_encoder_++; 615 frames_dropped_media_encoder_++;
647 OnDroppedFrameOnCodecThread();
648 return WEBRTC_VIDEO_CODEC_OK; 616 return WEBRTC_VIDEO_CODEC_OK;
649 } 617 }
650 618
651 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; 619 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
652 620
653 // Check if we accumulated too many frames in encoder input buffers and drop 621 // Check if we accumulated too many frames in encoder input buffers and drop
654 // frame if so. 622 // frame if so.
655 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { 623 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) {
656 ALOGD << "Already " << input_frame_infos_.size() 624 ALOGD << "Already " << input_frame_infos_.size()
657 << " frames in the queue, dropping" 625 << " frames in the queue, dropping"
658 << ". TS: " << (int)(current_timestamp_us_ / 1000) 626 << ". TS: " << (int)(current_timestamp_us_ / 1000)
659 << ". Fps: " << last_set_fps_ 627 << ". Fps: " << last_set_fps_
660 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; 628 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_;
661 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 629 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
662 consecutive_full_queue_frame_drops_++; 630 consecutive_full_queue_frame_drops_++;
663 if (consecutive_full_queue_frame_drops_ >= 631 if (consecutive_full_queue_frame_drops_ >=
664 ENCODER_STALL_FRAMEDROP_THRESHOLD) { 632 ENCODER_STALL_FRAMEDROP_THRESHOLD) {
665 ALOGE << "Encoder got stuck. Reset."; 633 ALOGE << "Encoder got stuck. Reset.";
666 ResetCodecOnCodecThread(); 634 ResetCodecOnCodecThread();
667 return WEBRTC_VIDEO_CODEC_ERROR; 635 return WEBRTC_VIDEO_CODEC_ERROR;
668 } 636 }
669 frames_dropped_media_encoder_++; 637 frames_dropped_media_encoder_++;
670 OnDroppedFrameOnCodecThread();
671 return WEBRTC_VIDEO_CODEC_OK; 638 return WEBRTC_VIDEO_CODEC_OK;
672 } 639 }
673 consecutive_full_queue_frame_drops_ = 0; 640 consecutive_full_queue_frame_drops_ = 0;
674 641
675 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( 642 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
676 frame.video_frame_buffer()); 643 frame.video_frame_buffer());
677 if (scale_) {
678 // Check framerate before spatial resolution change.
679 quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
680 const webrtc::QualityScaler::Resolution scaled_resolution =
681 quality_scaler_.GetScaledResolution();
682 if (scaled_resolution.width != frame.width() ||
683 scaled_resolution.height != frame.height()) {
684 if (input_buffer->native_handle() != nullptr) {
685 input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get())
686 ->CropScaleAndRotate(frame.width(), frame.height(),
687 0, 0,
688 scaled_resolution.width,
689 scaled_resolution.height,
690 webrtc::kVideoRotation_0);
691 } else {
692 input_buffer = quality_scaler_.GetScaledBuffer(input_buffer);
693 }
694 }
695 }
696 644
697 VideoFrame input_frame(input_buffer, frame.timestamp(), 645 VideoFrame input_frame(input_buffer, frame.timestamp(),
698 frame.render_time_ms(), frame.rotation()); 646 frame.render_time_ms(), frame.rotation());
699 647
700 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { 648 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
701 ALOGE << "Failed to reconfigure encoder."; 649 ALOGE << "Failed to reconfigure encoder.";
702 return WEBRTC_VIDEO_CODEC_ERROR; 650 return WEBRTC_VIDEO_CODEC_ERROR;
703 } 651 }
704 652
705 const bool key_frame = 653 const bool key_frame =
706 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; 654 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
707 bool encode_status = true; 655 bool encode_status = true;
708 if (!input_frame.video_frame_buffer()->native_handle()) { 656 if (!input_frame.video_frame_buffer()->native_handle()) {
709 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, 657 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
710 j_dequeue_input_buffer_method_); 658 j_dequeue_input_buffer_method_);
711 CHECK_EXCEPTION(jni); 659 CHECK_EXCEPTION(jni);
712 if (j_input_buffer_index == -1) { 660 if (j_input_buffer_index == -1) {
713 // Video codec falls behind - no input buffer available. 661 // Video codec falls behind - no input buffer available.
714 ALOGW << "Encoder drop frame - no input buffers available"; 662 ALOGW << "Encoder drop frame - no input buffers available";
715 if (frames_received_ > 1) { 663 if (frames_received_ > 1) {
716 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 664 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
717 frames_dropped_media_encoder_++; 665 frames_dropped_media_encoder_++;
718 OnDroppedFrameOnCodecThread();
719 } else { 666 } else {
720 // Input buffers are not ready after codec initialization, HW is still 667 // Input buffers are not ready after codec initialization, HW is still
721 // allocating thme - this is expected and should not result in drop 668 // allocating thme - this is expected and should not result in drop
722 // frame report. 669 // frame report.
723 frames_received_ = 0; 670 frames_received_ = 0;
724 } 671 }
725 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. 672 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
726 } else if (j_input_buffer_index == -2) { 673 } else if (j_input_buffer_index == -2) {
727 ResetCodecOnCodecThread(); 674 ResetCodecOnCodecThread();
728 return WEBRTC_VIDEO_CODEC_ERROR; 675 return WEBRTC_VIDEO_CODEC_ERROR;
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
875 822
876 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, 823 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
877 uint32_t frame_rate) { 824 uint32_t frame_rate) {
878 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); 825 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
879 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? 826 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
880 frame_rate : MAX_ALLOWED_VIDEO_FPS; 827 frame_rate : MAX_ALLOWED_VIDEO_FPS;
881 if (last_set_bitrate_kbps_ == new_bit_rate && 828 if (last_set_bitrate_kbps_ == new_bit_rate &&
882 last_set_fps_ == frame_rate) { 829 last_set_fps_ == frame_rate) {
883 return WEBRTC_VIDEO_CODEC_OK; 830 return WEBRTC_VIDEO_CODEC_OK;
884 } 831 }
885 if (scale_) {
886 quality_scaler_.ReportFramerate(frame_rate);
887 }
888 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 832 JNIEnv* jni = AttachCurrentThreadIfNeeded();
889 ScopedLocalRefFrame local_ref_frame(jni); 833 ScopedLocalRefFrame local_ref_frame(jni);
890 if (new_bit_rate > 0) { 834 if (new_bit_rate > 0) {
891 last_set_bitrate_kbps_ = new_bit_rate; 835 last_set_bitrate_kbps_ = new_bit_rate;
892 } 836 }
893 if (frame_rate > 0) { 837 if (frame_rate > 0) {
894 last_set_fps_ = frame_rate; 838 last_set_fps_ = frame_rate;
895 } 839 }
896 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, 840 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
897 j_set_rates_method_, 841 j_set_rates_method_,
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
980 std::unique_ptr<webrtc::EncodedImage> image( 924 std::unique_ptr<webrtc::EncodedImage> image(
981 new webrtc::EncodedImage(payload, payload_size, payload_size)); 925 new webrtc::EncodedImage(payload, payload_size, payload_size));
982 image->_encodedWidth = width_; 926 image->_encodedWidth = width_;
983 image->_encodedHeight = height_; 927 image->_encodedHeight = height_;
984 image->_timeStamp = output_timestamp_; 928 image->_timeStamp = output_timestamp_;
985 image->capture_time_ms_ = output_render_time_ms_; 929 image->capture_time_ms_ = output_render_time_ms_;
986 image->rotation_ = output_rotation_; 930 image->rotation_ = output_rotation_;
987 image->_frameType = 931 image->_frameType =
988 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); 932 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
989 image->_completeFrame = true; 933 image->_completeFrame = true;
990 image->adapt_reason_.quality_resolution_downscales =
991 scale_ ? quality_scaler_.downscale_shift() : -1;
992
993 webrtc::CodecSpecificInfo info; 934 webrtc::CodecSpecificInfo info;
994 memset(&info, 0, sizeof(info)); 935 memset(&info, 0, sizeof(info));
995 info.codecType = codecType_; 936 info.codecType = codecType_;
996 if (codecType_ == kVideoCodecVP8) { 937 if (codecType_ == kVideoCodecVP8) {
997 info.codecSpecific.VP8.pictureId = picture_id_; 938 info.codecSpecific.VP8.pictureId = picture_id_;
998 info.codecSpecific.VP8.nonReference = false; 939 info.codecSpecific.VP8.nonReference = false;
999 info.codecSpecific.VP8.simulcastIdx = 0; 940 info.codecSpecific.VP8.simulcastIdx = 0;
1000 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; 941 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
1001 info.codecSpecific.VP8.layerSync = false; 942 info.codecSpecific.VP8.layerSync = false;
1002 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; 943 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
(...skipping 26 matching lines...) Expand all
1029 970
1030 // Generate a header describing a single fragment. 971 // Generate a header describing a single fragment.
1031 webrtc::RTPFragmentationHeader header; 972 webrtc::RTPFragmentationHeader header;
1032 memset(&header, 0, sizeof(header)); 973 memset(&header, 0, sizeof(header));
1033 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { 974 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
1034 header.VerifyAndAllocateFragmentationHeader(1); 975 header.VerifyAndAllocateFragmentationHeader(1);
1035 header.fragmentationOffset[0] = 0; 976 header.fragmentationOffset[0] = 0;
1036 header.fragmentationLength[0] = image->_length; 977 header.fragmentationLength[0] = image->_length;
1037 header.fragmentationPlType[0] = 0; 978 header.fragmentationPlType[0] = 0;
1038 header.fragmentationTimeDiff[0] = 0; 979 header.fragmentationTimeDiff[0] = 0;
1039 if (codecType_ == kVideoCodecVP8 && scale_) { 980 if (codecType_ == kVideoCodecVP8) {
1040 int qp; 981 int qp;
1041 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { 982 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
1042 current_acc_qp_ += qp; 983 current_acc_qp_ += qp;
1043 quality_scaler_.ReportQP(qp);
1044 image->qp_ = qp; 984 image->qp_ = qp;
1045 } 985 }
1046 } 986 }
1047 } else if (codecType_ == kVideoCodecH264) { 987 } else if (codecType_ == kVideoCodecH264) {
1048 if (scale_) { 988 h264_bitstream_parser_.ParseBitstream(payload, payload_size);
1049 h264_bitstream_parser_.ParseBitstream(payload, payload_size); 989 int qp;
1050 int qp; 990 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
1051 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { 991 current_acc_qp_ += qp;
1052 current_acc_qp_ += qp; 992 image->qp_ = qp;
1053 quality_scaler_.ReportQP(qp);
1054 }
1055 } 993 }
1056 // For H.264 search for start codes. 994 // For H.264 search for start codes.
1057 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; 995 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
1058 int32_t scPositionsLength = 0; 996 int32_t scPositionsLength = 0;
1059 int32_t scPosition = 0; 997 int32_t scPosition = 0;
1060 while (scPositionsLength < MAX_NALUS_PERFRAME) { 998 while (scPositionsLength < MAX_NALUS_PERFRAME) {
1061 int32_t naluPosition = NextNaluPosition( 999 int32_t naluPosition = NextNaluPosition(
1062 payload + scPosition, payload_size - scPosition); 1000 payload + scPosition, payload_size - scPosition);
1063 if (naluPosition < 0) { 1001 if (naluPosition < 0) {
1064 break; 1002 break;
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
1148 ". QP: " << (current_acc_qp_ / current_frames_divider) << 1086 ". QP: " << (current_acc_qp_ / current_frames_divider) <<
1149 " for last " << statistic_time_ms << " ms."; 1087 " for last " << statistic_time_ms << " ms.";
1150 stat_start_time_ms_ = rtc::TimeMillis(); 1088 stat_start_time_ms_ = rtc::TimeMillis();
1151 current_frames_ = 0; 1089 current_frames_ = 0;
1152 current_bytes_ = 0; 1090 current_bytes_ = 0;
1153 current_acc_qp_ = 0; 1091 current_acc_qp_ = 0;
1154 current_encoding_time_ms_ = 0; 1092 current_encoding_time_ms_ = 0;
1155 } 1093 }
1156 } 1094 }
1157 1095
1096 QualityScaler::Settings MediaCodecVideoEncoder::GetQPThresholds() const {
1097 return QualityScaler::Settings(true);
1098 }
1099
1158 int32_t MediaCodecVideoEncoder::NextNaluPosition( 1100 int32_t MediaCodecVideoEncoder::NextNaluPosition(
1159 uint8_t *buffer, size_t buffer_size) { 1101 uint8_t *buffer, size_t buffer_size) {
1160 if (buffer_size < H264_SC_LENGTH) { 1102 if (buffer_size < H264_SC_LENGTH) {
1161 return -1; 1103 return -1;
1162 } 1104 }
1163 uint8_t *head = buffer; 1105 uint8_t *head = buffer;
1164 // Set end buffer pointer to 4 bytes before actual buffer end so we can 1106 // Set end buffer pointer to 4 bytes before actual buffer end so we can
1165 // access head[1], head[2] and head[3] in a loop without buffer overrun. 1107 // access head[1], head[2] and head[3] in a loop without buffer overrun.
1166 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH; 1108 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
1167 1109
(...skipping 13 matching lines...) Expand all
1181 if (head[3] != 0x01) { // got 000000xx 1123 if (head[3] != 0x01) { // got 000000xx
1182 head++; // xx != 1, continue searching. 1124 head++; // xx != 1, continue searching.
1183 continue; 1125 continue;
1184 } 1126 }
1185 return (int32_t)(head - buffer); 1127 return (int32_t)(head - buffer);
1186 } 1128 }
1187 return -1; 1129 return -1;
1188 } 1130 }
1189 1131
1190 void MediaCodecVideoEncoder::OnDroppedFrame() { 1132 void MediaCodecVideoEncoder::OnDroppedFrame() {
1191 // Methods running on the codec thread should call OnDroppedFrameOnCodecThread
1192 // directly.
1193 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread()); 1133 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread());
1194 codec_thread_->Invoke<void>(
1195 RTC_FROM_HERE,
1196 Bind(&MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread, this));
1197 }
1198
1199 void MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread() {
1200 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
1201 // Report dropped frame to quality_scaler_.
1202 if (scale_)
1203 quality_scaler_.ReportDroppedFrame();
1204 } 1134 }
1205 1135
1206 const char* MediaCodecVideoEncoder::ImplementationName() const { 1136 const char* MediaCodecVideoEncoder::ImplementationName() const {
1207 return "MediaCodec"; 1137 return "MediaCodec";
1208 } 1138 }
1209 1139
1210 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() 1140 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
1211 : egl_context_(nullptr) { 1141 : egl_context_(nullptr) {
1212 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 1142 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1213 ScopedLocalRefFrame local_ref_frame(jni); 1143 ScopedLocalRefFrame local_ref_frame(jni);
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
1290 return supported_codecs_; 1220 return supported_codecs_;
1291 } 1221 }
1292 1222
1293 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( 1223 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1294 webrtc::VideoEncoder* encoder) { 1224 webrtc::VideoEncoder* encoder) {
1295 ALOGD << "Destroy video encoder."; 1225 ALOGD << "Destroy video encoder.";
1296 delete encoder; 1226 delete encoder;
1297 } 1227 }
1298 1228
1299 } // namespace webrtc_jni 1229 } // namespace webrtc_jni
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698