Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(237)

Side by Side Diff: webrtc/api/java/jni/androidmediaencoder_jni.cc

Issue 1889463002: Remove field trial for scaling down MediaCodec. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after
235 int64_t output_render_time_ms_; // Last output frame render time from 235 int64_t output_render_time_ms_; // Last output frame render time from
236 // render_times_ms_ queue. 236 // render_times_ms_ queue.
237 // Frame size in bytes fed to MediaCodec. 237 // Frame size in bytes fed to MediaCodec.
238 int yuv_size_; 238 int yuv_size_;
239 // True only when between a callback_->Encoded() call return a positive value 239 // True only when between a callback_->Encoded() call return a positive value
240 // and the next Encode() call being ignored. 240 // and the next Encode() call being ignored.
241 bool drop_next_input_frame_; 241 bool drop_next_input_frame_;
242 // Global references; must be deleted in Release(). 242 // Global references; must be deleted in Release().
243 std::vector<jobject> input_buffers_; 243 std::vector<jobject> input_buffers_;
244 QualityScaler quality_scaler_; 244 QualityScaler quality_scaler_;
245 // Dynamic resolution change, off by default.
246 bool scale_;
247 245
248 // H264 bitstream parser, used to extract QP from encoded bitstreams. 246 // H264 bitstream parser, used to extract QP from encoded bitstreams.
249 webrtc::H264BitstreamParser h264_bitstream_parser_; 247 webrtc::H264BitstreamParser h264_bitstream_parser_;
250 248
251 // VP9 variables to populate codec specific structure. 249 // VP9 variables to populate codec specific structure.
252 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for 250 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
253 // non-flexible VP9 mode. 251 // non-flexible VP9 mode.
254 uint8_t tl0_pic_idx_; 252 uint8_t tl0_pic_idx_;
255 size_t gof_idx_; 253 size_t gof_idx_;
256 254
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
358 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 356 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
359 } 357 }
360 // Factory should guard against other codecs being used with us. 358 // Factory should guard against other codecs being used with us.
361 RTC_CHECK(codec_settings->codecType == codecType_) 359 RTC_CHECK(codec_settings->codecType == codecType_)
362 << "Unsupported codec " << codec_settings->codecType << " for " 360 << "Unsupported codec " << codec_settings->codecType << " for "
363 << codecType_; 361 << codecType_;
364 362
365 codec_mode_ = codec_settings->mode; 363 codec_mode_ = codec_settings->mode;
366 int init_width = codec_settings->width; 364 int init_width = codec_settings->width;
367 int init_height = codec_settings->height; 365 int init_height = codec_settings->height;
368 scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName( 366 ALOGD << "InitEncode request: " << init_width << " x " << init_height;
369 "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled");
370 367
371 ALOGD << "InitEncode request: " << init_width << " x " << init_height; 368 if (codecType_ == kVideoCodecVP8) {
372 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); 369 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
373 370 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
374 if (scale_) { 371 // always = 127. Note that in SW, QP is that of the user-level range [0,
375 if (codecType_ == kVideoCodecVP8) { 372 // 63].
376 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the 373 const int kLowQpThreshold = 29;
377 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is 374 const int kBadQpThreshold = 90;
378 // always = 127. Note that in SW, QP is that of the user-level range [0, 375 quality_scaler_.Init(kLowQpThreshold, kBadQpThreshold, false,
379 // 63]. 376 codec_settings->startBitrate, codec_settings->width,
380 const int kLowQpThreshold = 29; 377 codec_settings->height, codec_settings->maxFramerate);
381 const int kBadQpThreshold = 90; 378 } else if (codecType_ == kVideoCodecH264) {
382 quality_scaler_.Init(kLowQpThreshold, kBadQpThreshold, false, 379 // H264 QP is in the range [0, 51].
383 codec_settings->startBitrate, codec_settings->width, 380 const int kLowQpThreshold = 21;
384 codec_settings->height, 381 const int kBadQpThreshold = 33;
385 codec_settings->maxFramerate); 382 quality_scaler_.Init(kLowQpThreshold, kBadQpThreshold, false,
386 } else if (codecType_ == kVideoCodecH264) { 383 codec_settings->startBitrate, codec_settings->width,
387 // H264 QP is in the range [0, 51]. 384 codec_settings->height, codec_settings->maxFramerate);
388 const int kLowQpThreshold = 21; 385 } else {
389 const int kBadQpThreshold = 33; 386 // When adding codec support to additional hardware codecs, also configure
390 quality_scaler_.Init(kLowQpThreshold, kBadQpThreshold, false, 387 // their QP thresholds for scaling.
391 codec_settings->startBitrate, codec_settings->width, 388 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
392 codec_settings->height,
393 codec_settings->maxFramerate);
394 } else {
395 // When adding codec support to additional hardware codecs, also configure
396 // their QP thresholds for scaling.
397 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
398 scale_ = false;
399 }
400 quality_scaler_.SetMinResolution(kMinDimension, kMinDimension);
401 QualityScaler::Resolution res = quality_scaler_.GetScaledResolution();
402 init_width = std::max(res.width, kMinDimension);
403 init_height = std::max(res.height, kMinDimension);
404 ALOGD << "Scaled resolution: " << init_width << " x " << init_height;
405 } 389 }
390 quality_scaler_.SetMinResolution(kMinDimension, kMinDimension);
391 QualityScaler::Resolution res = quality_scaler_.GetScaledResolution();
392 init_width = std::max(res.width, kMinDimension);
393 init_height = std::max(res.height, kMinDimension);
394 ALOGD << "Scaled resolution: " << init_width << " x " << init_height;
406 395
407 return codec_thread_->Invoke<int32_t>( 396 return codec_thread_->Invoke<int32_t>(
408 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, 397 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
409 this, 398 this,
410 init_width, 399 init_width,
411 init_height, 400 init_height,
412 codec_settings->startBitrate, 401 codec_settings->startBitrate,
413 codec_settings->maxFramerate, 402 codec_settings->maxFramerate,
414 false /* use_surface */)); 403 false /* use_surface */));
415 } 404 }
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after
655 ResetCodecOnCodecThread(); 644 ResetCodecOnCodecThread();
656 return WEBRTC_VIDEO_CODEC_ERROR; 645 return WEBRTC_VIDEO_CODEC_ERROR;
657 } 646 }
658 frames_dropped_media_encoder_++; 647 frames_dropped_media_encoder_++;
659 OnDroppedFrame(); 648 OnDroppedFrame();
660 return WEBRTC_VIDEO_CODEC_OK; 649 return WEBRTC_VIDEO_CODEC_OK;
661 } 650 }
662 consecutive_full_queue_frame_drops_ = 0; 651 consecutive_full_queue_frame_drops_ = 0;
663 652
664 VideoFrame input_frame = frame; 653 VideoFrame input_frame = frame;
665 if (scale_) { 654 // Check framerate before spatial resolution change.
666 // Check framerate before spatial resolution change. 655 quality_scaler_.OnEncodeFrame(frame);
667 quality_scaler_.OnEncodeFrame(frame); 656 const webrtc::QualityScaler::Resolution scaled_resolution =
AlexG 2016/04/13 16:46:12 How this will work for VP9 case? We do not report
668 const webrtc::QualityScaler::Resolution scaled_resolution = 657 quality_scaler_.GetScaledResolution();
669 quality_scaler_.GetScaledResolution(); 658 if (scaled_resolution.width != frame.width() ||
670 if (scaled_resolution.width != frame.width() || 659 scaled_resolution.height != frame.height()) {
671 scaled_resolution.height != frame.height()) { 660 if (frame.native_handle() != nullptr) {
672 if (frame.native_handle() != nullptr) { 661 rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
673 rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer( 662 static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get())
674 static_cast<AndroidTextureBuffer*>( 663 ->ScaleAndRotate(scaled_resolution.width,
675 frame.video_frame_buffer().get())->ScaleAndRotate( 664 scaled_resolution.height,
676 scaled_resolution.width, 665 webrtc::kVideoRotation_0));
677 scaled_resolution.height, 666 input_frame.set_video_frame_buffer(scaled_buffer);
678 webrtc::kVideoRotation_0)); 667 } else {
679 input_frame.set_video_frame_buffer(scaled_buffer); 668 input_frame = quality_scaler_.GetScaledFrame(frame);
680 } else {
681 input_frame = quality_scaler_.GetScaledFrame(frame);
682 }
683 } 669 }
684 } 670 }
685 671
686 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { 672 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
687 ALOGE << "Failed to reconfigure encoder."; 673 ALOGE << "Failed to reconfigure encoder.";
688 return WEBRTC_VIDEO_CODEC_ERROR; 674 return WEBRTC_VIDEO_CODEC_ERROR;
689 } 675 }
690 676
691 // Save time when input frame is sent to the encoder input. 677 // Save time when input frame is sent to the encoder input.
692 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); 678 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
853 839
854 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, 840 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
855 uint32_t frame_rate) { 841 uint32_t frame_rate) {
856 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); 842 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
857 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? 843 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
858 frame_rate : MAX_ALLOWED_VIDEO_FPS; 844 frame_rate : MAX_ALLOWED_VIDEO_FPS;
859 if (last_set_bitrate_kbps_ == new_bit_rate && 845 if (last_set_bitrate_kbps_ == new_bit_rate &&
860 last_set_fps_ == frame_rate) { 846 last_set_fps_ == frame_rate) {
861 return WEBRTC_VIDEO_CODEC_OK; 847 return WEBRTC_VIDEO_CODEC_OK;
862 } 848 }
863 if (scale_) { 849 quality_scaler_.ReportFramerate(frame_rate);
864 quality_scaler_.ReportFramerate(frame_rate);
865 }
866 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 850 JNIEnv* jni = AttachCurrentThreadIfNeeded();
867 ScopedLocalRefFrame local_ref_frame(jni); 851 ScopedLocalRefFrame local_ref_frame(jni);
868 if (new_bit_rate > 0) { 852 if (new_bit_rate > 0) {
869 last_set_bitrate_kbps_ = new_bit_rate; 853 last_set_bitrate_kbps_ = new_bit_rate;
870 } 854 }
871 if (frame_rate > 0) { 855 if (frame_rate > 0) {
872 last_set_fps_ = frame_rate; 856 last_set_fps_ = frame_rate;
873 } 857 }
874 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, 858 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
875 j_set_rates_method_, 859 j_set_rates_method_,
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
968 scoped_ptr<webrtc::EncodedImage> image( 952 scoped_ptr<webrtc::EncodedImage> image(
969 new webrtc::EncodedImage(payload, payload_size, payload_size)); 953 new webrtc::EncodedImage(payload, payload_size, payload_size));
970 image->_encodedWidth = width_; 954 image->_encodedWidth = width_;
971 image->_encodedHeight = height_; 955 image->_encodedHeight = height_;
972 image->_timeStamp = output_timestamp_; 956 image->_timeStamp = output_timestamp_;
973 image->capture_time_ms_ = output_render_time_ms_; 957 image->capture_time_ms_ = output_render_time_ms_;
974 image->_frameType = 958 image->_frameType =
975 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); 959 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
976 image->_completeFrame = true; 960 image->_completeFrame = true;
977 image->adapt_reason_.quality_resolution_downscales = 961 image->adapt_reason_.quality_resolution_downscales =
978 scale_ ? quality_scaler_.downscale_shift() : -1; 962 quality_scaler_.downscale_shift();
979 963
980 webrtc::CodecSpecificInfo info; 964 webrtc::CodecSpecificInfo info;
981 memset(&info, 0, sizeof(info)); 965 memset(&info, 0, sizeof(info));
982 info.codecType = codecType_; 966 info.codecType = codecType_;
983 if (codecType_ == kVideoCodecVP8) { 967 if (codecType_ == kVideoCodecVP8) {
984 info.codecSpecific.VP8.pictureId = picture_id_; 968 info.codecSpecific.VP8.pictureId = picture_id_;
985 info.codecSpecific.VP8.nonReference = false; 969 info.codecSpecific.VP8.nonReference = false;
986 info.codecSpecific.VP8.simulcastIdx = 0; 970 info.codecSpecific.VP8.simulcastIdx = 0;
987 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; 971 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
988 info.codecSpecific.VP8.layerSync = false; 972 info.codecSpecific.VP8.layerSync = false;
(...skipping 27 matching lines...) Expand all
1016 1000
1017 // Generate a header describing a single fragment. 1001 // Generate a header describing a single fragment.
1018 webrtc::RTPFragmentationHeader header; 1002 webrtc::RTPFragmentationHeader header;
1019 memset(&header, 0, sizeof(header)); 1003 memset(&header, 0, sizeof(header));
1020 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { 1004 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
1021 header.VerifyAndAllocateFragmentationHeader(1); 1005 header.VerifyAndAllocateFragmentationHeader(1);
1022 header.fragmentationOffset[0] = 0; 1006 header.fragmentationOffset[0] = 0;
1023 header.fragmentationLength[0] = image->_length; 1007 header.fragmentationLength[0] = image->_length;
1024 header.fragmentationPlType[0] = 0; 1008 header.fragmentationPlType[0] = 0;
1025 header.fragmentationTimeDiff[0] = 0; 1009 header.fragmentationTimeDiff[0] = 0;
1026 if (codecType_ == kVideoCodecVP8 && scale_) { 1010 if (codecType_ == kVideoCodecVP8) {
1027 int qp; 1011 int qp;
1028 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { 1012 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
1029 current_acc_qp_ += qp; 1013 current_acc_qp_ += qp;
1030 quality_scaler_.ReportQP(qp); 1014 quality_scaler_.ReportQP(qp);
1031 } 1015 }
1032 } 1016 }
1033 } else if (codecType_ == kVideoCodecH264) { 1017 } else if (codecType_ == kVideoCodecH264) {
1034 if (scale_) { 1018 h264_bitstream_parser_.ParseBitstream(payload, payload_size);
1035 h264_bitstream_parser_.ParseBitstream(payload, payload_size); 1019 int qp;
1036 int qp; 1020 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
1037 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { 1021 current_acc_qp_ += qp;
1038 current_acc_qp_ += qp; 1022 quality_scaler_.ReportQP(qp);
1039 quality_scaler_.ReportQP(qp);
1040 }
1041 } 1023 }
1042 // For H.264 search for start codes. 1024 // For H.264 search for start codes.
1043 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; 1025 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
1044 int32_t scPositionsLength = 0; 1026 int32_t scPositionsLength = 0;
1045 int32_t scPosition = 0; 1027 int32_t scPosition = 0;
1046 while (scPositionsLength < MAX_NALUS_PERFRAME) { 1028 while (scPositionsLength < MAX_NALUS_PERFRAME) {
1047 int32_t naluPosition = NextNaluPosition( 1029 int32_t naluPosition = NextNaluPosition(
1048 payload + scPosition, payload_size - scPosition); 1030 payload + scPosition, payload_size - scPosition);
1049 if (naluPosition < 0) { 1031 if (naluPosition < 0) {
1050 break; 1032 break;
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1150 head++; // xx != 1, continue searching. 1132 head++; // xx != 1, continue searching.
1151 continue; 1133 continue;
1152 } 1134 }
1153 return (int32_t)(head - buffer); 1135 return (int32_t)(head - buffer);
1154 } 1136 }
1155 return -1; 1137 return -1;
1156 } 1138 }
1157 1139
1158 void MediaCodecVideoEncoder::OnDroppedFrame() { 1140 void MediaCodecVideoEncoder::OnDroppedFrame() {
1159 // Report dropped frame to quality_scaler_. 1141 // Report dropped frame to quality_scaler_.
1160 if (scale_) 1142 quality_scaler_.ReportDroppedFrame();
1161 quality_scaler_.ReportDroppedFrame();
1162 } 1143 }
1163 1144
1164 int MediaCodecVideoEncoder::GetTargetFramerate() { 1145 int MediaCodecVideoEncoder::GetTargetFramerate() {
1165 return scale_ ? quality_scaler_.GetTargetFramerate() : -1; 1146 return quality_scaler_.GetTargetFramerate();
1166 } 1147 }
1167 1148
1168 const char* MediaCodecVideoEncoder::ImplementationName() const { 1149 const char* MediaCodecVideoEncoder::ImplementationName() const {
1169 return "MediaCodec"; 1150 return "MediaCodec";
1170 } 1151 }
1171 1152
1172 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() 1153 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
1173 : egl_context_(nullptr) { 1154 : egl_context_(nullptr) {
1174 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 1155 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1175 ScopedLocalRefFrame local_ref_frame(jni); 1156 ScopedLocalRefFrame local_ref_frame(jni);
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
1253 } 1234 }
1254 1235
1255 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( 1236 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1256 webrtc::VideoEncoder* encoder) { 1237 webrtc::VideoEncoder* encoder) {
1257 ALOGD << "Destroy video encoder."; 1238 ALOGD << "Destroy video encoder.";
1258 delete encoder; 1239 delete encoder;
1259 } 1240 }
1260 1241
1261 } // namespace webrtc_jni 1242 } // namespace webrtc_jni
1262 1243
OLDNEW
« no previous file with comments | « no previous file | webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698