Chromium Code Reviews| Index: webrtc/api/java/jni/androidmediaencoder_jni.cc |
| diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc |
| index d83c4314c6c558c0937ffcc344d1d605e8197cf6..81014b779c823725be0ebee895b3f244e11baf49 100644 |
| --- a/webrtc/api/java/jni/androidmediaencoder_jni.cc |
| +++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc |
| @@ -242,8 +242,6 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
| // Global references; must be deleted in Release(). |
| std::vector<jobject> input_buffers_; |
| QualityScaler quality_scaler_; |
| - // Dynamic resolution change, off by default. |
| - bool scale_; |
| // H264 bitstream parser, used to extract QP from encoded bitstreams. |
| webrtc::H264BitstreamParser h264_bitstream_parser_; |
| @@ -365,44 +363,35 @@ int32_t MediaCodecVideoEncoder::InitEncode( |
| codec_mode_ = codec_settings->mode; |
| int init_width = codec_settings->width; |
| int init_height = codec_settings->height; |
| - scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName( |
| - "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled"); |
| - |
| ALOGD << "InitEncode request: " << init_width << " x " << init_height; |
| - ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); |
| - |
| - if (scale_) { |
| - if (codecType_ == kVideoCodecVP8) { |
| - // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the |
| - // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is |
| - // always = 127. Note that in SW, QP is that of the user-level range [0, |
| - // 63]. |
| - const int kLowQpThreshold = 29; |
| - const int kBadQpThreshold = 90; |
| - quality_scaler_.Init(kLowQpThreshold, kBadQpThreshold, false, |
| - codec_settings->startBitrate, codec_settings->width, |
| - codec_settings->height, |
| - codec_settings->maxFramerate); |
| - } else if (codecType_ == kVideoCodecH264) { |
| - // H264 QP is in the range [0, 51]. |
| - const int kLowQpThreshold = 21; |
| - const int kBadQpThreshold = 33; |
| - quality_scaler_.Init(kLowQpThreshold, kBadQpThreshold, false, |
| - codec_settings->startBitrate, codec_settings->width, |
| - codec_settings->height, |
| - codec_settings->maxFramerate); |
| - } else { |
| - // When adding codec support to additional hardware codecs, also configure |
| - // their QP thresholds for scaling. |
| - RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds."; |
| - scale_ = false; |
| - } |
| - quality_scaler_.SetMinResolution(kMinDimension, kMinDimension); |
| - QualityScaler::Resolution res = quality_scaler_.GetScaledResolution(); |
| - init_width = std::max(res.width, kMinDimension); |
| - init_height = std::max(res.height, kMinDimension); |
| - ALOGD << "Scaled resolution: " << init_width << " x " << init_height; |
| + |
| + if (codecType_ == kVideoCodecVP8) { |
| + // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the |
| + // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is |
| + // always = 127. Note that in SW, QP is that of the user-level range [0, |
| + // 63]. |
| + const int kLowQpThreshold = 29; |
| + const int kBadQpThreshold = 90; |
| + quality_scaler_.Init(kLowQpThreshold, kBadQpThreshold, false, |
| + codec_settings->startBitrate, codec_settings->width, |
| + codec_settings->height, codec_settings->maxFramerate); |
| + } else if (codecType_ == kVideoCodecH264) { |
| + // H264 QP is in the range [0, 51]. |
| + const int kLowQpThreshold = 21; |
| + const int kBadQpThreshold = 33; |
| + quality_scaler_.Init(kLowQpThreshold, kBadQpThreshold, false, |
| + codec_settings->startBitrate, codec_settings->width, |
| + codec_settings->height, codec_settings->maxFramerate); |
| + } else { |
| + // When adding codec support to additional hardware codecs, also configure |
| + // their QP thresholds for scaling. |
| + RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds."; |
| } |
| + quality_scaler_.SetMinResolution(kMinDimension, kMinDimension); |
| + QualityScaler::Resolution res = quality_scaler_.GetScaledResolution(); |
| + init_width = std::max(res.width, kMinDimension); |
| + init_height = std::max(res.height, kMinDimension); |
| + ALOGD << "Scaled resolution: " << init_width << " x " << init_height; |
| return codec_thread_->Invoke<int32_t>( |
| Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, |
| @@ -662,24 +651,21 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
| consecutive_full_queue_frame_drops_ = 0; |
| VideoFrame input_frame = frame; |
| - if (scale_) { |
| - // Check framerate before spatial resolution change. |
| - quality_scaler_.OnEncodeFrame(frame); |
| - const webrtc::QualityScaler::Resolution scaled_resolution = |
| - quality_scaler_.GetScaledResolution(); |
| - if (scaled_resolution.width != frame.width() || |
| - scaled_resolution.height != frame.height()) { |
| - if (frame.native_handle() != nullptr) { |
| - rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer( |
| - static_cast<AndroidTextureBuffer*>( |
| - frame.video_frame_buffer().get())->ScaleAndRotate( |
| - scaled_resolution.width, |
| - scaled_resolution.height, |
| - webrtc::kVideoRotation_0)); |
| - input_frame.set_video_frame_buffer(scaled_buffer); |
| - } else { |
| - input_frame = quality_scaler_.GetScaledFrame(frame); |
| - } |
| + // Check framerate before spatial resolution change. |
| + quality_scaler_.OnEncodeFrame(frame); |
| + const webrtc::QualityScaler::Resolution scaled_resolution = |
|
AlexG
2016/04/13 16:46:12
How this will work for VP9 case? We do not report
|
| + quality_scaler_.GetScaledResolution(); |
| + if (scaled_resolution.width != frame.width() || |
| + scaled_resolution.height != frame.height()) { |
| + if (frame.native_handle() != nullptr) { |
| + rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer( |
| + static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get()) |
| + ->ScaleAndRotate(scaled_resolution.width, |
| + scaled_resolution.height, |
| + webrtc::kVideoRotation_0)); |
| + input_frame.set_video_frame_buffer(scaled_buffer); |
| + } else { |
| + input_frame = quality_scaler_.GetScaledFrame(frame); |
| } |
| } |
| @@ -860,9 +846,7 @@ int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
| last_set_fps_ == frame_rate) { |
| return WEBRTC_VIDEO_CODEC_OK; |
| } |
| - if (scale_) { |
| - quality_scaler_.ReportFramerate(frame_rate); |
| - } |
| + quality_scaler_.ReportFramerate(frame_rate); |
| JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| ScopedLocalRefFrame local_ref_frame(jni); |
| if (new_bit_rate > 0) { |
| @@ -975,7 +959,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
| (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
| image->_completeFrame = true; |
| image->adapt_reason_.quality_resolution_downscales = |
| - scale_ ? quality_scaler_.downscale_shift() : -1; |
| + quality_scaler_.downscale_shift(); |
| webrtc::CodecSpecificInfo info; |
| memset(&info, 0, sizeof(info)); |
| @@ -1023,7 +1007,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
| header.fragmentationLength[0] = image->_length; |
| header.fragmentationPlType[0] = 0; |
| header.fragmentationTimeDiff[0] = 0; |
| - if (codecType_ == kVideoCodecVP8 && scale_) { |
| + if (codecType_ == kVideoCodecVP8) { |
| int qp; |
| if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { |
| current_acc_qp_ += qp; |
| @@ -1031,13 +1015,11 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
| } |
| } |
| } else if (codecType_ == kVideoCodecH264) { |
| - if (scale_) { |
| - h264_bitstream_parser_.ParseBitstream(payload, payload_size); |
| - int qp; |
| - if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { |
| - current_acc_qp_ += qp; |
| - quality_scaler_.ReportQP(qp); |
| - } |
| + h264_bitstream_parser_.ParseBitstream(payload, payload_size); |
| + int qp; |
| + if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { |
| + current_acc_qp_ += qp; |
| + quality_scaler_.ReportQP(qp); |
| } |
| // For H.264 search for start codes. |
| int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; |
| @@ -1157,12 +1139,11 @@ int32_t MediaCodecVideoEncoder::NextNaluPosition( |
| void MediaCodecVideoEncoder::OnDroppedFrame() { |
| // Report dropped frame to quality_scaler_. |
| - if (scale_) |
| - quality_scaler_.ReportDroppedFrame(); |
| + quality_scaler_.ReportDroppedFrame(); |
| } |
| int MediaCodecVideoEncoder::GetTargetFramerate() { |
| - return scale_ ? quality_scaler_.GetTargetFramerate() : -1; |
| + return quality_scaler_.GetTargetFramerate(); |
| } |
| const char* MediaCodecVideoEncoder::ImplementationName() const { |