Index: webrtc/api/android/jni/androidmediaencoder_jni.cc |
diff --git a/webrtc/api/android/jni/androidmediaencoder_jni.cc b/webrtc/api/android/jni/androidmediaencoder_jni.cc |
index e84b850287944bbc8f76d07c0c250bb3526c1ce0..38d3c882e06de9326332ab7b181fb82ad44d20d3 100644 |
--- a/webrtc/api/android/jni/androidmediaencoder_jni.cc |
+++ b/webrtc/api/android/jni/androidmediaencoder_jni.cc |
@@ -31,7 +31,6 @@ |
#include "webrtc/common_types.h" |
#include "webrtc/modules/video_coding/include/video_codec_interface.h" |
#include "webrtc/modules/video_coding/utility/h264_bitstream_parser.h" |
-#include "webrtc/modules/video_coding/utility/quality_scaler.h" |
#include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
#include "webrtc/system_wrappers/include/field_trial.h" |
#include "webrtc/system_wrappers/include/logcat_trace_context.h" |
@@ -49,7 +48,6 @@ using webrtc::VideoCodecType; |
using webrtc::kVideoCodecH264; |
using webrtc::kVideoCodecVP8; |
using webrtc::kVideoCodecVP9; |
-using webrtc::QualityScaler; |
namespace webrtc_jni { |
@@ -156,7 +154,6 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
webrtc::EncodedImageCallback* callback); |
int32_t ReleaseOnCodecThread(); |
int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); |
- void OnDroppedFrameOnCodecThread(); |
// Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. |
int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); |
@@ -259,11 +256,6 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
bool drop_next_input_frame_; |
// Global references; must be deleted in Release(). |
std::vector<jobject> input_buffers_; |
- QualityScaler quality_scaler_; |
- // Dynamic resolution change, off by default. |
- bool scale_; |
- |
- // H264 bitstream parser, used to extract QP from encoded bitstreams. |
webrtc::H264BitstreamParser h264_bitstream_parser_; |
// VP9 variables to populate codec specific structure. |
@@ -382,34 +374,7 @@ int32_t MediaCodecVideoEncoder::InitEncode( |
codec_mode_ = codec_settings->mode; |
int init_width = codec_settings->width; |
int init_height = codec_settings->height; |
- // Scaling is disabled for VP9, but optionally enabled for VP8. |
- // TODO(pbos): Extract automaticResizeOn out of VP8 settings. |
- scale_ = false; |
- if (codecType_ == kVideoCodecVP8) { |
- scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn; |
- } else if (codecType_ != kVideoCodecVP9) { |
- scale_ = true; |
- } |
- |
ALOGD << "InitEncode request: " << init_width << " x " << init_height; |
- ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); |
- |
- if (scale_) { |
- if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264) { |
- quality_scaler_.Init(codecType_, codec_settings->startBitrate, |
- codec_settings->width, codec_settings->height, |
- codec_settings->maxFramerate); |
- } else { |
- // When adding codec support to additional hardware codecs, also configure |
- // their QP thresholds for scaling. |
- RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds."; |
- scale_ = false; |
- } |
- QualityScaler::Resolution res = quality_scaler_.GetScaledResolution(); |
- init_width = res.width; |
- init_height = res.height; |
- ALOGD << "Scaled resolution: " << init_width << " x " << init_height; |
- } |
return codec_thread_->Invoke<int32_t>( |
RTC_FROM_HERE, |
@@ -644,7 +609,6 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
drop_next_input_frame_ = false; |
current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
frames_dropped_media_encoder_++; |
- OnDroppedFrameOnCodecThread(); |
return WEBRTC_VIDEO_CODEC_OK; |
} |
@@ -667,32 +631,12 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
frames_dropped_media_encoder_++; |
- OnDroppedFrameOnCodecThread(); |
return WEBRTC_VIDEO_CODEC_OK; |
} |
consecutive_full_queue_frame_drops_ = 0; |
rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( |
frame.video_frame_buffer()); |
- if (scale_) { |
- // Check framerate before spatial resolution change. |
- quality_scaler_.OnEncodeFrame(frame.width(), frame.height()); |
- const webrtc::QualityScaler::Resolution scaled_resolution = |
- quality_scaler_.GetScaledResolution(); |
- if (scaled_resolution.width != frame.width() || |
- scaled_resolution.height != frame.height()) { |
- if (input_buffer->native_handle() != nullptr) { |
- input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get()) |
- ->CropScaleAndRotate(frame.width(), frame.height(), |
- 0, 0, |
- scaled_resolution.width, |
- scaled_resolution.height, |
- webrtc::kVideoRotation_0); |
- } else { |
- input_buffer = quality_scaler_.GetScaledBuffer(input_buffer); |
- } |
- } |
- } |
VideoFrame input_frame(input_buffer, frame.timestamp(), |
frame.render_time_ms(), frame.rotation()); |
@@ -715,7 +659,6 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
if (frames_received_ > 1) { |
current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
frames_dropped_media_encoder_++; |
- OnDroppedFrameOnCodecThread(); |
} else { |
// Input buffers are not ready after codec initialization, HW is still |
// allocating thme - this is expected and should not result in drop |
@@ -882,9 +825,6 @@ int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
last_set_fps_ == frame_rate) { |
return WEBRTC_VIDEO_CODEC_OK; |
} |
- if (scale_) { |
- quality_scaler_.ReportFramerate(frame_rate); |
- } |
JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
ScopedLocalRefFrame local_ref_frame(jni); |
if (new_bit_rate > 0) { |
@@ -987,9 +927,6 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
image->_frameType = |
(key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
image->_completeFrame = true; |
- image->adapt_reason_.quality_resolution_downscales = |
perkj_webrtc
2016/10/07 07:41:15
does this cl take stats into account?
kthelgason
2016/10/07 09:34:48
Nope, that's still to be addressed. In any case th
|
- scale_ ? quality_scaler_.downscale_shift() : -1; |
- |
webrtc::CodecSpecificInfo info; |
memset(&info, 0, sizeof(info)); |
info.codecType = codecType_; |
@@ -1036,22 +973,19 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
header.fragmentationLength[0] = image->_length; |
header.fragmentationPlType[0] = 0; |
header.fragmentationTimeDiff[0] = 0; |
- if (codecType_ == kVideoCodecVP8 && scale_) { |
+ if (codecType_ == kVideoCodecVP8) { |
int qp; |
if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { |
current_acc_qp_ += qp; |
- quality_scaler_.ReportQP(qp); |
image->qp_ = qp; |
} |
} |
} else if (codecType_ == kVideoCodecH264) { |
- if (scale_) { |
- h264_bitstream_parser_.ParseBitstream(payload, payload_size); |
- int qp; |
- if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { |
- current_acc_qp_ += qp; |
- quality_scaler_.ReportQP(qp); |
- } |
+ h264_bitstream_parser_.ParseBitstream(payload, payload_size); |
+ int qp; |
+ if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { |
+ current_acc_qp_ += qp; |
+ image->qp_ = qp; |
} |
// For H.264 search for start codes. |
int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; |
@@ -1188,19 +1122,7 @@ int32_t MediaCodecVideoEncoder::NextNaluPosition( |
} |
void MediaCodecVideoEncoder::OnDroppedFrame() { |
- // Methods running on the codec thread should call OnDroppedFrameOnCodecThread |
- // directly. |
RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread()); |
- codec_thread_->Invoke<void>( |
- RTC_FROM_HERE, |
- Bind(&MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread, this)); |
-} |
- |
-void MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread() { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
- // Report dropped frame to quality_scaler_. |
- if (scale_) |
- quality_scaler_.ReportDroppedFrame(); |
} |
const char* MediaCodecVideoEncoder::ImplementationName() const { |