Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(932)

Side by Side Diff: talk/app/webrtc/java/jni/androidmediaencoder_jni.cc

Issue 1365063002: Wire up QualityScaler for H.264 on Android. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: feedback Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 15 matching lines...) Expand all
26 * 26 *
27 */ 27 */
28 28
29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h" 29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" 30 #include "talk/app/webrtc/java/jni/classreferenceholder.h"
31 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" 31 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
32 #include "webrtc/base/bind.h" 32 #include "webrtc/base/bind.h"
33 #include "webrtc/base/checks.h" 33 #include "webrtc/base/checks.h"
34 #include "webrtc/base/logging.h" 34 #include "webrtc/base/logging.h"
35 #include "webrtc/base/thread.h" 35 #include "webrtc/base/thread.h"
36 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
36 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" 37 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
37 #include "webrtc/modules/video_coding/utility/include/quality_scaler.h" 38 #include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
38 #include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h" 39 #include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h"
39 #include "webrtc/system_wrappers/interface/field_trial.h" 40 #include "webrtc/system_wrappers/interface/field_trial.h"
40 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" 41 #include "webrtc/system_wrappers/interface/logcat_trace_context.h"
41 #include "third_party/libyuv/include/libyuv/convert.h" 42 #include "third_party/libyuv/include/libyuv/convert.h"
42 #include "third_party/libyuv/include/libyuv/convert_from.h" 43 #include "third_party/libyuv/include/libyuv/convert_from.h"
43 #include "third_party/libyuv/include/libyuv/video_common.h" 44 #include "third_party/libyuv/include/libyuv/video_common.h"
44 45
45 using rtc::Bind; 46 using rtc::Bind;
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
192 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q. 193 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
193 int64_t output_render_time_ms_; // Last output frame render time from 194 int64_t output_render_time_ms_; // Last output frame render time from
194 // render_times_ms_ queue. 195 // render_times_ms_ queue.
195 // Frame size in bytes fed to MediaCodec. 196 // Frame size in bytes fed to MediaCodec.
196 int yuv_size_; 197 int yuv_size_;
197 // True only when between a callback_->Encoded() call return a positive value 198 // True only when between a callback_->Encoded() call return a positive value
198 // and the next Encode() call being ignored. 199 // and the next Encode() call being ignored.
199 bool drop_next_input_frame_; 200 bool drop_next_input_frame_;
200 // Global references; must be deleted in Release(). 201 // Global references; must be deleted in Release().
201 std::vector<jobject> input_buffers_; 202 std::vector<jobject> input_buffers_;
202 scoped_ptr<webrtc::QualityScaler> quality_scaler_; 203 webrtc::QualityScaler quality_scaler_;
203 // Dynamic resolution change, off by default. 204 // Dynamic resolution change, off by default.
204 bool scale_; 205 bool scale_;
205 int updated_framerate_; 206
207 // H264 bitstream parser, used to extract QP from encoded bitstreams.
208 webrtc::H264BitstreamParser h264_bitstream_parser_;
206 }; 209 };
207 210
208 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { 211 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
209 // Call Release() to ensure no more callbacks to us after we are deleted. 212 // Call Release() to ensure no more callbacks to us after we are deleted.
210 Release(); 213 Release();
211 } 214 }
212 215
213 MediaCodecVideoEncoder::MediaCodecVideoEncoder( 216 MediaCodecVideoEncoder::MediaCodecVideoEncoder(
214 JNIEnv* jni, VideoCodecType codecType) : 217 JNIEnv* jni, VideoCodecType codecType) :
215 codecType_(codecType), 218 codecType_(codecType),
216 callback_(NULL), 219 callback_(NULL),
217 inited_(false), 220 inited_(false),
218 picture_id_(0), 221 picture_id_(0),
219 codec_thread_(new Thread()), 222 codec_thread_(new Thread()),
220 quality_scaler_(new webrtc::QualityScaler()),
221 j_media_codec_video_encoder_class_( 223 j_media_codec_video_encoder_class_(
222 jni, 224 jni,
223 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), 225 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
224 j_media_codec_video_encoder_( 226 j_media_codec_video_encoder_(
225 jni, 227 jni,
226 jni->NewObject(*j_media_codec_video_encoder_class_, 228 jni->NewObject(*j_media_codec_video_encoder_class_,
227 GetMethodID(jni, 229 GetMethodID(jni,
228 *j_media_codec_video_encoder_class_, 230 *j_media_codec_video_encoder_class_,
229 "<init>", 231 "<init>",
230 "()V"))) { 232 "()V"))) {
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
276 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed"; 278 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
277 AllowBlockingCalls(); 279 AllowBlockingCalls();
278 } 280 }
279 281
280 int32_t MediaCodecVideoEncoder::InitEncode( 282 int32_t MediaCodecVideoEncoder::InitEncode(
281 const webrtc::VideoCodec* codec_settings, 283 const webrtc::VideoCodec* codec_settings,
282 int32_t /* number_of_cores */, 284 int32_t /* number_of_cores */,
283 size_t /* max_payload_size */) { 285 size_t /* max_payload_size */) {
284 const int kMinWidth = 320; 286 const int kMinWidth = 320;
285 const int kMinHeight = 180; 287 const int kMinHeight = 180;
286 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
287 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
288 // always = 127. Note that in SW, QP is that of the user-level range [0, 63].
289 const int kMaxQP = 127;
290 const int kLowQpThresholdDenominator = 3; 288 const int kLowQpThresholdDenominator = 3;
291 if (codec_settings == NULL) { 289 if (codec_settings == NULL) {
292 ALOGE("NULL VideoCodec instance"); 290 ALOGE("NULL VideoCodec instance");
293 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 291 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
294 } 292 }
295 // Factory should guard against other codecs being used with us. 293 // Factory should guard against other codecs being used with us.
296 RTC_CHECK(codec_settings->codecType == codecType_) 294 RTC_CHECK(codec_settings->codecType == codecType_)
297 << "Unsupported codec " << codec_settings->codecType << " for " 295 << "Unsupported codec " << codec_settings->codecType << " for "
298 << codecType_; 296 << codecType_;
299 297
300 ALOGD("InitEncode request"); 298 ALOGD("InitEncode request");
301
302 scale_ = webrtc::field_trial::FindFullName( 299 scale_ = webrtc::field_trial::FindFullName(
303 "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled"; 300 "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled";
304 ALOGD("Automatic resize: %s", scale_ ? "enabled" : "disabled"); 301 ALOGD("Automatic resize: %s", scale_ ? "enabled" : "disabled");
305 302 if (scale_) {
306 if (scale_ && codecType_ == kVideoCodecVP8) { 303 if (codecType_ == kVideoCodecVP8) {
307 quality_scaler_->Init(kMaxQP / kLowQpThresholdDenominator, true); 304 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
308 quality_scaler_->SetMinResolution(kMinWidth, kMinHeight); 305 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
309 quality_scaler_->ReportFramerate(codec_settings->maxFramerate); 306 // always = 127. Note that in SW, QP is that of the user-level range [0,
310 updated_framerate_ = codec_settings->maxFramerate; 307 // 63].
311 } else { 308 const int kMaxQp = 127;
312 updated_framerate_ = -1; 309 quality_scaler_.Init(kMaxQp / kLowQpThresholdDenominator, true);
310 } else if (codecType_ == kVideoCodecH264) {
311 // H264 QP is in the range [0, 51].
312 const int kMaxQp = 51;
313 quality_scaler_.Init(kMaxQp / kLowQpThresholdDenominator, true);
314 } else {
315 // When adding codec support to additional hardware codecs, also configure
316 // their QP thresholds for scaling.
317 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
318 }
319 quality_scaler_.SetMinResolution(kMinWidth, kMinHeight);
320 quality_scaler_.ReportFramerate(codec_settings->maxFramerate);
313 } 321 }
314 return codec_thread_->Invoke<int32_t>( 322 return codec_thread_->Invoke<int32_t>(
315 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, 323 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
316 this, 324 this,
317 codec_settings->width, 325 codec_settings->width,
318 codec_settings->height, 326 codec_settings->height,
319 codec_settings->startBitrate, 327 codec_settings->startBitrate,
320 codec_settings->maxFramerate)); 328 codec_settings->maxFramerate));
321 } 329 }
322 330
(...skipping 19 matching lines...) Expand all
342 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); 350 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
343 } 351 }
344 352
345 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, 353 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
346 int64_t /* rtt */) { 354 int64_t /* rtt */) {
347 return WEBRTC_VIDEO_CODEC_OK; 355 return WEBRTC_VIDEO_CODEC_OK;
348 } 356 }
349 357
350 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate, 358 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
351 uint32_t frame_rate) { 359 uint32_t frame_rate) {
352 if (scale_ && codecType_ == kVideoCodecVP8) { 360 if (scale_)
353 quality_scaler_->ReportFramerate(frame_rate); 361 quality_scaler_.ReportFramerate(frame_rate);
354 } 362
355 return codec_thread_->Invoke<int32_t>( 363 return codec_thread_->Invoke<int32_t>(
356 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, 364 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
357 this, 365 this,
358 new_bit_rate, 366 new_bit_rate,
359 frame_rate)); 367 frame_rate));
360 } 368 }
361 369
362 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { 370 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
363 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 371 JNIEnv* jni = AttachCurrentThreadIfNeeded();
364 ScopedLocalRefFrame local_ref_frame(jni); 372 ScopedLocalRefFrame local_ref_frame(jni);
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
500 } 508 }
501 509
502 if (drop_next_input_frame_) { 510 if (drop_next_input_frame_) {
503 ALOGV("Encoder drop frame - failed callback."); 511 ALOGV("Encoder drop frame - failed callback.");
504 drop_next_input_frame_ = false; 512 drop_next_input_frame_ = false;
505 return WEBRTC_VIDEO_CODEC_OK; 513 return WEBRTC_VIDEO_CODEC_OK;
506 } 514 }
507 515
508 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; 516 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
509 // Check framerate before spatial resolution change. 517 // Check framerate before spatial resolution change.
510 if (scale_ && codecType_ == kVideoCodecVP8) { 518 if (scale_)
511 quality_scaler_->OnEncodeFrame(frame); 519 quality_scaler_.OnEncodeFrame(frame);
512 updated_framerate_ = quality_scaler_->GetTargetFramerate(); 520
513 } 521 const VideoFrame& input_frame =
514 const VideoFrame& input_frame = (scale_ && codecType_ == kVideoCodecVP8) ? 522 scale_ ? quality_scaler_.GetScaledFrame(frame) : frame;
515 quality_scaler_->GetScaledFrame(frame) : frame;
516 523
517 if (input_frame.width() != width_ || input_frame.height() != height_) { 524 if (input_frame.width() != width_ || input_frame.height() != height_) {
518 ALOGD("Frame resolution change from %d x %d to %d x %d", 525 ALOGD("Frame resolution change from %d x %d to %d x %d",
519 width_, height_, input_frame.width(), input_frame.height()); 526 width_, height_, input_frame.width(), input_frame.height());
520 width_ = input_frame.width(); 527 width_ = input_frame.width();
521 height_ = input_frame.height(); 528 height_ = input_frame.height();
522 ResetCodec(); 529 ResetCodec();
523 return WEBRTC_VIDEO_CODEC_OK; 530 return WEBRTC_VIDEO_CODEC_OK;
524 } 531 }
525 532
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
716 jni->GetDirectBufferAddress(j_output_buffer)); 723 jni->GetDirectBufferAddress(j_output_buffer));
717 CHECK_EXCEPTION(jni); 724 CHECK_EXCEPTION(jni);
718 725
719 ALOGV("Encoder frame out # %d. Key: %d. Size: %d. TS: %lld." 726 ALOGV("Encoder frame out # %d. Key: %d. Size: %d. TS: %lld."
720 " Latency: %lld. EncTime: %lld", 727 " Latency: %lld. EncTime: %lld",
721 frames_encoded_, key_frame, payload_size, 728 frames_encoded_, key_frame, payload_size,
722 last_output_timestamp_ms_, 729 last_output_timestamp_ms_,
723 last_input_timestamp_ms_ - last_output_timestamp_ms_, 730 last_input_timestamp_ms_ - last_output_timestamp_ms_,
724 frame_encoding_time_ms); 731 frame_encoding_time_ms);
725 732
726 if (payload_size && scale_ && codecType_ == kVideoCodecVP8)
727 quality_scaler_->ReportQP(webrtc::vp8::GetQP(payload));
728
729 // Calculate and print encoding statistics - every 3 seconds. 733 // Calculate and print encoding statistics - every 3 seconds.
730 frames_encoded_++; 734 frames_encoded_++;
731 current_frames_++; 735 current_frames_++;
732 current_bytes_ += payload_size; 736 current_bytes_ += payload_size;
733 current_encoding_time_ms_ += frame_encoding_time_ms; 737 current_encoding_time_ms_ += frame_encoding_time_ms;
734 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; 738 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
735 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && 739 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
736 current_frames_ > 0) { 740 current_frames_ > 0) {
737 ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d," 741 ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
738 " encTime: %d for last %d ms", 742 " encTime: %d for last %d ms",
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
774 778
775 // Generate a header describing a single fragment. 779 // Generate a header describing a single fragment.
776 webrtc::RTPFragmentationHeader header; 780 webrtc::RTPFragmentationHeader header;
777 memset(&header, 0, sizeof(header)); 781 memset(&header, 0, sizeof(header));
778 if (codecType_ == kVideoCodecVP8) { 782 if (codecType_ == kVideoCodecVP8) {
779 header.VerifyAndAllocateFragmentationHeader(1); 783 header.VerifyAndAllocateFragmentationHeader(1);
780 header.fragmentationOffset[0] = 0; 784 header.fragmentationOffset[0] = 0;
781 header.fragmentationLength[0] = image->_length; 785 header.fragmentationLength[0] = image->_length;
782 header.fragmentationPlType[0] = 0; 786 header.fragmentationPlType[0] = 0;
783 header.fragmentationTimeDiff[0] = 0; 787 header.fragmentationTimeDiff[0] = 0;
788 if (scale_)
789 quality_scaler_.ReportQP(webrtc::vp8::GetQP(payload));
784 } else if (codecType_ == kVideoCodecH264) { 790 } else if (codecType_ == kVideoCodecH264) {
791 if (scale_) {
792 h264_bitstream_parser_.ParseBitstream(payload, payload_size);
793 int qp;
794 if (h264_bitstream_parser_.GetLastSliceQp(&qp))
795 quality_scaler_.ReportQP(qp);
796 }
785 // For H.264 search for start codes. 797 // For H.264 search for start codes.
786 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; 798 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
787 int32_t scPositionsLength = 0; 799 int32_t scPositionsLength = 0;
788 int32_t scPosition = 0; 800 int32_t scPosition = 0;
789 while (scPositionsLength < MAX_NALUS_PERFRAME) { 801 while (scPositionsLength < MAX_NALUS_PERFRAME) {
790 int32_t naluPosition = NextNaluPosition( 802 int32_t naluPosition = NextNaluPosition(
791 payload + scPosition, payload_size - scPosition); 803 payload + scPosition, payload_size - scPosition);
792 if (naluPosition < 0) { 804 if (naluPosition < 0) {
793 break; 805 break;
794 } 806 }
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
864 if (head[3] != 0x01) { // got 000000xx 876 if (head[3] != 0x01) { // got 000000xx
865 head++; // xx != 1, continue searching. 877 head++; // xx != 1, continue searching.
866 continue; 878 continue;
867 } 879 }
868 return (int32_t)(head - buffer); 880 return (int32_t)(head - buffer);
869 } 881 }
870 return -1; 882 return -1;
871 } 883 }
872 884
873 void MediaCodecVideoEncoder::OnDroppedFrame() { 885 void MediaCodecVideoEncoder::OnDroppedFrame() {
874 if (scale_ && codecType_ == kVideoCodecVP8) 886 if (scale_)
875 quality_scaler_->ReportDroppedFrame(); 887 quality_scaler_.ReportDroppedFrame();
876 } 888 }
877 889
878 int MediaCodecVideoEncoder::GetTargetFramerate() { 890 int MediaCodecVideoEncoder::GetTargetFramerate() {
879 return updated_framerate_; 891 return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
880 } 892 }
881 893
882 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() { 894 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
883 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 895 JNIEnv* jni = AttachCurrentThreadIfNeeded();
884 ScopedLocalRefFrame local_ref_frame(jni); 896 ScopedLocalRefFrame local_ref_frame(jni);
885 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder"); 897 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
886 supported_codecs_.clear(); 898 supported_codecs_.clear();
887 899
888 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( 900 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
889 j_encoder_class, 901 j_encoder_class,
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
930 } 942 }
931 943
932 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( 944 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
933 webrtc::VideoEncoder* encoder) { 945 webrtc::VideoEncoder* encoder) {
934 ALOGD("Destroy video encoder."); 946 ALOGD("Destroy video encoder.");
935 delete encoder; 947 delete encoder;
936 } 948 }
937 949
938 } // namespace webrtc_jni 950 } // namespace webrtc_jni
939 951
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698