Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(27)

Side by Side Diff: webrtc/api/android/jni/androidmediaencoder_jni.cc

Issue 2398963003: Move usage of QualityScaler to ViEEncoder. (Closed)
Patch Set: prevent data race Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 17 matching lines...) Expand all
28 #include "webrtc/base/thread.h" 28 #include "webrtc/base/thread.h"
29 #include "webrtc/base/thread_checker.h" 29 #include "webrtc/base/thread_checker.h"
30 #include "webrtc/base/timeutils.h" 30 #include "webrtc/base/timeutils.h"
31 #include "webrtc/common_types.h" 31 #include "webrtc/common_types.h"
32 #include "webrtc/common_video/h264/h264_bitstream_parser.h" 32 #include "webrtc/common_video/h264/h264_bitstream_parser.h"
33 #include "webrtc/modules/video_coding/include/video_codec_interface.h" 33 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
34 #include "webrtc/modules/video_coding/utility/quality_scaler.h" 34 #include "webrtc/modules/video_coding/utility/quality_scaler.h"
35 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" 35 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
36 #include "webrtc/system_wrappers/include/field_trial.h" 36 #include "webrtc/system_wrappers/include/field_trial.h"
37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" 37 #include "webrtc/system_wrappers/include/logcat_trace_context.h"
38 #include "webrtc/video_encoder.h"
38 39
39 using rtc::Bind; 40 using rtc::Bind;
40 using rtc::Thread; 41 using rtc::Thread;
41 using rtc::ThreadManager; 42 using rtc::ThreadManager;
42 43
43 using webrtc::CodecSpecificInfo; 44 using webrtc::CodecSpecificInfo;
44 using webrtc::EncodedImage; 45 using webrtc::EncodedImage;
45 using webrtc::VideoFrame; 46 using webrtc::VideoFrame;
46 using webrtc::RTPFragmentationHeader; 47 using webrtc::RTPFragmentationHeader;
47 using webrtc::VideoCodec; 48 using webrtc::VideoCodec;
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
159 const int64_t frame_input_time_ms); 160 const int64_t frame_input_time_ms);
160 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, 161 bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
161 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); 162 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
162 bool EncodeTextureOnCodecThread(JNIEnv* jni, 163 bool EncodeTextureOnCodecThread(JNIEnv* jni,
163 bool key_frame, const webrtc::VideoFrame& frame); 164 bool key_frame, const webrtc::VideoFrame& frame);
164 165
165 int32_t RegisterEncodeCompleteCallbackOnCodecThread( 166 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
166 webrtc::EncodedImageCallback* callback); 167 webrtc::EncodedImageCallback* callback);
167 int32_t ReleaseOnCodecThread(); 168 int32_t ReleaseOnCodecThread();
168 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); 169 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
169 void OnDroppedFrameOnCodecThread();
170 170
171 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. 171 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
172 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); 172 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
173 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); 173 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
174 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); 174 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
175 jlong GetOutputBufferInfoPresentationTimestampUs( 175 jlong GetOutputBufferInfoPresentationTimestampUs(
176 JNIEnv* jni, jobject j_output_buffer_info); 176 JNIEnv* jni, jobject j_output_buffer_info);
177 177
178 // Deliver any outputs pending in the MediaCodec to our |callback_| and return 178 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
179 // true on success. 179 // true on success.
180 bool DeliverPendingOutputs(JNIEnv* jni); 180 bool DeliverPendingOutputs(JNIEnv* jni);
181 181
182 // Search for H.264 start codes. 182 // Search for H.264 start codes.
183 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); 183 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
184 184
185 VideoEncoder::ScalingSettings GetScalingSettings() const override;
186
185 // Displays encoder statistics. 187 // Displays encoder statistics.
186 void LogStatistics(bool force_log); 188 void LogStatistics(bool force_log);
187 189
188 // Type of video codec. 190 // Type of video codec.
189 VideoCodecType codecType_; 191 VideoCodecType codecType_;
190 192
191 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to 193 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
192 // |codec_thread_| synchronously. 194 // |codec_thread_| synchronously.
193 webrtc::EncodedImageCallback* callback_; 195 webrtc::EncodedImageCallback* callback_;
194 196
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
260 // |input_frame_infos_|. 262 // |input_frame_infos_|.
261 int64_t output_render_time_ms_; // Last output frame render time from 263 int64_t output_render_time_ms_; // Last output frame render time from
262 // |input_frame_infos_|. 264 // |input_frame_infos_|.
263 webrtc::VideoRotation output_rotation_; // Last output frame rotation from 265 webrtc::VideoRotation output_rotation_; // Last output frame rotation from
264 // |input_frame_infos_|. 266 // |input_frame_infos_|.
265 // Frame size in bytes fed to MediaCodec. 267 // Frame size in bytes fed to MediaCodec.
266 int yuv_size_; 268 int yuv_size_;
267 // True only when between a callback_->OnEncodedImage() call return a positive 269 // True only when between a callback_->OnEncodedImage() call return a positive
268 // value and the next Encode() call being ignored. 270 // value and the next Encode() call being ignored.
269 bool drop_next_input_frame_; 271 bool drop_next_input_frame_;
272 bool scale_;
270 // Global references; must be deleted in Release(). 273 // Global references; must be deleted in Release().
271 std::vector<jobject> input_buffers_; 274 std::vector<jobject> input_buffers_;
272 QualityScaler quality_scaler_;
273 // Dynamic resolution change, off by default.
274 bool scale_;
275
276 // H264 bitstream parser, used to extract QP from encoded bitstreams.
277 webrtc::H264BitstreamParser h264_bitstream_parser_; 275 webrtc::H264BitstreamParser h264_bitstream_parser_;
278 276
279 // VP9 variables to populate codec specific structure. 277 // VP9 variables to populate codec specific structure.
280 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for 278 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
281 // non-flexible VP9 mode. 279 // non-flexible VP9 mode.
282 uint8_t tl0_pic_idx_; 280 uint8_t tl0_pic_idx_;
283 size_t gof_idx_; 281 size_t gof_idx_;
284 282
285 // EGL context - owned by factory, should not be allocated/destroyed 283 // EGL context - owned by factory, should not be allocated/destroyed
286 // by MediaCodecVideoEncoder. 284 // by MediaCodecVideoEncoder.
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
406 scale_ = false; 404 scale_ = false;
407 if (codecType_ == kVideoCodecVP8) { 405 if (codecType_ == kVideoCodecVP8) {
408 scale_ = codec_settings->VP8().automaticResizeOn; 406 scale_ = codec_settings->VP8().automaticResizeOn;
409 } else if (codecType_ != kVideoCodecVP9) { 407 } else if (codecType_ != kVideoCodecVP9) {
410 scale_ = true; 408 scale_ = true;
411 } 409 }
412 410
413 ALOGD << "InitEncode request: " << init_width << " x " << init_height; 411 ALOGD << "InitEncode request: " << init_width << " x " << init_height;
414 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); 412 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
415 413
416 if (scale_) {
417 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264) {
418 quality_scaler_.Init(codecType_, codec_settings->startBitrate,
419 codec_settings->width, codec_settings->height,
420 codec_settings->maxFramerate);
421 } else {
422 // When adding codec support to additional hardware codecs, also configure
423 // their QP thresholds for scaling.
424 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
425 scale_ = false;
426 }
427 QualityScaler::Resolution res = quality_scaler_.GetScaledResolution();
428 init_width = res.width;
429 init_height = res.height;
430 ALOGD << "Scaled resolution: " << init_width << " x " << init_height;
431 }
432
433 return codec_thread_->Invoke<int32_t>( 414 return codec_thread_->Invoke<int32_t>(
434 RTC_FROM_HERE, 415 RTC_FROM_HERE,
435 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, 416 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width,
436 init_height, codec_settings->startBitrate, 417 init_height, codec_settings->startBitrate,
437 codec_settings->maxFramerate, 418 codec_settings->maxFramerate,
438 codec_settings->expect_encode_from_texture)); 419 codec_settings->expect_encode_from_texture));
439 } 420 }
440 421
441 int32_t MediaCodecVideoEncoder::Encode( 422 int32_t MediaCodecVideoEncoder::Encode(
442 const webrtc::VideoFrame& frame, 423 const webrtc::VideoFrame& frame,
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after
703 << ". TS: " << (int)(current_timestamp_us_ / 1000) 684 << ". TS: " << (int)(current_timestamp_us_ / 1000)
704 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ 685 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_
705 << ". Kbps: " << last_set_bitrate_kbps_; 686 << ". Kbps: " << last_set_bitrate_kbps_;
706 } 687 }
707 688
708 if (drop_next_input_frame_) { 689 if (drop_next_input_frame_) {
709 ALOGW << "Encoder drop frame - failed callback."; 690 ALOGW << "Encoder drop frame - failed callback.";
710 drop_next_input_frame_ = false; 691 drop_next_input_frame_ = false;
711 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 692 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
712 frames_dropped_media_encoder_++; 693 frames_dropped_media_encoder_++;
713 OnDroppedFrameOnCodecThread();
714 return WEBRTC_VIDEO_CODEC_OK; 694 return WEBRTC_VIDEO_CODEC_OK;
715 } 695 }
716 696
717 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; 697 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
718 698
719 // Check if we accumulated too many frames in encoder input buffers and drop 699 // Check if we accumulated too many frames in encoder input buffers and drop
720 // frame if so. 700 // frame if so.
721 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { 701 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) {
722 ALOGD << "Already " << input_frame_infos_.size() 702 ALOGD << "Already " << input_frame_infos_.size()
723 << " frames in the queue, dropping" 703 << " frames in the queue, dropping"
724 << ". TS: " << (int)(current_timestamp_us_ / 1000) 704 << ". TS: " << (int)(current_timestamp_us_ / 1000)
725 << ". Fps: " << last_set_fps_ 705 << ". Fps: " << last_set_fps_
726 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; 706 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_;
727 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 707 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
728 consecutive_full_queue_frame_drops_++; 708 consecutive_full_queue_frame_drops_++;
729 if (consecutive_full_queue_frame_drops_ >= 709 if (consecutive_full_queue_frame_drops_ >=
730 ENCODER_STALL_FRAMEDROP_THRESHOLD) { 710 ENCODER_STALL_FRAMEDROP_THRESHOLD) {
731 ALOGE << "Encoder got stuck."; 711 ALOGE << "Encoder got stuck.";
732 return ProcessHWErrorOnEncodeOnCodecThread(); 712 return ProcessHWErrorOnEncodeOnCodecThread();
733 } 713 }
734 frames_dropped_media_encoder_++; 714 frames_dropped_media_encoder_++;
735 OnDroppedFrameOnCodecThread();
736 return WEBRTC_VIDEO_CODEC_OK; 715 return WEBRTC_VIDEO_CODEC_OK;
737 } 716 }
738 consecutive_full_queue_frame_drops_ = 0; 717 consecutive_full_queue_frame_drops_ = 0;
739 718
740 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( 719 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
741 frame.video_frame_buffer()); 720 frame.video_frame_buffer());
742 if (scale_) {
743 // Check framerate before spatial resolution change.
744 quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
745 const webrtc::QualityScaler::Resolution scaled_resolution =
746 quality_scaler_.GetScaledResolution();
747 if (scaled_resolution.width != frame.width() ||
748 scaled_resolution.height != frame.height()) {
749 if (input_buffer->native_handle() != nullptr) {
750 input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get())
751 ->CropScaleAndRotate(frame.width(), frame.height(),
752 0, 0,
753 scaled_resolution.width,
754 scaled_resolution.height,
755 webrtc::kVideoRotation_0);
756 } else {
757 input_buffer = quality_scaler_.GetScaledBuffer(input_buffer);
758 }
759 }
760 }
761 721
762 VideoFrame input_frame(input_buffer, frame.timestamp(), 722 VideoFrame input_frame(input_buffer, frame.timestamp(),
763 frame.render_time_ms(), frame.rotation()); 723 frame.render_time_ms(), frame.rotation());
764 724
765 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { 725 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
766 ALOGE << "Failed to reconfigure encoder."; 726 ALOGE << "Failed to reconfigure encoder.";
767 return WEBRTC_VIDEO_CODEC_ERROR; 727 return WEBRTC_VIDEO_CODEC_ERROR;
768 } 728 }
769 729
770 const bool key_frame = 730 const bool key_frame =
771 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; 731 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
772 bool encode_status = true; 732 bool encode_status = true;
773 if (!input_frame.video_frame_buffer()->native_handle()) { 733 if (!input_frame.video_frame_buffer()->native_handle()) {
774 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, 734 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
775 j_dequeue_input_buffer_method_); 735 j_dequeue_input_buffer_method_);
776 if (CheckException(jni)) { 736 if (CheckException(jni)) {
777 ALOGE << "Exception in dequeu input buffer."; 737 ALOGE << "Exception in dequeu input buffer.";
778 return ProcessHWErrorOnEncodeOnCodecThread(); 738 return ProcessHWErrorOnEncodeOnCodecThread();
779 } 739 }
780 if (j_input_buffer_index == -1) { 740 if (j_input_buffer_index == -1) {
781 // Video codec falls behind - no input buffer available. 741 // Video codec falls behind - no input buffer available.
782 ALOGW << "Encoder drop frame - no input buffers available"; 742 ALOGW << "Encoder drop frame - no input buffers available";
783 if (frames_received_ > 1) { 743 if (frames_received_ > 1) {
784 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 744 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
785 frames_dropped_media_encoder_++; 745 frames_dropped_media_encoder_++;
786 OnDroppedFrameOnCodecThread();
787 } else { 746 } else {
788 // Input buffers are not ready after codec initialization, HW is still 747 // Input buffers are not ready after codec initialization, HW is still
789 // allocating thme - this is expected and should not result in drop 748 // allocating thme - this is expected and should not result in drop
790 // frame report. 749 // frame report.
791 frames_received_ = 0; 750 frames_received_ = 0;
792 } 751 }
793 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. 752 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
794 } else if (j_input_buffer_index == -2) { 753 } else if (j_input_buffer_index == -2) {
795 return ProcessHWErrorOnEncodeOnCodecThread(); 754 return ProcessHWErrorOnEncodeOnCodecThread();
796 } 755 }
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
957 uint32_t frame_rate) { 916 uint32_t frame_rate) {
958 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); 917 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
959 if (sw_fallback_required_) 918 if (sw_fallback_required_)
960 return WEBRTC_VIDEO_CODEC_OK; 919 return WEBRTC_VIDEO_CODEC_OK;
961 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? 920 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
962 frame_rate : MAX_ALLOWED_VIDEO_FPS; 921 frame_rate : MAX_ALLOWED_VIDEO_FPS;
963 if (last_set_bitrate_kbps_ == new_bit_rate && 922 if (last_set_bitrate_kbps_ == new_bit_rate &&
964 last_set_fps_ == frame_rate) { 923 last_set_fps_ == frame_rate) {
965 return WEBRTC_VIDEO_CODEC_OK; 924 return WEBRTC_VIDEO_CODEC_OK;
966 } 925 }
967 if (scale_) {
968 quality_scaler_.ReportFramerate(frame_rate);
969 }
970 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 926 JNIEnv* jni = AttachCurrentThreadIfNeeded();
971 ScopedLocalRefFrame local_ref_frame(jni); 927 ScopedLocalRefFrame local_ref_frame(jni);
972 if (new_bit_rate > 0) { 928 if (new_bit_rate > 0) {
973 last_set_bitrate_kbps_ = new_bit_rate; 929 last_set_bitrate_kbps_ = new_bit_rate;
974 } 930 }
975 if (frame_rate > 0) { 931 if (frame_rate > 0) {
976 last_set_fps_ = frame_rate; 932 last_set_fps_ = frame_rate;
977 } 933 }
978 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, 934 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
979 j_set_rates_method_, 935 j_set_rates_method_,
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1071 std::unique_ptr<webrtc::EncodedImage> image( 1027 std::unique_ptr<webrtc::EncodedImage> image(
1072 new webrtc::EncodedImage(payload, payload_size, payload_size)); 1028 new webrtc::EncodedImage(payload, payload_size, payload_size));
1073 image->_encodedWidth = width_; 1029 image->_encodedWidth = width_;
1074 image->_encodedHeight = height_; 1030 image->_encodedHeight = height_;
1075 image->_timeStamp = output_timestamp_; 1031 image->_timeStamp = output_timestamp_;
1076 image->capture_time_ms_ = output_render_time_ms_; 1032 image->capture_time_ms_ = output_render_time_ms_;
1077 image->rotation_ = output_rotation_; 1033 image->rotation_ = output_rotation_;
1078 image->_frameType = 1034 image->_frameType =
1079 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); 1035 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
1080 image->_completeFrame = true; 1036 image->_completeFrame = true;
1081 image->adapt_reason_.quality_resolution_downscales =
1082 scale_ ? quality_scaler_.downscale_shift() : -1;
1083
1084 webrtc::CodecSpecificInfo info; 1037 webrtc::CodecSpecificInfo info;
1085 memset(&info, 0, sizeof(info)); 1038 memset(&info, 0, sizeof(info));
1086 info.codecType = codecType_; 1039 info.codecType = codecType_;
1087 if (codecType_ == kVideoCodecVP8) { 1040 if (codecType_ == kVideoCodecVP8) {
1088 info.codecSpecific.VP8.pictureId = picture_id_; 1041 info.codecSpecific.VP8.pictureId = picture_id_;
1089 info.codecSpecific.VP8.nonReference = false; 1042 info.codecSpecific.VP8.nonReference = false;
1090 info.codecSpecific.VP8.simulcastIdx = 0; 1043 info.codecSpecific.VP8.simulcastIdx = 0;
1091 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; 1044 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
1092 info.codecSpecific.VP8.layerSync = false; 1045 info.codecSpecific.VP8.layerSync = false;
1093 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; 1046 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
(...skipping 26 matching lines...) Expand all
1120 1073
1121 // Generate a header describing a single fragment. 1074 // Generate a header describing a single fragment.
1122 webrtc::RTPFragmentationHeader header; 1075 webrtc::RTPFragmentationHeader header;
1123 memset(&header, 0, sizeof(header)); 1076 memset(&header, 0, sizeof(header));
1124 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { 1077 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
1125 header.VerifyAndAllocateFragmentationHeader(1); 1078 header.VerifyAndAllocateFragmentationHeader(1);
1126 header.fragmentationOffset[0] = 0; 1079 header.fragmentationOffset[0] = 0;
1127 header.fragmentationLength[0] = image->_length; 1080 header.fragmentationLength[0] = image->_length;
1128 header.fragmentationPlType[0] = 0; 1081 header.fragmentationPlType[0] = 0;
1129 header.fragmentationTimeDiff[0] = 0; 1082 header.fragmentationTimeDiff[0] = 0;
1130 if (codecType_ == kVideoCodecVP8 && scale_) { 1083 if (codecType_ == kVideoCodecVP8) {
1131 int qp; 1084 int qp;
1132 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { 1085 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
1133 current_acc_qp_ += qp; 1086 current_acc_qp_ += qp;
1134 quality_scaler_.ReportQP(qp);
1135 image->qp_ = qp; 1087 image->qp_ = qp;
1136 } 1088 }
1137 } 1089 }
1138 } else if (codecType_ == kVideoCodecH264) { 1090 } else if (codecType_ == kVideoCodecH264) {
1139 if (scale_) { 1091 h264_bitstream_parser_.ParseBitstream(payload, payload_size);
1140 h264_bitstream_parser_.ParseBitstream(payload, payload_size); 1092 int qp;
1141 int qp; 1093 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
1142 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { 1094 current_acc_qp_ += qp;
1143 current_acc_qp_ += qp; 1095 image->qp_ = qp;
1144 quality_scaler_.ReportQP(qp);
1145 }
1146 } 1096 }
1147 // For H.264 search for start codes. 1097 // For H.264 search for start codes.
1148 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; 1098 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
1149 int32_t scPositionsLength = 0; 1099 int32_t scPositionsLength = 0;
1150 int32_t scPosition = 0; 1100 int32_t scPosition = 0;
1151 while (scPositionsLength < MAX_NALUS_PERFRAME) { 1101 while (scPositionsLength < MAX_NALUS_PERFRAME) {
1152 int32_t naluPosition = NextNaluPosition( 1102 int32_t naluPosition = NextNaluPosition(
1153 payload + scPosition, payload_size - scPosition); 1103 payload + scPosition, payload_size - scPosition);
1154 if (naluPosition < 0) { 1104 if (naluPosition < 0) {
1155 break; 1105 break;
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1236 ". QP: " << (current_acc_qp_ / current_frames_divider) << 1186 ". QP: " << (current_acc_qp_ / current_frames_divider) <<
1237 " for last " << statistic_time_ms << " ms."; 1187 " for last " << statistic_time_ms << " ms.";
1238 stat_start_time_ms_ = rtc::TimeMillis(); 1188 stat_start_time_ms_ = rtc::TimeMillis();
1239 current_frames_ = 0; 1189 current_frames_ = 0;
1240 current_bytes_ = 0; 1190 current_bytes_ = 0;
1241 current_acc_qp_ = 0; 1191 current_acc_qp_ = 0;
1242 current_encoding_time_ms_ = 0; 1192 current_encoding_time_ms_ = 0;
1243 } 1193 }
1244 } 1194 }
1245 1195
1196 webrtc::VideoEncoder::ScalingSettings
1197 MediaCodecVideoEncoder::GetScalingSettings() const {
1198 return VideoEncoder::ScalingSettings(scale_);
1199 }
1200
1246 int32_t MediaCodecVideoEncoder::NextNaluPosition( 1201 int32_t MediaCodecVideoEncoder::NextNaluPosition(
1247 uint8_t *buffer, size_t buffer_size) { 1202 uint8_t *buffer, size_t buffer_size) {
1248 if (buffer_size < H264_SC_LENGTH) { 1203 if (buffer_size < H264_SC_LENGTH) {
1249 return -1; 1204 return -1;
1250 } 1205 }
1251 uint8_t *head = buffer; 1206 uint8_t *head = buffer;
1252 // Set end buffer pointer to 4 bytes before actual buffer end so we can 1207 // Set end buffer pointer to 4 bytes before actual buffer end so we can
1253 // access head[1], head[2] and head[3] in a loop without buffer overrun. 1208 // access head[1], head[2] and head[3] in a loop without buffer overrun.
1254 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH; 1209 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
1255 1210
(...skipping 13 matching lines...) Expand all
1269 if (head[3] != 0x01) { // got 000000xx 1224 if (head[3] != 0x01) { // got 000000xx
1270 head++; // xx != 1, continue searching. 1225 head++; // xx != 1, continue searching.
1271 continue; 1226 continue;
1272 } 1227 }
1273 return (int32_t)(head - buffer); 1228 return (int32_t)(head - buffer);
1274 } 1229 }
1275 return -1; 1230 return -1;
1276 } 1231 }
1277 1232
1278 void MediaCodecVideoEncoder::OnDroppedFrame() { 1233 void MediaCodecVideoEncoder::OnDroppedFrame() {
1279 // Methods running on the codec thread should call OnDroppedFrameOnCodecThread
1280 // directly.
1281 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread()); 1234 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread());
1282 codec_thread_->Invoke<void>(
1283 RTC_FROM_HERE,
1284 Bind(&MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread, this));
1285 }
1286
1287 void MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread() {
1288 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
1289 // Report dropped frame to quality_scaler_.
1290 if (scale_)
1291 quality_scaler_.ReportDroppedFrame();
1292 } 1235 }
1293 1236
1294 const char* MediaCodecVideoEncoder::ImplementationName() const { 1237 const char* MediaCodecVideoEncoder::ImplementationName() const {
1295 return "MediaCodec"; 1238 return "MediaCodec";
1296 } 1239 }
1297 1240
1298 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() 1241 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
1299 : egl_context_(nullptr) { 1242 : egl_context_(nullptr) {
1300 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 1243 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1301 ScopedLocalRefFrame local_ref_frame(jni); 1244 ScopedLocalRefFrame local_ref_frame(jni);
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
1372 return supported_codecs_; 1315 return supported_codecs_;
1373 } 1316 }
1374 1317
1375 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( 1318 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1376 webrtc::VideoEncoder* encoder) { 1319 webrtc::VideoEncoder* encoder) {
1377 ALOGD << "Destroy video encoder."; 1320 ALOGD << "Destroy video encoder.";
1378 delete encoder; 1321 delete encoder;
1379 } 1322 }
1380 1323
1381 } // namespace webrtc_jni 1324 } // namespace webrtc_jni
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698