Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(19)

Side by Side Diff: webrtc/api/android/jni/androidmediaencoder_jni.cc

Issue 2398963003: Move usage of QualityScaler to ViEEncoder. (Closed)
Patch Set: rebase Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | webrtc/api/peerconnection_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 19 matching lines...) Expand all
30 #include "webrtc/base/timeutils.h" 30 #include "webrtc/base/timeutils.h"
31 #include "webrtc/common_types.h" 31 #include "webrtc/common_types.h"
32 #include "webrtc/common_video/h264/h264_bitstream_parser.h" 32 #include "webrtc/common_video/h264/h264_bitstream_parser.h"
33 #include "webrtc/common_video/h264/profile_level_id.h" 33 #include "webrtc/common_video/h264/profile_level_id.h"
34 #include "webrtc/media/engine/internalencoderfactory.h" 34 #include "webrtc/media/engine/internalencoderfactory.h"
35 #include "webrtc/modules/video_coding/include/video_codec_interface.h" 35 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
36 #include "webrtc/modules/video_coding/utility/quality_scaler.h" 36 #include "webrtc/modules/video_coding/utility/quality_scaler.h"
37 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" 37 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
38 #include "webrtc/system_wrappers/include/field_trial.h" 38 #include "webrtc/system_wrappers/include/field_trial.h"
39 #include "webrtc/system_wrappers/include/logcat_trace_context.h" 39 #include "webrtc/system_wrappers/include/logcat_trace_context.h"
40 #include "webrtc/video_encoder.h"
40 41
41 using rtc::Bind; 42 using rtc::Bind;
42 using rtc::Thread; 43 using rtc::Thread;
43 using rtc::ThreadManager; 44 using rtc::ThreadManager;
44 45
45 using webrtc::CodecSpecificInfo; 46 using webrtc::CodecSpecificInfo;
46 using webrtc::EncodedImage; 47 using webrtc::EncodedImage;
47 using webrtc::VideoFrame; 48 using webrtc::VideoFrame;
48 using webrtc::RTPFragmentationHeader; 49 using webrtc::RTPFragmentationHeader;
49 using webrtc::VideoCodec; 50 using webrtc::VideoCodec;
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
113 webrtc::EncodedImageCallback* callback) override; 114 webrtc::EncodedImageCallback* callback) override;
114 int32_t Release() override; 115 int32_t Release() override;
115 int32_t SetChannelParameters(uint32_t /* packet_loss */, 116 int32_t SetChannelParameters(uint32_t /* packet_loss */,
116 int64_t /* rtt */) override; 117 int64_t /* rtt */) override;
117 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, 118 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation,
118 uint32_t frame_rate) override; 119 uint32_t frame_rate) override;
119 120
120 // rtc::MessageHandler implementation. 121 // rtc::MessageHandler implementation.
121 void OnMessage(rtc::Message* msg) override; 122 void OnMessage(rtc::Message* msg) override;
122 123
123 void OnDroppedFrame() override;
124
125 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } 124 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
126 const char* ImplementationName() const override; 125 const char* ImplementationName() const override;
127 126
128 private: 127 private:
129 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and 128 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
130 // InitEncodeOnCodecThread() in an attempt to restore the codec to an 129 // InitEncodeOnCodecThread() in an attempt to restore the codec to an
131 // operable state. Necessary after all manner of OMX-layer errors. 130 // operable state. Necessary after all manner of OMX-layer errors.
132 // Returns true if the codec was reset successfully. 131 // Returns true if the codec was reset successfully.
133 bool ResetCodecOnCodecThread(); 132 bool ResetCodecOnCodecThread();
134 133
(...skipping 26 matching lines...) Expand all
161 const int64_t frame_input_time_ms); 160 const int64_t frame_input_time_ms);
162 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, 161 bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
163 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); 162 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
164 bool EncodeTextureOnCodecThread(JNIEnv* jni, 163 bool EncodeTextureOnCodecThread(JNIEnv* jni,
165 bool key_frame, const webrtc::VideoFrame& frame); 164 bool key_frame, const webrtc::VideoFrame& frame);
166 165
167 int32_t RegisterEncodeCompleteCallbackOnCodecThread( 166 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
168 webrtc::EncodedImageCallback* callback); 167 webrtc::EncodedImageCallback* callback);
169 int32_t ReleaseOnCodecThread(); 168 int32_t ReleaseOnCodecThread();
170 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); 169 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
171 void OnDroppedFrameOnCodecThread();
172 170
173 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. 171 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
174 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); 172 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
175 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); 173 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
176 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); 174 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
177 jlong GetOutputBufferInfoPresentationTimestampUs( 175 jlong GetOutputBufferInfoPresentationTimestampUs(
178 JNIEnv* jni, jobject j_output_buffer_info); 176 JNIEnv* jni, jobject j_output_buffer_info);
179 177
180 // Deliver any outputs pending in the MediaCodec to our |callback_| and return 178 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
181 // true on success. 179 // true on success.
182 bool DeliverPendingOutputs(JNIEnv* jni); 180 bool DeliverPendingOutputs(JNIEnv* jni);
183 181
184 // Search for H.264 start codes. 182 // Search for H.264 start codes.
185 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); 183 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
186 184
185 VideoEncoder::ScalingSettings GetScalingSettings() const override;
186
187 // Displays encoder statistics. 187 // Displays encoder statistics.
188 void LogStatistics(bool force_log); 188 void LogStatistics(bool force_log);
189 189
190 // Type of video codec. 190 // Type of video codec.
191 const cricket::VideoCodec codec_; 191 const cricket::VideoCodec codec_;
192 192
193 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to 193 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
194 // |codec_thread_| synchronously. 194 // |codec_thread_| synchronously.
195 webrtc::EncodedImageCallback* callback_; 195 webrtc::EncodedImageCallback* callback_;
196 196
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
262 // |input_frame_infos_|. 262 // |input_frame_infos_|.
263 int64_t output_render_time_ms_; // Last output frame render time from 263 int64_t output_render_time_ms_; // Last output frame render time from
264 // |input_frame_infos_|. 264 // |input_frame_infos_|.
265 webrtc::VideoRotation output_rotation_; // Last output frame rotation from 265 webrtc::VideoRotation output_rotation_; // Last output frame rotation from
266 // |input_frame_infos_|. 266 // |input_frame_infos_|.
267 // Frame size in bytes fed to MediaCodec. 267 // Frame size in bytes fed to MediaCodec.
268 int yuv_size_; 268 int yuv_size_;
269 // True only when between a callback_->OnEncodedImage() call return a positive 269 // True only when between a callback_->OnEncodedImage() call return a positive
270 // value and the next Encode() call being ignored. 270 // value and the next Encode() call being ignored.
271 bool drop_next_input_frame_; 271 bool drop_next_input_frame_;
272 bool scale_;
272 // Global references; must be deleted in Release(). 273 // Global references; must be deleted in Release().
273 std::vector<jobject> input_buffers_; 274 std::vector<jobject> input_buffers_;
274 QualityScaler quality_scaler_;
275 // Dynamic resolution change, off by default.
276 bool scale_;
277
278 // H264 bitstream parser, used to extract QP from encoded bitstreams.
279 webrtc::H264BitstreamParser h264_bitstream_parser_; 275 webrtc::H264BitstreamParser h264_bitstream_parser_;
280 276
281 // VP9 variables to populate codec specific structure. 277 // VP9 variables to populate codec specific structure.
282 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for 278 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
283 // non-flexible VP9 mode. 279 // non-flexible VP9 mode.
284 uint8_t tl0_pic_idx_; 280 uint8_t tl0_pic_idx_;
285 size_t gof_idx_; 281 size_t gof_idx_;
286 282
287 // EGL context - owned by factory, should not be allocated/destroyed 283 // EGL context - owned by factory, should not be allocated/destroyed
288 // by MediaCodecVideoEncoder. 284 // by MediaCodecVideoEncoder.
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
410 scale_ = false; 406 scale_ = false;
411 if (codec_type == kVideoCodecVP8) { 407 if (codec_type == kVideoCodecVP8) {
412 scale_ = codec_settings->VP8().automaticResizeOn; 408 scale_ = codec_settings->VP8().automaticResizeOn;
413 } else if (codec_type != kVideoCodecVP9) { 409 } else if (codec_type != kVideoCodecVP9) {
414 scale_ = true; 410 scale_ = true;
415 } 411 }
416 412
417 ALOGD << "InitEncode request: " << init_width << " x " << init_height; 413 ALOGD << "InitEncode request: " << init_width << " x " << init_height;
418 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); 414 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
419 415
420 if (scale_) {
421 if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecH264) {
422 quality_scaler_.Init(codec_type, codec_settings->startBitrate,
423 codec_settings->width, codec_settings->height,
424 codec_settings->maxFramerate);
425 } else {
426 // When adding codec support to additional hardware codecs, also configure
427 // their QP thresholds for scaling.
428 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
429 scale_ = false;
430 }
431 QualityScaler::Resolution res = quality_scaler_.GetScaledResolution();
432 init_width = res.width;
433 init_height = res.height;
434 ALOGD << "Scaled resolution: " << init_width << " x " << init_height;
435 }
436
437 return codec_thread_->Invoke<int32_t>( 416 return codec_thread_->Invoke<int32_t>(
438 RTC_FROM_HERE, 417 RTC_FROM_HERE,
439 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, 418 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width,
440 init_height, codec_settings->startBitrate, 419 init_height, codec_settings->startBitrate,
441 codec_settings->maxFramerate, 420 codec_settings->maxFramerate,
442 codec_settings->expect_encode_from_texture)); 421 codec_settings->expect_encode_from_texture));
443 } 422 }
444 423
445 int32_t MediaCodecVideoEncoder::Encode( 424 int32_t MediaCodecVideoEncoder::Encode(
446 const webrtc::VideoFrame& frame, 425 const webrtc::VideoFrame& frame,
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after
708 << ". TS: " << (int)(current_timestamp_us_ / 1000) 687 << ". TS: " << (int)(current_timestamp_us_ / 1000)
709 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ 688 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_
710 << ". Kbps: " << last_set_bitrate_kbps_; 689 << ". Kbps: " << last_set_bitrate_kbps_;
711 } 690 }
712 691
713 if (drop_next_input_frame_) { 692 if (drop_next_input_frame_) {
714 ALOGW << "Encoder drop frame - failed callback."; 693 ALOGW << "Encoder drop frame - failed callback.";
715 drop_next_input_frame_ = false; 694 drop_next_input_frame_ = false;
716 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 695 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
717 frames_dropped_media_encoder_++; 696 frames_dropped_media_encoder_++;
718 OnDroppedFrameOnCodecThread();
719 return WEBRTC_VIDEO_CODEC_OK; 697 return WEBRTC_VIDEO_CODEC_OK;
720 } 698 }
721 699
722 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; 700 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
723 701
724 // Check if we accumulated too many frames in encoder input buffers and drop 702 // Check if we accumulated too many frames in encoder input buffers and drop
725 // frame if so. 703 // frame if so.
726 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { 704 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) {
727 ALOGD << "Already " << input_frame_infos_.size() 705 ALOGD << "Already " << input_frame_infos_.size()
728 << " frames in the queue, dropping" 706 << " frames in the queue, dropping"
729 << ". TS: " << (int)(current_timestamp_us_ / 1000) 707 << ". TS: " << (int)(current_timestamp_us_ / 1000)
730 << ". Fps: " << last_set_fps_ 708 << ". Fps: " << last_set_fps_
731 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; 709 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_;
732 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 710 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
733 consecutive_full_queue_frame_drops_++; 711 consecutive_full_queue_frame_drops_++;
734 if (consecutive_full_queue_frame_drops_ >= 712 if (consecutive_full_queue_frame_drops_ >=
735 ENCODER_STALL_FRAMEDROP_THRESHOLD) { 713 ENCODER_STALL_FRAMEDROP_THRESHOLD) {
736 ALOGE << "Encoder got stuck."; 714 ALOGE << "Encoder got stuck.";
737 return ProcessHWErrorOnEncodeOnCodecThread(); 715 return ProcessHWErrorOnEncodeOnCodecThread();
738 } 716 }
739 frames_dropped_media_encoder_++; 717 frames_dropped_media_encoder_++;
740 OnDroppedFrameOnCodecThread();
741 return WEBRTC_VIDEO_CODEC_OK; 718 return WEBRTC_VIDEO_CODEC_OK;
742 } 719 }
743 consecutive_full_queue_frame_drops_ = 0; 720 consecutive_full_queue_frame_drops_ = 0;
744 721
745 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( 722 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
746 frame.video_frame_buffer()); 723 frame.video_frame_buffer());
747 if (scale_) {
748 // Check framerate before spatial resolution change.
749 quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
750 const webrtc::QualityScaler::Resolution scaled_resolution =
751 quality_scaler_.GetScaledResolution();
752 if (scaled_resolution.width != frame.width() ||
753 scaled_resolution.height != frame.height()) {
754 if (input_buffer->native_handle() != nullptr) {
755 input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get())
756 ->CropScaleAndRotate(frame.width(), frame.height(),
757 0, 0,
758 scaled_resolution.width,
759 scaled_resolution.height,
760 webrtc::kVideoRotation_0);
761 } else {
762 input_buffer = quality_scaler_.GetScaledBuffer(input_buffer);
763 }
764 }
765 }
766 724
767 VideoFrame input_frame(input_buffer, frame.timestamp(), 725 VideoFrame input_frame(input_buffer, frame.timestamp(),
768 frame.render_time_ms(), frame.rotation()); 726 frame.render_time_ms(), frame.rotation());
769 727
770 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { 728 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
771 ALOGE << "Failed to reconfigure encoder."; 729 ALOGE << "Failed to reconfigure encoder.";
772 return WEBRTC_VIDEO_CODEC_ERROR; 730 return WEBRTC_VIDEO_CODEC_ERROR;
773 } 731 }
774 732
775 const bool key_frame = 733 const bool key_frame =
776 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; 734 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
777 bool encode_status = true; 735 bool encode_status = true;
778 if (!input_frame.video_frame_buffer()->native_handle()) { 736 if (!input_frame.video_frame_buffer()->native_handle()) {
779 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, 737 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
780 j_dequeue_input_buffer_method_); 738 j_dequeue_input_buffer_method_);
781 if (CheckException(jni)) { 739 if (CheckException(jni)) {
782 ALOGE << "Exception in dequeu input buffer."; 740 ALOGE << "Exception in dequeu input buffer.";
783 return ProcessHWErrorOnEncodeOnCodecThread(); 741 return ProcessHWErrorOnEncodeOnCodecThread();
784 } 742 }
785 if (j_input_buffer_index == -1) { 743 if (j_input_buffer_index == -1) {
786 // Video codec falls behind - no input buffer available. 744 // Video codec falls behind - no input buffer available.
787 ALOGW << "Encoder drop frame - no input buffers available"; 745 ALOGW << "Encoder drop frame - no input buffers available";
788 if (frames_received_ > 1) { 746 if (frames_received_ > 1) {
789 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 747 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
790 frames_dropped_media_encoder_++; 748 frames_dropped_media_encoder_++;
791 OnDroppedFrameOnCodecThread();
792 } else { 749 } else {
793 // Input buffers are not ready after codec initialization, HW is still 750 // Input buffers are not ready after codec initialization, HW is still
794 // allocating thme - this is expected and should not result in drop 751 // allocating thme - this is expected and should not result in drop
795 // frame report. 752 // frame report.
796 frames_received_ = 0; 753 frames_received_ = 0;
797 } 754 }
798 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. 755 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
799 } else if (j_input_buffer_index == -2) { 756 } else if (j_input_buffer_index == -2) {
800 return ProcessHWErrorOnEncodeOnCodecThread(); 757 return ProcessHWErrorOnEncodeOnCodecThread();
801 } 758 }
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
962 uint32_t frame_rate) { 919 uint32_t frame_rate) {
963 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); 920 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
964 if (sw_fallback_required_) 921 if (sw_fallback_required_)
965 return WEBRTC_VIDEO_CODEC_OK; 922 return WEBRTC_VIDEO_CODEC_OK;
966 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? 923 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
967 frame_rate : MAX_ALLOWED_VIDEO_FPS; 924 frame_rate : MAX_ALLOWED_VIDEO_FPS;
968 if (last_set_bitrate_kbps_ == new_bit_rate && 925 if (last_set_bitrate_kbps_ == new_bit_rate &&
969 last_set_fps_ == frame_rate) { 926 last_set_fps_ == frame_rate) {
970 return WEBRTC_VIDEO_CODEC_OK; 927 return WEBRTC_VIDEO_CODEC_OK;
971 } 928 }
972 if (scale_) {
973 quality_scaler_.ReportFramerate(frame_rate);
974 }
975 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 929 JNIEnv* jni = AttachCurrentThreadIfNeeded();
976 ScopedLocalRefFrame local_ref_frame(jni); 930 ScopedLocalRefFrame local_ref_frame(jni);
977 if (new_bit_rate > 0) { 931 if (new_bit_rate > 0) {
978 last_set_bitrate_kbps_ = new_bit_rate; 932 last_set_bitrate_kbps_ = new_bit_rate;
979 } 933 }
980 if (frame_rate > 0) { 934 if (frame_rate > 0) {
981 last_set_fps_ = frame_rate; 935 last_set_fps_ = frame_rate;
982 } 936 }
983 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, 937 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
984 j_set_rates_method_, 938 j_set_rates_method_,
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
1079 std::unique_ptr<webrtc::EncodedImage> image( 1033 std::unique_ptr<webrtc::EncodedImage> image(
1080 new webrtc::EncodedImage(payload, payload_size, payload_size)); 1034 new webrtc::EncodedImage(payload, payload_size, payload_size));
1081 image->_encodedWidth = width_; 1035 image->_encodedWidth = width_;
1082 image->_encodedHeight = height_; 1036 image->_encodedHeight = height_;
1083 image->_timeStamp = output_timestamp_; 1037 image->_timeStamp = output_timestamp_;
1084 image->capture_time_ms_ = output_render_time_ms_; 1038 image->capture_time_ms_ = output_render_time_ms_;
1085 image->rotation_ = output_rotation_; 1039 image->rotation_ = output_rotation_;
1086 image->_frameType = 1040 image->_frameType =
1087 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); 1041 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
1088 image->_completeFrame = true; 1042 image->_completeFrame = true;
1089 image->adapt_reason_.quality_resolution_downscales =
1090 scale_ ? quality_scaler_.downscale_shift() : -1;
1091
1092 webrtc::CodecSpecificInfo info; 1043 webrtc::CodecSpecificInfo info;
1093 memset(&info, 0, sizeof(info)); 1044 memset(&info, 0, sizeof(info));
1094 info.codecType = codec_type; 1045 info.codecType = codec_type;
1095 if (codec_type == kVideoCodecVP8) { 1046 if (codec_type == kVideoCodecVP8) {
1096 info.codecSpecific.VP8.pictureId = picture_id_; 1047 info.codecSpecific.VP8.pictureId = picture_id_;
1097 info.codecSpecific.VP8.nonReference = false; 1048 info.codecSpecific.VP8.nonReference = false;
1098 info.codecSpecific.VP8.simulcastIdx = 0; 1049 info.codecSpecific.VP8.simulcastIdx = 0;
1099 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; 1050 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
1100 info.codecSpecific.VP8.layerSync = false; 1051 info.codecSpecific.VP8.layerSync = false;
1101 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; 1052 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
(...skipping 26 matching lines...) Expand all
1128 1079
1129 // Generate a header describing a single fragment. 1080 // Generate a header describing a single fragment.
1130 webrtc::RTPFragmentationHeader header; 1081 webrtc::RTPFragmentationHeader header;
1131 memset(&header, 0, sizeof(header)); 1082 memset(&header, 0, sizeof(header));
1132 if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecVP9) { 1083 if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecVP9) {
1133 header.VerifyAndAllocateFragmentationHeader(1); 1084 header.VerifyAndAllocateFragmentationHeader(1);
1134 header.fragmentationOffset[0] = 0; 1085 header.fragmentationOffset[0] = 0;
1135 header.fragmentationLength[0] = image->_length; 1086 header.fragmentationLength[0] = image->_length;
1136 header.fragmentationPlType[0] = 0; 1087 header.fragmentationPlType[0] = 0;
1137 header.fragmentationTimeDiff[0] = 0; 1088 header.fragmentationTimeDiff[0] = 0;
1138 if (codec_type == kVideoCodecVP8 && scale_) { 1089 if (codec_type == kVideoCodecVP8) {
1139 int qp; 1090 int qp;
1140 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { 1091 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
1141 current_acc_qp_ += qp; 1092 current_acc_qp_ += qp;
1142 quality_scaler_.ReportQP(qp);
1143 image->qp_ = qp; 1093 image->qp_ = qp;
1144 } 1094 }
1145 } 1095 }
1146 } else if (codec_type == kVideoCodecH264) { 1096 } else if (codec_type == kVideoCodecH264) {
1147 if (scale_) { 1097 h264_bitstream_parser_.ParseBitstream(payload, payload_size);
1148 h264_bitstream_parser_.ParseBitstream(payload, payload_size); 1098 int qp;
1149 int qp; 1099 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
1150 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { 1100 current_acc_qp_ += qp;
1151 current_acc_qp_ += qp; 1101 image->qp_ = qp;
1152 quality_scaler_.ReportQP(qp);
1153 image->qp_ = qp;
1154 }
1155 } 1102 }
1156 // For H.264 search for start codes. 1103 // For H.264 search for start codes.
1157 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; 1104 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
1158 int32_t scPositionsLength = 0; 1105 int32_t scPositionsLength = 0;
1159 int32_t scPosition = 0; 1106 int32_t scPosition = 0;
1160 while (scPositionsLength < MAX_NALUS_PERFRAME) { 1107 while (scPositionsLength < MAX_NALUS_PERFRAME) {
1161 int32_t naluPosition = NextNaluPosition( 1108 int32_t naluPosition = NextNaluPosition(
1162 payload + scPosition, payload_size - scPosition); 1109 payload + scPosition, payload_size - scPosition);
1163 if (naluPosition < 0) { 1110 if (naluPosition < 0) {
1164 break; 1111 break;
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1245 ". QP: " << (current_acc_qp_ / current_frames_divider) << 1192 ". QP: " << (current_acc_qp_ / current_frames_divider) <<
1246 " for last " << statistic_time_ms << " ms."; 1193 " for last " << statistic_time_ms << " ms.";
1247 stat_start_time_ms_ = rtc::TimeMillis(); 1194 stat_start_time_ms_ = rtc::TimeMillis();
1248 current_frames_ = 0; 1195 current_frames_ = 0;
1249 current_bytes_ = 0; 1196 current_bytes_ = 0;
1250 current_acc_qp_ = 0; 1197 current_acc_qp_ = 0;
1251 current_encoding_time_ms_ = 0; 1198 current_encoding_time_ms_ = 0;
1252 } 1199 }
1253 } 1200 }
1254 1201
1202 webrtc::VideoEncoder::ScalingSettings
1203 MediaCodecVideoEncoder::GetScalingSettings() const {
1204 return VideoEncoder::ScalingSettings(scale_);
1205 }
1206
1255 int32_t MediaCodecVideoEncoder::NextNaluPosition( 1207 int32_t MediaCodecVideoEncoder::NextNaluPosition(
1256 uint8_t *buffer, size_t buffer_size) { 1208 uint8_t *buffer, size_t buffer_size) {
1257 if (buffer_size < H264_SC_LENGTH) { 1209 if (buffer_size < H264_SC_LENGTH) {
1258 return -1; 1210 return -1;
1259 } 1211 }
1260 uint8_t *head = buffer; 1212 uint8_t *head = buffer;
1261 // Set end buffer pointer to 4 bytes before actual buffer end so we can 1213 // Set end buffer pointer to 4 bytes before actual buffer end so we can
1262 // access head[1], head[2] and head[3] in a loop without buffer overrun. 1214 // access head[1], head[2] and head[3] in a loop without buffer overrun.
1263 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH; 1215 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
1264 1216
(...skipping 12 matching lines...) Expand all
1277 } 1229 }
1278 if (head[3] != 0x01) { // got 000000xx 1230 if (head[3] != 0x01) { // got 000000xx
1279 head++; // xx != 1, continue searching. 1231 head++; // xx != 1, continue searching.
1280 continue; 1232 continue;
1281 } 1233 }
1282 return (int32_t)(head - buffer); 1234 return (int32_t)(head - buffer);
1283 } 1235 }
1284 return -1; 1236 return -1;
1285 } 1237 }
1286 1238
1287 void MediaCodecVideoEncoder::OnDroppedFrame() {
1288 // Methods running on the codec thread should call OnDroppedFrameOnCodecThread
1289 // directly.
1290 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread());
1291 codec_thread_->Invoke<void>(
1292 RTC_FROM_HERE,
1293 Bind(&MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread, this));
1294 }
1295
1296 void MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread() {
1297 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
1298 // Report dropped frame to quality_scaler_.
1299 if (scale_)
1300 quality_scaler_.ReportDroppedFrame();
1301 }
1302
1303 const char* MediaCodecVideoEncoder::ImplementationName() const { 1239 const char* MediaCodecVideoEncoder::ImplementationName() const {
1304 return "MediaCodec"; 1240 return "MediaCodec";
1305 } 1241 }
1306 1242
1307 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() 1243 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
1308 : egl_context_(nullptr) { 1244 : egl_context_(nullptr) {
1309 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 1245 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1310 ScopedLocalRefFrame local_ref_frame(jni); 1246 ScopedLocalRefFrame local_ref_frame(jni);
1311 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder"); 1247 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
1312 supported_codecs_.clear(); 1248 supported_codecs_.clear();
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1394 return supported_codecs_; 1330 return supported_codecs_;
1395 } 1331 }
1396 1332
1397 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( 1333 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1398 webrtc::VideoEncoder* encoder) { 1334 webrtc::VideoEncoder* encoder) {
1399 ALOGD << "Destroy video encoder."; 1335 ALOGD << "Destroy video encoder.";
1400 delete encoder; 1336 delete encoder;
1401 } 1337 }
1402 1338
1403 } // namespace webrtc_jni 1339 } // namespace webrtc_jni
OLDNEW
« no previous file with comments | « no previous file | webrtc/api/peerconnection_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698