OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
117 webrtc::EncodedImageCallback* callback) override; | 117 webrtc::EncodedImageCallback* callback) override; |
118 int32_t Release() override; | 118 int32_t Release() override; |
119 int32_t SetChannelParameters(uint32_t /* packet_loss */, | 119 int32_t SetChannelParameters(uint32_t /* packet_loss */, |
120 int64_t /* rtt */) override; | 120 int64_t /* rtt */) override; |
121 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, | 121 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, |
122 uint32_t frame_rate) override; | 122 uint32_t frame_rate) override; |
123 | 123 |
124 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | 124 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } |
125 const char* ImplementationName() const override; | 125 const char* ImplementationName() const override; |
126 | 126 |
| 127 // Fills the input buffer with data from the buffers passed as parameters. |
| 128 bool FillInputBuffer(JNIEnv* jni, |
| 129 int input_buffer_index, |
| 130 uint8_t const* buffer_y, |
| 131 int stride_y, |
| 132 uint8_t const* buffer_u, |
| 133 int stride_u, |
| 134 uint8_t const* buffer_v, |
| 135 int stride_v); |
| 136 |
127 private: | 137 private: |
128 class EncodeTask : public rtc::QueuedTask { | 138 class EncodeTask : public rtc::QueuedTask { |
129 public: | 139 public: |
130 explicit EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); | 140 explicit EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); |
131 bool Run() override; | 141 bool Run() override; |
132 | 142 |
133 private: | 143 private: |
134 rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; | 144 rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; |
135 }; | 145 }; |
136 | 146 |
(...skipping 16 matching lines...) Expand all Loading... |
153 // previously-current values are reused instead of the passed parameters | 163 // previously-current values are reused instead of the passed parameters |
154 // (makes it easier to reason about thread-safety). | 164 // (makes it easier to reason about thread-safety). |
155 int32_t InitEncodeInternal(int width, | 165 int32_t InitEncodeInternal(int width, |
156 int height, | 166 int height, |
157 int kbps, | 167 int kbps, |
158 int fps, | 168 int fps, |
159 bool use_surface); | 169 bool use_surface); |
160 // Reconfigure to match |frame| in width, height. Also reconfigures the | 170 // Reconfigure to match |frame| in width, height. Also reconfigures the |
161 // encoder if |frame| is a texture/byte buffer and the encoder is initialized | 171 // encoder if |frame| is a texture/byte buffer and the encoder is initialized |
162 // for byte buffer/texture. Returns false if reconfiguring fails. | 172 // for byte buffer/texture. Returns false if reconfiguring fails. |
163 bool MaybeReconfigureEncoder(const webrtc::VideoFrame& frame); | 173 bool MaybeReconfigureEncoder(JNIEnv* jni, const webrtc::VideoFrame& frame); |
| 174 |
| 175 // Returns true if the frame is a texture frame and we should use surface |
| 176 // based encoding. |
| 177 bool IsTextureFrame(JNIEnv* jni, const webrtc::VideoFrame& frame); |
| 178 |
164 bool EncodeByteBuffer(JNIEnv* jni, | 179 bool EncodeByteBuffer(JNIEnv* jni, |
165 bool key_frame, | 180 bool key_frame, |
166 const webrtc::VideoFrame& frame, | 181 const webrtc::VideoFrame& frame, |
167 int input_buffer_index); | 182 int input_buffer_index); |
168 bool EncodeTexture(JNIEnv* jni, | 183 bool EncodeTexture(JNIEnv* jni, |
169 bool key_frame, | 184 bool key_frame, |
170 const webrtc::VideoFrame& frame); | 185 const webrtc::VideoFrame& frame); |
| 186 // Encodes a new style org.webrtc.VideoFrame. Might be a I420 or a texture |
| 187 // frame. |
| 188 bool EncodeJavaFrame(JNIEnv* jni, |
| 189 bool key_frame, |
| 190 jobject frame, |
| 191 int input_buffer_index); |
171 | 192 |
172 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. | 193 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. |
173 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); | 194 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); |
174 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); | 195 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); |
175 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); | 196 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); |
176 jlong GetOutputBufferInfoPresentationTimestampUs( | 197 jlong GetOutputBufferInfoPresentationTimestampUs( |
177 JNIEnv* jni, jobject j_output_buffer_info); | 198 JNIEnv* jni, jobject j_output_buffer_info); |
178 | 199 |
179 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 200 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
180 // true on success. | 201 // true on success. |
(...skipping 22 matching lines...) Expand all Loading... |
203 // State that is constant for the lifetime of this object once the ctor | 224 // State that is constant for the lifetime of this object once the ctor |
204 // returns. | 225 // returns. |
205 rtc::SequencedTaskChecker encoder_queue_checker_; | 226 rtc::SequencedTaskChecker encoder_queue_checker_; |
206 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; | 227 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; |
207 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; | 228 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; |
208 jmethodID j_init_encode_method_; | 229 jmethodID j_init_encode_method_; |
209 jmethodID j_get_input_buffers_method_; | 230 jmethodID j_get_input_buffers_method_; |
210 jmethodID j_dequeue_input_buffer_method_; | 231 jmethodID j_dequeue_input_buffer_method_; |
211 jmethodID j_encode_buffer_method_; | 232 jmethodID j_encode_buffer_method_; |
212 jmethodID j_encode_texture_method_; | 233 jmethodID j_encode_texture_method_; |
| 234 jmethodID j_encode_frame_method_; |
213 jmethodID j_release_method_; | 235 jmethodID j_release_method_; |
214 jmethodID j_set_rates_method_; | 236 jmethodID j_set_rates_method_; |
215 jmethodID j_dequeue_output_buffer_method_; | 237 jmethodID j_dequeue_output_buffer_method_; |
216 jmethodID j_release_output_buffer_method_; | 238 jmethodID j_release_output_buffer_method_; |
217 jfieldID j_color_format_field_; | 239 jfieldID j_color_format_field_; |
218 jfieldID j_info_index_field_; | 240 jfieldID j_info_index_field_; |
219 jfieldID j_info_buffer_field_; | 241 jfieldID j_info_buffer_field_; |
220 jfieldID j_info_is_key_frame_field_; | 242 jfieldID j_info_is_key_frame_field_; |
221 jfieldID j_info_presentation_timestamp_us_field_; | 243 jfieldID j_info_presentation_timestamp_us_field_; |
222 | 244 |
| 245 const JavaVideoFrameFactory video_frame_factory_; |
| 246 ScopedGlobalRef<jclass> j_video_frame_texture_buffer_class_; |
| 247 |
223 // State that is valid only between InitEncode() and the next Release(). | 248 // State that is valid only between InitEncode() and the next Release(). |
224 int width_; // Frame width in pixels. | 249 int width_; // Frame width in pixels. |
225 int height_; // Frame height in pixels. | 250 int height_; // Frame height in pixels. |
226 bool inited_; | 251 bool inited_; |
227 bool use_surface_; | 252 bool use_surface_; |
228 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. | 253 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. |
229 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | 254 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. |
230 int last_set_fps_; // Last-requested frame rate. | 255 int last_set_fps_; // Last-requested frame rate. |
231 int64_t current_timestamp_us_; // Current frame timestamps in us. | 256 int64_t current_timestamp_us_; // Current frame timestamps in us. |
232 int frames_received_; // Number of frames received by encoder. | 257 int frames_received_; // Number of frames received by encoder. |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
322 j_media_codec_video_encoder_class_( | 347 j_media_codec_video_encoder_class_( |
323 jni, | 348 jni, |
324 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), | 349 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), |
325 j_media_codec_video_encoder_( | 350 j_media_codec_video_encoder_( |
326 jni, | 351 jni, |
327 jni->NewObject(*j_media_codec_video_encoder_class_, | 352 jni->NewObject(*j_media_codec_video_encoder_class_, |
328 GetMethodID(jni, | 353 GetMethodID(jni, |
329 *j_media_codec_video_encoder_class_, | 354 *j_media_codec_video_encoder_class_, |
330 "<init>", | 355 "<init>", |
331 "()V"))), | 356 "()V"))), |
| 357 video_frame_factory_(jni), |
| 358 j_video_frame_texture_buffer_class_( |
| 359 jni, |
| 360 FindClass(jni, "org/webrtc/VideoFrame$TextureBuffer")), |
332 inited_(false), | 361 inited_(false), |
333 use_surface_(false), | 362 use_surface_(false), |
334 egl_context_(egl_context), | 363 egl_context_(egl_context), |
335 sw_fallback_required_(false) { | 364 sw_fallback_required_(false) { |
336 encoder_queue_checker_.Detach(); | 365 encoder_queue_checker_.Detach(); |
337 | 366 |
338 jclass j_output_buffer_info_class = | 367 jclass j_output_buffer_info_class = |
339 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | 368 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |
340 j_init_encode_method_ = | 369 j_init_encode_method_ = |
341 GetMethodID(jni, *j_media_codec_video_encoder_class_, "initEncode", | 370 GetMethodID(jni, *j_media_codec_video_encoder_class_, "initEncode", |
342 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" | 371 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" |
343 "IIIIILorg/webrtc/EglBase14$Context;)Z"); | 372 "IIIIILorg/webrtc/EglBase14$Context;)Z"); |
344 j_get_input_buffers_method_ = GetMethodID( | 373 j_get_input_buffers_method_ = GetMethodID( |
345 jni, | 374 jni, |
346 *j_media_codec_video_encoder_class_, | 375 *j_media_codec_video_encoder_class_, |
347 "getInputBuffers", | 376 "getInputBuffers", |
348 "()[Ljava/nio/ByteBuffer;"); | 377 "()[Ljava/nio/ByteBuffer;"); |
349 j_dequeue_input_buffer_method_ = GetMethodID( | 378 j_dequeue_input_buffer_method_ = GetMethodID( |
350 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); | 379 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); |
351 j_encode_buffer_method_ = GetMethodID( | 380 j_encode_buffer_method_ = GetMethodID( |
352 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); | 381 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); |
353 j_encode_texture_method_ = GetMethodID( | 382 j_encode_texture_method_ = GetMethodID( |
354 jni, *j_media_codec_video_encoder_class_, "encodeTexture", | 383 jni, *j_media_codec_video_encoder_class_, "encodeTexture", |
355 "(ZI[FJ)Z"); | 384 "(ZI[FJ)Z"); |
| 385 j_encode_frame_method_ = |
| 386 GetMethodID(jni, *j_media_codec_video_encoder_class_, "encodeFrame", |
| 387 "(JZLorg/webrtc/VideoFrame;I)Z"); |
356 j_release_method_ = | 388 j_release_method_ = |
357 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); | 389 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); |
358 j_set_rates_method_ = GetMethodID( | 390 j_set_rates_method_ = GetMethodID( |
359 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); | 391 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); |
360 j_dequeue_output_buffer_method_ = GetMethodID( | 392 j_dequeue_output_buffer_method_ = GetMethodID( |
361 jni, | 393 jni, |
362 *j_media_codec_video_encoder_class_, | 394 *j_media_codec_video_encoder_class_, |
363 "dequeueOutputBuffer", | 395 "dequeueOutputBuffer", |
364 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); | 396 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); |
365 j_release_output_buffer_method_ = GetMethodID( | 397 j_release_output_buffer_method_ = GetMethodID( |
(...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
729 return WEBRTC_VIDEO_CODEC_OK; | 761 return WEBRTC_VIDEO_CODEC_OK; |
730 } | 762 } |
731 consecutive_full_queue_frame_drops_ = 0; | 763 consecutive_full_queue_frame_drops_ = 0; |
732 | 764 |
733 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | 765 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( |
734 frame.video_frame_buffer()); | 766 frame.video_frame_buffer()); |
735 | 767 |
736 VideoFrame input_frame(input_buffer, frame.timestamp(), | 768 VideoFrame input_frame(input_buffer, frame.timestamp(), |
737 frame.render_time_ms(), frame.rotation()); | 769 frame.render_time_ms(), frame.rotation()); |
738 | 770 |
739 if (!MaybeReconfigureEncoder(input_frame)) { | 771 if (!MaybeReconfigureEncoder(jni, input_frame)) { |
740 ALOGE << "Failed to reconfigure encoder."; | 772 ALOGE << "Failed to reconfigure encoder."; |
741 return WEBRTC_VIDEO_CODEC_ERROR; | 773 return WEBRTC_VIDEO_CODEC_ERROR; |
742 } | 774 } |
743 | 775 |
744 const bool key_frame = | 776 const bool key_frame = |
745 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | 777 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; |
746 bool encode_status = true; | 778 bool encode_status = true; |
747 if (input_frame.video_frame_buffer()->type() != | 779 |
748 webrtc::VideoFrameBuffer::Type::kNative) { | 780 int j_input_buffer_index = -1; |
749 int j_input_buffer_index = jni->CallIntMethod( | 781 if (!use_surface_) { |
750 *j_media_codec_video_encoder_, j_dequeue_input_buffer_method_); | 782 j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
| 783 j_dequeue_input_buffer_method_); |
751 if (CheckException(jni)) { | 784 if (CheckException(jni)) { |
752 ALOGE << "Exception in dequeu input buffer."; | 785 ALOGE << "Exception in dequeu input buffer."; |
753 return ProcessHWErrorOnEncode(); | 786 return ProcessHWErrorOnEncode(); |
754 } | 787 } |
755 if (j_input_buffer_index == -1) { | 788 if (j_input_buffer_index == -1) { |
756 // Video codec falls behind - no input buffer available. | 789 // Video codec falls behind - no input buffer available. |
757 ALOGW << "Encoder drop frame - no input buffers available"; | 790 ALOGW << "Encoder drop frame - no input buffers available"; |
758 if (frames_received_ > 1) { | 791 if (frames_received_ > 1) { |
759 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 792 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
760 frames_dropped_media_encoder_++; | 793 frames_dropped_media_encoder_++; |
761 } else { | 794 } else { |
762 // Input buffers are not ready after codec initialization, HW is still | 795 // Input buffers are not ready after codec initialization, HW is still |
763 // allocating thme - this is expected and should not result in drop | 796 // allocating thme - this is expected and should not result in drop |
764 // frame report. | 797 // frame report. |
765 frames_received_ = 0; | 798 frames_received_ = 0; |
766 } | 799 } |
767 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 800 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
768 } else if (j_input_buffer_index == -2) { | 801 } else if (j_input_buffer_index == -2) { |
769 return ProcessHWErrorOnEncode(); | 802 return ProcessHWErrorOnEncode(); |
770 } | 803 } |
| 804 } |
| 805 |
| 806 if (input_frame.video_frame_buffer()->type() != |
| 807 webrtc::VideoFrameBuffer::Type::kNative) { |
771 encode_status = | 808 encode_status = |
772 EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); | 809 EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); |
773 } else { | 810 } else { |
774 encode_status = EncodeTexture(jni, key_frame, input_frame); | 811 AndroidVideoFrameBuffer* android_buffer = |
| 812 static_cast<AndroidVideoFrameBuffer*>( |
| 813 input_frame.video_frame_buffer().get()); |
| 814 switch (android_buffer->android_type()) { |
| 815 case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: |
| 816 encode_status = EncodeTexture(jni, key_frame, input_frame); |
| 817 break; |
| 818 case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: |
| 819 encode_status = EncodeJavaFrame( |
| 820 jni, key_frame, video_frame_factory_.ToJavaFrame(jni, input_frame), |
| 821 j_input_buffer_index); |
| 822 break; |
| 823 default: |
| 824 RTC_NOTREACHED(); |
| 825 return WEBRTC_VIDEO_CODEC_ERROR; |
| 826 } |
775 } | 827 } |
776 | 828 |
777 if (!encode_status) { | 829 if (!encode_status) { |
778 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | 830 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
779 return ProcessHWErrorOnEncode(); | 831 return ProcessHWErrorOnEncode(); |
780 } | 832 } |
781 | 833 |
782 // Save input image timestamps for later output. | 834 // Save input image timestamps for later output. |
783 input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), | 835 input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), |
784 input_frame.render_time_ms(), | 836 input_frame.render_time_ms(), |
(...skipping 10 matching lines...) Expand all Loading... |
795 kMediaCodecPollMs); | 847 kMediaCodecPollMs); |
796 } | 848 } |
797 | 849 |
798 if (!DeliverPendingOutputs(jni)) { | 850 if (!DeliverPendingOutputs(jni)) { |
799 return ProcessHWErrorOnEncode(); | 851 return ProcessHWErrorOnEncode(); |
800 } | 852 } |
801 return WEBRTC_VIDEO_CODEC_OK; | 853 return WEBRTC_VIDEO_CODEC_OK; |
802 } | 854 } |
803 | 855 |
804 bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( | 856 bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( |
| 857 JNIEnv* jni, |
805 const webrtc::VideoFrame& frame) { | 858 const webrtc::VideoFrame& frame) { |
806 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | 859 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
807 | 860 |
808 const bool reconfigure_due_to_format = frame.is_texture() != use_surface_; | 861 bool is_texture = IsTextureFrame(jni, frame); |
| 862 const bool reconfigure_due_to_format = is_texture != use_surface_; |
809 const bool reconfigure_due_to_size = | 863 const bool reconfigure_due_to_size = |
810 frame.width() != width_ || frame.height() != height_; | 864 frame.width() != width_ || frame.height() != height_; |
811 | 865 |
812 if (reconfigure_due_to_format) { | 866 if (reconfigure_due_to_format) { |
813 ALOGD << "Reconfigure encoder due to format change. " | 867 ALOGD << "Reconfigure encoder due to format change. " |
814 << (use_surface_ ? | 868 << (use_surface_ ? |
815 "Reconfiguring to encode from byte buffer." : | 869 "Reconfiguring to encode from byte buffer." : |
816 "Reconfiguring to encode from texture."); | 870 "Reconfiguring to encode from texture."); |
817 LogStatistics(true); | 871 LogStatistics(true); |
818 } | 872 } |
819 if (reconfigure_due_to_size) { | 873 if (reconfigure_due_to_size) { |
820 ALOGW << "Reconfigure encoder due to frame resolution change from " | 874 ALOGW << "Reconfigure encoder due to frame resolution change from " |
821 << width_ << " x " << height_ << " to " << frame.width() << " x " | 875 << width_ << " x " << height_ << " to " << frame.width() << " x " |
822 << frame.height(); | 876 << frame.height(); |
823 LogStatistics(true); | 877 LogStatistics(true); |
824 width_ = frame.width(); | 878 width_ = frame.width(); |
825 height_ = frame.height(); | 879 height_ = frame.height(); |
826 } | 880 } |
827 | 881 |
828 if (!reconfigure_due_to_format && !reconfigure_due_to_size) | 882 if (!reconfigure_due_to_format && !reconfigure_due_to_size) |
829 return true; | 883 return true; |
830 | 884 |
831 Release(); | 885 Release(); |
832 | 886 |
833 return InitEncodeInternal(width_, height_, 0, 0, frame.is_texture()) == | 887 return InitEncodeInternal(width_, height_, 0, 0, is_texture) == |
834 WEBRTC_VIDEO_CODEC_OK; | 888 WEBRTC_VIDEO_CODEC_OK; |
835 } | 889 } |
836 | 890 |
| 891 bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni, |
| 892 const webrtc::VideoFrame& frame) { |
| 893 if (frame.video_frame_buffer()->type() != |
| 894 webrtc::VideoFrameBuffer::Type::kNative) { |
| 895 return false; |
| 896 } |
| 897 |
| 898 AndroidVideoFrameBuffer* android_buffer = |
| 899 static_cast<AndroidVideoFrameBuffer*>(frame.video_frame_buffer().get()); |
| 900 switch (android_buffer->android_type()) { |
| 901 case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: |
| 902 return true; |
| 903 case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: |
| 904 return jni->IsInstanceOf(static_cast<AndroidVideoBuffer*>(android_buffer) |
| 905 ->video_frame_buffer(), |
| 906 *j_video_frame_texture_buffer_class_); |
| 907 default: |
| 908 RTC_NOTREACHED(); |
| 909 return false; |
| 910 } |
| 911 } |
| 912 |
837 bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, | 913 bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, |
838 bool key_frame, | 914 bool key_frame, |
839 const webrtc::VideoFrame& frame, | 915 const webrtc::VideoFrame& frame, |
840 int input_buffer_index) { | 916 int input_buffer_index) { |
841 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | 917 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
842 RTC_CHECK(!use_surface_); | 918 RTC_CHECK(!use_surface_); |
843 | 919 |
| 920 rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer = |
| 921 frame.video_frame_buffer()->ToI420(); |
| 922 if (!FillInputBuffer(jni, input_buffer_index, i420_buffer->DataY(), |
| 923 i420_buffer->StrideY(), i420_buffer->DataU(), |
| 924 i420_buffer->StrideU(), i420_buffer->DataV(), |
| 925 i420_buffer->StrideV())) { |
| 926 return false; |
| 927 } |
| 928 bool encode_status = jni->CallBooleanMethod( |
| 929 *j_media_codec_video_encoder_, j_encode_buffer_method_, key_frame, |
| 930 input_buffer_index, yuv_size_, current_timestamp_us_); |
| 931 if (CheckException(jni)) { |
| 932 ALOGE << "Exception in encode buffer."; |
| 933 ProcessHWError(true /* reset_if_fallback_unavailable */); |
| 934 return false; |
| 935 } |
| 936 return encode_status; |
| 937 } |
| 938 |
| 939 bool MediaCodecVideoEncoder::FillInputBuffer(JNIEnv* jni, |
| 940 int input_buffer_index, |
| 941 uint8_t const* buffer_y, |
| 942 int stride_y, |
| 943 uint8_t const* buffer_u, |
| 944 int stride_u, |
| 945 uint8_t const* buffer_v, |
| 946 int stride_v) { |
844 jobject j_input_buffer = input_buffers_[input_buffer_index]; | 947 jobject j_input_buffer = input_buffers_[input_buffer_index]; |
845 uint8_t* yuv_buffer = | 948 uint8_t* yuv_buffer = |
846 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 949 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
847 if (CheckException(jni)) { | 950 if (CheckException(jni)) { |
848 ALOGE << "Exception in get direct buffer address."; | 951 ALOGE << "Exception in get direct buffer address."; |
849 ProcessHWError(true /* reset_if_fallback_unavailable */); | 952 ProcessHWError(true /* reset_if_fallback_unavailable */); |
850 return false; | 953 return false; |
851 } | 954 } |
852 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | 955 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
853 rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer = | 956 |
854 frame.video_frame_buffer()->ToI420(); | 957 RTC_CHECK(!libyuv::ConvertFromI420(buffer_y, stride_y, buffer_u, stride_u, |
855 RTC_CHECK(!libyuv::ConvertFromI420( | 958 buffer_v, stride_v, yuv_buffer, width_, |
856 i420_buffer->DataY(), i420_buffer->StrideY(), i420_buffer->DataU(), | 959 width_, height_, encoder_fourcc_)) |
857 i420_buffer->StrideU(), i420_buffer->DataV(), i420_buffer->StrideV(), | |
858 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | |
859 << "ConvertFromI420 failed"; | 960 << "ConvertFromI420 failed"; |
860 | 961 return true; |
861 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
862 j_encode_buffer_method_, | |
863 key_frame, | |
864 input_buffer_index, | |
865 yuv_size_, | |
866 current_timestamp_us_); | |
867 if (CheckException(jni)) { | |
868 ALOGE << "Exception in encode buffer."; | |
869 ProcessHWError(true /* reset_if_fallback_unavailable */); | |
870 return false; | |
871 } | |
872 return encode_status; | |
873 } | 962 } |
874 | 963 |
875 bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, | 964 bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, |
876 bool key_frame, | 965 bool key_frame, |
877 const webrtc::VideoFrame& frame) { | 966 const webrtc::VideoFrame& frame) { |
878 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | 967 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
879 RTC_CHECK(use_surface_); | 968 RTC_CHECK(use_surface_); |
880 NativeHandleImpl handle = | 969 NativeHandleImpl handle = |
881 static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get()) | 970 static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get()) |
882 ->native_handle_impl(); | 971 ->native_handle_impl(); |
883 | 972 |
884 jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni); | 973 jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni); |
885 bool encode_status = jni->CallBooleanMethod( | 974 bool encode_status = jni->CallBooleanMethod( |
886 *j_media_codec_video_encoder_, j_encode_texture_method_, key_frame, | 975 *j_media_codec_video_encoder_, j_encode_texture_method_, key_frame, |
887 handle.oes_texture_id, sampling_matrix, current_timestamp_us_); | 976 handle.oes_texture_id, sampling_matrix, current_timestamp_us_); |
888 if (CheckException(jni)) { | 977 if (CheckException(jni)) { |
889 ALOGE << "Exception in encode texture."; | 978 ALOGE << "Exception in encode texture."; |
890 ProcessHWError(true /* reset_if_fallback_unavailable */); | 979 ProcessHWError(true /* reset_if_fallback_unavailable */); |
891 return false; | 980 return false; |
892 } | 981 } |
893 return encode_status; | 982 return encode_status; |
894 } | 983 } |
895 | 984 |
| 985 bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni, |
| 986 bool key_frame, |
| 987 jobject frame, |
| 988 int input_buffer_index) { |
| 989 bool encode_status = jni->CallBooleanMethod( |
| 990 *j_media_codec_video_encoder_, j_encode_frame_method_, |
| 991 jlongFromPointer(this), key_frame, frame, input_buffer_index); |
| 992 if (CheckException(jni)) { |
| 993 ALOGE << "Exception in encode frame."; |
| 994 ProcessHWError(true /* reset_if_fallback_unavailable */); |
| 995 return false; |
| 996 } |
| 997 return encode_status; |
| 998 } |
| 999 |
896 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( | 1000 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( |
897 webrtc::EncodedImageCallback* callback) { | 1001 webrtc::EncodedImageCallback* callback) { |
898 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | 1002 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
899 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 1003 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
900 ScopedLocalRefFrame local_ref_frame(jni); | 1004 ScopedLocalRefFrame local_ref_frame(jni); |
901 callback_ = callback; | 1005 callback_ = callback; |
902 return WEBRTC_VIDEO_CODEC_OK; | 1006 return WEBRTC_VIDEO_CODEC_OK; |
903 } | 1007 } |
904 | 1008 |
905 int32_t MediaCodecVideoEncoder::Release() { | 1009 int32_t MediaCodecVideoEncoder::Release() { |
(...skipping 460 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1366 return supported_codecs_; | 1470 return supported_codecs_; |
1367 } | 1471 } |
1368 } | 1472 } |
1369 | 1473 |
1370 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1474 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1371 webrtc::VideoEncoder* encoder) { | 1475 webrtc::VideoEncoder* encoder) { |
1372 ALOGD << "Destroy video encoder."; | 1476 ALOGD << "Destroy video encoder."; |
1373 delete encoder; | 1477 delete encoder; |
1374 } | 1478 } |
1375 | 1479 |
| 1480 JOW(void, MediaCodecVideoEncoder_nativeFillBuffer) |
| 1481 (JNIEnv* jni, |
| 1482 jlong native_encoder, |
| 1483 jint input_buffer, |
| 1484 jobject j_buffer_y, |
| 1485 jint stride_y, |
| 1486 jobject j_buffer_u, |
| 1487 jint stride_u, |
| 1488 jobject j_buffer_v, |
| 1489 jint stride_v) { |
| 1490 uint8_t* buffer_y = |
| 1491 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_y)); |
| 1492 uint8_t* buffer_u = |
| 1493 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_u)); |
| 1494 uint8_t* buffer_v = |
| 1495 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_v)); |
| 1496 |
| 1497 reinterpret_cast<MediaCodecVideoEncoder*>(native_encoder) |
| 1498 ->FillInputBuffer(jni, input_buffer, buffer_y, stride_y, buffer_u, |
| 1499 stride_u, buffer_v, stride_v); |
| 1500 } |
| 1501 |
1376 } // namespace webrtc_jni | 1502 } // namespace webrtc_jni |
OLD | NEW |