OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 // NOTICE: androidmediaencoder_jni.h must be included before | 11 // NOTICE: androidmediaencoder_jni.h must be included before |
12 // androidmediacodeccommon.h to avoid build errors. | 12 // androidmediacodeccommon.h to avoid build errors. |
13 #include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h" | 13 #include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h" |
14 | 14 |
15 #include <algorithm> | 15 #include <algorithm> |
16 #include <memory> | 16 #include <memory> |
17 #include <list> | 17 #include <list> |
18 | 18 |
19 #include "third_party/libyuv/include/libyuv/convert.h" | 19 #include "third_party/libyuv/include/libyuv/convert.h" |
20 #include "third_party/libyuv/include/libyuv/convert_from.h" | 20 #include "third_party/libyuv/include/libyuv/convert_from.h" |
21 #include "third_party/libyuv/include/libyuv/video_common.h" | 21 #include "third_party/libyuv/include/libyuv/video_common.h" |
22 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" | 22 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" |
23 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | 23 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
24 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | 24 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
25 #include "webrtc/base/bind.h" | 25 #include "webrtc/base/bind.h" |
26 #include "webrtc/base/checks.h" | 26 #include "webrtc/base/checks.h" |
27 #include "webrtc/base/logging.h" | 27 #include "webrtc/base/logging.h" |
28 #include "webrtc/base/task_queue.h" | |
28 #include "webrtc/base/thread.h" | 29 #include "webrtc/base/thread.h" |
29 #include "webrtc/base/thread_checker.h" | 30 #include "webrtc/base/thread_checker.h" |
30 #include "webrtc/base/timeutils.h" | 31 #include "webrtc/base/timeutils.h" |
32 #include "webrtc/base/weak_ptr.h" | |
31 #include "webrtc/common_types.h" | 33 #include "webrtc/common_types.h" |
32 #include "webrtc/common_video/h264/h264_bitstream_parser.h" | 34 #include "webrtc/common_video/h264/h264_bitstream_parser.h" |
33 #include "webrtc/common_video/h264/h264_common.h" | 35 #include "webrtc/common_video/h264/h264_common.h" |
34 #include "webrtc/common_video/h264/profile_level_id.h" | 36 #include "webrtc/common_video/h264/profile_level_id.h" |
35 #include "webrtc/media/engine/internalencoderfactory.h" | 37 #include "webrtc/media/engine/internalencoderfactory.h" |
36 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 38 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
37 #include "webrtc/modules/video_coding/utility/quality_scaler.h" | 39 #include "webrtc/modules/video_coding/utility/quality_scaler.h" |
38 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 40 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
39 #include "webrtc/system_wrappers/include/field_trial.h" | 41 #include "webrtc/system_wrappers/include/field_trial.h" |
40 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 42 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
83 // Maximum time limit between incoming frames before requesting a key frame. | 85 // Maximum time limit between incoming frames before requesting a key frame. |
84 const size_t kFrameDiffThresholdMs = 350; | 86 const size_t kFrameDiffThresholdMs = 350; |
85 const int kMinKeyFrameInterval = 6; | 87 const int kMinKeyFrameInterval = 6; |
86 const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile"; | 88 const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile"; |
87 } // namespace | 89 } // namespace |
88 | 90 |
89 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses | 91 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses |
90 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) | 92 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) |
91 // HW-backed video encode. This C++ class is implemented as a very thin shim, | 93 // HW-backed video encode. This C++ class is implemented as a very thin shim, |
92 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. | 94 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. |
93 // MediaCodecVideoEncoder is created, operated, and destroyed on a single | 95 // MediaCodecVideoEncoder must be created, operated, and destroyed on a single |
94 // thread, currently the libjingle Worker thread. | 96 // task queue, currently this is the encoder queue from ViE encoder. |
95 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, | 97 class MediaCodecVideoEncoder : public webrtc::VideoEncoder { |
96 public rtc::MessageHandler { | |
97 public: | 98 public: |
98 virtual ~MediaCodecVideoEncoder(); | 99 virtual ~MediaCodecVideoEncoder(); |
99 MediaCodecVideoEncoder(JNIEnv* jni, | 100 MediaCodecVideoEncoder(JNIEnv* jni, |
100 const cricket::VideoCodec& codec, | 101 const cricket::VideoCodec& codec, |
101 jobject egl_context); | 102 jobject egl_context); |
102 | 103 |
103 // webrtc::VideoEncoder implementation. Everything trampolines to | 104 // webrtc::VideoEncoder implementation. |
104 // |codec_thread_| for execution. | |
105 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, | 105 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, |
106 int32_t /* number_of_cores */, | 106 int32_t /* number_of_cores */, |
107 size_t /* max_payload_size */) override; | 107 size_t /* max_payload_size */) override; |
108 int32_t Encode(const webrtc::VideoFrame& input_image, | 108 int32_t Encode(const webrtc::VideoFrame& input_image, |
109 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | 109 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
110 const std::vector<webrtc::FrameType>* frame_types) override; | 110 const std::vector<webrtc::FrameType>* frame_types) override; |
111 int32_t RegisterEncodeCompleteCallback( | 111 int32_t RegisterEncodeCompleteCallback( |
112 webrtc::EncodedImageCallback* callback) override; | 112 webrtc::EncodedImageCallback* callback) override; |
113 int32_t Release() override; | 113 int32_t Release() override; |
114 int32_t SetChannelParameters(uint32_t /* packet_loss */, | 114 int32_t SetChannelParameters(uint32_t /* packet_loss */, |
115 int64_t /* rtt */) override; | 115 int64_t /* rtt */) override; |
116 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, | 116 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, |
117 uint32_t frame_rate) override; | 117 uint32_t frame_rate) override; |
118 | 118 |
119 // rtc::MessageHandler implementation. | |
120 void OnMessage(rtc::Message* msg) override; | |
121 | |
122 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | 119 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } |
123 const char* ImplementationName() const override; | 120 const char* ImplementationName() const override; |
124 | 121 |
125 private: | 122 private: |
126 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and | 123 class EncodeTask : public rtc::QueuedTask { |
127 // InitEncodeOnCodecThread() in an attempt to restore the codec to an | 124 public: |
128 // operable state. Necessary after all manner of OMX-layer errors. | 125 EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); |
129 // Returns true if the codec was reset successfully. | 126 bool Run() override; |
130 bool ResetCodecOnCodecThread(); | 127 |
128 private: | |
129 rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; | |
magjed_webrtc
2017/02/03 10:39:17
Why do we need a WeakPtr?
sakal
2017/02/03 11:57:13
Discussed offline. (MediaCodecVideoEncoder can get
| |
130 }; | |
131 | |
132 // ResetCodec() calls Release() and InitEncodeInternal() in an attempt to | |
133 // restore the codec to an operable state. Necessary after all manner of | |
134 // OMX-layer errors. Returns true if the codec was reset successfully. | |
135 bool ResetCodec(); | |
131 | 136 |
132 // Fallback to a software encoder if one is supported else try to reset the | 137 // Fallback to a software encoder if one is supported else try to reset the |
133 // encoder. Called with |reset_if_fallback_unavailable| equal to false from | 138 // encoder. Called with |reset_if_fallback_unavailable| equal to false from |
134 // init/release encoder so that we don't go into infinite recursion. | 139 // init/release encoder so that we don't go into infinite recursion. |
135 // Returns true if the codec was reset successfully. | 140 // Returns true if the codec was reset successfully. |
136 bool ProcessHWErrorOnCodecThread(bool reset_if_fallback_unavailable); | 141 bool ProcessHWError(bool reset_if_fallback_unavailable); |
137 | 142 |
138 // Calls ProcessHWErrorOnCodecThread(true). Returns | 143 // Calls ProcessHWError(true). Returns WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if |
139 // WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if sw_fallback_required_ was set or | 144 // sw_fallback_required_ was set or WEBRTC_VIDEO_CODEC_ERROR otherwise. |
140 // WEBRTC_VIDEO_CODEC_ERROR otherwise. | 145 int32_t ProcessHWErrorOnEncode(); |
141 int32_t ProcessHWErrorOnEncodeOnCodecThread(); | |
142 | 146 |
143 // Implementation of webrtc::VideoEncoder methods above, all running on the | |
144 // codec thread exclusively. | |
145 // | |
146 // If width==0 then this is assumed to be a re-initialization and the | 147 // If width==0 then this is assumed to be a re-initialization and the |
147 // previously-current values are reused instead of the passed parameters | 148 // previously-current values are reused instead of the passed parameters |
148 // (makes it easier to reason about thread-safety). | 149 // (makes it easier to reason about thread-safety). |
149 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, | 150 int32_t InitEncodeInternal(int width, |
150 bool use_surface); | 151 int height, |
152 int kbps, | |
153 int fps, | |
154 bool use_surface); | |
151 // Reconfigure to match |frame| in width, height. Also reconfigures the | 155 // Reconfigure to match |frame| in width, height. Also reconfigures the |
152 // encoder if |frame| is a texture/byte buffer and the encoder is initialized | 156 // encoder if |frame| is a texture/byte buffer and the encoder is initialized |
153 // for byte buffer/texture. Returns false if reconfiguring fails. | 157 // for byte buffer/texture. Returns false if reconfiguring fails. |
154 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); | 158 bool MaybeReconfigureEncoder(const webrtc::VideoFrame& frame); |
155 int32_t EncodeOnCodecThread( | 159 bool EncodeByteBuffer(JNIEnv* jni, |
156 const webrtc::VideoFrame& input_image, | 160 bool key_frame, |
157 const std::vector<webrtc::FrameType>* frame_types, | 161 const webrtc::VideoFrame& frame, |
158 const int64_t frame_input_time_ms); | 162 int input_buffer_index); |
159 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, | 163 bool EncodeTexture(JNIEnv* jni, |
160 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); | 164 bool key_frame, |
161 bool EncodeTextureOnCodecThread(JNIEnv* jni, | 165 const webrtc::VideoFrame& frame); |
162 bool key_frame, const webrtc::VideoFrame& frame); | |
163 | |
164 int32_t RegisterEncodeCompleteCallbackOnCodecThread( | |
165 webrtc::EncodedImageCallback* callback); | |
166 int32_t ReleaseOnCodecThread(); | |
167 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); | |
168 | 166 |
169 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. | 167 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. |
170 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); | 168 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); |
171 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); | 169 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); |
172 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); | 170 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); |
173 jlong GetOutputBufferInfoPresentationTimestampUs( | 171 jlong GetOutputBufferInfoPresentationTimestampUs( |
174 JNIEnv* jni, jobject j_output_buffer_info); | 172 JNIEnv* jni, jobject j_output_buffer_info); |
175 | 173 |
176 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 174 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
177 // true on success. | 175 // true on success. |
178 bool DeliverPendingOutputs(JNIEnv* jni); | 176 bool DeliverPendingOutputs(JNIEnv* jni); |
179 | 177 |
180 VideoEncoder::ScalingSettings GetScalingSettings() const override; | 178 VideoEncoder::ScalingSettings GetScalingSettings() const override; |
181 | 179 |
182 // Displays encoder statistics. | 180 // Displays encoder statistics. |
183 void LogStatistics(bool force_log); | 181 void LogStatistics(bool force_log); |
184 | 182 |
185 // Type of video codec. | 183 // Type of video codec. |
186 const cricket::VideoCodec codec_; | 184 const cricket::VideoCodec codec_; |
187 | 185 |
188 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to | |
189 // |codec_thread_| synchronously. | |
190 webrtc::EncodedImageCallback* callback_; | 186 webrtc::EncodedImageCallback* callback_; |
191 | 187 |
192 // State that is constant for the lifetime of this object once the ctor | 188 // State that is constant for the lifetime of this object once the ctor |
193 // returns. | 189 // returns. |
194 std::unique_ptr<Thread> | 190 rtc::ThreadChecker encoder_queue_checker_; |
magjed_webrtc
2017/02/03 10:39:17
Use SequencedTaskChecker instead. Also, update CL
sakal
2017/02/03 11:57:13
Done.
| |
195 codec_thread_; // Thread on which to operate MediaCodec. | |
196 rtc::ThreadChecker codec_thread_checker_; | |
197 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; | 191 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; |
198 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; | 192 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; |
199 jmethodID j_init_encode_method_; | 193 jmethodID j_init_encode_method_; |
200 jmethodID j_get_input_buffers_method_; | 194 jmethodID j_get_input_buffers_method_; |
201 jmethodID j_dequeue_input_buffer_method_; | 195 jmethodID j_dequeue_input_buffer_method_; |
202 jmethodID j_encode_buffer_method_; | 196 jmethodID j_encode_buffer_method_; |
203 jmethodID j_encode_texture_method_; | 197 jmethodID j_encode_texture_method_; |
204 jmethodID j_release_method_; | 198 jmethodID j_release_method_; |
205 jmethodID j_set_rates_method_; | 199 jmethodID j_set_rates_method_; |
206 jmethodID j_dequeue_output_buffer_method_; | 200 jmethodID j_dequeue_output_buffer_method_; |
207 jmethodID j_release_output_buffer_method_; | 201 jmethodID j_release_output_buffer_method_; |
208 jfieldID j_color_format_field_; | 202 jfieldID j_color_format_field_; |
209 jfieldID j_info_index_field_; | 203 jfieldID j_info_index_field_; |
210 jfieldID j_info_buffer_field_; | 204 jfieldID j_info_buffer_field_; |
211 jfieldID j_info_is_key_frame_field_; | 205 jfieldID j_info_is_key_frame_field_; |
212 jfieldID j_info_presentation_timestamp_us_field_; | 206 jfieldID j_info_presentation_timestamp_us_field_; |
213 | 207 |
214 // State that is valid only between InitEncode() and the next Release(). | 208 // State that is valid only between InitEncode() and the next Release(). |
215 // Touched only on codec_thread_ so no explicit synchronization necessary. | |
216 int width_; // Frame width in pixels. | 209 int width_; // Frame width in pixels. |
217 int height_; // Frame height in pixels. | 210 int height_; // Frame height in pixels. |
218 bool inited_; | 211 bool inited_; |
219 bool use_surface_; | 212 bool use_surface_; |
220 uint16_t picture_id_; | 213 uint16_t picture_id_; |
221 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. | 214 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. |
222 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | 215 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. |
223 int last_set_fps_; // Last-requested frame rate. | 216 int last_set_fps_; // Last-requested frame rate. |
224 int64_t current_timestamp_us_; // Current frame timestamps in us. | 217 int64_t current_timestamp_us_; // Current frame timestamps in us. |
225 int frames_received_; // Number of frames received by encoder. | 218 int frames_received_; // Number of frames received by encoder. |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
280 jobject egl_context_; | 273 jobject egl_context_; |
281 | 274 |
282 // Temporary fix for VP8. | 275 // Temporary fix for VP8. |
283 // Sends a key frame if frames are largely spaced apart (possibly | 276 // Sends a key frame if frames are largely spaced apart (possibly |
284 // corresponding to a large image change). | 277 // corresponding to a large image change). |
285 int64_t last_frame_received_ms_; | 278 int64_t last_frame_received_ms_; |
286 int frames_received_since_last_key_; | 279 int frames_received_since_last_key_; |
287 webrtc::VideoCodecMode codec_mode_; | 280 webrtc::VideoCodecMode codec_mode_; |
288 | 281 |
289 bool sw_fallback_required_; | 282 bool sw_fallback_required_; |
283 // Holds the task while the polling loop is paused. | |
284 std::unique_ptr<rtc::QueuedTask> encode_task_; | |
285 | |
286 // All other member variables should be before WeakPtrFactory. | |
287 rtc::WeakPtrFactory<MediaCodecVideoEncoder> weak_factory_; | |
290 }; | 288 }; |
291 | 289 |
292 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { | 290 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { |
291 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); | |
293 // Call Release() to ensure no more callbacks to us after we are deleted. | 292 // Call Release() to ensure no more callbacks to us after we are deleted. |
294 Release(); | 293 Release(); |
295 } | 294 } |
296 | 295 |
297 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, | 296 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, |
298 const cricket::VideoCodec& codec, | 297 const cricket::VideoCodec& codec, |
299 jobject egl_context) | 298 jobject egl_context) |
300 : codec_(codec), | 299 : codec_(codec), |
301 callback_(NULL), | 300 callback_(NULL), |
302 codec_thread_(new Thread()), | |
303 j_media_codec_video_encoder_class_( | 301 j_media_codec_video_encoder_class_( |
304 jni, | 302 jni, |
305 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), | 303 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), |
306 j_media_codec_video_encoder_( | 304 j_media_codec_video_encoder_( |
307 jni, | 305 jni, |
308 jni->NewObject(*j_media_codec_video_encoder_class_, | 306 jni->NewObject(*j_media_codec_video_encoder_class_, |
309 GetMethodID(jni, | 307 GetMethodID(jni, |
310 *j_media_codec_video_encoder_class_, | 308 *j_media_codec_video_encoder_class_, |
311 "<init>", | 309 "<init>", |
312 "()V"))), | 310 "()V"))), |
313 inited_(false), | 311 inited_(false), |
314 use_surface_(false), | 312 use_surface_(false), |
315 picture_id_(0), | 313 picture_id_(0), |
316 egl_context_(egl_context), | 314 egl_context_(egl_context), |
317 sw_fallback_required_(false) { | 315 sw_fallback_required_(false), |
318 // It would be nice to avoid spinning up a new thread per MediaCodec, and | 316 weak_factory_(this) { |
319 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | 317 encode_task_ = std::unique_ptr<rtc::QueuedTask>( |
320 // 2732 means that deadlocks abound. This class synchronously trampolines | 318 new EncodeTask(weak_factory_.GetWeakPtr())); |
321 // to |codec_thread_|, so if anything else can be coming to _us_ from | 319 |
322 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | |
323 // in the bug, we have a problem. For now work around that with a dedicated | |
324 // thread. | |
325 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | |
326 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | |
327 codec_thread_checker_.DetachFromThread(); | |
328 jclass j_output_buffer_info_class = | 320 jclass j_output_buffer_info_class = |
329 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | 321 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |
330 j_init_encode_method_ = GetMethodID( | 322 j_init_encode_method_ = GetMethodID( |
331 jni, | 323 jni, |
332 *j_media_codec_video_encoder_class_, | 324 *j_media_codec_video_encoder_class_, |
333 "initEncode", | 325 "initEncode", |
334 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" | 326 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" |
335 "IIIILorg/webrtc/EglBase14$Context;)Z"); | 327 "IIIILorg/webrtc/EglBase14$Context;)Z"); |
336 j_get_input_buffers_method_ = GetMethodID( | 328 j_get_input_buffers_method_ = GetMethodID( |
337 jni, | 329 jni, |
(...skipping 24 matching lines...) Expand all Loading... | |
362 j_info_index_field_ = | 354 j_info_index_field_ = |
363 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); | 355 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); |
364 j_info_buffer_field_ = GetFieldID( | 356 j_info_buffer_field_ = GetFieldID( |
365 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); | 357 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); |
366 j_info_is_key_frame_field_ = | 358 j_info_is_key_frame_field_ = |
367 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); | 359 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); |
368 j_info_presentation_timestamp_us_field_ = GetFieldID( | 360 j_info_presentation_timestamp_us_field_ = GetFieldID( |
369 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); | 361 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); |
370 if (CheckException(jni)) { | 362 if (CheckException(jni)) { |
371 ALOGW << "MediaCodecVideoEncoder ctor failed."; | 363 ALOGW << "MediaCodecVideoEncoder ctor failed."; |
372 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 364 ProcessHWError(true /* reset_if_fallback_unavailable */); |
373 } | 365 } |
374 srand(time(NULL)); | 366 srand(time(NULL)); |
375 AllowBlockingCalls(); | |
376 } | 367 } |
377 | 368 |
378 int32_t MediaCodecVideoEncoder::InitEncode( | 369 int32_t MediaCodecVideoEncoder::InitEncode( |
379 const webrtc::VideoCodec* codec_settings, | 370 const webrtc::VideoCodec* codec_settings, |
380 int32_t /* number_of_cores */, | 371 int32_t /* number_of_cores */, |
381 size_t /* max_payload_size */) { | 372 size_t /* max_payload_size */) { |
373 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); | |
382 if (codec_settings == NULL) { | 374 if (codec_settings == NULL) { |
383 ALOGE << "NULL VideoCodec instance"; | 375 ALOGE << "NULL VideoCodec instance"; |
384 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 376 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
385 } | 377 } |
386 // Factory should guard against other codecs being used with us. | 378 // Factory should guard against other codecs being used with us. |
387 const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) | 379 const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) |
388 .value_or(webrtc::kVideoCodecUnknown); | 380 .value_or(webrtc::kVideoCodecUnknown); |
389 RTC_CHECK(codec_settings->codecType == codec_type) | 381 RTC_CHECK(codec_settings->codecType == codec_type) |
390 << "Unsupported codec " << codec_settings->codecType << " for " | 382 << "Unsupported codec " << codec_settings->codecType << " for " |
391 << codec_type; | 383 << codec_type; |
392 if (sw_fallback_required_) { | 384 if (sw_fallback_required_) { |
393 return WEBRTC_VIDEO_CODEC_OK; | 385 return WEBRTC_VIDEO_CODEC_OK; |
394 } | 386 } |
395 codec_mode_ = codec_settings->mode; | 387 codec_mode_ = codec_settings->mode; |
396 int init_width = codec_settings->width; | 388 int init_width = codec_settings->width; |
397 int init_height = codec_settings->height; | 389 int init_height = codec_settings->height; |
398 // Scaling is disabled for VP9, but optionally enabled for VP8. | 390 // Scaling is disabled for VP9, but optionally enabled for VP8. |
399 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. | 391 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. |
400 scale_ = false; | 392 scale_ = false; |
401 if (codec_type == kVideoCodecVP8) { | 393 if (codec_type == kVideoCodecVP8) { |
402 scale_ = codec_settings->VP8().automaticResizeOn; | 394 scale_ = codec_settings->VP8().automaticResizeOn; |
403 } else if (codec_type != kVideoCodecVP9) { | 395 } else if (codec_type != kVideoCodecVP9) { |
404 scale_ = true; | 396 scale_ = true; |
405 } | 397 } |
406 | 398 |
407 ALOGD << "InitEncode request: " << init_width << " x " << init_height; | 399 ALOGD << "InitEncode request: " << init_width << " x " << init_height; |
408 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); | 400 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); |
409 | 401 |
410 return codec_thread_->Invoke<int32_t>( | 402 return InitEncodeInternal( |
411 RTC_FROM_HERE, | 403 init_width, init_height, codec_settings->startBitrate, |
412 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, | 404 codec_settings->maxFramerate, codec_settings->expect_encode_from_texture); |
413 init_height, codec_settings->startBitrate, | |
414 codec_settings->maxFramerate, | |
415 codec_settings->expect_encode_from_texture)); | |
416 } | 405 } |
417 | 406 |
418 int32_t MediaCodecVideoEncoder::Encode( | 407 int32_t MediaCodecVideoEncoder::Encode( |
419 const webrtc::VideoFrame& frame, | 408 const webrtc::VideoFrame& frame, |
420 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | 409 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
421 const std::vector<webrtc::FrameType>* frame_types) { | 410 const std::vector<webrtc::FrameType>* frame_types) { |
422 return codec_thread_->Invoke<int32_t>( | 411 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
423 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this, | 412 if (sw_fallback_required_) |
magjed_webrtc
2017/02/03 10:39:17
Can you try to reorder the functions in order to m
sakal
2017/02/03 11:57:13
Done. Though, I often like to keep the order of de
| |
424 frame, frame_types, rtc::TimeMillis())); | 413 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
414 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
415 ScopedLocalRefFrame local_ref_frame(jni); | |
416 const int64_t frame_input_time_ms = rtc::TimeMillis(); | |
417 | |
418 if (!inited_) { | |
419 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
420 } | |
421 | |
422 bool send_key_frame = false; | |
423 if (codec_mode_ == webrtc::kRealtimeVideo) { | |
424 ++frames_received_since_last_key_; | |
425 int64_t now_ms = rtc::TimeMillis(); | |
426 if (last_frame_received_ms_ != -1 && | |
427 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { | |
428 // Add limit to prevent triggering a key for every frame for very low | |
429 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). | |
430 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { | |
431 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); | |
432 send_key_frame = true; | |
433 } | |
434 frames_received_since_last_key_ = 0; | |
435 } | |
436 last_frame_received_ms_ = now_ms; | |
437 } | |
438 | |
439 frames_received_++; | |
440 if (!DeliverPendingOutputs(jni)) { | |
441 if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) { | |
442 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | |
443 : WEBRTC_VIDEO_CODEC_ERROR; | |
444 } | |
445 } | |
446 if (frames_encoded_ < kMaxEncodedLogFrames) { | |
447 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | |
448 << ". TS: " << (int)(current_timestamp_us_ / 1000) | |
449 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | |
450 << ". Kbps: " << last_set_bitrate_kbps_; | |
451 } | |
452 | |
453 if (drop_next_input_frame_) { | |
454 ALOGW << "Encoder drop frame - failed callback."; | |
455 drop_next_input_frame_ = false; | |
456 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
457 frames_dropped_media_encoder_++; | |
458 return WEBRTC_VIDEO_CODEC_OK; | |
459 } | |
460 | |
461 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | |
462 | |
463 // Check if we accumulated too many frames in encoder input buffers and drop | |
464 // frame if so. | |
465 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { | |
466 ALOGD << "Already " << input_frame_infos_.size() | |
467 << " frames in the queue, dropping" | |
468 << ". TS: " << (int)(current_timestamp_us_ / 1000) | |
469 << ". Fps: " << last_set_fps_ | |
470 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | |
471 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
472 consecutive_full_queue_frame_drops_++; | |
473 if (consecutive_full_queue_frame_drops_ >= | |
474 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | |
475 ALOGE << "Encoder got stuck."; | |
476 return ProcessHWErrorOnEncode(); | |
477 } | |
478 frames_dropped_media_encoder_++; | |
479 return WEBRTC_VIDEO_CODEC_OK; | |
480 } | |
481 consecutive_full_queue_frame_drops_ = 0; | |
482 | |
483 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | |
484 frame.video_frame_buffer()); | |
485 | |
486 VideoFrame input_frame(input_buffer, frame.timestamp(), | |
487 frame.render_time_ms(), frame.rotation()); | |
488 | |
489 if (!MaybeReconfigureEncoder(input_frame)) { | |
490 ALOGE << "Failed to reconfigure encoder."; | |
491 return WEBRTC_VIDEO_CODEC_ERROR; | |
492 } | |
493 | |
494 const bool key_frame = | |
495 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | |
496 bool encode_status = true; | |
497 if (!input_frame.video_frame_buffer()->native_handle()) { | |
498 int j_input_buffer_index = jni->CallIntMethod( | |
499 *j_media_codec_video_encoder_, j_dequeue_input_buffer_method_); | |
500 if (CheckException(jni)) { | |
501 ALOGE << "Exception in dequeu input buffer."; | |
502 return ProcessHWErrorOnEncode(); | |
503 } | |
504 if (j_input_buffer_index == -1) { | |
505 // Video codec falls behind - no input buffer available. | |
506 ALOGW << "Encoder drop frame - no input buffers available"; | |
507 if (frames_received_ > 1) { | |
508 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
509 frames_dropped_media_encoder_++; | |
510 } else { | |
511 // Input buffers are not ready after codec initialization, HW is still | |
512 // allocating thme - this is expected and should not result in drop | |
513 // frame report. | |
514 frames_received_ = 0; | |
515 } | |
516 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | |
517 } else if (j_input_buffer_index == -2) { | |
518 return ProcessHWErrorOnEncode(); | |
519 } | |
520 encode_status = | |
521 EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); | |
522 } else { | |
523 encode_status = EncodeTexture(jni, key_frame, input_frame); | |
524 } | |
525 | |
526 if (!encode_status) { | |
527 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | |
528 return ProcessHWErrorOnEncode(); | |
529 } | |
530 | |
531 // Save input image timestamps for later output. | |
532 input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), | |
533 input_frame.render_time_ms(), | |
534 input_frame.rotation()); | |
535 | |
536 last_input_timestamp_ms_ = | |
537 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | |
538 | |
539 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
540 | |
541 // Start the polling loop if it is not started. | |
542 if (encode_task_) { | |
543 rtc::TaskQueue::Current()->PostDelayedTask(std::move(encode_task_), | |
544 kMediaCodecPollMs); | |
545 } | |
546 | |
547 if (!DeliverPendingOutputs(jni)) { | |
548 return ProcessHWErrorOnEncode(); | |
549 } | |
550 return WEBRTC_VIDEO_CODEC_OK; | |
425 } | 551 } |
426 | 552 |
427 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( | 553 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( |
428 webrtc::EncodedImageCallback* callback) { | 554 webrtc::EncodedImageCallback* callback) { |
429 return codec_thread_->Invoke<int32_t>( | 555 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
430 RTC_FROM_HERE, | 556 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
431 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread, | 557 ScopedLocalRefFrame local_ref_frame(jni); |
432 this, callback)); | 558 callback_ = callback; |
559 return WEBRTC_VIDEO_CODEC_OK; | |
433 } | 560 } |
434 | 561 |
435 int32_t MediaCodecVideoEncoder::Release() { | 562 int32_t MediaCodecVideoEncoder::Release() { |
436 ALOGD << "EncoderRelease request"; | 563 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
437 return codec_thread_->Invoke<int32_t>( | 564 if (!inited_) { |
438 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); | 565 return WEBRTC_VIDEO_CODEC_OK; |
566 } | |
567 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
568 ALOGD << "EncoderRelease: Frames received: " << frames_received_ | |
569 << ". Encoded: " << frames_encoded_ | |
570 << ". Dropped: " << frames_dropped_media_encoder_; | |
571 ScopedLocalRefFrame local_ref_frame(jni); | |
572 for (size_t i = 0; i < input_buffers_.size(); ++i) | |
573 jni->DeleteGlobalRef(input_buffers_[i]); | |
574 input_buffers_.clear(); | |
575 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | |
576 if (CheckException(jni)) { | |
577 ALOGE << "Exception in release."; | |
578 ProcessHWError(false /* reset_if_fallback_unavailable */); | |
579 return WEBRTC_VIDEO_CODEC_ERROR; | |
580 } | |
581 inited_ = false; | |
582 use_surface_ = false; | |
583 ALOGD << "EncoderRelease done."; | |
584 return WEBRTC_VIDEO_CODEC_OK; | |
439 } | 585 } |
440 | 586 |
441 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, | 587 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, |
442 int64_t /* rtt */) { | 588 int64_t /* rtt */) { |
443 return WEBRTC_VIDEO_CODEC_OK; | 589 return WEBRTC_VIDEO_CODEC_OK; |
444 } | 590 } |
445 | 591 |
446 int32_t MediaCodecVideoEncoder::SetRateAllocation( | 592 int32_t MediaCodecVideoEncoder::SetRateAllocation( |
447 const webrtc::BitrateAllocation& rate_allocation, | 593 const webrtc::BitrateAllocation& rate_allocation, |
448 uint32_t frame_rate) { | 594 uint32_t frame_rate) { |
449 return codec_thread_->Invoke<int32_t>( | 595 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
450 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, this, | 596 const uint32_t new_bit_rate = rate_allocation.get_sum_kbps(); |
451 rate_allocation.get_sum_kbps(), frame_rate)); | 597 if (sw_fallback_required_) |
598 return WEBRTC_VIDEO_CODEC_OK; | |
599 frame_rate = | |
600 (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? frame_rate : MAX_ALLOWED_VIDEO_FPS; | |
601 if (last_set_bitrate_kbps_ == new_bit_rate && last_set_fps_ == frame_rate) { | |
602 return WEBRTC_VIDEO_CODEC_OK; | |
603 } | |
604 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
605 ScopedLocalRefFrame local_ref_frame(jni); | |
606 if (new_bit_rate > 0) { | |
607 last_set_bitrate_kbps_ = new_bit_rate; | |
608 } | |
609 if (frame_rate > 0) { | |
610 last_set_fps_ = frame_rate; | |
611 } | |
612 bool ret = | |
613 jni->CallBooleanMethod(*j_media_codec_video_encoder_, j_set_rates_method_, | |
614 last_set_bitrate_kbps_, last_set_fps_); | |
615 if (CheckException(jni) || !ret) { | |
616 ProcessHWError(true /* reset_if_fallback_unavailable */); | |
617 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK | |
618 : WEBRTC_VIDEO_CODEC_ERROR; | |
619 } | |
620 return WEBRTC_VIDEO_CODEC_OK; | |
452 } | 621 } |
453 | 622 |
454 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { | 623 bool MediaCodecVideoEncoder::ResetCodec() { |
455 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 624 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
456 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 625 ALOGE << "Reset"; |
457 ScopedLocalRefFrame local_ref_frame(jni); | 626 if (Release() != WEBRTC_VIDEO_CODEC_OK) { |
458 | |
459 // We only ever send one message to |this| directly (not through a Bind()'d | |
460 // functor), so expect no ID/data. | |
461 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | |
462 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | |
463 if (!inited_) { | |
464 return; | |
465 } | |
466 | |
467 // It would be nice to recover from a failure here if one happened, but it's | |
468 // unclear how to signal such a failure to the app, so instead we stay silent | |
469 // about it and let the next app-called API method reveal the borkedness. | |
470 DeliverPendingOutputs(jni); | |
471 | |
472 // If there aren't more frames to deliver, we can start polling at lower rate. | |
473 if (input_frame_infos_.empty()) { | |
474 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollNoFramesMs, this); | |
475 } else { | |
476 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
477 } | |
478 | |
479 // Call log statistics here so it's called even if no frames are being | |
480 // delivered. | |
481 LogStatistics(false); | |
482 } | |
483 | |
484 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { | |
485 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
486 ALOGE << "ResetOnCodecThread"; | |
487 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK) { | |
488 ALOGE << "Releasing codec failed during reset."; | 627 ALOGE << "Releasing codec failed during reset."; |
489 return false; | 628 return false; |
490 } | 629 } |
491 if (InitEncodeOnCodecThread(width_, height_, 0, 0, false) != | 630 if (InitEncodeInternal(width_, height_, 0, 0, false) != |
492 WEBRTC_VIDEO_CODEC_OK) { | 631 WEBRTC_VIDEO_CODEC_OK) { |
493 ALOGE << "Initializing encoder failed during reset."; | 632 ALOGE << "Initializing encoder failed during reset."; |
494 return false; | 633 return false; |
495 } | 634 } |
496 return true; | 635 return true; |
497 } | 636 } |
498 | 637 |
499 bool MediaCodecVideoEncoder::ProcessHWErrorOnCodecThread( | 638 MediaCodecVideoEncoder::EncodeTask::EncodeTask( |
639 rtc::WeakPtr<MediaCodecVideoEncoder> encoder) | |
640 : encoder_(encoder) {} | |
641 | |
642 bool MediaCodecVideoEncoder::EncodeTask::Run() { | |
643 if (!encoder_) { | |
644 // Encoder was destroyed. | |
645 return true; | |
646 } | |
647 | |
648 RTC_DCHECK(encoder_->encoder_queue_checker_.CalledOnValidThread()); | |
649 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
650 ScopedLocalRefFrame local_ref_frame(jni); | |
651 | |
652 if (!encoder_->inited_) { | |
653 encoder_->encode_task_ = std::unique_ptr<rtc::QueuedTask>(this); | |
654 return false; | |
655 } | |
656 | |
657 // It would be nice to recover from a failure here if one happened, but it's | |
658 // unclear how to signal such a failure to the app, so instead we stay silent | |
659 // about it and let the next app-called API method reveal the borkedness. | |
660 encoder_->DeliverPendingOutputs(jni); | |
661 | |
662 // Call log statistics here so it's called even if no frames are being | |
663 // delivered. | |
664 encoder_->LogStatistics(false); | |
665 | |
666 // If there aren't more frames to deliver, we can start polling at lower rate. | |
667 if (encoder_->input_frame_infos_.empty()) { | |
668 rtc::TaskQueue::Current()->PostDelayedTask( | |
669 std::unique_ptr<rtc::QueuedTask>(this), kMediaCodecPollNoFramesMs); | |
670 } else { | |
671 rtc::TaskQueue::Current()->PostDelayedTask( | |
672 std::unique_ptr<rtc::QueuedTask>(this), kMediaCodecPollMs); | |
673 } | |
674 | |
675 return false; | |
676 } | |
677 | |
678 bool MediaCodecVideoEncoder::ProcessHWError( | |
500 bool reset_if_fallback_unavailable) { | 679 bool reset_if_fallback_unavailable) { |
501 ALOGE << "ProcessHWErrorOnCodecThread"; | 680 ALOGE << "ProcessHWError"; |
502 if (FindMatchingCodec(cricket::InternalEncoderFactory().supported_codecs(), | 681 if (FindMatchingCodec(cricket::InternalEncoderFactory().supported_codecs(), |
503 codec_)) { | 682 codec_)) { |
504 ALOGE << "Fallback to SW encoder."; | 683 ALOGE << "Fallback to SW encoder."; |
505 sw_fallback_required_ = true; | 684 sw_fallback_required_ = true; |
506 return false; | 685 return false; |
507 } else if (reset_if_fallback_unavailable) { | 686 } else if (reset_if_fallback_unavailable) { |
508 ALOGE << "Reset encoder."; | 687 ALOGE << "Reset encoder."; |
509 return ResetCodecOnCodecThread(); | 688 return ResetCodec(); |
510 } | 689 } |
511 return false; | 690 return false; |
512 } | 691 } |
513 | 692 |
514 int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncodeOnCodecThread() { | 693 int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncode() { |
515 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 694 ProcessHWError(true /* reset_if_fallback_unavailable */); |
516 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | 695 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
517 : WEBRTC_VIDEO_CODEC_ERROR; | 696 : WEBRTC_VIDEO_CODEC_ERROR; |
518 } | 697 } |
519 | 698 |
520 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | 699 int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, |
521 int width, int height, int kbps, int fps, bool use_surface) { | 700 int height, |
522 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 701 int kbps, |
702 int fps, | |
703 bool use_surface) { | |
704 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); | |
523 if (sw_fallback_required_) { | 705 if (sw_fallback_required_) { |
524 return WEBRTC_VIDEO_CODEC_OK; | 706 return WEBRTC_VIDEO_CODEC_OK; |
525 } | 707 } |
526 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; | 708 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; |
527 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 709 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
528 ScopedLocalRefFrame local_ref_frame(jni); | 710 ScopedLocalRefFrame local_ref_frame(jni); |
529 | 711 |
530 const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) | 712 const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) |
531 .value_or(webrtc::kVideoCodecUnknown); | 713 .value_or(webrtc::kVideoCodecUnknown); |
532 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codec_type << ", " << width | 714 ALOGD << "InitEncodeInternal Type: " << (int)codec_type << ", " << width |
533 << " x " << height << ". Bitrate: " << kbps << " kbps. Fps: " << fps; | 715 << " x " << height << ". Bitrate: " << kbps << " kbps. Fps: " << fps; |
534 if (kbps == 0) { | 716 if (kbps == 0) { |
535 kbps = last_set_bitrate_kbps_; | 717 kbps = last_set_bitrate_kbps_; |
536 } | 718 } |
537 if (fps == 0) { | 719 if (fps == 0) { |
538 fps = MAX_VIDEO_FPS; | 720 fps = MAX_VIDEO_FPS; |
539 } | 721 } |
540 | 722 |
541 width_ = width; | 723 width_ = width; |
542 height_ = height; | 724 height_ = height; |
(...skipping 26 matching lines...) Expand all Loading... | |
569 | 751 |
570 // We enforce no extra stride/padding in the format creation step. | 752 // We enforce no extra stride/padding in the format creation step. |
571 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | 753 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( |
572 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); | 754 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); |
573 const bool encode_status = jni->CallBooleanMethod( | 755 const bool encode_status = jni->CallBooleanMethod( |
574 *j_media_codec_video_encoder_, j_init_encode_method_, | 756 *j_media_codec_video_encoder_, j_init_encode_method_, |
575 j_video_codec_enum, width, height, kbps, fps, | 757 j_video_codec_enum, width, height, kbps, fps, |
576 (use_surface ? egl_context_ : nullptr)); | 758 (use_surface ? egl_context_ : nullptr)); |
577 if (!encode_status) { | 759 if (!encode_status) { |
578 ALOGE << "Failed to configure encoder."; | 760 ALOGE << "Failed to configure encoder."; |
579 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 761 ProcessHWError(false /* reset_if_fallback_unavailable */); |
580 return WEBRTC_VIDEO_CODEC_ERROR; | 762 return WEBRTC_VIDEO_CODEC_ERROR; |
581 } | 763 } |
582 if (CheckException(jni)) { | 764 if (CheckException(jni)) { |
583 ALOGE << "Exception in init encode."; | 765 ALOGE << "Exception in init encode."; |
584 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 766 ProcessHWError(false /* reset_if_fallback_unavailable */); |
585 return WEBRTC_VIDEO_CODEC_ERROR; | 767 return WEBRTC_VIDEO_CODEC_ERROR; |
586 } | 768 } |
587 | 769 |
588 if (!use_surface) { | 770 if (!use_surface) { |
589 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | 771 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( |
590 jni->CallObjectMethod(*j_media_codec_video_encoder_, | 772 jni->CallObjectMethod(*j_media_codec_video_encoder_, |
591 j_get_input_buffers_method_)); | 773 j_get_input_buffers_method_)); |
592 if (CheckException(jni)) { | 774 if (CheckException(jni)) { |
593 ALOGE << "Exception in get input buffers."; | 775 ALOGE << "Exception in get input buffers."; |
594 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 776 ProcessHWError(false /* reset_if_fallback_unavailable */); |
595 return WEBRTC_VIDEO_CODEC_ERROR; | 777 return WEBRTC_VIDEO_CODEC_ERROR; |
596 } | 778 } |
597 | 779 |
598 if (IsNull(jni, input_buffers)) { | 780 if (IsNull(jni, input_buffers)) { |
599 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 781 ProcessHWError(false /* reset_if_fallback_unavailable */); |
600 return WEBRTC_VIDEO_CODEC_ERROR; | 782 return WEBRTC_VIDEO_CODEC_ERROR; |
601 } | 783 } |
602 | 784 |
603 switch (GetIntField(jni, *j_media_codec_video_encoder_, | 785 switch (GetIntField(jni, *j_media_codec_video_encoder_, |
604 j_color_format_field_)) { | 786 j_color_format_field_)) { |
605 case COLOR_FormatYUV420Planar: | 787 case COLOR_FormatYUV420Planar: |
606 encoder_fourcc_ = libyuv::FOURCC_YU12; | 788 encoder_fourcc_ = libyuv::FOURCC_YU12; |
607 break; | 789 break; |
608 case COLOR_FormatYUV420SemiPlanar: | 790 case COLOR_FormatYUV420SemiPlanar: |
609 case COLOR_QCOM_FormatYUV420SemiPlanar: | 791 case COLOR_QCOM_FormatYUV420SemiPlanar: |
610 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | 792 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
611 encoder_fourcc_ = libyuv::FOURCC_NV12; | 793 encoder_fourcc_ = libyuv::FOURCC_NV12; |
612 break; | 794 break; |
613 default: | 795 default: |
614 LOG(LS_ERROR) << "Wrong color format."; | 796 LOG(LS_ERROR) << "Wrong color format."; |
615 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 797 ProcessHWError(false /* reset_if_fallback_unavailable */); |
616 return WEBRTC_VIDEO_CODEC_ERROR; | 798 return WEBRTC_VIDEO_CODEC_ERROR; |
617 } | 799 } |
618 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 800 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
619 RTC_CHECK(input_buffers_.empty()) | 801 RTC_CHECK(input_buffers_.empty()) |
620 << "Unexpected double InitEncode without Release"; | 802 << "Unexpected double InitEncode without Release"; |
621 input_buffers_.resize(num_input_buffers); | 803 input_buffers_.resize(num_input_buffers); |
622 for (size_t i = 0; i < num_input_buffers; ++i) { | 804 for (size_t i = 0; i < num_input_buffers; ++i) { |
623 input_buffers_[i] = | 805 input_buffers_[i] = |
624 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 806 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
625 int64_t yuv_buffer_capacity = | 807 int64_t yuv_buffer_capacity = |
626 jni->GetDirectBufferCapacity(input_buffers_[i]); | 808 jni->GetDirectBufferCapacity(input_buffers_[i]); |
627 if (CheckException(jni)) { | 809 if (CheckException(jni)) { |
628 ALOGE << "Exception in get direct buffer capacity."; | 810 ALOGE << "Exception in get direct buffer capacity."; |
629 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 811 ProcessHWError(false /* reset_if_fallback_unavailable */); |
630 return WEBRTC_VIDEO_CODEC_ERROR; | 812 return WEBRTC_VIDEO_CODEC_ERROR; |
631 } | 813 } |
632 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | 814 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
633 } | 815 } |
634 } | 816 } |
635 | 817 |
636 inited_ = true; | 818 inited_ = true; |
637 return WEBRTC_VIDEO_CODEC_OK; | 819 return WEBRTC_VIDEO_CODEC_OK; |
638 } | 820 } |
639 | 821 |
640 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 822 bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( |
641 const webrtc::VideoFrame& frame, | |
642 const std::vector<webrtc::FrameType>* frame_types, | |
643 const int64_t frame_input_time_ms) { | |
644 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
645 if (sw_fallback_required_) | |
646 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
647 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
648 ScopedLocalRefFrame local_ref_frame(jni); | |
649 | |
650 if (!inited_) { | |
651 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
652 } | |
653 | |
654 bool send_key_frame = false; | |
655 if (codec_mode_ == webrtc::kRealtimeVideo) { | |
656 ++frames_received_since_last_key_; | |
657 int64_t now_ms = rtc::TimeMillis(); | |
658 if (last_frame_received_ms_ != -1 && | |
659 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { | |
660 // Add limit to prevent triggering a key for every frame for very low | |
661 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). | |
662 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { | |
663 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); | |
664 send_key_frame = true; | |
665 } | |
666 frames_received_since_last_key_ = 0; | |
667 } | |
668 last_frame_received_ms_ = now_ms; | |
669 } | |
670 | |
671 frames_received_++; | |
672 if (!DeliverPendingOutputs(jni)) { | |
673 if (!ProcessHWErrorOnCodecThread( | |
674 true /* reset_if_fallback_unavailable */)) { | |
675 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | |
676 : WEBRTC_VIDEO_CODEC_ERROR; | |
677 } | |
678 } | |
679 if (frames_encoded_ < kMaxEncodedLogFrames) { | |
680 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | |
681 << ". TS: " << (int)(current_timestamp_us_ / 1000) | |
682 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | |
683 << ". Kbps: " << last_set_bitrate_kbps_; | |
684 } | |
685 | |
686 if (drop_next_input_frame_) { | |
687 ALOGW << "Encoder drop frame - failed callback."; | |
688 drop_next_input_frame_ = false; | |
689 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
690 frames_dropped_media_encoder_++; | |
691 return WEBRTC_VIDEO_CODEC_OK; | |
692 } | |
693 | |
694 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | |
695 | |
696 // Check if we accumulated too many frames in encoder input buffers and drop | |
697 // frame if so. | |
698 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { | |
699 ALOGD << "Already " << input_frame_infos_.size() | |
700 << " frames in the queue, dropping" | |
701 << ". TS: " << (int)(current_timestamp_us_ / 1000) | |
702 << ". Fps: " << last_set_fps_ | |
703 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | |
704 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
705 consecutive_full_queue_frame_drops_++; | |
706 if (consecutive_full_queue_frame_drops_ >= | |
707 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | |
708 ALOGE << "Encoder got stuck."; | |
709 return ProcessHWErrorOnEncodeOnCodecThread(); | |
710 } | |
711 frames_dropped_media_encoder_++; | |
712 return WEBRTC_VIDEO_CODEC_OK; | |
713 } | |
714 consecutive_full_queue_frame_drops_ = 0; | |
715 | |
716 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | |
717 frame.video_frame_buffer()); | |
718 | |
719 VideoFrame input_frame(input_buffer, frame.timestamp(), | |
720 frame.render_time_ms(), frame.rotation()); | |
721 | |
722 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { | |
723 ALOGE << "Failed to reconfigure encoder."; | |
724 return WEBRTC_VIDEO_CODEC_ERROR; | |
725 } | |
726 | |
727 const bool key_frame = | |
728 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | |
729 bool encode_status = true; | |
730 if (!input_frame.video_frame_buffer()->native_handle()) { | |
731 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | |
732 j_dequeue_input_buffer_method_); | |
733 if (CheckException(jni)) { | |
734 ALOGE << "Exception in dequeu input buffer."; | |
735 return ProcessHWErrorOnEncodeOnCodecThread(); | |
736 } | |
737 if (j_input_buffer_index == -1) { | |
738 // Video codec falls behind - no input buffer available. | |
739 ALOGW << "Encoder drop frame - no input buffers available"; | |
740 if (frames_received_ > 1) { | |
741 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
742 frames_dropped_media_encoder_++; | |
743 } else { | |
744 // Input buffers are not ready after codec initialization, HW is still | |
745 // allocating thme - this is expected and should not result in drop | |
746 // frame report. | |
747 frames_received_ = 0; | |
748 } | |
749 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | |
750 } else if (j_input_buffer_index == -2) { | |
751 return ProcessHWErrorOnEncodeOnCodecThread(); | |
752 } | |
753 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | |
754 j_input_buffer_index); | |
755 } else { | |
756 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | |
757 } | |
758 | |
759 if (!encode_status) { | |
760 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | |
761 return ProcessHWErrorOnEncodeOnCodecThread(); | |
762 } | |
763 | |
764 // Save input image timestamps for later output. | |
765 input_frame_infos_.emplace_back( | |
766 frame_input_time_ms, input_frame.timestamp(), | |
767 input_frame.render_time_ms(), input_frame.rotation()); | |
768 | |
769 last_input_timestamp_ms_ = | |
770 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | |
771 | |
772 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
773 | |
774 codec_thread_->Clear(this); | |
775 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
776 | |
777 if (!DeliverPendingOutputs(jni)) { | |
778 return ProcessHWErrorOnEncodeOnCodecThread(); | |
779 } | |
780 return WEBRTC_VIDEO_CODEC_OK; | |
781 } | |
782 | |
783 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | |
784 const webrtc::VideoFrame& frame) { | 823 const webrtc::VideoFrame& frame) { |
785 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 824 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
786 | 825 |
787 const bool is_texture_frame = | 826 const bool is_texture_frame = |
788 frame.video_frame_buffer()->native_handle() != nullptr; | 827 frame.video_frame_buffer()->native_handle() != nullptr; |
789 const bool reconfigure_due_to_format = is_texture_frame != use_surface_; | 828 const bool reconfigure_due_to_format = is_texture_frame != use_surface_; |
790 const bool reconfigure_due_to_size = | 829 const bool reconfigure_due_to_size = |
791 frame.width() != width_ || frame.height() != height_; | 830 frame.width() != width_ || frame.height() != height_; |
792 | 831 |
793 if (reconfigure_due_to_format) { | 832 if (reconfigure_due_to_format) { |
794 ALOGD << "Reconfigure encoder due to format change. " | 833 ALOGD << "Reconfigure encoder due to format change. " |
795 << (use_surface_ ? | 834 << (use_surface_ ? |
796 "Reconfiguring to encode from byte buffer." : | 835 "Reconfiguring to encode from byte buffer." : |
797 "Reconfiguring to encode from texture."); | 836 "Reconfiguring to encode from texture."); |
798 LogStatistics(true); | 837 LogStatistics(true); |
799 } | 838 } |
800 if (reconfigure_due_to_size) { | 839 if (reconfigure_due_to_size) { |
801 ALOGW << "Reconfigure encoder due to frame resolution change from " | 840 ALOGW << "Reconfigure encoder due to frame resolution change from " |
802 << width_ << " x " << height_ << " to " << frame.width() << " x " | 841 << width_ << " x " << height_ << " to " << frame.width() << " x " |
803 << frame.height(); | 842 << frame.height(); |
804 LogStatistics(true); | 843 LogStatistics(true); |
805 width_ = frame.width(); | 844 width_ = frame.width(); |
806 height_ = frame.height(); | 845 height_ = frame.height(); |
807 } | 846 } |
808 | 847 |
809 if (!reconfigure_due_to_format && !reconfigure_due_to_size) | 848 if (!reconfigure_due_to_format && !reconfigure_due_to_size) |
810 return true; | 849 return true; |
811 | 850 |
812 ReleaseOnCodecThread(); | 851 Release(); |
813 | 852 |
814 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == | 853 return InitEncodeInternal(width_, height_, 0, 0, is_texture_frame) == |
815 WEBRTC_VIDEO_CODEC_OK; | 854 WEBRTC_VIDEO_CODEC_OK; |
816 } | 855 } |
817 | 856 |
818 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | 857 bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, |
819 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | 858 bool key_frame, |
820 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 859 const webrtc::VideoFrame& frame, |
860 int input_buffer_index) { | |
861 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); | |
821 RTC_CHECK(!use_surface_); | 862 RTC_CHECK(!use_surface_); |
822 | 863 |
823 jobject j_input_buffer = input_buffers_[input_buffer_index]; | 864 jobject j_input_buffer = input_buffers_[input_buffer_index]; |
824 uint8_t* yuv_buffer = | 865 uint8_t* yuv_buffer = |
825 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 866 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
826 if (CheckException(jni)) { | 867 if (CheckException(jni)) { |
827 ALOGE << "Exception in get direct buffer address."; | 868 ALOGE << "Exception in get direct buffer address."; |
828 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 869 ProcessHWError(true /* reset_if_fallback_unavailable */); |
829 return false; | 870 return false; |
830 } | 871 } |
831 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | 872 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
832 RTC_CHECK(!libyuv::ConvertFromI420( | 873 RTC_CHECK(!libyuv::ConvertFromI420( |
833 frame.video_frame_buffer()->DataY(), | 874 frame.video_frame_buffer()->DataY(), |
834 frame.video_frame_buffer()->StrideY(), | 875 frame.video_frame_buffer()->StrideY(), |
835 frame.video_frame_buffer()->DataU(), | 876 frame.video_frame_buffer()->DataU(), |
836 frame.video_frame_buffer()->StrideU(), | 877 frame.video_frame_buffer()->StrideU(), |
837 frame.video_frame_buffer()->DataV(), | 878 frame.video_frame_buffer()->DataV(), |
838 frame.video_frame_buffer()->StrideV(), | 879 frame.video_frame_buffer()->StrideV(), |
839 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | 880 yuv_buffer, width_, width_, height_, encoder_fourcc_)) |
840 << "ConvertFromI420 failed"; | 881 << "ConvertFromI420 failed"; |
841 | 882 |
842 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 883 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
843 j_encode_buffer_method_, | 884 j_encode_buffer_method_, |
844 key_frame, | 885 key_frame, |
845 input_buffer_index, | 886 input_buffer_index, |
846 yuv_size_, | 887 yuv_size_, |
847 current_timestamp_us_); | 888 current_timestamp_us_); |
848 if (CheckException(jni)) { | 889 if (CheckException(jni)) { |
849 ALOGE << "Exception in encode buffer."; | 890 ALOGE << "Exception in encode buffer."; |
850 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 891 ProcessHWError(true /* reset_if_fallback_unavailable */); |
851 return false; | 892 return false; |
852 } | 893 } |
853 return encode_status; | 894 return encode_status; |
854 } | 895 } |
855 | 896 |
856 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, | 897 bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, |
857 bool key_frame, const webrtc::VideoFrame& frame) { | 898 bool key_frame, |
858 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 899 const webrtc::VideoFrame& frame) { |
900 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); | |
859 RTC_CHECK(use_surface_); | 901 RTC_CHECK(use_surface_); |
860 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( | 902 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( |
861 frame.video_frame_buffer()->native_handle()); | 903 frame.video_frame_buffer()->native_handle()); |
862 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); | 904 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); |
863 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 905 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
864 j_encode_texture_method_, | 906 j_encode_texture_method_, |
865 key_frame, | 907 key_frame, |
866 handle->oes_texture_id, | 908 handle->oes_texture_id, |
867 sampling_matrix, | 909 sampling_matrix, |
868 current_timestamp_us_); | 910 current_timestamp_us_); |
869 if (CheckException(jni)) { | 911 if (CheckException(jni)) { |
870 ALOGE << "Exception in encode texture."; | 912 ALOGE << "Exception in encode texture."; |
871 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 913 ProcessHWError(true /* reset_if_fallback_unavailable */); |
872 return false; | 914 return false; |
873 } | 915 } |
874 return encode_status; | 916 return encode_status; |
875 } | 917 } |
876 | 918 |
877 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | |
878 webrtc::EncodedImageCallback* callback) { | |
879 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
880 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
881 ScopedLocalRefFrame local_ref_frame(jni); | |
882 callback_ = callback; | |
883 return WEBRTC_VIDEO_CODEC_OK; | |
884 } | |
885 | |
886 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | |
887 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
888 if (!inited_) { | |
889 return WEBRTC_VIDEO_CODEC_OK; | |
890 } | |
891 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
892 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | |
893 frames_received_ << ". Encoded: " << frames_encoded_ << | |
894 ". Dropped: " << frames_dropped_media_encoder_; | |
895 ScopedLocalRefFrame local_ref_frame(jni); | |
896 for (size_t i = 0; i < input_buffers_.size(); ++i) | |
897 jni->DeleteGlobalRef(input_buffers_[i]); | |
898 input_buffers_.clear(); | |
899 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | |
900 if (CheckException(jni)) { | |
901 ALOGE << "Exception in release."; | |
902 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | |
903 return WEBRTC_VIDEO_CODEC_ERROR; | |
904 } | |
905 rtc::MessageQueueManager::Clear(this); | |
906 inited_ = false; | |
907 use_surface_ = false; | |
908 ALOGD << "EncoderReleaseOnCodecThread done."; | |
909 return WEBRTC_VIDEO_CODEC_OK; | |
910 } | |
911 | |
912 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | |
913 uint32_t frame_rate) { | |
914 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
915 if (sw_fallback_required_) | |
916 return WEBRTC_VIDEO_CODEC_OK; | |
917 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? | |
918 frame_rate : MAX_ALLOWED_VIDEO_FPS; | |
919 if (last_set_bitrate_kbps_ == new_bit_rate && | |
920 last_set_fps_ == frame_rate) { | |
921 return WEBRTC_VIDEO_CODEC_OK; | |
922 } | |
923 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
924 ScopedLocalRefFrame local_ref_frame(jni); | |
925 if (new_bit_rate > 0) { | |
926 last_set_bitrate_kbps_ = new_bit_rate; | |
927 } | |
928 if (frame_rate > 0) { | |
929 last_set_fps_ = frame_rate; | |
930 } | |
931 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
932 j_set_rates_method_, | |
933 last_set_bitrate_kbps_, | |
934 last_set_fps_); | |
935 if (CheckException(jni) || !ret) { | |
936 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | |
937 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK | |
938 : WEBRTC_VIDEO_CODEC_ERROR; | |
939 } | |
940 return WEBRTC_VIDEO_CODEC_OK; | |
941 } | |
942 | |
943 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( | 919 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( |
944 JNIEnv* jni, | 920 JNIEnv* jni, |
945 jobject j_output_buffer_info) { | 921 jobject j_output_buffer_info) { |
946 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); | 922 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); |
947 } | 923 } |
948 | 924 |
949 jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer( | 925 jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer( |
950 JNIEnv* jni, | 926 JNIEnv* jni, |
951 jobject j_output_buffer_info) { | 927 jobject j_output_buffer_info) { |
952 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_); | 928 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_); |
953 } | 929 } |
954 | 930 |
955 bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame( | 931 bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame( |
956 JNIEnv* jni, | 932 JNIEnv* jni, |
957 jobject j_output_buffer_info) { | 933 jobject j_output_buffer_info) { |
958 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_); | 934 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_); |
959 } | 935 } |
960 | 936 |
961 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( | 937 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( |
962 JNIEnv* jni, | 938 JNIEnv* jni, |
963 jobject j_output_buffer_info) { | 939 jobject j_output_buffer_info) { |
964 return GetLongField( | 940 return GetLongField( |
965 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); | 941 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); |
966 } | 942 } |
967 | 943 |
968 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { | 944 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
969 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 945 RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
970 | 946 |
971 while (true) { | 947 while (true) { |
972 jobject j_output_buffer_info = jni->CallObjectMethod( | 948 jobject j_output_buffer_info = jni->CallObjectMethod( |
973 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); | 949 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); |
974 if (CheckException(jni)) { | 950 if (CheckException(jni)) { |
975 ALOGE << "Exception in set dequeue output buffer."; | 951 ALOGE << "Exception in set dequeue output buffer."; |
976 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 952 ProcessHWError(true /* reset_if_fallback_unavailable */); |
977 return WEBRTC_VIDEO_CODEC_ERROR; | 953 return WEBRTC_VIDEO_CODEC_ERROR; |
978 } | 954 } |
979 if (IsNull(jni, j_output_buffer_info)) { | 955 if (IsNull(jni, j_output_buffer_info)) { |
980 break; | 956 break; |
981 } | 957 } |
982 | 958 |
983 int output_buffer_index = | 959 int output_buffer_index = |
984 GetOutputBufferInfoIndex(jni, j_output_buffer_info); | 960 GetOutputBufferInfoIndex(jni, j_output_buffer_info); |
985 if (output_buffer_index == -1) { | 961 if (output_buffer_index == -1) { |
986 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 962 ProcessHWError(true /* reset_if_fallback_unavailable */); |
987 return false; | 963 return false; |
988 } | 964 } |
989 | 965 |
990 // Get key and config frame flags. | 966 // Get key and config frame flags. |
991 jobject j_output_buffer = | 967 jobject j_output_buffer = |
992 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | 968 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); |
993 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | 969 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); |
994 | 970 |
995 // Get frame timestamps from a queue - for non config frames only. | 971 // Get frame timestamps from a queue - for non config frames only. |
996 int64_t encoding_start_time_ms = 0; | 972 int64_t encoding_start_time_ms = 0; |
997 int64_t frame_encoding_time_ms = 0; | 973 int64_t frame_encoding_time_ms = 0; |
998 last_output_timestamp_ms_ = | 974 last_output_timestamp_ms_ = |
999 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | 975 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / |
1000 rtc::kNumMicrosecsPerMillisec; | 976 rtc::kNumMicrosecsPerMillisec; |
1001 if (!input_frame_infos_.empty()) { | 977 if (!input_frame_infos_.empty()) { |
1002 const InputFrameInfo& frame_info = input_frame_infos_.front(); | 978 const InputFrameInfo& frame_info = input_frame_infos_.front(); |
1003 output_timestamp_ = frame_info.frame_timestamp; | 979 output_timestamp_ = frame_info.frame_timestamp; |
1004 output_render_time_ms_ = frame_info.frame_render_time_ms; | 980 output_render_time_ms_ = frame_info.frame_render_time_ms; |
1005 output_rotation_ = frame_info.rotation; | 981 output_rotation_ = frame_info.rotation; |
1006 encoding_start_time_ms = frame_info.encode_start_time; | 982 encoding_start_time_ms = frame_info.encode_start_time; |
1007 input_frame_infos_.pop_front(); | 983 input_frame_infos_.pop_front(); |
1008 } | 984 } |
1009 | 985 |
1010 // Extract payload. | 986 // Extract payload. |
1011 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 987 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
1012 uint8_t* payload = reinterpret_cast<uint8_t*>( | 988 uint8_t* payload = reinterpret_cast<uint8_t*>( |
1013 jni->GetDirectBufferAddress(j_output_buffer)); | 989 jni->GetDirectBufferAddress(j_output_buffer)); |
1014 if (CheckException(jni)) { | 990 if (CheckException(jni)) { |
1015 ALOGE << "Exception in get direct buffer address."; | 991 ALOGE << "Exception in get direct buffer address."; |
1016 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 992 ProcessHWError(true /* reset_if_fallback_unavailable */); |
1017 return WEBRTC_VIDEO_CODEC_ERROR; | 993 return WEBRTC_VIDEO_CODEC_ERROR; |
1018 } | 994 } |
1019 | 995 |
1020 // Callback - return encoded frame. | 996 // Callback - return encoded frame. |
1021 const VideoCodecType codec_type = | 997 const VideoCodecType codec_type = |
1022 webrtc::PayloadNameToCodecType(codec_.name) | 998 webrtc::PayloadNameToCodecType(codec_.name) |
1023 .value_or(webrtc::kVideoCodecUnknown); | 999 .value_or(webrtc::kVideoCodecUnknown); |
1024 webrtc::EncodedImageCallback::Result callback_result( | 1000 webrtc::EncodedImageCallback::Result callback_result( |
1025 webrtc::EncodedImageCallback::Result::OK); | 1001 webrtc::EncodedImageCallback::Result::OK); |
1026 if (callback_) { | 1002 if (callback_) { |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1095 image->qp_ = qp; | 1071 image->qp_ = qp; |
1096 } | 1072 } |
1097 // For H.264 search for start codes. | 1073 // For H.264 search for start codes. |
1098 const std::vector<webrtc::H264::NaluIndex> nalu_idxs = | 1074 const std::vector<webrtc::H264::NaluIndex> nalu_idxs = |
1099 webrtc::H264::FindNaluIndices(payload, payload_size); | 1075 webrtc::H264::FindNaluIndices(payload, payload_size); |
1100 if (nalu_idxs.empty()) { | 1076 if (nalu_idxs.empty()) { |
1101 ALOGE << "Start code is not found!"; | 1077 ALOGE << "Start code is not found!"; |
1102 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] | 1078 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] |
1103 << " " << image->_buffer[2] << " " << image->_buffer[3] | 1079 << " " << image->_buffer[2] << " " << image->_buffer[3] |
1104 << " " << image->_buffer[4] << " " << image->_buffer[5]; | 1080 << " " << image->_buffer[4] << " " << image->_buffer[5]; |
1105 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 1081 ProcessHWError(true /* reset_if_fallback_unavailable */); |
1106 return false; | 1082 return false; |
1107 } | 1083 } |
1108 header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size()); | 1084 header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size()); |
1109 for (size_t i = 0; i < nalu_idxs.size(); i++) { | 1085 for (size_t i = 0; i < nalu_idxs.size(); i++) { |
1110 header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset; | 1086 header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset; |
1111 header.fragmentationLength[i] = nalu_idxs[i].payload_size; | 1087 header.fragmentationLength[i] = nalu_idxs[i].payload_size; |
1112 header.fragmentationPlType[i] = 0; | 1088 header.fragmentationPlType[i] = 0; |
1113 header.fragmentationTimeDiff[i] = 0; | 1089 header.fragmentationTimeDiff[i] = 0; |
1114 } | 1090 } |
1115 } | 1091 } |
1116 | 1092 |
1117 callback_result = callback_->OnEncodedImage(*image, &info, &header); | 1093 callback_result = callback_->OnEncodedImage(*image, &info, &header); |
1118 } | 1094 } |
1119 | 1095 |
1120 // Return output buffer back to the encoder. | 1096 // Return output buffer back to the encoder. |
1121 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 1097 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
1122 j_release_output_buffer_method_, | 1098 j_release_output_buffer_method_, |
1123 output_buffer_index); | 1099 output_buffer_index); |
1124 if (CheckException(jni) || !success) { | 1100 if (CheckException(jni) || !success) { |
1125 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 1101 ProcessHWError(true /* reset_if_fallback_unavailable */); |
1126 return false; | 1102 return false; |
1127 } | 1103 } |
1128 | 1104 |
1129 // Print per frame statistics. | 1105 // Print per frame statistics. |
1130 if (encoding_start_time_ms > 0) { | 1106 if (encoding_start_time_ms > 0) { |
1131 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; | 1107 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; |
1132 } | 1108 } |
1133 if (frames_encoded_ < kMaxEncodedLogFrames) { | 1109 if (frames_encoded_ < kMaxEncodedLogFrames) { |
1134 int current_latency = | 1110 int current_latency = |
1135 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); | 1111 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1305 return supported_codecs_; | 1281 return supported_codecs_; |
1306 } | 1282 } |
1307 | 1283 |
1308 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1284 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1309 webrtc::VideoEncoder* encoder) { | 1285 webrtc::VideoEncoder* encoder) { |
1310 ALOGD << "Destroy video encoder."; | 1286 ALOGD << "Destroy video encoder."; |
1311 delete encoder; | 1287 delete encoder; |
1312 } | 1288 } |
1313 | 1289 |
1314 } // namespace webrtc_jni | 1290 } // namespace webrtc_jni |
OLD | NEW |