OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 // NOTICE: androidmediaencoder_jni.h must be included before | 11 // NOTICE: androidmediaencoder_jni.h must be included before |
12 // androidmediacodeccommon.h to avoid build errors. | 12 // androidmediacodeccommon.h to avoid build errors. |
13 #include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h" | 13 #include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h" |
14 | 14 |
15 #include <algorithm> | 15 #include <algorithm> |
16 #include <memory> | 16 #include <memory> |
17 #include <list> | 17 #include <list> |
18 | 18 |
19 #include "third_party/libyuv/include/libyuv/convert.h" | 19 #include "third_party/libyuv/include/libyuv/convert.h" |
20 #include "third_party/libyuv/include/libyuv/convert_from.h" | 20 #include "third_party/libyuv/include/libyuv/convert_from.h" |
21 #include "third_party/libyuv/include/libyuv/video_common.h" | 21 #include "third_party/libyuv/include/libyuv/video_common.h" |
22 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" | 22 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" |
23 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | 23 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
24 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | 24 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
25 #include "webrtc/base/bind.h" | 25 #include "webrtc/base/bind.h" |
26 #include "webrtc/base/checks.h" | 26 #include "webrtc/base/checks.h" |
27 #include "webrtc/base/logging.h" | 27 #include "webrtc/base/logging.h" |
28 #include "webrtc/base/sequenced_task_checker.h" | |
29 #include "webrtc/base/task_queue.h" | |
28 #include "webrtc/base/thread.h" | 30 #include "webrtc/base/thread.h" |
29 #include "webrtc/base/thread_checker.h" | |
30 #include "webrtc/base/timeutils.h" | 31 #include "webrtc/base/timeutils.h" |
32 #include "webrtc/base/weak_ptr.h" | |
31 #include "webrtc/common_types.h" | 33 #include "webrtc/common_types.h" |
32 #include "webrtc/common_video/h264/h264_bitstream_parser.h" | 34 #include "webrtc/common_video/h264/h264_bitstream_parser.h" |
33 #include "webrtc/common_video/h264/h264_common.h" | 35 #include "webrtc/common_video/h264/h264_common.h" |
34 #include "webrtc/common_video/h264/profile_level_id.h" | 36 #include "webrtc/common_video/h264/profile_level_id.h" |
35 #include "webrtc/media/engine/internalencoderfactory.h" | 37 #include "webrtc/media/engine/internalencoderfactory.h" |
36 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 38 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
37 #include "webrtc/modules/video_coding/utility/quality_scaler.h" | 39 #include "webrtc/modules/video_coding/utility/quality_scaler.h" |
38 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 40 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
39 #include "webrtc/system_wrappers/include/field_trial.h" | 41 #include "webrtc/system_wrappers/include/field_trial.h" |
40 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 42 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
83 // Maximum time limit between incoming frames before requesting a key frame. | 85 // Maximum time limit between incoming frames before requesting a key frame. |
84 const size_t kFrameDiffThresholdMs = 350; | 86 const size_t kFrameDiffThresholdMs = 350; |
85 const int kMinKeyFrameInterval = 6; | 87 const int kMinKeyFrameInterval = 6; |
86 const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile"; | 88 const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile"; |
87 } // namespace | 89 } // namespace |
88 | 90 |
89 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses | 91 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses |
90 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) | 92 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) |
91 // HW-backed video encode. This C++ class is implemented as a very thin shim, | 93 // HW-backed video encode. This C++ class is implemented as a very thin shim, |
92 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. | 94 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. |
93 // MediaCodecVideoEncoder is created, operated, and destroyed on a single | 95 // MediaCodecVideoEncoder must be operated on a single task queue, currently |
94 // thread, currently the libjingle Worker thread. | 96 // this is the encoder queue from ViE encoder. |
95 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, | 97 class MediaCodecVideoEncoder : public webrtc::VideoEncoder { |
96 public rtc::MessageHandler { | |
97 public: | 98 public: |
98 virtual ~MediaCodecVideoEncoder(); | 99 virtual ~MediaCodecVideoEncoder(); |
99 MediaCodecVideoEncoder(JNIEnv* jni, | 100 MediaCodecVideoEncoder(JNIEnv* jni, |
100 const cricket::VideoCodec& codec, | 101 const cricket::VideoCodec& codec, |
101 jobject egl_context); | 102 jobject egl_context); |
102 | 103 |
103 // webrtc::VideoEncoder implementation. Everything trampolines to | 104 // webrtc::VideoEncoder implementation. |
104 // |codec_thread_| for execution. | |
105 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, | 105 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, |
106 int32_t /* number_of_cores */, | 106 int32_t /* number_of_cores */, |
107 size_t /* max_payload_size */) override; | 107 size_t /* max_payload_size */) override; |
108 int32_t Encode(const webrtc::VideoFrame& input_image, | 108 int32_t Encode(const webrtc::VideoFrame& input_image, |
109 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | 109 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
110 const std::vector<webrtc::FrameType>* frame_types) override; | 110 const std::vector<webrtc::FrameType>* frame_types) override; |
111 int32_t RegisterEncodeCompleteCallback( | 111 int32_t RegisterEncodeCompleteCallback( |
112 webrtc::EncodedImageCallback* callback) override; | 112 webrtc::EncodedImageCallback* callback) override; |
113 int32_t Release() override; | 113 int32_t Release() override; |
114 int32_t SetChannelParameters(uint32_t /* packet_loss */, | 114 int32_t SetChannelParameters(uint32_t /* packet_loss */, |
115 int64_t /* rtt */) override; | 115 int64_t /* rtt */) override; |
116 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, | 116 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, |
117 uint32_t frame_rate) override; | 117 uint32_t frame_rate) override; |
118 | 118 |
119 // rtc::MessageHandler implementation. | |
120 void OnMessage(rtc::Message* msg) override; | |
121 | |
122 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | 119 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } |
123 const char* ImplementationName() const override; | 120 const char* ImplementationName() const override; |
124 | 121 |
125 private: | 122 private: |
126 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and | 123 class EncodeTask : public rtc::QueuedTask { |
127 // InitEncodeOnCodecThread() in an attempt to restore the codec to an | 124 public: |
128 // operable state. Necessary after all manner of OMX-layer errors. | 125 EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); |
129 // Returns true if the codec was reset successfully. | 126 bool Run() override; |
130 bool ResetCodecOnCodecThread(); | 127 |
128 private: | |
129 rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; | |
130 }; | |
131 | |
132 // ResetCodec() calls Release() and InitEncodeInternal() in an attempt to | |
133 // restore the codec to an operable state. Necessary after all manner of | |
134 // OMX-layer errors. Returns true if the codec was reset successfully. | |
135 bool ResetCodec(); | |
131 | 136 |
132 // Fallback to a software encoder if one is supported else try to reset the | 137 // Fallback to a software encoder if one is supported else try to reset the |
133 // encoder. Called with |reset_if_fallback_unavailable| equal to false from | 138 // encoder. Called with |reset_if_fallback_unavailable| equal to false from |
134 // init/release encoder so that we don't go into infinite recursion. | 139 // init/release encoder so that we don't go into infinite recursion. |
135 // Returns true if the codec was reset successfully. | 140 // Returns true if the codec was reset successfully. |
136 bool ProcessHWErrorOnCodecThread(bool reset_if_fallback_unavailable); | 141 bool ProcessHWError(bool reset_if_fallback_unavailable); |
137 | 142 |
138 // Calls ProcessHWErrorOnCodecThread(true). Returns | 143 // Calls ProcessHWError(true). Returns WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if |
139 // WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if sw_fallback_required_ was set or | 144 // sw_fallback_required_ was set or WEBRTC_VIDEO_CODEC_ERROR otherwise. |
140 // WEBRTC_VIDEO_CODEC_ERROR otherwise. | 145 int32_t ProcessHWErrorOnEncode(); |
141 int32_t ProcessHWErrorOnEncodeOnCodecThread(); | |
142 | 146 |
143 // Implementation of webrtc::VideoEncoder methods above, all running on the | |
144 // codec thread exclusively. | |
145 // | |
146 // If width==0 then this is assumed to be a re-initialization and the | 147 // If width==0 then this is assumed to be a re-initialization and the |
147 // previously-current values are reused instead of the passed parameters | 148 // previously-current values are reused instead of the passed parameters |
148 // (makes it easier to reason about thread-safety). | 149 // (makes it easier to reason about thread-safety). |
149 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, | 150 int32_t InitEncodeInternal(int width, |
150 bool use_surface); | 151 int height, |
152 int kbps, | |
153 int fps, | |
154 bool use_surface); | |
151 // Reconfigure to match |frame| in width, height. Also reconfigures the | 155 // Reconfigure to match |frame| in width, height. Also reconfigures the |
152 // encoder if |frame| is a texture/byte buffer and the encoder is initialized | 156 // encoder if |frame| is a texture/byte buffer and the encoder is initialized |
153 // for byte buffer/texture. Returns false if reconfiguring fails. | 157 // for byte buffer/texture. Returns false if reconfiguring fails. |
154 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); | 158 bool MaybeReconfigureEncoder(const webrtc::VideoFrame& frame); |
155 int32_t EncodeOnCodecThread( | 159 bool EncodeByteBuffer(JNIEnv* jni, |
156 const webrtc::VideoFrame& input_image, | 160 bool key_frame, |
157 const std::vector<webrtc::FrameType>* frame_types, | 161 const webrtc::VideoFrame& frame, |
158 const int64_t frame_input_time_ms); | 162 int input_buffer_index); |
159 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, | 163 bool EncodeTexture(JNIEnv* jni, |
160 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); | 164 bool key_frame, |
161 bool EncodeTextureOnCodecThread(JNIEnv* jni, | 165 const webrtc::VideoFrame& frame); |
162 bool key_frame, const webrtc::VideoFrame& frame); | |
163 | |
164 int32_t RegisterEncodeCompleteCallbackOnCodecThread( | |
165 webrtc::EncodedImageCallback* callback); | |
166 int32_t ReleaseOnCodecThread(); | |
167 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); | |
168 | 166 |
169 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. | 167 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. |
170 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); | 168 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); |
171 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); | 169 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); |
172 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); | 170 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); |
173 jlong GetOutputBufferInfoPresentationTimestampUs( | 171 jlong GetOutputBufferInfoPresentationTimestampUs( |
174 JNIEnv* jni, jobject j_output_buffer_info); | 172 JNIEnv* jni, jobject j_output_buffer_info); |
175 | 173 |
176 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 174 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
177 // true on success. | 175 // true on success. |
178 bool DeliverPendingOutputs(JNIEnv* jni); | 176 bool DeliverPendingOutputs(JNIEnv* jni); |
179 | 177 |
180 VideoEncoder::ScalingSettings GetScalingSettings() const override; | 178 VideoEncoder::ScalingSettings GetScalingSettings() const override; |
181 | 179 |
182 // Displays encoder statistics. | 180 // Displays encoder statistics. |
183 void LogStatistics(bool force_log); | 181 void LogStatistics(bool force_log); |
184 | 182 |
183 #if RTC_DCHECK_IS_ON | |
184 // Mutex for protecting inited_. It is only used for correctness checking on | |
185 // debug build. It is used for checking that encoder has been released in the | |
186 // destructor. Because this might happen on a different thread, we need a | |
187 // mutex. | |
188 rtc::CriticalSection inited_crit_; | |
189 #endif | |
190 | |
185 // Type of video codec. | 191 // Type of video codec. |
186 const cricket::VideoCodec codec_; | 192 const cricket::VideoCodec codec_; |
187 | 193 |
188 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to | |
189 // |codec_thread_| synchronously. | |
190 webrtc::EncodedImageCallback* callback_; | 194 webrtc::EncodedImageCallback* callback_; |
191 | 195 |
192 // State that is constant for the lifetime of this object once the ctor | 196 // State that is constant for the lifetime of this object once the ctor |
193 // returns. | 197 // returns. |
194 std::unique_ptr<Thread> | 198 rtc::SequencedTaskChecker encoder_queue_checker_; |
195 codec_thread_; // Thread on which to operate MediaCodec. | |
196 rtc::ThreadChecker codec_thread_checker_; | |
197 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; | 199 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; |
198 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; | 200 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; |
199 jmethodID j_init_encode_method_; | 201 jmethodID j_init_encode_method_; |
200 jmethodID j_get_input_buffers_method_; | 202 jmethodID j_get_input_buffers_method_; |
201 jmethodID j_dequeue_input_buffer_method_; | 203 jmethodID j_dequeue_input_buffer_method_; |
202 jmethodID j_encode_buffer_method_; | 204 jmethodID j_encode_buffer_method_; |
203 jmethodID j_encode_texture_method_; | 205 jmethodID j_encode_texture_method_; |
204 jmethodID j_release_method_; | 206 jmethodID j_release_method_; |
205 jmethodID j_set_rates_method_; | 207 jmethodID j_set_rates_method_; |
206 jmethodID j_dequeue_output_buffer_method_; | 208 jmethodID j_dequeue_output_buffer_method_; |
207 jmethodID j_release_output_buffer_method_; | 209 jmethodID j_release_output_buffer_method_; |
208 jfieldID j_color_format_field_; | 210 jfieldID j_color_format_field_; |
209 jfieldID j_info_index_field_; | 211 jfieldID j_info_index_field_; |
210 jfieldID j_info_buffer_field_; | 212 jfieldID j_info_buffer_field_; |
211 jfieldID j_info_is_key_frame_field_; | 213 jfieldID j_info_is_key_frame_field_; |
212 jfieldID j_info_presentation_timestamp_us_field_; | 214 jfieldID j_info_presentation_timestamp_us_field_; |
213 | 215 |
214 // State that is valid only between InitEncode() and the next Release(). | 216 // State that is valid only between InitEncode() and the next Release(). |
215 // Touched only on codec_thread_ so no explicit synchronization necessary. | |
216 int width_; // Frame width in pixels. | 217 int width_; // Frame width in pixels. |
217 int height_; // Frame height in pixels. | 218 int height_; // Frame height in pixels. |
218 bool inited_; | 219 bool inited_; |
219 bool use_surface_; | 220 bool use_surface_; |
220 uint16_t picture_id_; | 221 uint16_t picture_id_; |
221 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. | 222 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. |
222 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | 223 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. |
223 int last_set_fps_; // Last-requested frame rate. | 224 int last_set_fps_; // Last-requested frame rate. |
224 int64_t current_timestamp_us_; // Current frame timestamps in us. | 225 int64_t current_timestamp_us_; // Current frame timestamps in us. |
225 int frames_received_; // Number of frames received by encoder. | 226 int frames_received_; // Number of frames received by encoder. |
226 int frames_encoded_; // Number of frames encoded by encoder. | 227 int frames_encoded_; // Number of frames encoded by encoder. |
227 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. | 228 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. |
228 // Number of dropped frames caused by full queue. | 229 // Number of dropped frames caused by full queue. |
229 int consecutive_full_queue_frame_drops_; | 230 int consecutive_full_queue_frame_drops_; |
230 int64_t stat_start_time_ms_; // Start time for statistics. | 231 int64_t stat_start_time_ms_; // Start time for statistics. |
231 int current_frames_; // Number of frames in the current statistics interval. | 232 int current_frames_; // Number of frames in the current statistics interval. |
232 int current_bytes_; // Encoded bytes in the current statistics interval. | 233 int current_bytes_; // Encoded bytes in the current statistics interval. |
233 int current_acc_qp_; // Accumulated QP in the current statistics interval. | 234 int current_acc_qp_; // Accumulated QP in the current statistics interval. |
234 int current_encoding_time_ms_; // Overall encoding time in the current second | 235 int current_encoding_time_ms_; // Overall encoding time in the current second |
235 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | 236 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. |
236 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | 237 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. |
238 // Holds the task while the polling loop is paused. | |
239 std::unique_ptr<rtc::QueuedTask> encode_task_; | |
237 | 240 |
238 struct InputFrameInfo { | 241 struct InputFrameInfo { |
239 InputFrameInfo(int64_t encode_start_time, | 242 InputFrameInfo(int64_t encode_start_time, |
240 int32_t frame_timestamp, | 243 int32_t frame_timestamp, |
241 int64_t frame_render_time_ms, | 244 int64_t frame_render_time_ms, |
242 webrtc::VideoRotation rotation) | 245 webrtc::VideoRotation rotation) |
243 : encode_start_time(encode_start_time), | 246 : encode_start_time(encode_start_time), |
244 frame_timestamp(frame_timestamp), | 247 frame_timestamp(frame_timestamp), |
245 frame_render_time_ms(frame_render_time_ms), | 248 frame_render_time_ms(frame_render_time_ms), |
246 rotation(rotation) {} | 249 rotation(rotation) {} |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
280 jobject egl_context_; | 283 jobject egl_context_; |
281 | 284 |
282 // Temporary fix for VP8. | 285 // Temporary fix for VP8. |
283 // Sends a key frame if frames are largely spaced apart (possibly | 286 // Sends a key frame if frames are largely spaced apart (possibly |
284 // corresponding to a large image change). | 287 // corresponding to a large image change). |
285 int64_t last_frame_received_ms_; | 288 int64_t last_frame_received_ms_; |
286 int frames_received_since_last_key_; | 289 int frames_received_since_last_key_; |
287 webrtc::VideoCodecMode codec_mode_; | 290 webrtc::VideoCodecMode codec_mode_; |
288 | 291 |
289 bool sw_fallback_required_; | 292 bool sw_fallback_required_; |
293 | |
294 // All other member variables should be before WeakPtrFactory. Valid only from | |
295 // InitEncode to Release. | |
296 std::unique_ptr<rtc::WeakPtrFactory<MediaCodecVideoEncoder>> weak_factory_; | |
290 }; | 297 }; |
291 | 298 |
292 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { | 299 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { |
293 // Call Release() to ensure no more callbacks to us after we are deleted. | 300 #if RTC_DCHECK_IS_ON |
stefan-webrtc
2017/02/21 12:29:56
I haven't seen this in many other places. Why is i
sakal
2017/02/21 12:53:50
A lock is not normally needed. However, it is need
stefan-webrtc
2017/02/21 13:13:08
ack
| |
294 Release(); | 301 rtc::CritScope lock(&inited_crit_); |
302 RTC_DCHECK(!inited_); | |
303 #endif | |
295 } | 304 } |
296 | 305 |
297 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, | 306 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, |
298 const cricket::VideoCodec& codec, | 307 const cricket::VideoCodec& codec, |
299 jobject egl_context) | 308 jobject egl_context) |
300 : codec_(codec), | 309 : codec_(codec), |
301 callback_(NULL), | 310 callback_(NULL), |
302 codec_thread_(new Thread()), | |
303 j_media_codec_video_encoder_class_( | 311 j_media_codec_video_encoder_class_( |
304 jni, | 312 jni, |
305 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), | 313 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), |
306 j_media_codec_video_encoder_( | 314 j_media_codec_video_encoder_( |
307 jni, | 315 jni, |
308 jni->NewObject(*j_media_codec_video_encoder_class_, | 316 jni->NewObject(*j_media_codec_video_encoder_class_, |
309 GetMethodID(jni, | 317 GetMethodID(jni, |
310 *j_media_codec_video_encoder_class_, | 318 *j_media_codec_video_encoder_class_, |
311 "<init>", | 319 "<init>", |
312 "()V"))), | 320 "()V"))), |
313 inited_(false), | 321 inited_(false), |
314 use_surface_(false), | 322 use_surface_(false), |
315 picture_id_(0), | 323 picture_id_(0), |
316 egl_context_(egl_context), | 324 egl_context_(egl_context), |
317 sw_fallback_required_(false) { | 325 sw_fallback_required_(false) { |
318 // It would be nice to avoid spinning up a new thread per MediaCodec, and | 326 encoder_queue_checker_.Detach(); |
319 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | 327 |
320 // 2732 means that deadlocks abound. This class synchronously trampolines | |
321 // to |codec_thread_|, so if anything else can be coming to _us_ from | |
322 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | |
323 // in the bug, we have a problem. For now work around that with a dedicated | |
324 // thread. | |
325 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | |
326 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | |
327 codec_thread_checker_.DetachFromThread(); | |
328 jclass j_output_buffer_info_class = | 328 jclass j_output_buffer_info_class = |
329 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | 329 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |
330 j_init_encode_method_ = GetMethodID( | 330 j_init_encode_method_ = GetMethodID( |
331 jni, | 331 jni, |
332 *j_media_codec_video_encoder_class_, | 332 *j_media_codec_video_encoder_class_, |
333 "initEncode", | 333 "initEncode", |
334 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" | 334 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" |
335 "IIIILorg/webrtc/EglBase14$Context;)Z"); | 335 "IIIILorg/webrtc/EglBase14$Context;)Z"); |
336 j_get_input_buffers_method_ = GetMethodID( | 336 j_get_input_buffers_method_ = GetMethodID( |
337 jni, | 337 jni, |
(...skipping 24 matching lines...) Expand all Loading... | |
362 j_info_index_field_ = | 362 j_info_index_field_ = |
363 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); | 363 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); |
364 j_info_buffer_field_ = GetFieldID( | 364 j_info_buffer_field_ = GetFieldID( |
365 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); | 365 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); |
366 j_info_is_key_frame_field_ = | 366 j_info_is_key_frame_field_ = |
367 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); | 367 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); |
368 j_info_presentation_timestamp_us_field_ = GetFieldID( | 368 j_info_presentation_timestamp_us_field_ = GetFieldID( |
369 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); | 369 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); |
370 if (CheckException(jni)) { | 370 if (CheckException(jni)) { |
371 ALOGW << "MediaCodecVideoEncoder ctor failed."; | 371 ALOGW << "MediaCodecVideoEncoder ctor failed."; |
372 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 372 ProcessHWError(true /* reset_if_fallback_unavailable */); |
373 } | 373 } |
374 srand(time(NULL)); | 374 srand(time(NULL)); |
375 AllowBlockingCalls(); | |
376 } | 375 } |
377 | 376 |
378 int32_t MediaCodecVideoEncoder::InitEncode( | 377 int32_t MediaCodecVideoEncoder::InitEncode( |
379 const webrtc::VideoCodec* codec_settings, | 378 const webrtc::VideoCodec* codec_settings, |
380 int32_t /* number_of_cores */, | 379 int32_t /* number_of_cores */, |
381 size_t /* max_payload_size */) { | 380 size_t /* max_payload_size */) { |
381 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | |
382 if (codec_settings == NULL) { | 382 if (codec_settings == NULL) { |
383 ALOGE << "NULL VideoCodec instance"; | 383 ALOGE << "NULL VideoCodec instance"; |
384 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 384 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
385 } | 385 } |
386 // Factory should guard against other codecs being used with us. | 386 // Factory should guard against other codecs being used with us. |
387 const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) | 387 const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) |
388 .value_or(webrtc::kVideoCodecUnknown); | 388 .value_or(webrtc::kVideoCodecUnknown); |
389 RTC_CHECK(codec_settings->codecType == codec_type) | 389 RTC_CHECK(codec_settings->codecType == codec_type) |
390 << "Unsupported codec " << codec_settings->codecType << " for " | 390 << "Unsupported codec " << codec_settings->codecType << " for " |
391 << codec_type; | 391 << codec_type; |
392 if (sw_fallback_required_) { | 392 if (sw_fallback_required_) { |
393 return WEBRTC_VIDEO_CODEC_OK; | 393 return WEBRTC_VIDEO_CODEC_OK; |
394 } | 394 } |
395 codec_mode_ = codec_settings->mode; | 395 codec_mode_ = codec_settings->mode; |
396 int init_width = codec_settings->width; | 396 int init_width = codec_settings->width; |
397 int init_height = codec_settings->height; | 397 int init_height = codec_settings->height; |
398 // Scaling is disabled for VP9, but optionally enabled for VP8. | 398 // Scaling is disabled for VP9, but optionally enabled for VP8. |
399 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. | 399 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. |
400 scale_ = false; | 400 scale_ = false; |
401 if (codec_type == kVideoCodecVP8) { | 401 if (codec_type == kVideoCodecVP8) { |
402 scale_ = codec_settings->VP8().automaticResizeOn; | 402 scale_ = codec_settings->VP8().automaticResizeOn; |
403 } else if (codec_type != kVideoCodecVP9) { | 403 } else if (codec_type != kVideoCodecVP9) { |
404 scale_ = true; | 404 scale_ = true; |
405 } | 405 } |
406 | 406 |
407 ALOGD << "InitEncode request: " << init_width << " x " << init_height; | 407 ALOGD << "InitEncode request: " << init_width << " x " << init_height; |
408 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); | 408 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); |
409 | 409 |
410 return codec_thread_->Invoke<int32_t>( | 410 return InitEncodeInternal( |
411 RTC_FROM_HERE, | 411 init_width, init_height, codec_settings->startBitrate, |
412 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, | 412 codec_settings->maxFramerate, codec_settings->expect_encode_from_texture); |
413 init_height, codec_settings->startBitrate, | |
414 codec_settings->maxFramerate, | |
415 codec_settings->expect_encode_from_texture)); | |
416 } | |
417 | |
418 int32_t MediaCodecVideoEncoder::Encode( | |
419 const webrtc::VideoFrame& frame, | |
420 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | |
421 const std::vector<webrtc::FrameType>* frame_types) { | |
422 return codec_thread_->Invoke<int32_t>( | |
423 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this, | |
424 frame, frame_types, rtc::TimeMillis())); | |
425 } | |
426 | |
427 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( | |
428 webrtc::EncodedImageCallback* callback) { | |
429 return codec_thread_->Invoke<int32_t>( | |
430 RTC_FROM_HERE, | |
431 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread, | |
432 this, callback)); | |
433 } | |
434 | |
435 int32_t MediaCodecVideoEncoder::Release() { | |
436 ALOGD << "EncoderRelease request"; | |
437 return codec_thread_->Invoke<int32_t>( | |
438 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); | |
439 } | 413 } |
440 | 414 |
441 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, | 415 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, |
442 int64_t /* rtt */) { | 416 int64_t /* rtt */) { |
443 return WEBRTC_VIDEO_CODEC_OK; | 417 return WEBRTC_VIDEO_CODEC_OK; |
444 } | 418 } |
445 | 419 |
446 int32_t MediaCodecVideoEncoder::SetRateAllocation( | 420 bool MediaCodecVideoEncoder::ResetCodec() { |
447 const webrtc::BitrateAllocation& rate_allocation, | 421 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
448 uint32_t frame_rate) { | 422 ALOGE << "Reset"; |
449 return codec_thread_->Invoke<int32_t>( | 423 if (Release() != WEBRTC_VIDEO_CODEC_OK) { |
450 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, this, | |
451 rate_allocation.get_sum_kbps(), frame_rate)); | |
452 } | |
453 | |
454 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { | |
455 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
456 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
457 ScopedLocalRefFrame local_ref_frame(jni); | |
458 | |
459 // We only ever send one message to |this| directly (not through a Bind()'d | |
460 // functor), so expect no ID/data. | |
461 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | |
462 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | |
463 if (!inited_) { | |
464 return; | |
465 } | |
466 | |
467 // It would be nice to recover from a failure here if one happened, but it's | |
468 // unclear how to signal such a failure to the app, so instead we stay silent | |
469 // about it and let the next app-called API method reveal the borkedness. | |
470 DeliverPendingOutputs(jni); | |
471 | |
472 // If there aren't more frames to deliver, we can start polling at lower rate. | |
473 if (input_frame_infos_.empty()) { | |
474 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollNoFramesMs, this); | |
475 } else { | |
476 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
477 } | |
478 | |
479 // Call log statistics here so it's called even if no frames are being | |
480 // delivered. | |
481 LogStatistics(false); | |
482 } | |
483 | |
484 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { | |
485 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
486 ALOGE << "ResetOnCodecThread"; | |
487 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK) { | |
488 ALOGE << "Releasing codec failed during reset."; | 424 ALOGE << "Releasing codec failed during reset."; |
489 return false; | 425 return false; |
490 } | 426 } |
491 if (InitEncodeOnCodecThread(width_, height_, 0, 0, false) != | 427 if (InitEncodeInternal(width_, height_, 0, 0, false) != |
492 WEBRTC_VIDEO_CODEC_OK) { | 428 WEBRTC_VIDEO_CODEC_OK) { |
493 ALOGE << "Initializing encoder failed during reset."; | 429 ALOGE << "Initializing encoder failed during reset."; |
494 return false; | 430 return false; |
495 } | 431 } |
496 return true; | 432 return true; |
497 } | 433 } |
498 | 434 |
499 bool MediaCodecVideoEncoder::ProcessHWErrorOnCodecThread( | 435 MediaCodecVideoEncoder::EncodeTask::EncodeTask( |
436 rtc::WeakPtr<MediaCodecVideoEncoder> encoder) | |
437 : encoder_(encoder) {} | |
438 | |
439 bool MediaCodecVideoEncoder::EncodeTask::Run() { | |
440 if (!encoder_) { | |
441 // Encoder was destroyed. | |
442 return true; | |
443 } | |
444 | |
445 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_->encoder_queue_checker_); | |
446 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
447 ScopedLocalRefFrame local_ref_frame(jni); | |
448 | |
449 if (!encoder_->inited_) { | |
450 encoder_->encode_task_ = std::unique_ptr<rtc::QueuedTask>(this); | |
451 return false; | |
452 } | |
453 | |
454 // It would be nice to recover from a failure here if one happened, but it's | |
455 // unclear how to signal such a failure to the app, so instead we stay silent | |
456 // about it and let the next app-called API method reveal the borkedness. | |
457 encoder_->DeliverPendingOutputs(jni); | |
458 | |
459 // Call log statistics here so it's called even if no frames are being | |
460 // delivered. | |
461 encoder_->LogStatistics(false); | |
462 | |
463 // If there aren't more frames to deliver, we can start polling at lower rate. | |
464 if (encoder_->input_frame_infos_.empty()) { | |
465 rtc::TaskQueue::Current()->PostDelayedTask( | |
466 std::unique_ptr<rtc::QueuedTask>(this), kMediaCodecPollNoFramesMs); | |
467 } else { | |
468 rtc::TaskQueue::Current()->PostDelayedTask( | |
469 std::unique_ptr<rtc::QueuedTask>(this), kMediaCodecPollMs); | |
470 } | |
471 | |
472 return false; | |
473 } | |
474 | |
475 bool MediaCodecVideoEncoder::ProcessHWError( | |
500 bool reset_if_fallback_unavailable) { | 476 bool reset_if_fallback_unavailable) { |
501 ALOGE << "ProcessHWErrorOnCodecThread"; | 477 ALOGE << "ProcessHWError"; |
502 if (FindMatchingCodec(cricket::InternalEncoderFactory().supported_codecs(), | 478 if (FindMatchingCodec(cricket::InternalEncoderFactory().supported_codecs(), |
503 codec_)) { | 479 codec_)) { |
504 ALOGE << "Fallback to SW encoder."; | 480 ALOGE << "Fallback to SW encoder."; |
505 sw_fallback_required_ = true; | 481 sw_fallback_required_ = true; |
506 return false; | 482 return false; |
507 } else if (reset_if_fallback_unavailable) { | 483 } else if (reset_if_fallback_unavailable) { |
508 ALOGE << "Reset encoder."; | 484 ALOGE << "Reset encoder."; |
509 return ResetCodecOnCodecThread(); | 485 return ResetCodec(); |
510 } | 486 } |
511 return false; | 487 return false; |
512 } | 488 } |
513 | 489 |
514 int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncodeOnCodecThread() { | 490 int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncode() { |
515 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 491 ProcessHWError(true /* reset_if_fallback_unavailable */); |
516 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | 492 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
517 : WEBRTC_VIDEO_CODEC_ERROR; | 493 : WEBRTC_VIDEO_CODEC_ERROR; |
518 } | 494 } |
519 | 495 |
520 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | 496 int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, |
521 int width, int height, int kbps, int fps, bool use_surface) { | 497 int height, |
522 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 498 int kbps, |
499 int fps, | |
500 bool use_surface) { | |
501 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | |
523 if (sw_fallback_required_) { | 502 if (sw_fallback_required_) { |
524 return WEBRTC_VIDEO_CODEC_OK; | 503 return WEBRTC_VIDEO_CODEC_OK; |
525 } | 504 } |
526 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; | 505 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; |
527 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 506 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
528 ScopedLocalRefFrame local_ref_frame(jni); | 507 ScopedLocalRefFrame local_ref_frame(jni); |
529 | 508 |
530 const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) | 509 const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) |
531 .value_or(webrtc::kVideoCodecUnknown); | 510 .value_or(webrtc::kVideoCodecUnknown); |
532 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codec_type << ", " << width | 511 ALOGD << "InitEncodeInternal Type: " << (int)codec_type << ", " << width |
533 << " x " << height << ". Bitrate: " << kbps << " kbps. Fps: " << fps; | 512 << " x " << height << ". Bitrate: " << kbps << " kbps. Fps: " << fps; |
534 if (kbps == 0) { | 513 if (kbps == 0) { |
535 kbps = last_set_bitrate_kbps_; | 514 kbps = last_set_bitrate_kbps_; |
536 } | 515 } |
537 if (fps == 0) { | 516 if (fps == 0) { |
538 fps = MAX_VIDEO_FPS; | 517 fps = MAX_VIDEO_FPS; |
539 } | 518 } |
540 | 519 |
541 width_ = width; | 520 width_ = width; |
542 height_ = height; | 521 height_ = height; |
(...skipping 16 matching lines...) Expand all Loading... | |
559 output_render_time_ms_ = 0; | 538 output_render_time_ms_ = 0; |
560 input_frame_infos_.clear(); | 539 input_frame_infos_.clear(); |
561 drop_next_input_frame_ = false; | 540 drop_next_input_frame_ = false; |
562 use_surface_ = use_surface; | 541 use_surface_ = use_surface; |
563 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | 542 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; |
564 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); | 543 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); |
565 tl0_pic_idx_ = static_cast<uint8_t>(rand()); | 544 tl0_pic_idx_ = static_cast<uint8_t>(rand()); |
566 gof_idx_ = 0; | 545 gof_idx_ = 0; |
567 last_frame_received_ms_ = -1; | 546 last_frame_received_ms_ = -1; |
568 frames_received_since_last_key_ = kMinKeyFrameInterval; | 547 frames_received_since_last_key_ = kMinKeyFrameInterval; |
548 weak_factory_.reset(new rtc::WeakPtrFactory<MediaCodecVideoEncoder>(this)); | |
549 encode_task_.reset(new EncodeTask(weak_factory_->GetWeakPtr())); | |
569 | 550 |
570 // We enforce no extra stride/padding in the format creation step. | 551 // We enforce no extra stride/padding in the format creation step. |
571 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | 552 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( |
572 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); | 553 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); |
573 const bool encode_status = jni->CallBooleanMethod( | 554 const bool encode_status = jni->CallBooleanMethod( |
574 *j_media_codec_video_encoder_, j_init_encode_method_, | 555 *j_media_codec_video_encoder_, j_init_encode_method_, |
575 j_video_codec_enum, width, height, kbps, fps, | 556 j_video_codec_enum, width, height, kbps, fps, |
576 (use_surface ? egl_context_ : nullptr)); | 557 (use_surface ? egl_context_ : nullptr)); |
577 if (!encode_status) { | 558 if (!encode_status) { |
578 ALOGE << "Failed to configure encoder."; | 559 ALOGE << "Failed to configure encoder."; |
579 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 560 ProcessHWError(false /* reset_if_fallback_unavailable */); |
580 return WEBRTC_VIDEO_CODEC_ERROR; | 561 return WEBRTC_VIDEO_CODEC_ERROR; |
581 } | 562 } |
582 if (CheckException(jni)) { | 563 if (CheckException(jni)) { |
583 ALOGE << "Exception in init encode."; | 564 ALOGE << "Exception in init encode."; |
584 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 565 ProcessHWError(false /* reset_if_fallback_unavailable */); |
585 return WEBRTC_VIDEO_CODEC_ERROR; | 566 return WEBRTC_VIDEO_CODEC_ERROR; |
586 } | 567 } |
587 | 568 |
588 if (!use_surface) { | 569 if (!use_surface) { |
589 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | 570 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( |
590 jni->CallObjectMethod(*j_media_codec_video_encoder_, | 571 jni->CallObjectMethod(*j_media_codec_video_encoder_, |
591 j_get_input_buffers_method_)); | 572 j_get_input_buffers_method_)); |
592 if (CheckException(jni)) { | 573 if (CheckException(jni)) { |
593 ALOGE << "Exception in get input buffers."; | 574 ALOGE << "Exception in get input buffers."; |
594 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 575 ProcessHWError(false /* reset_if_fallback_unavailable */); |
595 return WEBRTC_VIDEO_CODEC_ERROR; | 576 return WEBRTC_VIDEO_CODEC_ERROR; |
596 } | 577 } |
597 | 578 |
598 if (IsNull(jni, input_buffers)) { | 579 if (IsNull(jni, input_buffers)) { |
599 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 580 ProcessHWError(false /* reset_if_fallback_unavailable */); |
600 return WEBRTC_VIDEO_CODEC_ERROR; | 581 return WEBRTC_VIDEO_CODEC_ERROR; |
601 } | 582 } |
602 | 583 |
603 switch (GetIntField(jni, *j_media_codec_video_encoder_, | 584 switch (GetIntField(jni, *j_media_codec_video_encoder_, |
604 j_color_format_field_)) { | 585 j_color_format_field_)) { |
605 case COLOR_FormatYUV420Planar: | 586 case COLOR_FormatYUV420Planar: |
606 encoder_fourcc_ = libyuv::FOURCC_YU12; | 587 encoder_fourcc_ = libyuv::FOURCC_YU12; |
607 break; | 588 break; |
608 case COLOR_FormatYUV420SemiPlanar: | 589 case COLOR_FormatYUV420SemiPlanar: |
609 case COLOR_QCOM_FormatYUV420SemiPlanar: | 590 case COLOR_QCOM_FormatYUV420SemiPlanar: |
610 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | 591 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
611 encoder_fourcc_ = libyuv::FOURCC_NV12; | 592 encoder_fourcc_ = libyuv::FOURCC_NV12; |
612 break; | 593 break; |
613 default: | 594 default: |
614 LOG(LS_ERROR) << "Wrong color format."; | 595 LOG(LS_ERROR) << "Wrong color format."; |
615 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 596 ProcessHWError(false /* reset_if_fallback_unavailable */); |
616 return WEBRTC_VIDEO_CODEC_ERROR; | 597 return WEBRTC_VIDEO_CODEC_ERROR; |
617 } | 598 } |
618 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 599 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
619 RTC_CHECK(input_buffers_.empty()) | 600 RTC_CHECK(input_buffers_.empty()) |
620 << "Unexpected double InitEncode without Release"; | 601 << "Unexpected double InitEncode without Release"; |
621 input_buffers_.resize(num_input_buffers); | 602 input_buffers_.resize(num_input_buffers); |
622 for (size_t i = 0; i < num_input_buffers; ++i) { | 603 for (size_t i = 0; i < num_input_buffers; ++i) { |
623 input_buffers_[i] = | 604 input_buffers_[i] = |
624 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 605 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
625 int64_t yuv_buffer_capacity = | 606 int64_t yuv_buffer_capacity = |
626 jni->GetDirectBufferCapacity(input_buffers_[i]); | 607 jni->GetDirectBufferCapacity(input_buffers_[i]); |
627 if (CheckException(jni)) { | 608 if (CheckException(jni)) { |
628 ALOGE << "Exception in get direct buffer capacity."; | 609 ALOGE << "Exception in get direct buffer capacity."; |
629 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 610 ProcessHWError(false /* reset_if_fallback_unavailable */); |
630 return WEBRTC_VIDEO_CODEC_ERROR; | 611 return WEBRTC_VIDEO_CODEC_ERROR; |
631 } | 612 } |
632 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | 613 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
633 } | 614 } |
634 } | 615 } |
635 | 616 |
636 inited_ = true; | 617 { |
618 #if RTC_DCHECK_IS_ON | |
619 rtc::CritScope lock(&inited_crit_); | |
620 #endif | |
621 inited_ = true; | |
622 } | |
637 return WEBRTC_VIDEO_CODEC_OK; | 623 return WEBRTC_VIDEO_CODEC_OK; |
638 } | 624 } |
639 | 625 |
640 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 626 int32_t MediaCodecVideoEncoder::Encode( |
641 const webrtc::VideoFrame& frame, | 627 const webrtc::VideoFrame& frame, |
642 const std::vector<webrtc::FrameType>* frame_types, | 628 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
643 const int64_t frame_input_time_ms) { | 629 const std::vector<webrtc::FrameType>* frame_types) { |
644 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 630 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
645 if (sw_fallback_required_) | 631 if (sw_fallback_required_) |
646 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 632 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
647 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 633 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
648 ScopedLocalRefFrame local_ref_frame(jni); | 634 ScopedLocalRefFrame local_ref_frame(jni); |
635 const int64_t frame_input_time_ms = rtc::TimeMillis(); | |
649 | 636 |
650 if (!inited_) { | 637 if (!inited_) { |
651 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 638 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
652 } | 639 } |
653 | 640 |
654 bool send_key_frame = false; | 641 bool send_key_frame = false; |
655 if (codec_mode_ == webrtc::kRealtimeVideo) { | 642 if (codec_mode_ == webrtc::kRealtimeVideo) { |
656 ++frames_received_since_last_key_; | 643 ++frames_received_since_last_key_; |
657 int64_t now_ms = rtc::TimeMillis(); | 644 int64_t now_ms = rtc::TimeMillis(); |
658 if (last_frame_received_ms_ != -1 && | 645 if (last_frame_received_ms_ != -1 && |
659 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { | 646 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { |
660 // Add limit to prevent triggering a key for every frame for very low | 647 // Add limit to prevent triggering a key for every frame for very low |
661 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). | 648 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). |
662 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { | 649 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { |
663 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); | 650 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); |
664 send_key_frame = true; | 651 send_key_frame = true; |
665 } | 652 } |
666 frames_received_since_last_key_ = 0; | 653 frames_received_since_last_key_ = 0; |
667 } | 654 } |
668 last_frame_received_ms_ = now_ms; | 655 last_frame_received_ms_ = now_ms; |
669 } | 656 } |
670 | 657 |
671 frames_received_++; | 658 frames_received_++; |
672 if (!DeliverPendingOutputs(jni)) { | 659 if (!DeliverPendingOutputs(jni)) { |
673 if (!ProcessHWErrorOnCodecThread( | 660 if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) { |
674 true /* reset_if_fallback_unavailable */)) { | |
675 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | 661 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
676 : WEBRTC_VIDEO_CODEC_ERROR; | 662 : WEBRTC_VIDEO_CODEC_ERROR; |
677 } | 663 } |
678 } | 664 } |
679 if (frames_encoded_ < kMaxEncodedLogFrames) { | 665 if (frames_encoded_ < kMaxEncodedLogFrames) { |
680 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | 666 ALOGD << "Encoder frame in # " << (frames_received_ - 1) |
681 << ". TS: " << (int)(current_timestamp_us_ / 1000) | 667 << ". TS: " << (int)(current_timestamp_us_ / 1000) |
682 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | 668 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ |
683 << ". Kbps: " << last_set_bitrate_kbps_; | 669 << ". Kbps: " << last_set_bitrate_kbps_; |
684 } | 670 } |
(...skipping 14 matching lines...) Expand all Loading... | |
699 ALOGD << "Already " << input_frame_infos_.size() | 685 ALOGD << "Already " << input_frame_infos_.size() |
700 << " frames in the queue, dropping" | 686 << " frames in the queue, dropping" |
701 << ". TS: " << (int)(current_timestamp_us_ / 1000) | 687 << ". TS: " << (int)(current_timestamp_us_ / 1000) |
702 << ". Fps: " << last_set_fps_ | 688 << ". Fps: " << last_set_fps_ |
703 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | 689 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; |
704 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 690 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
705 consecutive_full_queue_frame_drops_++; | 691 consecutive_full_queue_frame_drops_++; |
706 if (consecutive_full_queue_frame_drops_ >= | 692 if (consecutive_full_queue_frame_drops_ >= |
707 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | 693 ENCODER_STALL_FRAMEDROP_THRESHOLD) { |
708 ALOGE << "Encoder got stuck."; | 694 ALOGE << "Encoder got stuck."; |
709 return ProcessHWErrorOnEncodeOnCodecThread(); | 695 return ProcessHWErrorOnEncode(); |
710 } | 696 } |
711 frames_dropped_media_encoder_++; | 697 frames_dropped_media_encoder_++; |
712 return WEBRTC_VIDEO_CODEC_OK; | 698 return WEBRTC_VIDEO_CODEC_OK; |
713 } | 699 } |
714 consecutive_full_queue_frame_drops_ = 0; | 700 consecutive_full_queue_frame_drops_ = 0; |
715 | 701 |
716 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | 702 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( |
717 frame.video_frame_buffer()); | 703 frame.video_frame_buffer()); |
718 | 704 |
719 VideoFrame input_frame(input_buffer, frame.timestamp(), | 705 VideoFrame input_frame(input_buffer, frame.timestamp(), |
720 frame.render_time_ms(), frame.rotation()); | 706 frame.render_time_ms(), frame.rotation()); |
721 | 707 |
722 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { | 708 if (!MaybeReconfigureEncoder(input_frame)) { |
723 ALOGE << "Failed to reconfigure encoder."; | 709 ALOGE << "Failed to reconfigure encoder."; |
724 return WEBRTC_VIDEO_CODEC_ERROR; | 710 return WEBRTC_VIDEO_CODEC_ERROR; |
725 } | 711 } |
726 | 712 |
727 const bool key_frame = | 713 const bool key_frame = |
728 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | 714 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; |
729 bool encode_status = true; | 715 bool encode_status = true; |
730 if (!input_frame.video_frame_buffer()->native_handle()) { | 716 if (!input_frame.video_frame_buffer()->native_handle()) { |
731 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 717 int j_input_buffer_index = jni->CallIntMethod( |
732 j_dequeue_input_buffer_method_); | 718 *j_media_codec_video_encoder_, j_dequeue_input_buffer_method_); |
733 if (CheckException(jni)) { | 719 if (CheckException(jni)) { |
734 ALOGE << "Exception in dequeu input buffer."; | 720 ALOGE << "Exception in dequeu input buffer."; |
735 return ProcessHWErrorOnEncodeOnCodecThread(); | 721 return ProcessHWErrorOnEncode(); |
736 } | 722 } |
737 if (j_input_buffer_index == -1) { | 723 if (j_input_buffer_index == -1) { |
738 // Video codec falls behind - no input buffer available. | 724 // Video codec falls behind - no input buffer available. |
739 ALOGW << "Encoder drop frame - no input buffers available"; | 725 ALOGW << "Encoder drop frame - no input buffers available"; |
740 if (frames_received_ > 1) { | 726 if (frames_received_ > 1) { |
741 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 727 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
742 frames_dropped_media_encoder_++; | 728 frames_dropped_media_encoder_++; |
743 } else { | 729 } else { |
744 // Input buffers are not ready after codec initialization, HW is still | 730 // Input buffers are not ready after codec initialization, HW is still |
745 // allocating thme - this is expected and should not result in drop | 731 // allocating thme - this is expected and should not result in drop |
746 // frame report. | 732 // frame report. |
747 frames_received_ = 0; | 733 frames_received_ = 0; |
748 } | 734 } |
749 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 735 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
750 } else if (j_input_buffer_index == -2) { | 736 } else if (j_input_buffer_index == -2) { |
751 return ProcessHWErrorOnEncodeOnCodecThread(); | 737 return ProcessHWErrorOnEncode(); |
752 } | 738 } |
753 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | 739 encode_status = |
754 j_input_buffer_index); | 740 EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); |
755 } else { | 741 } else { |
756 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | 742 encode_status = EncodeTexture(jni, key_frame, input_frame); |
757 } | 743 } |
758 | 744 |
759 if (!encode_status) { | 745 if (!encode_status) { |
760 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | 746 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
761 return ProcessHWErrorOnEncodeOnCodecThread(); | 747 return ProcessHWErrorOnEncode(); |
762 } | 748 } |
763 | 749 |
764 // Save input image timestamps for later output. | 750 // Save input image timestamps for later output. |
765 input_frame_infos_.emplace_back( | 751 input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), |
766 frame_input_time_ms, input_frame.timestamp(), | 752 input_frame.render_time_ms(), |
767 input_frame.render_time_ms(), input_frame.rotation()); | 753 input_frame.rotation()); |
768 | 754 |
769 last_input_timestamp_ms_ = | 755 last_input_timestamp_ms_ = |
770 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | 756 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
771 | 757 |
772 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 758 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
773 | 759 |
774 codec_thread_->Clear(this); | 760 // Start the polling loop if it is not started. |
775 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | 761 if (encode_task_) { |
762 rtc::TaskQueue::Current()->PostDelayedTask(std::move(encode_task_), | |
763 kMediaCodecPollMs); | |
764 } | |
776 | 765 |
777 if (!DeliverPendingOutputs(jni)) { | 766 if (!DeliverPendingOutputs(jni)) { |
778 return ProcessHWErrorOnEncodeOnCodecThread(); | 767 return ProcessHWErrorOnEncode(); |
779 } | 768 } |
780 return WEBRTC_VIDEO_CODEC_OK; | 769 return WEBRTC_VIDEO_CODEC_OK; |
781 } | 770 } |
782 | 771 |
783 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | 772 bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( |
784 const webrtc::VideoFrame& frame) { | 773 const webrtc::VideoFrame& frame) { |
785 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 774 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
786 | 775 |
787 const bool is_texture_frame = | 776 const bool is_texture_frame = |
788 frame.video_frame_buffer()->native_handle() != nullptr; | 777 frame.video_frame_buffer()->native_handle() != nullptr; |
789 const bool reconfigure_due_to_format = is_texture_frame != use_surface_; | 778 const bool reconfigure_due_to_format = is_texture_frame != use_surface_; |
790 const bool reconfigure_due_to_size = | 779 const bool reconfigure_due_to_size = |
791 frame.width() != width_ || frame.height() != height_; | 780 frame.width() != width_ || frame.height() != height_; |
792 | 781 |
793 if (reconfigure_due_to_format) { | 782 if (reconfigure_due_to_format) { |
794 ALOGD << "Reconfigure encoder due to format change. " | 783 ALOGD << "Reconfigure encoder due to format change. " |
795 << (use_surface_ ? | 784 << (use_surface_ ? |
796 "Reconfiguring to encode from byte buffer." : | 785 "Reconfiguring to encode from byte buffer." : |
797 "Reconfiguring to encode from texture."); | 786 "Reconfiguring to encode from texture."); |
798 LogStatistics(true); | 787 LogStatistics(true); |
799 } | 788 } |
800 if (reconfigure_due_to_size) { | 789 if (reconfigure_due_to_size) { |
801 ALOGW << "Reconfigure encoder due to frame resolution change from " | 790 ALOGW << "Reconfigure encoder due to frame resolution change from " |
802 << width_ << " x " << height_ << " to " << frame.width() << " x " | 791 << width_ << " x " << height_ << " to " << frame.width() << " x " |
803 << frame.height(); | 792 << frame.height(); |
804 LogStatistics(true); | 793 LogStatistics(true); |
805 width_ = frame.width(); | 794 width_ = frame.width(); |
806 height_ = frame.height(); | 795 height_ = frame.height(); |
807 } | 796 } |
808 | 797 |
809 if (!reconfigure_due_to_format && !reconfigure_due_to_size) | 798 if (!reconfigure_due_to_format && !reconfigure_due_to_size) |
810 return true; | 799 return true; |
811 | 800 |
812 ReleaseOnCodecThread(); | 801 Release(); |
813 | 802 |
814 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == | 803 return InitEncodeInternal(width_, height_, 0, 0, is_texture_frame) == |
815 WEBRTC_VIDEO_CODEC_OK; | 804 WEBRTC_VIDEO_CODEC_OK; |
816 } | 805 } |
817 | 806 |
818 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | 807 bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, |
819 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | 808 bool key_frame, |
820 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 809 const webrtc::VideoFrame& frame, |
810 int input_buffer_index) { | |
811 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | |
821 RTC_CHECK(!use_surface_); | 812 RTC_CHECK(!use_surface_); |
822 | 813 |
823 jobject j_input_buffer = input_buffers_[input_buffer_index]; | 814 jobject j_input_buffer = input_buffers_[input_buffer_index]; |
824 uint8_t* yuv_buffer = | 815 uint8_t* yuv_buffer = |
825 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 816 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
826 if (CheckException(jni)) { | 817 if (CheckException(jni)) { |
827 ALOGE << "Exception in get direct buffer address."; | 818 ALOGE << "Exception in get direct buffer address."; |
828 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 819 ProcessHWError(true /* reset_if_fallback_unavailable */); |
829 return false; | 820 return false; |
830 } | 821 } |
831 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | 822 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
832 RTC_CHECK(!libyuv::ConvertFromI420( | 823 RTC_CHECK(!libyuv::ConvertFromI420( |
833 frame.video_frame_buffer()->DataY(), | 824 frame.video_frame_buffer()->DataY(), |
834 frame.video_frame_buffer()->StrideY(), | 825 frame.video_frame_buffer()->StrideY(), |
835 frame.video_frame_buffer()->DataU(), | 826 frame.video_frame_buffer()->DataU(), |
836 frame.video_frame_buffer()->StrideU(), | 827 frame.video_frame_buffer()->StrideU(), |
837 frame.video_frame_buffer()->DataV(), | 828 frame.video_frame_buffer()->DataV(), |
838 frame.video_frame_buffer()->StrideV(), | 829 frame.video_frame_buffer()->StrideV(), |
839 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | 830 yuv_buffer, width_, width_, height_, encoder_fourcc_)) |
840 << "ConvertFromI420 failed"; | 831 << "ConvertFromI420 failed"; |
841 | 832 |
842 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 833 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
843 j_encode_buffer_method_, | 834 j_encode_buffer_method_, |
844 key_frame, | 835 key_frame, |
845 input_buffer_index, | 836 input_buffer_index, |
846 yuv_size_, | 837 yuv_size_, |
847 current_timestamp_us_); | 838 current_timestamp_us_); |
848 if (CheckException(jni)) { | 839 if (CheckException(jni)) { |
849 ALOGE << "Exception in encode buffer."; | 840 ALOGE << "Exception in encode buffer."; |
850 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 841 ProcessHWError(true /* reset_if_fallback_unavailable */); |
851 return false; | 842 return false; |
852 } | 843 } |
853 return encode_status; | 844 return encode_status; |
854 } | 845 } |
855 | 846 |
856 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, | 847 bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, |
857 bool key_frame, const webrtc::VideoFrame& frame) { | 848 bool key_frame, |
858 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 849 const webrtc::VideoFrame& frame) { |
850 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | |
859 RTC_CHECK(use_surface_); | 851 RTC_CHECK(use_surface_); |
860 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( | 852 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( |
861 frame.video_frame_buffer()->native_handle()); | 853 frame.video_frame_buffer()->native_handle()); |
862 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); | 854 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); |
863 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 855 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
864 j_encode_texture_method_, | 856 j_encode_texture_method_, |
865 key_frame, | 857 key_frame, |
866 handle->oes_texture_id, | 858 handle->oes_texture_id, |
867 sampling_matrix, | 859 sampling_matrix, |
868 current_timestamp_us_); | 860 current_timestamp_us_); |
869 if (CheckException(jni)) { | 861 if (CheckException(jni)) { |
870 ALOGE << "Exception in encode texture."; | 862 ALOGE << "Exception in encode texture."; |
871 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 863 ProcessHWError(true /* reset_if_fallback_unavailable */); |
872 return false; | 864 return false; |
873 } | 865 } |
874 return encode_status; | 866 return encode_status; |
875 } | 867 } |
876 | 868 |
877 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | 869 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( |
878 webrtc::EncodedImageCallback* callback) { | 870 webrtc::EncodedImageCallback* callback) { |
879 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 871 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
880 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 872 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
881 ScopedLocalRefFrame local_ref_frame(jni); | 873 ScopedLocalRefFrame local_ref_frame(jni); |
882 callback_ = callback; | 874 callback_ = callback; |
883 return WEBRTC_VIDEO_CODEC_OK; | 875 return WEBRTC_VIDEO_CODEC_OK; |
884 } | 876 } |
885 | 877 |
886 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | 878 int32_t MediaCodecVideoEncoder::Release() { |
887 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 879 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
888 if (!inited_) { | 880 if (!inited_) { |
889 return WEBRTC_VIDEO_CODEC_OK; | 881 return WEBRTC_VIDEO_CODEC_OK; |
890 } | 882 } |
891 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 883 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
892 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | 884 ALOGD << "EncoderRelease: Frames received: " << frames_received_ |
893 frames_received_ << ". Encoded: " << frames_encoded_ << | 885 << ". Encoded: " << frames_encoded_ |
894 ". Dropped: " << frames_dropped_media_encoder_; | 886 << ". Dropped: " << frames_dropped_media_encoder_; |
895 ScopedLocalRefFrame local_ref_frame(jni); | 887 ScopedLocalRefFrame local_ref_frame(jni); |
896 for (size_t i = 0; i < input_buffers_.size(); ++i) | 888 for (size_t i = 0; i < input_buffers_.size(); ++i) |
897 jni->DeleteGlobalRef(input_buffers_[i]); | 889 jni->DeleteGlobalRef(input_buffers_[i]); |
898 input_buffers_.clear(); | 890 input_buffers_.clear(); |
899 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | 891 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); |
900 if (CheckException(jni)) { | 892 if (CheckException(jni)) { |
901 ALOGE << "Exception in release."; | 893 ALOGE << "Exception in release."; |
902 ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); | 894 ProcessHWError(false /* reset_if_fallback_unavailable */); |
903 return WEBRTC_VIDEO_CODEC_ERROR; | 895 return WEBRTC_VIDEO_CODEC_ERROR; |
904 } | 896 } |
905 rtc::MessageQueueManager::Clear(this); | 897 { |
906 inited_ = false; | 898 #if RTC_DCHECK_IS_ON |
899 rtc::CritScope lock(&inited_crit_); | |
900 #endif | |
901 inited_ = false; | |
902 } | |
907 use_surface_ = false; | 903 use_surface_ = false; |
908 ALOGD << "EncoderReleaseOnCodecThread done."; | 904 encode_task_.reset(nullptr); |
905 weak_factory_.reset(nullptr); | |
906 ALOGD << "EncoderRelease done."; | |
909 return WEBRTC_VIDEO_CODEC_OK; | 907 return WEBRTC_VIDEO_CODEC_OK; |
910 } | 908 } |
911 | 909 |
912 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | 910 int32_t MediaCodecVideoEncoder::SetRateAllocation( |
913 uint32_t frame_rate) { | 911 const webrtc::BitrateAllocation& rate_allocation, |
914 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 912 uint32_t frame_rate) { |
913 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); | |
914 const uint32_t new_bit_rate = rate_allocation.get_sum_kbps(); | |
915 if (sw_fallback_required_) | 915 if (sw_fallback_required_) |
916 return WEBRTC_VIDEO_CODEC_OK; | 916 return WEBRTC_VIDEO_CODEC_OK; |
917 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? | 917 frame_rate = |
918 frame_rate : MAX_ALLOWED_VIDEO_FPS; | 918 (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? frame_rate : MAX_ALLOWED_VIDEO_FPS; |
919 if (last_set_bitrate_kbps_ == new_bit_rate && | 919 if (last_set_bitrate_kbps_ == new_bit_rate && last_set_fps_ == frame_rate) { |
920 last_set_fps_ == frame_rate) { | |
921 return WEBRTC_VIDEO_CODEC_OK; | 920 return WEBRTC_VIDEO_CODEC_OK; |
922 } | 921 } |
923 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 922 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
924 ScopedLocalRefFrame local_ref_frame(jni); | 923 ScopedLocalRefFrame local_ref_frame(jni); |
925 if (new_bit_rate > 0) { | 924 if (new_bit_rate > 0) { |
926 last_set_bitrate_kbps_ = new_bit_rate; | 925 last_set_bitrate_kbps_ = new_bit_rate; |
927 } | 926 } |
928 if (frame_rate > 0) { | 927 if (frame_rate > 0) { |
929 last_set_fps_ = frame_rate; | 928 last_set_fps_ = frame_rate; |
930 } | 929 } |
931 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 930 bool ret = |
932 j_set_rates_method_, | 931 jni->CallBooleanMethod(*j_media_codec_video_encoder_, j_set_rates_method_, |
933 last_set_bitrate_kbps_, | 932 last_set_bitrate_kbps_, last_set_fps_); |
934 last_set_fps_); | |
935 if (CheckException(jni) || !ret) { | 933 if (CheckException(jni) || !ret) { |
936 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 934 ProcessHWError(true /* reset_if_fallback_unavailable */); |
937 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK | 935 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK |
938 : WEBRTC_VIDEO_CODEC_ERROR; | 936 : WEBRTC_VIDEO_CODEC_ERROR; |
939 } | 937 } |
940 return WEBRTC_VIDEO_CODEC_OK; | 938 return WEBRTC_VIDEO_CODEC_OK; |
941 } | 939 } |
942 | 940 |
943 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( | 941 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( |
944 JNIEnv* jni, | 942 JNIEnv* jni, |
945 jobject j_output_buffer_info) { | 943 jobject j_output_buffer_info) { |
946 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); | 944 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); |
(...skipping 12 matching lines...) Expand all Loading... | |
959 } | 957 } |
960 | 958 |
961 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( | 959 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( |
962 JNIEnv* jni, | 960 JNIEnv* jni, |
963 jobject j_output_buffer_info) { | 961 jobject j_output_buffer_info) { |
964 return GetLongField( | 962 return GetLongField( |
965 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); | 963 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); |
966 } | 964 } |
967 | 965 |
968 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { | 966 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
969 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 967 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); |
970 | 968 |
971 while (true) { | 969 while (true) { |
972 jobject j_output_buffer_info = jni->CallObjectMethod( | 970 jobject j_output_buffer_info = jni->CallObjectMethod( |
973 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); | 971 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); |
974 if (CheckException(jni)) { | 972 if (CheckException(jni)) { |
975 ALOGE << "Exception in set dequeue output buffer."; | 973 ALOGE << "Exception in set dequeue output buffer."; |
976 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 974 ProcessHWError(true /* reset_if_fallback_unavailable */); |
977 return WEBRTC_VIDEO_CODEC_ERROR; | 975 return WEBRTC_VIDEO_CODEC_ERROR; |
978 } | 976 } |
979 if (IsNull(jni, j_output_buffer_info)) { | 977 if (IsNull(jni, j_output_buffer_info)) { |
980 break; | 978 break; |
981 } | 979 } |
982 | 980 |
983 int output_buffer_index = | 981 int output_buffer_index = |
984 GetOutputBufferInfoIndex(jni, j_output_buffer_info); | 982 GetOutputBufferInfoIndex(jni, j_output_buffer_info); |
985 if (output_buffer_index == -1) { | 983 if (output_buffer_index == -1) { |
986 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 984 ProcessHWError(true /* reset_if_fallback_unavailable */); |
987 return false; | 985 return false; |
988 } | 986 } |
989 | 987 |
990 // Get key and config frame flags. | 988 // Get key and config frame flags. |
991 jobject j_output_buffer = | 989 jobject j_output_buffer = |
992 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | 990 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); |
993 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | 991 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); |
994 | 992 |
995 // Get frame timestamps from a queue - for non config frames only. | 993 // Get frame timestamps from a queue - for non config frames only. |
996 int64_t encoding_start_time_ms = 0; | 994 int64_t encoding_start_time_ms = 0; |
997 int64_t frame_encoding_time_ms = 0; | 995 int64_t frame_encoding_time_ms = 0; |
998 last_output_timestamp_ms_ = | 996 last_output_timestamp_ms_ = |
999 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | 997 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / |
1000 rtc::kNumMicrosecsPerMillisec; | 998 rtc::kNumMicrosecsPerMillisec; |
1001 if (!input_frame_infos_.empty()) { | 999 if (!input_frame_infos_.empty()) { |
1002 const InputFrameInfo& frame_info = input_frame_infos_.front(); | 1000 const InputFrameInfo& frame_info = input_frame_infos_.front(); |
1003 output_timestamp_ = frame_info.frame_timestamp; | 1001 output_timestamp_ = frame_info.frame_timestamp; |
1004 output_render_time_ms_ = frame_info.frame_render_time_ms; | 1002 output_render_time_ms_ = frame_info.frame_render_time_ms; |
1005 output_rotation_ = frame_info.rotation; | 1003 output_rotation_ = frame_info.rotation; |
1006 encoding_start_time_ms = frame_info.encode_start_time; | 1004 encoding_start_time_ms = frame_info.encode_start_time; |
1007 input_frame_infos_.pop_front(); | 1005 input_frame_infos_.pop_front(); |
1008 } | 1006 } |
1009 | 1007 |
1010 // Extract payload. | 1008 // Extract payload. |
1011 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 1009 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
1012 uint8_t* payload = reinterpret_cast<uint8_t*>( | 1010 uint8_t* payload = reinterpret_cast<uint8_t*>( |
1013 jni->GetDirectBufferAddress(j_output_buffer)); | 1011 jni->GetDirectBufferAddress(j_output_buffer)); |
1014 if (CheckException(jni)) { | 1012 if (CheckException(jni)) { |
1015 ALOGE << "Exception in get direct buffer address."; | 1013 ALOGE << "Exception in get direct buffer address."; |
1016 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 1014 ProcessHWError(true /* reset_if_fallback_unavailable */); |
1017 return WEBRTC_VIDEO_CODEC_ERROR; | 1015 return WEBRTC_VIDEO_CODEC_ERROR; |
1018 } | 1016 } |
1019 | 1017 |
1020 // Callback - return encoded frame. | 1018 // Callback - return encoded frame. |
1021 const VideoCodecType codec_type = | 1019 const VideoCodecType codec_type = |
1022 webrtc::PayloadNameToCodecType(codec_.name) | 1020 webrtc::PayloadNameToCodecType(codec_.name) |
1023 .value_or(webrtc::kVideoCodecUnknown); | 1021 .value_or(webrtc::kVideoCodecUnknown); |
1024 webrtc::EncodedImageCallback::Result callback_result( | 1022 webrtc::EncodedImageCallback::Result callback_result( |
1025 webrtc::EncodedImageCallback::Result::OK); | 1023 webrtc::EncodedImageCallback::Result::OK); |
1026 if (callback_) { | 1024 if (callback_) { |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1095 image->qp_ = qp; | 1093 image->qp_ = qp; |
1096 } | 1094 } |
1097 // For H.264 search for start codes. | 1095 // For H.264 search for start codes. |
1098 const std::vector<webrtc::H264::NaluIndex> nalu_idxs = | 1096 const std::vector<webrtc::H264::NaluIndex> nalu_idxs = |
1099 webrtc::H264::FindNaluIndices(payload, payload_size); | 1097 webrtc::H264::FindNaluIndices(payload, payload_size); |
1100 if (nalu_idxs.empty()) { | 1098 if (nalu_idxs.empty()) { |
1101 ALOGE << "Start code is not found!"; | 1099 ALOGE << "Start code is not found!"; |
1102 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] | 1100 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] |
1103 << " " << image->_buffer[2] << " " << image->_buffer[3] | 1101 << " " << image->_buffer[2] << " " << image->_buffer[3] |
1104 << " " << image->_buffer[4] << " " << image->_buffer[5]; | 1102 << " " << image->_buffer[4] << " " << image->_buffer[5]; |
1105 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 1103 ProcessHWError(true /* reset_if_fallback_unavailable */); |
1106 return false; | 1104 return false; |
1107 } | 1105 } |
1108 header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size()); | 1106 header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size()); |
1109 for (size_t i = 0; i < nalu_idxs.size(); i++) { | 1107 for (size_t i = 0; i < nalu_idxs.size(); i++) { |
1110 header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset; | 1108 header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset; |
1111 header.fragmentationLength[i] = nalu_idxs[i].payload_size; | 1109 header.fragmentationLength[i] = nalu_idxs[i].payload_size; |
1112 header.fragmentationPlType[i] = 0; | 1110 header.fragmentationPlType[i] = 0; |
1113 header.fragmentationTimeDiff[i] = 0; | 1111 header.fragmentationTimeDiff[i] = 0; |
1114 } | 1112 } |
1115 } | 1113 } |
1116 | 1114 |
1117 callback_result = callback_->OnEncodedImage(*image, &info, &header); | 1115 callback_result = callback_->OnEncodedImage(*image, &info, &header); |
1118 } | 1116 } |
1119 | 1117 |
1120 // Return output buffer back to the encoder. | 1118 // Return output buffer back to the encoder. |
1121 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 1119 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
1122 j_release_output_buffer_method_, | 1120 j_release_output_buffer_method_, |
1123 output_buffer_index); | 1121 output_buffer_index); |
1124 if (CheckException(jni) || !success) { | 1122 if (CheckException(jni) || !success) { |
1125 ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); | 1123 ProcessHWError(true /* reset_if_fallback_unavailable */); |
1126 return false; | 1124 return false; |
1127 } | 1125 } |
1128 | 1126 |
1129 // Print per frame statistics. | 1127 // Print per frame statistics. |
1130 if (encoding_start_time_ms > 0) { | 1128 if (encoding_start_time_ms > 0) { |
1131 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; | 1129 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; |
1132 } | 1130 } |
1133 if (frames_encoded_ < kMaxEncodedLogFrames) { | 1131 if (frames_encoded_ < kMaxEncodedLogFrames) { |
1134 int current_latency = | 1132 int current_latency = |
1135 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); | 1133 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1305 return supported_codecs_; | 1303 return supported_codecs_; |
1306 } | 1304 } |
1307 | 1305 |
1308 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1306 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1309 webrtc::VideoEncoder* encoder) { | 1307 webrtc::VideoEncoder* encoder) { |
1310 ALOGD << "Destroy video encoder."; | 1308 ALOGD << "Destroy video encoder."; |
1311 delete encoder; | 1309 delete encoder; |
1312 } | 1310 } |
1313 | 1311 |
1314 } // namespace webrtc_jni | 1312 } // namespace webrtc_jni |
OLD | NEW |