OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 // NOTICE: androidmediaencoder_jni.h must be included before | |
12 // androidmediacodeccommon.h to avoid build errors. | |
13 #include "webrtc/api/java/jni/androidmediaencoder_jni.h" | |
14 | |
15 #include <algorithm> | |
16 #include <memory> | |
17 #include <list> | |
18 | |
19 #include "third_party/libyuv/include/libyuv/convert.h" | |
20 #include "third_party/libyuv/include/libyuv/convert_from.h" | |
21 #include "third_party/libyuv/include/libyuv/video_common.h" | |
22 #include "webrtc/api/java/jni/androidmediacodeccommon.h" | |
23 #include "webrtc/api/java/jni/classreferenceholder.h" | |
24 #include "webrtc/api/java/jni/native_handle_impl.h" | |
25 #include "webrtc/base/bind.h" | |
26 #include "webrtc/base/checks.h" | |
27 #include "webrtc/base/logging.h" | |
28 #include "webrtc/base/thread.h" | |
29 #include "webrtc/base/thread_checker.h" | |
30 #include "webrtc/base/timeutils.h" | |
31 #include "webrtc/common_types.h" | |
32 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | |
33 #include "webrtc/modules/video_coding/utility/h264_bitstream_parser.h" | |
34 #include "webrtc/modules/video_coding/utility/quality_scaler.h" | |
35 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | |
36 #include "webrtc/system_wrappers/include/field_trial.h" | |
37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | |
38 | |
39 using rtc::Bind; | |
40 using rtc::Thread; | |
41 using rtc::ThreadManager; | |
42 | |
43 using webrtc::CodecSpecificInfo; | |
44 using webrtc::EncodedImage; | |
45 using webrtc::VideoFrame; | |
46 using webrtc::RTPFragmentationHeader; | |
47 using webrtc::VideoCodec; | |
48 using webrtc::VideoCodecType; | |
49 using webrtc::kVideoCodecH264; | |
50 using webrtc::kVideoCodecVP8; | |
51 using webrtc::kVideoCodecVP9; | |
52 using webrtc::QualityScaler; | |
53 | |
54 namespace webrtc_jni { | |
55 | |
56 // H.264 start code length. | |
57 #define H264_SC_LENGTH 4 | |
58 // Maximum allowed NALUs in one output frame. | |
59 #define MAX_NALUS_PERFRAME 32 | |
60 // Maximum supported HW video encoder resolution. | |
61 #define MAX_VIDEO_WIDTH 1280 | |
62 #define MAX_VIDEO_HEIGHT 1280 | |
63 // Maximum supported HW video encoder fps. | |
64 #define MAX_VIDEO_FPS 30 | |
65 // Maximum allowed fps value in SetRates() call. | |
66 #define MAX_ALLOWED_VIDEO_FPS 60 | |
67 // Maximum allowed frames in encoder input queue. | |
68 #define MAX_ENCODER_Q_SIZE 2 | |
69 // Maximum amount of dropped frames caused by full encoder queue - exceeding | |
70 // this threshold means that encoder probably got stuck and need to be reset. | |
71 #define ENCODER_STALL_FRAMEDROP_THRESHOLD 60 | |
72 | |
73 // Logging macros. | |
74 #define TAG_ENCODER "MediaCodecVideoEncoder" | |
75 #ifdef TRACK_BUFFER_TIMING | |
76 #define ALOGV(...) | |
77 __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__) | |
78 #else | |
79 #define ALOGV(...) | |
80 #endif | |
81 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER) | |
82 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_ENCODER) | |
83 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER) | |
84 | |
85 namespace { | |
86 // Maximum time limit between incoming frames before requesting a key frame. | |
87 const size_t kFrameDiffThresholdMs = 1100; | |
88 const int kMinKeyFrameInterval = 2; | |
89 } // namespace | |
90 | |
91 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses | |
92 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) | |
93 // HW-backed video encode. This C++ class is implemented as a very thin shim, | |
94 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. | |
95 // MediaCodecVideoEncoder is created, operated, and destroyed on a single | |
96 // thread, currently the libjingle Worker thread. | |
97 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, | |
98 public rtc::MessageHandler { | |
99 public: | |
100 virtual ~MediaCodecVideoEncoder(); | |
101 MediaCodecVideoEncoder(JNIEnv* jni, | |
102 VideoCodecType codecType, | |
103 jobject egl_context); | |
104 | |
105 // webrtc::VideoEncoder implementation. Everything trampolines to | |
106 // |codec_thread_| for execution. | |
107 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, | |
108 int32_t /* number_of_cores */, | |
109 size_t /* max_payload_size */) override; | |
110 int32_t Encode(const webrtc::VideoFrame& input_image, | |
111 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | |
112 const std::vector<webrtc::FrameType>* frame_types) override; | |
113 int32_t RegisterEncodeCompleteCallback( | |
114 webrtc::EncodedImageCallback* callback) override; | |
115 int32_t Release() override; | |
116 int32_t SetChannelParameters(uint32_t /* packet_loss */, | |
117 int64_t /* rtt */) override; | |
118 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override; | |
119 | |
120 // rtc::MessageHandler implementation. | |
121 void OnMessage(rtc::Message* msg) override; | |
122 | |
123 void OnDroppedFrame() override; | |
124 | |
125 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | |
126 const char* ImplementationName() const override; | |
127 | |
128 private: | |
129 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and | |
130 // InitEncodeOnCodecThread() in an attempt to restore the codec to an | |
131 // operable state. Necessary after all manner of OMX-layer errors. | |
132 bool ResetCodecOnCodecThread(); | |
133 | |
134 // Implementation of webrtc::VideoEncoder methods above, all running on the | |
135 // codec thread exclusively. | |
136 // | |
137 // If width==0 then this is assumed to be a re-initialization and the | |
138 // previously-current values are reused instead of the passed parameters | |
139 // (makes it easier to reason about thread-safety). | |
140 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, | |
141 bool use_surface); | |
142 // Reconfigure to match |frame| in width, height. Also reconfigures the | |
143 // encoder if |frame| is a texture/byte buffer and the encoder is initialized | |
144 // for byte buffer/texture. Returns false if reconfiguring fails. | |
145 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); | |
146 int32_t EncodeOnCodecThread( | |
147 const webrtc::VideoFrame& input_image, | |
148 const std::vector<webrtc::FrameType>* frame_types, | |
149 const int64_t frame_input_time_ms); | |
150 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, | |
151 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); | |
152 bool EncodeTextureOnCodecThread(JNIEnv* jni, | |
153 bool key_frame, const webrtc::VideoFrame& frame); | |
154 | |
155 int32_t RegisterEncodeCompleteCallbackOnCodecThread( | |
156 webrtc::EncodedImageCallback* callback); | |
157 int32_t ReleaseOnCodecThread(); | |
158 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); | |
159 void OnDroppedFrameOnCodecThread(); | |
160 | |
161 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. | |
162 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); | |
163 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); | |
164 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); | |
165 jlong GetOutputBufferInfoPresentationTimestampUs( | |
166 JNIEnv* jni, jobject j_output_buffer_info); | |
167 | |
168 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | |
169 // true on success. | |
170 bool DeliverPendingOutputs(JNIEnv* jni); | |
171 | |
172 // Search for H.264 start codes. | |
173 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); | |
174 | |
175 // Displays encoder statistics. | |
176 void LogStatistics(bool force_log); | |
177 | |
178 // Type of video codec. | |
179 VideoCodecType codecType_; | |
180 | |
181 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to | |
182 // |codec_thread_| synchronously. | |
183 webrtc::EncodedImageCallback* callback_; | |
184 | |
185 // State that is constant for the lifetime of this object once the ctor | |
186 // returns. | |
187 std::unique_ptr<Thread> | |
188 codec_thread_; // Thread on which to operate MediaCodec. | |
189 rtc::ThreadChecker codec_thread_checker_; | |
190 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; | |
191 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; | |
192 jmethodID j_init_encode_method_; | |
193 jmethodID j_get_input_buffers_method_; | |
194 jmethodID j_dequeue_input_buffer_method_; | |
195 jmethodID j_encode_buffer_method_; | |
196 jmethodID j_encode_texture_method_; | |
197 jmethodID j_release_method_; | |
198 jmethodID j_set_rates_method_; | |
199 jmethodID j_dequeue_output_buffer_method_; | |
200 jmethodID j_release_output_buffer_method_; | |
201 jfieldID j_color_format_field_; | |
202 jfieldID j_info_index_field_; | |
203 jfieldID j_info_buffer_field_; | |
204 jfieldID j_info_is_key_frame_field_; | |
205 jfieldID j_info_presentation_timestamp_us_field_; | |
206 | |
207 // State that is valid only between InitEncode() and the next Release(). | |
208 // Touched only on codec_thread_ so no explicit synchronization necessary. | |
209 int width_; // Frame width in pixels. | |
210 int height_; // Frame height in pixels. | |
211 bool inited_; | |
212 bool use_surface_; | |
213 uint16_t picture_id_; | |
214 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. | |
215 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | |
216 int last_set_fps_; // Last-requested frame rate. | |
217 int64_t current_timestamp_us_; // Current frame timestamps in us. | |
218 int frames_received_; // Number of frames received by encoder. | |
219 int frames_encoded_; // Number of frames encoded by encoder. | |
220 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. | |
221 // Number of dropped frames caused by full queue. | |
222 int consecutive_full_queue_frame_drops_; | |
223 int64_t stat_start_time_ms_; // Start time for statistics. | |
224 int current_frames_; // Number of frames in the current statistics interval. | |
225 int current_bytes_; // Encoded bytes in the current statistics interval. | |
226 int current_acc_qp_; // Accumulated QP in the current statistics interval. | |
227 int current_encoding_time_ms_; // Overall encoding time in the current second | |
228 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | |
229 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | |
230 | |
231 struct InputFrameInfo { | |
232 InputFrameInfo(int64_t encode_start_time, | |
233 int32_t frame_timestamp, | |
234 int64_t frame_render_time_ms, | |
235 webrtc::VideoRotation rotation) | |
236 : encode_start_time(encode_start_time), | |
237 frame_timestamp(frame_timestamp), | |
238 frame_render_time_ms(frame_render_time_ms), | |
239 rotation(rotation) {} | |
240 // Time when video frame is sent to encoder input. | |
241 const int64_t encode_start_time; | |
242 | |
243 // Input frame information. | |
244 const int32_t frame_timestamp; | |
245 const int64_t frame_render_time_ms; | |
246 const webrtc::VideoRotation rotation; | |
247 }; | |
248 std::list<InputFrameInfo> input_frame_infos_; | |
249 int32_t output_timestamp_; // Last output frame timestamp from | |
250 // |input_frame_infos_|. | |
251 int64_t output_render_time_ms_; // Last output frame render time from | |
252 // |input_frame_infos_|. | |
253 webrtc::VideoRotation output_rotation_; // Last output frame rotation from | |
254 // |input_frame_infos_|. | |
255 // Frame size in bytes fed to MediaCodec. | |
256 int yuv_size_; | |
257 // True only when between a callback_->Encoded() call return a positive value | |
258 // and the next Encode() call being ignored. | |
259 bool drop_next_input_frame_; | |
260 // Global references; must be deleted in Release(). | |
261 std::vector<jobject> input_buffers_; | |
262 QualityScaler quality_scaler_; | |
263 // Dynamic resolution change, off by default. | |
264 bool scale_; | |
265 | |
266 // H264 bitstream parser, used to extract QP from encoded bitstreams. | |
267 webrtc::H264BitstreamParser h264_bitstream_parser_; | |
268 | |
269 // VP9 variables to populate codec specific structure. | |
270 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for | |
271 // non-flexible VP9 mode. | |
272 uint8_t tl0_pic_idx_; | |
273 size_t gof_idx_; | |
274 | |
275 // EGL context - owned by factory, should not be allocated/destroyed | |
276 // by MediaCodecVideoEncoder. | |
277 jobject egl_context_; | |
278 | |
279 // Temporary fix for VP8. | |
280 // Sends a key frame if frames are largely spaced apart (possibly | |
281 // corresponding to a large image change). | |
282 int64_t last_frame_received_ms_; | |
283 int frames_received_since_last_key_; | |
284 webrtc::VideoCodecMode codec_mode_; | |
285 }; | |
286 | |
287 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { | |
288 // Call Release() to ensure no more callbacks to us after we are deleted. | |
289 Release(); | |
290 } | |
291 | |
292 MediaCodecVideoEncoder::MediaCodecVideoEncoder( | |
293 JNIEnv* jni, VideoCodecType codecType, jobject egl_context) : | |
294 codecType_(codecType), | |
295 callback_(NULL), | |
296 codec_thread_(new Thread()), | |
297 j_media_codec_video_encoder_class_( | |
298 jni, | |
299 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), | |
300 j_media_codec_video_encoder_( | |
301 jni, | |
302 jni->NewObject(*j_media_codec_video_encoder_class_, | |
303 GetMethodID(jni, | |
304 *j_media_codec_video_encoder_class_, | |
305 "<init>", | |
306 "()V"))), | |
307 inited_(false), | |
308 use_surface_(false), | |
309 picture_id_(0), | |
310 egl_context_(egl_context) { | |
311 ScopedLocalRefFrame local_ref_frame(jni); | |
312 // It would be nice to avoid spinning up a new thread per MediaCodec, and | |
313 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | |
314 // 2732 means that deadlocks abound. This class synchronously trampolines | |
315 // to |codec_thread_|, so if anything else can be coming to _us_ from | |
316 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | |
317 // in the bug, we have a problem. For now work around that with a dedicated | |
318 // thread. | |
319 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | |
320 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | |
321 codec_thread_checker_.DetachFromThread(); | |
322 jclass j_output_buffer_info_class = | |
323 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | |
324 j_init_encode_method_ = GetMethodID( | |
325 jni, | |
326 *j_media_codec_video_encoder_class_, | |
327 "initEncode", | |
328 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" | |
329 "IIIILorg/webrtc/EglBase14$Context;)Z"); | |
330 j_get_input_buffers_method_ = GetMethodID( | |
331 jni, | |
332 *j_media_codec_video_encoder_class_, | |
333 "getInputBuffers", | |
334 "()[Ljava/nio/ByteBuffer;"); | |
335 j_dequeue_input_buffer_method_ = GetMethodID( | |
336 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); | |
337 j_encode_buffer_method_ = GetMethodID( | |
338 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); | |
339 j_encode_texture_method_ = GetMethodID( | |
340 jni, *j_media_codec_video_encoder_class_, "encodeTexture", | |
341 "(ZI[FJ)Z"); | |
342 j_release_method_ = | |
343 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); | |
344 j_set_rates_method_ = GetMethodID( | |
345 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); | |
346 j_dequeue_output_buffer_method_ = GetMethodID( | |
347 jni, | |
348 *j_media_codec_video_encoder_class_, | |
349 "dequeueOutputBuffer", | |
350 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); | |
351 j_release_output_buffer_method_ = GetMethodID( | |
352 jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z"); | |
353 | |
354 j_color_format_field_ = | |
355 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); | |
356 j_info_index_field_ = | |
357 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); | |
358 j_info_buffer_field_ = GetFieldID( | |
359 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); | |
360 j_info_is_key_frame_field_ = | |
361 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); | |
362 j_info_presentation_timestamp_us_field_ = GetFieldID( | |
363 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); | |
364 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed"; | |
365 srand(time(NULL)); | |
366 AllowBlockingCalls(); | |
367 } | |
368 | |
369 int32_t MediaCodecVideoEncoder::InitEncode( | |
370 const webrtc::VideoCodec* codec_settings, | |
371 int32_t /* number_of_cores */, | |
372 size_t /* max_payload_size */) { | |
373 if (codec_settings == NULL) { | |
374 ALOGE << "NULL VideoCodec instance"; | |
375 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
376 } | |
377 // Factory should guard against other codecs being used with us. | |
378 RTC_CHECK(codec_settings->codecType == codecType_) | |
379 << "Unsupported codec " << codec_settings->codecType << " for " | |
380 << codecType_; | |
381 | |
382 codec_mode_ = codec_settings->mode; | |
383 int init_width = codec_settings->width; | |
384 int init_height = codec_settings->height; | |
385 scale_ = codecType_ != kVideoCodecVP9; | |
386 | |
387 ALOGD << "InitEncode request: " << init_width << " x " << init_height; | |
388 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); | |
389 | |
390 if (scale_) { | |
391 if (codecType_ == kVideoCodecVP8) { | |
392 quality_scaler_.Init( | |
393 QualityScaler::kLowVp8QpThreshold, QualityScaler::kBadVp8QpThreshold, | |
394 codec_settings->startBitrate, codec_settings->width, | |
395 codec_settings->height, codec_settings->maxFramerate); | |
396 } else if (codecType_ == kVideoCodecH264) { | |
397 quality_scaler_.Init(QualityScaler::kLowH264QpThreshold, | |
398 QualityScaler::kBadH264QpThreshold, | |
399 codec_settings->startBitrate, codec_settings->width, | |
400 codec_settings->height, | |
401 codec_settings->maxFramerate); | |
402 } else { | |
403 // When adding codec support to additional hardware codecs, also configure | |
404 // their QP thresholds for scaling. | |
405 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds."; | |
406 scale_ = false; | |
407 } | |
408 QualityScaler::Resolution res = quality_scaler_.GetScaledResolution(); | |
409 init_width = res.width; | |
410 init_height = res.height; | |
411 ALOGD << "Scaled resolution: " << init_width << " x " << init_height; | |
412 } | |
413 | |
414 return codec_thread_->Invoke<int32_t>( | |
415 RTC_FROM_HERE, | |
416 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, | |
417 init_height, codec_settings->startBitrate, | |
418 codec_settings->maxFramerate, | |
419 codec_settings->expect_encode_from_texture)); | |
420 } | |
421 | |
422 int32_t MediaCodecVideoEncoder::Encode( | |
423 const webrtc::VideoFrame& frame, | |
424 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | |
425 const std::vector<webrtc::FrameType>* frame_types) { | |
426 return codec_thread_->Invoke<int32_t>( | |
427 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this, | |
428 frame, frame_types, rtc::TimeMillis())); | |
429 } | |
430 | |
431 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( | |
432 webrtc::EncodedImageCallback* callback) { | |
433 return codec_thread_->Invoke<int32_t>( | |
434 RTC_FROM_HERE, | |
435 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread, | |
436 this, callback)); | |
437 } | |
438 | |
439 int32_t MediaCodecVideoEncoder::Release() { | |
440 ALOGD << "EncoderRelease request"; | |
441 return codec_thread_->Invoke<int32_t>( | |
442 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); | |
443 } | |
444 | |
445 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, | |
446 int64_t /* rtt */) { | |
447 return WEBRTC_VIDEO_CODEC_OK; | |
448 } | |
449 | |
450 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate, | |
451 uint32_t frame_rate) { | |
452 return codec_thread_->Invoke<int32_t>( | |
453 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, this, | |
454 new_bit_rate, frame_rate)); | |
455 } | |
456 | |
457 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { | |
458 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
459 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
460 ScopedLocalRefFrame local_ref_frame(jni); | |
461 | |
462 // We only ever send one message to |this| directly (not through a Bind()'d | |
463 // functor), so expect no ID/data. | |
464 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | |
465 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | |
466 if (!inited_) { | |
467 return; | |
468 } | |
469 | |
470 // It would be nice to recover from a failure here if one happened, but it's | |
471 // unclear how to signal such a failure to the app, so instead we stay silent | |
472 // about it and let the next app-called API method reveal the borkedness. | |
473 DeliverPendingOutputs(jni); | |
474 | |
475 // If there aren't more frames to deliver, we can start polling at lower rate. | |
476 if (input_frame_infos_.empty()) { | |
477 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollNoFramesMs, this); | |
478 } else { | |
479 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
480 } | |
481 | |
482 // Call log statistics here so it's called even if no frames are being | |
483 // delivered. | |
484 LogStatistics(false); | |
485 } | |
486 | |
487 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { | |
488 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
489 ALOGE << "ResetOnCodecThread"; | |
490 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || | |
491 InitEncodeOnCodecThread(width_, height_, 0, 0, false) != | |
492 WEBRTC_VIDEO_CODEC_OK) { | |
493 // TODO(fischman): wouldn't it be nice if there was a way to gracefully | |
494 // degrade to a SW encoder at this point? There isn't one AFAICT :( | |
495 // https://code.google.com/p/webrtc/issues/detail?id=2920 | |
496 return false; | |
497 } | |
498 return true; | |
499 } | |
500 | |
501 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | |
502 int width, int height, int kbps, int fps, bool use_surface) { | |
503 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
504 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; | |
505 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
506 ScopedLocalRefFrame local_ref_frame(jni); | |
507 | |
508 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << | |
509 width << " x " << height << ". Bitrate: " << kbps << | |
510 " kbps. Fps: " << fps; | |
511 if (kbps == 0) { | |
512 kbps = last_set_bitrate_kbps_; | |
513 } | |
514 if (fps == 0) { | |
515 fps = MAX_VIDEO_FPS; | |
516 } | |
517 | |
518 width_ = width; | |
519 height_ = height; | |
520 last_set_bitrate_kbps_ = kbps; | |
521 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; | |
522 yuv_size_ = width_ * height_ * 3 / 2; | |
523 frames_received_ = 0; | |
524 frames_encoded_ = 0; | |
525 frames_dropped_media_encoder_ = 0; | |
526 consecutive_full_queue_frame_drops_ = 0; | |
527 current_timestamp_us_ = 0; | |
528 stat_start_time_ms_ = rtc::TimeMillis(); | |
529 current_frames_ = 0; | |
530 current_bytes_ = 0; | |
531 current_acc_qp_ = 0; | |
532 current_encoding_time_ms_ = 0; | |
533 last_input_timestamp_ms_ = -1; | |
534 last_output_timestamp_ms_ = -1; | |
535 output_timestamp_ = 0; | |
536 output_render_time_ms_ = 0; | |
537 input_frame_infos_.clear(); | |
538 drop_next_input_frame_ = false; | |
539 use_surface_ = use_surface; | |
540 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | |
541 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); | |
542 tl0_pic_idx_ = static_cast<uint8_t>(rand()); | |
543 gof_idx_ = 0; | |
544 last_frame_received_ms_ = -1; | |
545 frames_received_since_last_key_ = kMinKeyFrameInterval; | |
546 | |
547 // We enforce no extra stride/padding in the format creation step. | |
548 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | |
549 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); | |
550 const bool encode_status = jni->CallBooleanMethod( | |
551 *j_media_codec_video_encoder_, j_init_encode_method_, | |
552 j_video_codec_enum, width, height, kbps, fps, | |
553 (use_surface ? egl_context_ : nullptr)); | |
554 if (!encode_status) { | |
555 ALOGE << "Failed to configure encoder."; | |
556 return WEBRTC_VIDEO_CODEC_ERROR; | |
557 } | |
558 CHECK_EXCEPTION(jni); | |
559 | |
560 if (!use_surface) { | |
561 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | |
562 jni->CallObjectMethod(*j_media_codec_video_encoder_, | |
563 j_get_input_buffers_method_)); | |
564 CHECK_EXCEPTION(jni); | |
565 if (IsNull(jni, input_buffers)) { | |
566 return WEBRTC_VIDEO_CODEC_ERROR; | |
567 } | |
568 | |
569 switch (GetIntField(jni, *j_media_codec_video_encoder_, | |
570 j_color_format_field_)) { | |
571 case COLOR_FormatYUV420Planar: | |
572 encoder_fourcc_ = libyuv::FOURCC_YU12; | |
573 break; | |
574 case COLOR_FormatYUV420SemiPlanar: | |
575 case COLOR_QCOM_FormatYUV420SemiPlanar: | |
576 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | |
577 encoder_fourcc_ = libyuv::FOURCC_NV12; | |
578 break; | |
579 default: | |
580 LOG(LS_ERROR) << "Wrong color format."; | |
581 return WEBRTC_VIDEO_CODEC_ERROR; | |
582 } | |
583 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | |
584 RTC_CHECK(input_buffers_.empty()) | |
585 << "Unexpected double InitEncode without Release"; | |
586 input_buffers_.resize(num_input_buffers); | |
587 for (size_t i = 0; i < num_input_buffers; ++i) { | |
588 input_buffers_[i] = | |
589 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | |
590 int64_t yuv_buffer_capacity = | |
591 jni->GetDirectBufferCapacity(input_buffers_[i]); | |
592 CHECK_EXCEPTION(jni); | |
593 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | |
594 } | |
595 } | |
596 | |
597 inited_ = true; | |
598 return WEBRTC_VIDEO_CODEC_OK; | |
599 } | |
600 | |
601 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | |
602 const webrtc::VideoFrame& frame, | |
603 const std::vector<webrtc::FrameType>* frame_types, | |
604 const int64_t frame_input_time_ms) { | |
605 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
606 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
607 ScopedLocalRefFrame local_ref_frame(jni); | |
608 | |
609 if (!inited_) { | |
610 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
611 } | |
612 | |
613 bool send_key_frame = false; | |
614 if (codec_mode_ == webrtc::kRealtimeVideo) { | |
615 ++frames_received_since_last_key_; | |
616 int64_t now_ms = rtc::TimeMillis(); | |
617 if (last_frame_received_ms_ != -1 && | |
618 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { | |
619 // Add limit to prevent triggering a key for every frame for very low | |
620 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). | |
621 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { | |
622 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); | |
623 send_key_frame = true; | |
624 } | |
625 frames_received_since_last_key_ = 0; | |
626 } | |
627 last_frame_received_ms_ = now_ms; | |
628 } | |
629 | |
630 frames_received_++; | |
631 if (!DeliverPendingOutputs(jni)) { | |
632 if (!ResetCodecOnCodecThread()) | |
633 return WEBRTC_VIDEO_CODEC_ERROR; | |
634 } | |
635 if (frames_encoded_ < kMaxEncodedLogFrames) { | |
636 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | |
637 << ". TS: " << (int)(current_timestamp_us_ / 1000) | |
638 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | |
639 << ". Kbps: " << last_set_bitrate_kbps_; | |
640 } | |
641 | |
642 if (drop_next_input_frame_) { | |
643 ALOGW << "Encoder drop frame - failed callback."; | |
644 drop_next_input_frame_ = false; | |
645 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
646 frames_dropped_media_encoder_++; | |
647 OnDroppedFrameOnCodecThread(); | |
648 return WEBRTC_VIDEO_CODEC_OK; | |
649 } | |
650 | |
651 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | |
652 | |
653 // Check if we accumulated too many frames in encoder input buffers and drop | |
654 // frame if so. | |
655 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { | |
656 ALOGD << "Already " << input_frame_infos_.size() | |
657 << " frames in the queue, dropping" | |
658 << ". TS: " << (int)(current_timestamp_us_ / 1000) | |
659 << ". Fps: " << last_set_fps_ | |
660 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | |
661 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
662 consecutive_full_queue_frame_drops_++; | |
663 if (consecutive_full_queue_frame_drops_ >= | |
664 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | |
665 ALOGE << "Encoder got stuck. Reset."; | |
666 ResetCodecOnCodecThread(); | |
667 return WEBRTC_VIDEO_CODEC_ERROR; | |
668 } | |
669 frames_dropped_media_encoder_++; | |
670 OnDroppedFrameOnCodecThread(); | |
671 return WEBRTC_VIDEO_CODEC_OK; | |
672 } | |
673 consecutive_full_queue_frame_drops_ = 0; | |
674 | |
675 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | |
676 frame.video_frame_buffer()); | |
677 if (scale_) { | |
678 // Check framerate before spatial resolution change. | |
679 quality_scaler_.OnEncodeFrame(frame.width(), frame.height()); | |
680 const webrtc::QualityScaler::Resolution scaled_resolution = | |
681 quality_scaler_.GetScaledResolution(); | |
682 if (scaled_resolution.width != frame.width() || | |
683 scaled_resolution.height != frame.height()) { | |
684 if (input_buffer->native_handle() != nullptr) { | |
685 input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get()) | |
686 ->CropScaleAndRotate(frame.width(), frame.height(), | |
687 0, 0, | |
688 scaled_resolution.width, | |
689 scaled_resolution.height, | |
690 webrtc::kVideoRotation_0); | |
691 } else { | |
692 input_buffer = quality_scaler_.GetScaledBuffer(input_buffer); | |
693 } | |
694 } | |
695 } | |
696 | |
697 VideoFrame input_frame(input_buffer, frame.timestamp(), | |
698 frame.render_time_ms(), frame.rotation()); | |
699 | |
700 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { | |
701 ALOGE << "Failed to reconfigure encoder."; | |
702 return WEBRTC_VIDEO_CODEC_ERROR; | |
703 } | |
704 | |
705 const bool key_frame = | |
706 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | |
707 bool encode_status = true; | |
708 if (!input_frame.video_frame_buffer()->native_handle()) { | |
709 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | |
710 j_dequeue_input_buffer_method_); | |
711 CHECK_EXCEPTION(jni); | |
712 if (j_input_buffer_index == -1) { | |
713 // Video codec falls behind - no input buffer available. | |
714 ALOGW << "Encoder drop frame - no input buffers available"; | |
715 if (frames_received_ > 1) { | |
716 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
717 frames_dropped_media_encoder_++; | |
718 OnDroppedFrameOnCodecThread(); | |
719 } else { | |
720 // Input buffers are not ready after codec initialization, HW is still | |
721 // allocating thme - this is expected and should not result in drop | |
722 // frame report. | |
723 frames_received_ = 0; | |
724 } | |
725 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | |
726 } else if (j_input_buffer_index == -2) { | |
727 ResetCodecOnCodecThread(); | |
728 return WEBRTC_VIDEO_CODEC_ERROR; | |
729 } | |
730 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | |
731 j_input_buffer_index); | |
732 } else { | |
733 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | |
734 } | |
735 | |
736 if (!encode_status) { | |
737 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | |
738 ResetCodecOnCodecThread(); | |
739 return WEBRTC_VIDEO_CODEC_ERROR; | |
740 } | |
741 | |
742 // Save input image timestamps for later output. | |
743 input_frame_infos_.emplace_back( | |
744 frame_input_time_ms, input_frame.timestamp(), | |
745 input_frame.render_time_ms(), input_frame.rotation()); | |
746 | |
747 last_input_timestamp_ms_ = | |
748 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | |
749 | |
750 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
751 | |
752 codec_thread_->Clear(this); | |
753 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
754 | |
755 if (!DeliverPendingOutputs(jni)) { | |
756 ALOGE << "Failed deliver pending outputs."; | |
757 ResetCodecOnCodecThread(); | |
758 return WEBRTC_VIDEO_CODEC_ERROR; | |
759 } | |
760 return WEBRTC_VIDEO_CODEC_OK; | |
761 } | |
762 | |
763 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | |
764 const webrtc::VideoFrame& frame) { | |
765 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
766 | |
767 const bool is_texture_frame = | |
768 frame.video_frame_buffer()->native_handle() != nullptr; | |
769 const bool reconfigure_due_to_format = is_texture_frame != use_surface_; | |
770 const bool reconfigure_due_to_size = | |
771 frame.width() != width_ || frame.height() != height_; | |
772 | |
773 if (reconfigure_due_to_format) { | |
774 ALOGD << "Reconfigure encoder due to format change. " | |
775 << (use_surface_ ? | |
776 "Reconfiguring to encode from byte buffer." : | |
777 "Reconfiguring to encode from texture."); | |
778 LogStatistics(true); | |
779 } | |
780 if (reconfigure_due_to_size) { | |
781 ALOGW << "Reconfigure encoder due to frame resolution change from " | |
782 << width_ << " x " << height_ << " to " << frame.width() << " x " | |
783 << frame.height(); | |
784 LogStatistics(true); | |
785 width_ = frame.width(); | |
786 height_ = frame.height(); | |
787 } | |
788 | |
789 if (!reconfigure_due_to_format && !reconfigure_due_to_size) | |
790 return true; | |
791 | |
792 ReleaseOnCodecThread(); | |
793 | |
794 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == | |
795 WEBRTC_VIDEO_CODEC_OK; | |
796 } | |
797 | |
798 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | |
799 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | |
800 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
801 RTC_CHECK(!use_surface_); | |
802 | |
803 jobject j_input_buffer = input_buffers_[input_buffer_index]; | |
804 uint8_t* yuv_buffer = | |
805 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | |
806 CHECK_EXCEPTION(jni); | |
807 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | |
808 RTC_CHECK(!libyuv::ConvertFromI420( | |
809 frame.video_frame_buffer()->DataY(), | |
810 frame.video_frame_buffer()->StrideY(), | |
811 frame.video_frame_buffer()->DataU(), | |
812 frame.video_frame_buffer()->StrideU(), | |
813 frame.video_frame_buffer()->DataV(), | |
814 frame.video_frame_buffer()->StrideV(), | |
815 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | |
816 << "ConvertFromI420 failed"; | |
817 | |
818 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
819 j_encode_buffer_method_, | |
820 key_frame, | |
821 input_buffer_index, | |
822 yuv_size_, | |
823 current_timestamp_us_); | |
824 CHECK_EXCEPTION(jni); | |
825 return encode_status; | |
826 } | |
827 | |
828 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, | |
829 bool key_frame, const webrtc::VideoFrame& frame) { | |
830 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
831 RTC_CHECK(use_surface_); | |
832 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( | |
833 frame.video_frame_buffer()->native_handle()); | |
834 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); | |
835 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
836 j_encode_texture_method_, | |
837 key_frame, | |
838 handle->oes_texture_id, | |
839 sampling_matrix, | |
840 current_timestamp_us_); | |
841 CHECK_EXCEPTION(jni); | |
842 return encode_status; | |
843 } | |
844 | |
845 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | |
846 webrtc::EncodedImageCallback* callback) { | |
847 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
848 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
849 ScopedLocalRefFrame local_ref_frame(jni); | |
850 callback_ = callback; | |
851 return WEBRTC_VIDEO_CODEC_OK; | |
852 } | |
853 | |
854 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | |
855 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
856 if (!inited_) { | |
857 return WEBRTC_VIDEO_CODEC_OK; | |
858 } | |
859 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
860 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | |
861 frames_received_ << ". Encoded: " << frames_encoded_ << | |
862 ". Dropped: " << frames_dropped_media_encoder_; | |
863 ScopedLocalRefFrame local_ref_frame(jni); | |
864 for (size_t i = 0; i < input_buffers_.size(); ++i) | |
865 jni->DeleteGlobalRef(input_buffers_[i]); | |
866 input_buffers_.clear(); | |
867 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | |
868 CHECK_EXCEPTION(jni); | |
869 rtc::MessageQueueManager::Clear(this); | |
870 inited_ = false; | |
871 use_surface_ = false; | |
872 ALOGD << "EncoderReleaseOnCodecThread done."; | |
873 return WEBRTC_VIDEO_CODEC_OK; | |
874 } | |
875 | |
876 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | |
877 uint32_t frame_rate) { | |
878 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
879 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? | |
880 frame_rate : MAX_ALLOWED_VIDEO_FPS; | |
881 if (last_set_bitrate_kbps_ == new_bit_rate && | |
882 last_set_fps_ == frame_rate) { | |
883 return WEBRTC_VIDEO_CODEC_OK; | |
884 } | |
885 if (scale_) { | |
886 quality_scaler_.ReportFramerate(frame_rate); | |
887 } | |
888 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
889 ScopedLocalRefFrame local_ref_frame(jni); | |
890 if (new_bit_rate > 0) { | |
891 last_set_bitrate_kbps_ = new_bit_rate; | |
892 } | |
893 if (frame_rate > 0) { | |
894 last_set_fps_ = frame_rate; | |
895 } | |
896 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
897 j_set_rates_method_, | |
898 last_set_bitrate_kbps_, | |
899 last_set_fps_); | |
900 CHECK_EXCEPTION(jni); | |
901 if (!ret) { | |
902 ResetCodecOnCodecThread(); | |
903 return WEBRTC_VIDEO_CODEC_ERROR; | |
904 } | |
905 return WEBRTC_VIDEO_CODEC_OK; | |
906 } | |
907 | |
908 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( | |
909 JNIEnv* jni, | |
910 jobject j_output_buffer_info) { | |
911 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); | |
912 } | |
913 | |
914 jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer( | |
915 JNIEnv* jni, | |
916 jobject j_output_buffer_info) { | |
917 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_); | |
918 } | |
919 | |
920 bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame( | |
921 JNIEnv* jni, | |
922 jobject j_output_buffer_info) { | |
923 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_); | |
924 } | |
925 | |
926 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( | |
927 JNIEnv* jni, | |
928 jobject j_output_buffer_info) { | |
929 return GetLongField( | |
930 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); | |
931 } | |
932 | |
933 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { | |
934 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
935 | |
936 while (true) { | |
937 jobject j_output_buffer_info = jni->CallObjectMethod( | |
938 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); | |
939 CHECK_EXCEPTION(jni); | |
940 if (IsNull(jni, j_output_buffer_info)) { | |
941 break; | |
942 } | |
943 | |
944 int output_buffer_index = | |
945 GetOutputBufferInfoIndex(jni, j_output_buffer_info); | |
946 if (output_buffer_index == -1) { | |
947 ResetCodecOnCodecThread(); | |
948 return false; | |
949 } | |
950 | |
951 // Get key and config frame flags. | |
952 jobject j_output_buffer = | |
953 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | |
954 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | |
955 | |
956 // Get frame timestamps from a queue - for non config frames only. | |
957 int64_t encoding_start_time_ms = 0; | |
958 int64_t frame_encoding_time_ms = 0; | |
959 last_output_timestamp_ms_ = | |
960 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | |
961 rtc::kNumMicrosecsPerMillisec; | |
962 if (!input_frame_infos_.empty()) { | |
963 const InputFrameInfo& frame_info = input_frame_infos_.front(); | |
964 output_timestamp_ = frame_info.frame_timestamp; | |
965 output_render_time_ms_ = frame_info.frame_render_time_ms; | |
966 output_rotation_ = frame_info.rotation; | |
967 encoding_start_time_ms = frame_info.encode_start_time; | |
968 input_frame_infos_.pop_front(); | |
969 } | |
970 | |
971 // Extract payload. | |
972 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | |
973 uint8_t* payload = reinterpret_cast<uint8_t*>( | |
974 jni->GetDirectBufferAddress(j_output_buffer)); | |
975 CHECK_EXCEPTION(jni); | |
976 | |
977 // Callback - return encoded frame. | |
978 int32_t callback_status = 0; | |
979 if (callback_) { | |
980 std::unique_ptr<webrtc::EncodedImage> image( | |
981 new webrtc::EncodedImage(payload, payload_size, payload_size)); | |
982 image->_encodedWidth = width_; | |
983 image->_encodedHeight = height_; | |
984 image->_timeStamp = output_timestamp_; | |
985 image->capture_time_ms_ = output_render_time_ms_; | |
986 image->rotation_ = output_rotation_; | |
987 image->_frameType = | |
988 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | |
989 image->_completeFrame = true; | |
990 image->adapt_reason_.quality_resolution_downscales = | |
991 scale_ ? quality_scaler_.downscale_shift() : -1; | |
992 | |
993 webrtc::CodecSpecificInfo info; | |
994 memset(&info, 0, sizeof(info)); | |
995 info.codecType = codecType_; | |
996 if (codecType_ == kVideoCodecVP8) { | |
997 info.codecSpecific.VP8.pictureId = picture_id_; | |
998 info.codecSpecific.VP8.nonReference = false; | |
999 info.codecSpecific.VP8.simulcastIdx = 0; | |
1000 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; | |
1001 info.codecSpecific.VP8.layerSync = false; | |
1002 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; | |
1003 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx; | |
1004 } else if (codecType_ == kVideoCodecVP9) { | |
1005 if (key_frame) { | |
1006 gof_idx_ = 0; | |
1007 } | |
1008 info.codecSpecific.VP9.picture_id = picture_id_; | |
1009 info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true; | |
1010 info.codecSpecific.VP9.flexible_mode = false; | |
1011 info.codecSpecific.VP9.ss_data_available = key_frame ? true : false; | |
1012 info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++; | |
1013 info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx; | |
1014 info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx; | |
1015 info.codecSpecific.VP9.temporal_up_switch = true; | |
1016 info.codecSpecific.VP9.inter_layer_predicted = false; | |
1017 info.codecSpecific.VP9.gof_idx = | |
1018 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof); | |
1019 info.codecSpecific.VP9.num_spatial_layers = 1; | |
1020 info.codecSpecific.VP9.spatial_layer_resolution_present = false; | |
1021 if (info.codecSpecific.VP9.ss_data_available) { | |
1022 info.codecSpecific.VP9.spatial_layer_resolution_present = true; | |
1023 info.codecSpecific.VP9.width[0] = width_; | |
1024 info.codecSpecific.VP9.height[0] = height_; | |
1025 info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_); | |
1026 } | |
1027 } | |
1028 picture_id_ = (picture_id_ + 1) & 0x7FFF; | |
1029 | |
1030 // Generate a header describing a single fragment. | |
1031 webrtc::RTPFragmentationHeader header; | |
1032 memset(&header, 0, sizeof(header)); | |
1033 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { | |
1034 header.VerifyAndAllocateFragmentationHeader(1); | |
1035 header.fragmentationOffset[0] = 0; | |
1036 header.fragmentationLength[0] = image->_length; | |
1037 header.fragmentationPlType[0] = 0; | |
1038 header.fragmentationTimeDiff[0] = 0; | |
1039 if (codecType_ == kVideoCodecVP8 && scale_) { | |
1040 int qp; | |
1041 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { | |
1042 current_acc_qp_ += qp; | |
1043 quality_scaler_.ReportQP(qp); | |
1044 image->qp_ = qp; | |
1045 } | |
1046 } | |
1047 } else if (codecType_ == kVideoCodecH264) { | |
1048 if (scale_) { | |
1049 h264_bitstream_parser_.ParseBitstream(payload, payload_size); | |
1050 int qp; | |
1051 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { | |
1052 current_acc_qp_ += qp; | |
1053 quality_scaler_.ReportQP(qp); | |
1054 } | |
1055 } | |
1056 // For H.264 search for start codes. | |
1057 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; | |
1058 int32_t scPositionsLength = 0; | |
1059 int32_t scPosition = 0; | |
1060 while (scPositionsLength < MAX_NALUS_PERFRAME) { | |
1061 int32_t naluPosition = NextNaluPosition( | |
1062 payload + scPosition, payload_size - scPosition); | |
1063 if (naluPosition < 0) { | |
1064 break; | |
1065 } | |
1066 scPosition += naluPosition; | |
1067 scPositions[scPositionsLength++] = scPosition; | |
1068 scPosition += H264_SC_LENGTH; | |
1069 } | |
1070 if (scPositionsLength == 0) { | |
1071 ALOGE << "Start code is not found!"; | |
1072 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] | |
1073 << " " << image->_buffer[2] << " " << image->_buffer[3] | |
1074 << " " << image->_buffer[4] << " " << image->_buffer[5]; | |
1075 ResetCodecOnCodecThread(); | |
1076 return false; | |
1077 } | |
1078 scPositions[scPositionsLength] = payload_size; | |
1079 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); | |
1080 for (size_t i = 0; i < scPositionsLength; i++) { | |
1081 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; | |
1082 header.fragmentationLength[i] = | |
1083 scPositions[i + 1] - header.fragmentationOffset[i]; | |
1084 header.fragmentationPlType[i] = 0; | |
1085 header.fragmentationTimeDiff[i] = 0; | |
1086 } | |
1087 } | |
1088 | |
1089 callback_status = callback_->Encoded(*image, &info, &header); | |
1090 } | |
1091 | |
1092 // Return output buffer back to the encoder. | |
1093 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
1094 j_release_output_buffer_method_, | |
1095 output_buffer_index); | |
1096 CHECK_EXCEPTION(jni); | |
1097 if (!success) { | |
1098 ResetCodecOnCodecThread(); | |
1099 return false; | |
1100 } | |
1101 | |
1102 // Print per frame statistics. | |
1103 if (encoding_start_time_ms > 0) { | |
1104 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; | |
1105 } | |
1106 if (frames_encoded_ < kMaxEncodedLogFrames) { | |
1107 int current_latency = | |
1108 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); | |
1109 ALOGD << "Encoder frame out # " << frames_encoded_ << | |
1110 ". Key: " << key_frame << | |
1111 ". Size: " << payload_size << | |
1112 ". TS: " << (int)last_output_timestamp_ms_ << | |
1113 ". Latency: " << current_latency << | |
1114 ". EncTime: " << frame_encoding_time_ms; | |
1115 } | |
1116 | |
1117 // Calculate and print encoding statistics - every 3 seconds. | |
1118 frames_encoded_++; | |
1119 current_frames_++; | |
1120 current_bytes_ += payload_size; | |
1121 current_encoding_time_ms_ += frame_encoding_time_ms; | |
1122 LogStatistics(false); | |
1123 | |
1124 if (callback_status > 0) { | |
1125 drop_next_input_frame_ = true; | |
1126 // Theoretically could handle callback_status<0 here, but unclear what | |
1127 // that would mean for us. | |
1128 } | |
1129 } | |
1130 return true; | |
1131 } | |
1132 | |
1133 void MediaCodecVideoEncoder::LogStatistics(bool force_log) { | |
1134 int statistic_time_ms = rtc::TimeMillis() - stat_start_time_ms_; | |
1135 if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) | |
1136 && statistic_time_ms > 0) { | |
1137 // Prevent division by zero. | |
1138 int current_frames_divider = current_frames_ != 0 ? current_frames_ : 1; | |
1139 | |
1140 int current_bitrate = current_bytes_ * 8 / statistic_time_ms; | |
1141 int current_fps = | |
1142 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms; | |
1143 ALOGD << "Encoded frames: " << frames_encoded_ << | |
1144 ". Bitrate: " << current_bitrate << | |
1145 ", target: " << last_set_bitrate_kbps_ << " kbps" << | |
1146 ", fps: " << current_fps << | |
1147 ", encTime: " << (current_encoding_time_ms_ / current_frames_divider) << | |
1148 ". QP: " << (current_acc_qp_ / current_frames_divider) << | |
1149 " for last " << statistic_time_ms << " ms."; | |
1150 stat_start_time_ms_ = rtc::TimeMillis(); | |
1151 current_frames_ = 0; | |
1152 current_bytes_ = 0; | |
1153 current_acc_qp_ = 0; | |
1154 current_encoding_time_ms_ = 0; | |
1155 } | |
1156 } | |
1157 | |
1158 int32_t MediaCodecVideoEncoder::NextNaluPosition( | |
1159 uint8_t *buffer, size_t buffer_size) { | |
1160 if (buffer_size < H264_SC_LENGTH) { | |
1161 return -1; | |
1162 } | |
1163 uint8_t *head = buffer; | |
1164 // Set end buffer pointer to 4 bytes before actual buffer end so we can | |
1165 // access head[1], head[2] and head[3] in a loop without buffer overrun. | |
1166 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH; | |
1167 | |
1168 while (head < end) { | |
1169 if (head[0]) { | |
1170 head++; | |
1171 continue; | |
1172 } | |
1173 if (head[1]) { // got 00xx | |
1174 head += 2; | |
1175 continue; | |
1176 } | |
1177 if (head[2]) { // got 0000xx | |
1178 head += 3; | |
1179 continue; | |
1180 } | |
1181 if (head[3] != 0x01) { // got 000000xx | |
1182 head++; // xx != 1, continue searching. | |
1183 continue; | |
1184 } | |
1185 return (int32_t)(head - buffer); | |
1186 } | |
1187 return -1; | |
1188 } | |
1189 | |
1190 void MediaCodecVideoEncoder::OnDroppedFrame() { | |
1191 // Methods running on the codec thread should call OnDroppedFrameOnCodecThread | |
1192 // directly. | |
1193 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread()); | |
1194 codec_thread_->Invoke<void>( | |
1195 RTC_FROM_HERE, | |
1196 Bind(&MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread, this)); | |
1197 } | |
1198 | |
1199 void MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread() { | |
1200 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
1201 // Report dropped frame to quality_scaler_. | |
1202 if (scale_) | |
1203 quality_scaler_.ReportDroppedFrame(); | |
1204 } | |
1205 | |
1206 const char* MediaCodecVideoEncoder::ImplementationName() const { | |
1207 return "MediaCodec"; | |
1208 } | |
1209 | |
1210 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() | |
1211 : egl_context_(nullptr) { | |
1212 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
1213 ScopedLocalRefFrame local_ref_frame(jni); | |
1214 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder"); | |
1215 supported_codecs_.clear(); | |
1216 | |
1217 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( | |
1218 j_encoder_class, | |
1219 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z")); | |
1220 CHECK_EXCEPTION(jni); | |
1221 if (is_vp8_hw_supported) { | |
1222 ALOGD << "VP8 HW Encoder supported."; | |
1223 supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8", | |
1224 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | |
1225 } | |
1226 | |
1227 bool is_vp9_hw_supported = jni->CallStaticBooleanMethod( | |
1228 j_encoder_class, | |
1229 GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z")); | |
1230 CHECK_EXCEPTION(jni); | |
1231 if (is_vp9_hw_supported) { | |
1232 ALOGD << "VP9 HW Encoder supported."; | |
1233 supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9", | |
1234 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | |
1235 } | |
1236 | |
1237 bool is_h264_hw_supported = jni->CallStaticBooleanMethod( | |
1238 j_encoder_class, | |
1239 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z")); | |
1240 CHECK_EXCEPTION(jni); | |
1241 if (is_h264_hw_supported) { | |
1242 ALOGD << "H.264 HW Encoder supported."; | |
1243 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264", | |
1244 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | |
1245 } | |
1246 } | |
1247 | |
1248 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() { | |
1249 ALOGD << "MediaCodecVideoEncoderFactory dtor"; | |
1250 if (egl_context_) { | |
1251 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
1252 jni->DeleteGlobalRef(egl_context_); | |
1253 } | |
1254 } | |
1255 | |
1256 void MediaCodecVideoEncoderFactory::SetEGLContext( | |
1257 JNIEnv* jni, jobject egl_context) { | |
1258 ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext"; | |
1259 if (egl_context_) { | |
1260 jni->DeleteGlobalRef(egl_context_); | |
1261 egl_context_ = nullptr; | |
1262 } | |
1263 egl_context_ = jni->NewGlobalRef(egl_context); | |
1264 if (CheckException(jni)) { | |
1265 ALOGE << "error calling NewGlobalRef for EGL Context."; | |
1266 } | |
1267 } | |
1268 | |
1269 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder( | |
1270 VideoCodecType type) { | |
1271 if (supported_codecs_.empty()) { | |
1272 ALOGW << "No HW video encoder for type " << (int)type; | |
1273 return nullptr; | |
1274 } | |
1275 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin(); | |
1276 it != supported_codecs_.end(); ++it) { | |
1277 if (it->type == type) { | |
1278 ALOGD << "Create HW video encoder for type " << (int)type << | |
1279 " (" << it->name << ")."; | |
1280 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type, | |
1281 egl_context_); | |
1282 } | |
1283 } | |
1284 ALOGW << "Can not find HW video encoder for type " << (int)type; | |
1285 return nullptr; | |
1286 } | |
1287 | |
1288 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>& | |
1289 MediaCodecVideoEncoderFactory::codecs() const { | |
1290 return supported_codecs_; | |
1291 } | |
1292 | |
1293 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | |
1294 webrtc::VideoEncoder* encoder) { | |
1295 ALOGD << "Destroy video encoder."; | |
1296 delete encoder; | |
1297 } | |
1298 | |
1299 } // namespace webrtc_jni | |
OLD | NEW |