Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(40)

Side by Side Diff: talk/app/webrtc/java/jni/androidmediaencoder_jni.cc

Issue 1610243002: Move talk/app/webrtc to webrtc/api (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Removed processing of api.gyp for Chromium builds Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 /*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 *
27 */
28
29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
30 // NOTICE: androidmediaencoder_jni.h must be included before
31 // androidmediacodeccommon.h to avoid build errors.
32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
33 #include "talk/app/webrtc/java/jni/classreferenceholder.h"
34 #include "talk/app/webrtc/java/jni/native_handle_impl.h"
35 #include "third_party/libyuv/include/libyuv/convert.h"
36 #include "third_party/libyuv/include/libyuv/convert_from.h"
37 #include "third_party/libyuv/include/libyuv/video_common.h"
38 #include "webrtc/base/bind.h"
39 #include "webrtc/base/checks.h"
40 #include "webrtc/base/logging.h"
41 #include "webrtc/base/thread.h"
42 #include "webrtc/base/thread_checker.h"
43 #include "webrtc/common_types.h"
44 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
45 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
46 #include "webrtc/modules/video_coding/utility/quality_scaler.h"
47 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
48 #include "webrtc/system_wrappers/include/field_trial.h"
49 #include "webrtc/system_wrappers/include/logcat_trace_context.h"
50
51 using rtc::Bind;
52 using rtc::Thread;
53 using rtc::ThreadManager;
54 using rtc::scoped_ptr;
55
56 using webrtc::CodecSpecificInfo;
57 using webrtc::EncodedImage;
58 using webrtc::VideoFrame;
59 using webrtc::RTPFragmentationHeader;
60 using webrtc::VideoCodec;
61 using webrtc::VideoCodecType;
62 using webrtc::kVideoCodecH264;
63 using webrtc::kVideoCodecVP8;
64 using webrtc::kVideoCodecVP9;
65
66 namespace webrtc_jni {
67
68 // H.264 start code length.
69 #define H264_SC_LENGTH 4
70 // Maximum allowed NALUs in one output frame.
71 #define MAX_NALUS_PERFRAME 32
72 // Maximum supported HW video encoder resolution.
73 #define MAX_VIDEO_WIDTH 1280
74 #define MAX_VIDEO_HEIGHT 1280
75 // Maximum supported HW video encoder fps.
76 #define MAX_VIDEO_FPS 30
77 // Maximum allowed fps value in SetRates() call.
78 #define MAX_ALLOWED_VIDEO_FPS 60
79 // Maximum allowed frames in encoder input queue.
80 #define MAX_ENCODER_Q_SIZE 2
81 // Maximum allowed latency in ms.
82 #define MAX_ENCODER_LATENCY_MS 70
83 // Maximum amount of dropped frames caused by full encoder queue - exceeding
84 // this threshold means that encoder probably got stuck and need to be reset.
85 #define ENCODER_STALL_FRAMEDROP_THRESHOLD 60
86
87 // Logging macros.
88 #define TAG_ENCODER "MediaCodecVideoEncoder"
89 #ifdef TRACK_BUFFER_TIMING
90 #define ALOGV(...)
91 __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__)
92 #else
93 #define ALOGV(...)
94 #endif
95 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER)
96 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_ENCODER)
97 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER)
98
99 namespace {
100 // Maximum time limit between incoming frames before requesting a key frame.
101 const size_t kFrameDiffThresholdMs = 1100;
102 const int kMinKeyFrameInterval = 2;
103 } // namespace
104
105 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
106 // Android's MediaCodec SDK API behind the scenes to implement (hopefully)
107 // HW-backed video encode. This C++ class is implemented as a very thin shim,
108 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
109 // MediaCodecVideoEncoder is created, operated, and destroyed on a single
110 // thread, currently the libjingle Worker thread.
111 class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
112 public rtc::MessageHandler {
113 public:
114 virtual ~MediaCodecVideoEncoder();
115 MediaCodecVideoEncoder(JNIEnv* jni,
116 VideoCodecType codecType,
117 jobject egl_context);
118
119 // webrtc::VideoEncoder implementation. Everything trampolines to
120 // |codec_thread_| for execution.
121 int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
122 int32_t /* number_of_cores */,
123 size_t /* max_payload_size */) override;
124 int32_t Encode(const webrtc::VideoFrame& input_image,
125 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
126 const std::vector<webrtc::FrameType>* frame_types) override;
127 int32_t RegisterEncodeCompleteCallback(
128 webrtc::EncodedImageCallback* callback) override;
129 int32_t Release() override;
130 int32_t SetChannelParameters(uint32_t /* packet_loss */,
131 int64_t /* rtt */) override;
132 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
133
134 // rtc::MessageHandler implementation.
135 void OnMessage(rtc::Message* msg) override;
136
137 void OnDroppedFrame() override;
138
139 int GetTargetFramerate() override;
140
141 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
142 const char* ImplementationName() const override;
143
144 private:
145 // CHECK-fail if not running on |codec_thread_|.
146 void CheckOnCodecThread();
147
148 private:
149 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
150 // InitEncodeOnCodecThread() in an attempt to restore the codec to an
151 // operable state. Necessary after all manner of OMX-layer errors.
152 bool ResetCodecOnCodecThread();
153
154 // Implementation of webrtc::VideoEncoder methods above, all running on the
155 // codec thread exclusively.
156 //
157 // If width==0 then this is assumed to be a re-initialization and the
158 // previously-current values are reused instead of the passed parameters
159 // (makes it easier to reason about thread-safety).
160 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
161 bool use_surface);
162 // Reconfigure to match |frame| in width, height. Also reconfigures the
163 // encoder if |frame| is a texture/byte buffer and the encoder is initialized
164 // for byte buffer/texture. Returns false if reconfiguring fails.
165 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
166 int32_t EncodeOnCodecThread(
167 const webrtc::VideoFrame& input_image,
168 const std::vector<webrtc::FrameType>* frame_types);
169 bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
170 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
171 bool EncodeTextureOnCodecThread(JNIEnv* jni,
172 bool key_frame, const webrtc::VideoFrame& frame);
173
174 int32_t RegisterEncodeCompleteCallbackOnCodecThread(
175 webrtc::EncodedImageCallback* callback);
176 int32_t ReleaseOnCodecThread();
177 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
178
179 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
180 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
181 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
182 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
183 jlong GetOutputBufferInfoPresentationTimestampUs(
184 JNIEnv* jni, jobject j_output_buffer_info);
185
186 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
187 // true on success.
188 bool DeliverPendingOutputs(JNIEnv* jni);
189
190 // Search for H.264 start codes.
191 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
192
193 // Displays encoder statistics.
194 void LogStatistics(bool force_log);
195
196 // Type of video codec.
197 VideoCodecType codecType_;
198
199 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
200 // |codec_thread_| synchronously.
201 webrtc::EncodedImageCallback* callback_;
202
203 // State that is constant for the lifetime of this object once the ctor
204 // returns.
205 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
206 rtc::ThreadChecker codec_thread_checker_;
207 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
208 ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
209 jmethodID j_init_encode_method_;
210 jmethodID j_get_input_buffers_method_;
211 jmethodID j_dequeue_input_buffer_method_;
212 jmethodID j_encode_buffer_method_;
213 jmethodID j_encode_texture_method_;
214 jmethodID j_release_method_;
215 jmethodID j_set_rates_method_;
216 jmethodID j_dequeue_output_buffer_method_;
217 jmethodID j_release_output_buffer_method_;
218 jfieldID j_color_format_field_;
219 jfieldID j_info_index_field_;
220 jfieldID j_info_buffer_field_;
221 jfieldID j_info_is_key_frame_field_;
222 jfieldID j_info_presentation_timestamp_us_field_;
223
224 // State that is valid only between InitEncode() and the next Release().
225 // Touched only on codec_thread_ so no explicit synchronization necessary.
226 int width_; // Frame width in pixels.
227 int height_; // Frame height in pixels.
228 bool inited_;
229 bool use_surface_;
230 uint16_t picture_id_;
231 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
232 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
233 int last_set_fps_; // Last-requested frame rate.
234 int64_t current_timestamp_us_; // Current frame timestamps in us.
235 int frames_received_; // Number of frames received by encoder.
236 int frames_encoded_; // Number of frames encoded by encoder.
237 int frames_dropped_media_encoder_; // Number of frames dropped by encoder.
238 // Number of dropped frames caused by full queue.
239 int consecutive_full_queue_frame_drops_;
240 int frames_in_queue_; // Number of frames in encoder queue.
241 int64_t stat_start_time_ms_; // Start time for statistics.
242 int current_frames_; // Number of frames in the current statistics interval.
243 int current_bytes_; // Encoded bytes in the current statistics interval.
244 int current_acc_qp_; // Accumulated QP in the current statistics interval.
245 int current_encoding_time_ms_; // Overall encoding time in the current second
246 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
247 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
248 std::vector<int32_t> timestamps_; // Video frames timestamp queue.
249 std::vector<int64_t> render_times_ms_; // Video frames render time queue.
250 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
251 // encoder input.
252 int32_t output_timestamp_; // Last output frame timestamp from timestamps_ Q.
253 int64_t output_render_time_ms_; // Last output frame render time from
254 // render_times_ms_ queue.
255 // Frame size in bytes fed to MediaCodec.
256 int yuv_size_;
257 // True only when between a callback_->Encoded() call return a positive value
258 // and the next Encode() call being ignored.
259 bool drop_next_input_frame_;
260 // Global references; must be deleted in Release().
261 std::vector<jobject> input_buffers_;
262 webrtc::QualityScaler quality_scaler_;
263 // Dynamic resolution change, off by default.
264 bool scale_;
265
266 // H264 bitstream parser, used to extract QP from encoded bitstreams.
267 webrtc::H264BitstreamParser h264_bitstream_parser_;
268
269 // VP9 variables to populate codec specific structure.
270 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
271 // non-flexible VP9 mode.
272 uint8_t tl0_pic_idx_;
273 size_t gof_idx_;
274
275 // EGL context - owned by factory, should not be allocated/destroyed
276 // by MediaCodecVideoEncoder.
277 jobject egl_context_;
278
279 // Temporary fix for VP8.
280 // Sends a key frame if frames are largely spaced apart (possibly
281 // corresponding to a large image change).
282 int64_t last_frame_received_ms_;
283 int frames_received_since_last_key_;
284 webrtc::VideoCodecMode codec_mode_;
285 };
286
287 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
288 // Call Release() to ensure no more callbacks to us after we are deleted.
289 Release();
290 }
291
292 MediaCodecVideoEncoder::MediaCodecVideoEncoder(
293 JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
294 codecType_(codecType),
295 callback_(NULL),
296 codec_thread_(new Thread()),
297 j_media_codec_video_encoder_class_(
298 jni,
299 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
300 j_media_codec_video_encoder_(
301 jni,
302 jni->NewObject(*j_media_codec_video_encoder_class_,
303 GetMethodID(jni,
304 *j_media_codec_video_encoder_class_,
305 "<init>",
306 "()V"))),
307 inited_(false),
308 use_surface_(false),
309 picture_id_(0),
310 egl_context_(egl_context) {
311 ScopedLocalRefFrame local_ref_frame(jni);
312 // It would be nice to avoid spinning up a new thread per MediaCodec, and
313 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
314 // 2732 means that deadlocks abound. This class synchronously trampolines
315 // to |codec_thread_|, so if anything else can be coming to _us_ from
316 // |codec_thread_|, or from any thread holding the |_sendCritSect| described
317 // in the bug, we have a problem. For now work around that with a dedicated
318 // thread.
319 codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
320 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
321 codec_thread_checker_.DetachFromThread();
322 jclass j_output_buffer_info_class =
323 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
324 j_init_encode_method_ = GetMethodID(
325 jni,
326 *j_media_codec_video_encoder_class_,
327 "initEncode",
328 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
329 "IIIILorg/webrtc/EglBase14$Context;)Z");
330 j_get_input_buffers_method_ = GetMethodID(
331 jni,
332 *j_media_codec_video_encoder_class_,
333 "getInputBuffers",
334 "()[Ljava/nio/ByteBuffer;");
335 j_dequeue_input_buffer_method_ = GetMethodID(
336 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
337 j_encode_buffer_method_ = GetMethodID(
338 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
339 j_encode_texture_method_ = GetMethodID(
340 jni, *j_media_codec_video_encoder_class_, "encodeTexture",
341 "(ZI[FJ)Z");
342 j_release_method_ =
343 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
344 j_set_rates_method_ = GetMethodID(
345 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
346 j_dequeue_output_buffer_method_ = GetMethodID(
347 jni,
348 *j_media_codec_video_encoder_class_,
349 "dequeueOutputBuffer",
350 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
351 j_release_output_buffer_method_ = GetMethodID(
352 jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
353
354 j_color_format_field_ =
355 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
356 j_info_index_field_ =
357 GetFieldID(jni, j_output_buffer_info_class, "index", "I");
358 j_info_buffer_field_ = GetFieldID(
359 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
360 j_info_is_key_frame_field_ =
361 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
362 j_info_presentation_timestamp_us_field_ = GetFieldID(
363 jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
364 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
365 srand(time(NULL));
366 AllowBlockingCalls();
367 }
368
369 int32_t MediaCodecVideoEncoder::InitEncode(
370 const webrtc::VideoCodec* codec_settings,
371 int32_t /* number_of_cores */,
372 size_t /* max_payload_size */) {
373 const int kMinWidth = 320;
374 const int kMinHeight = 180;
375 const int kLowQpThresholdDenominator = 3;
376 if (codec_settings == NULL) {
377 ALOGE << "NULL VideoCodec instance";
378 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
379 }
380 // Factory should guard against other codecs being used with us.
381 RTC_CHECK(codec_settings->codecType == codecType_)
382 << "Unsupported codec " << codec_settings->codecType << " for "
383 << codecType_;
384
385 ALOGD << "InitEncode request";
386 codec_mode_ = codec_settings->mode;
387 scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName(
388 "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled");
389 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
390 if (scale_) {
391 if (codecType_ == kVideoCodecVP8) {
392 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
393 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
394 // always = 127. Note that in SW, QP is that of the user-level range [0,
395 // 63].
396 const int kMaxQp = 127;
397 const int kBadQpThreshold = 95;
398 quality_scaler_.Init(
399 kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false);
400 } else if (codecType_ == kVideoCodecH264) {
401 // H264 QP is in the range [0, 51].
402 const int kMaxQp = 51;
403 const int kBadQpThreshold = 40;
404 quality_scaler_.Init(
405 kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false);
406 } else {
407 // When adding codec support to additional hardware codecs, also configure
408 // their QP thresholds for scaling.
409 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
410 }
411 quality_scaler_.SetMinResolution(kMinWidth, kMinHeight);
412 quality_scaler_.ReportFramerate(codec_settings->maxFramerate);
413 }
414 return codec_thread_->Invoke<int32_t>(
415 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
416 this,
417 codec_settings->width,
418 codec_settings->height,
419 codec_settings->startBitrate,
420 codec_settings->maxFramerate,
421 false /* use_surface */));
422 }
423
424 int32_t MediaCodecVideoEncoder::Encode(
425 const webrtc::VideoFrame& frame,
426 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
427 const std::vector<webrtc::FrameType>* frame_types) {
428 return codec_thread_->Invoke<int32_t>(Bind(
429 &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
430 }
431
432 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
433 webrtc::EncodedImageCallback* callback) {
434 return codec_thread_->Invoke<int32_t>(
435 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
436 this,
437 callback));
438 }
439
440 int32_t MediaCodecVideoEncoder::Release() {
441 ALOGD << "EncoderRelease request";
442 return codec_thread_->Invoke<int32_t>(
443 Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
444 }
445
446 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
447 int64_t /* rtt */) {
448 return WEBRTC_VIDEO_CODEC_OK;
449 }
450
451 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
452 uint32_t frame_rate) {
453 return codec_thread_->Invoke<int32_t>(
454 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
455 this,
456 new_bit_rate,
457 frame_rate));
458 }
459
460 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
461 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
462 JNIEnv* jni = AttachCurrentThreadIfNeeded();
463 ScopedLocalRefFrame local_ref_frame(jni);
464
465 // We only ever send one message to |this| directly (not through a Bind()'d
466 // functor), so expect no ID/data.
467 RTC_CHECK(!msg->message_id) << "Unexpected message!";
468 RTC_CHECK(!msg->pdata) << "Unexpected message!";
469 if (!inited_) {
470 return;
471 }
472
473 // It would be nice to recover from a failure here if one happened, but it's
474 // unclear how to signal such a failure to the app, so instead we stay silent
475 // about it and let the next app-called API method reveal the borkedness.
476 DeliverPendingOutputs(jni);
477 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
478 }
479
480 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
481 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
482 ALOGE << "ResetOnCodecThread";
483 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
484 InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
485 WEBRTC_VIDEO_CODEC_OK) {
486 // TODO(fischman): wouldn't it be nice if there was a way to gracefully
487 // degrade to a SW encoder at this point? There isn't one AFAICT :(
488 // https://code.google.com/p/webrtc/issues/detail?id=2920
489 return false;
490 }
491 return true;
492 }
493
494 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
495 int width, int height, int kbps, int fps, bool use_surface) {
496 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
497 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
498 JNIEnv* jni = AttachCurrentThreadIfNeeded();
499 ScopedLocalRefFrame local_ref_frame(jni);
500
501 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " <<
502 width << " x " << height << ". Bitrate: " << kbps <<
503 " kbps. Fps: " << fps;
504 if (kbps == 0) {
505 kbps = last_set_bitrate_kbps_;
506 }
507 if (fps == 0) {
508 fps = MAX_VIDEO_FPS;
509 }
510
511 width_ = width;
512 height_ = height;
513 last_set_bitrate_kbps_ = kbps;
514 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS;
515 yuv_size_ = width_ * height_ * 3 / 2;
516 frames_received_ = 0;
517 frames_encoded_ = 0;
518 frames_dropped_media_encoder_ = 0;
519 consecutive_full_queue_frame_drops_ = 0;
520 frames_in_queue_ = 0;
521 current_timestamp_us_ = 0;
522 stat_start_time_ms_ = GetCurrentTimeMs();
523 current_frames_ = 0;
524 current_bytes_ = 0;
525 current_acc_qp_ = 0;
526 current_encoding_time_ms_ = 0;
527 last_input_timestamp_ms_ = -1;
528 last_output_timestamp_ms_ = -1;
529 output_timestamp_ = 0;
530 output_render_time_ms_ = 0;
531 timestamps_.clear();
532 render_times_ms_.clear();
533 frame_rtc_times_ms_.clear();
534 drop_next_input_frame_ = false;
535 use_surface_ = use_surface;
536 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
537 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
538 tl0_pic_idx_ = static_cast<uint8_t>(rand());
539 gof_idx_ = 0;
540 last_frame_received_ms_ = -1;
541 frames_received_since_last_key_ = kMinKeyFrameInterval;
542
543 // We enforce no extra stride/padding in the format creation step.
544 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
545 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
546 const bool encode_status = jni->CallBooleanMethod(
547 *j_media_codec_video_encoder_, j_init_encode_method_,
548 j_video_codec_enum, width, height, kbps, fps,
549 (use_surface ? egl_context_ : nullptr));
550 if (!encode_status) {
551 ALOGE << "Failed to configure encoder.";
552 return WEBRTC_VIDEO_CODEC_ERROR;
553 }
554 CHECK_EXCEPTION(jni);
555
556 if (!use_surface) {
557 jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
558 jni->CallObjectMethod(*j_media_codec_video_encoder_,
559 j_get_input_buffers_method_));
560 CHECK_EXCEPTION(jni);
561 if (IsNull(jni, input_buffers)) {
562 return WEBRTC_VIDEO_CODEC_ERROR;
563 }
564
565 switch (GetIntField(jni, *j_media_codec_video_encoder_,
566 j_color_format_field_)) {
567 case COLOR_FormatYUV420Planar:
568 encoder_fourcc_ = libyuv::FOURCC_YU12;
569 break;
570 case COLOR_FormatYUV420SemiPlanar:
571 case COLOR_QCOM_FormatYUV420SemiPlanar:
572 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
573 encoder_fourcc_ = libyuv::FOURCC_NV12;
574 break;
575 default:
576 LOG(LS_ERROR) << "Wrong color format.";
577 return WEBRTC_VIDEO_CODEC_ERROR;
578 }
579 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
580 RTC_CHECK(input_buffers_.empty())
581 << "Unexpected double InitEncode without Release";
582 input_buffers_.resize(num_input_buffers);
583 for (size_t i = 0; i < num_input_buffers; ++i) {
584 input_buffers_[i] =
585 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
586 int64_t yuv_buffer_capacity =
587 jni->GetDirectBufferCapacity(input_buffers_[i]);
588 CHECK_EXCEPTION(jni);
589 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
590 }
591 }
592
593 inited_ = true;
594 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
595 return WEBRTC_VIDEO_CODEC_OK;
596 }
597
598 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
599 const webrtc::VideoFrame& frame,
600 const std::vector<webrtc::FrameType>* frame_types) {
601 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
602 JNIEnv* jni = AttachCurrentThreadIfNeeded();
603 ScopedLocalRefFrame local_ref_frame(jni);
604
605 if (!inited_) {
606 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
607 }
608
609 bool send_key_frame = false;
610 if (codec_mode_ == webrtc::kRealtimeVideo) {
611 ++frames_received_since_last_key_;
612 int64_t now_ms = GetCurrentTimeMs();
613 if (last_frame_received_ms_ != -1 &&
614 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) {
615 // Add limit to prevent triggering a key for every frame for very low
616 // framerates (e.g. if frame diff > kFrameDiffThresholdMs).
617 if (frames_received_since_last_key_ > kMinKeyFrameInterval) {
618 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_);
619 send_key_frame = true;
620 }
621 frames_received_since_last_key_ = 0;
622 }
623 last_frame_received_ms_ = now_ms;
624 }
625
626 frames_received_++;
627 if (!DeliverPendingOutputs(jni)) {
628 if (!ResetCodecOnCodecThread())
629 return WEBRTC_VIDEO_CODEC_ERROR;
630 }
631 if (frames_encoded_ < kMaxEncodedLogFrames) {
632 ALOGD << "Encoder frame in # " << (frames_received_ - 1) <<
633 ". TS: " << (int)(current_timestamp_us_ / 1000) <<
634 ". Q: " << frames_in_queue_ <<
635 ". Fps: " << last_set_fps_ <<
636 ". Kbps: " << last_set_bitrate_kbps_;
637 }
638
639 if (drop_next_input_frame_) {
640 ALOGW << "Encoder drop frame - failed callback.";
641 drop_next_input_frame_ = false;
642 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
643 frames_dropped_media_encoder_++;
644 OnDroppedFrame();
645 return WEBRTC_VIDEO_CODEC_OK;
646 }
647
648 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
649
650 // Check if we accumulated too many frames in encoder input buffers
651 // or the encoder latency exceeds 70 ms and drop frame if so.
652 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
653 int encoder_latency_ms = last_input_timestamp_ms_ -
654 last_output_timestamp_ms_;
655 if (frames_in_queue_ > MAX_ENCODER_Q_SIZE ||
656 encoder_latency_ms > MAX_ENCODER_LATENCY_MS) {
657 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms <<
658 " ms. Q size: " << frames_in_queue_ << ". TS: " <<
659 (int)(current_timestamp_us_ / 1000) << ". Fps: " << last_set_fps_ <<
660 ". Consecutive drops: " << consecutive_full_queue_frame_drops_ ;
661 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
662 consecutive_full_queue_frame_drops_++;
663 if (consecutive_full_queue_frame_drops_ >=
664 ENCODER_STALL_FRAMEDROP_THRESHOLD) {
665 ALOGE << "Encoder got stuck. Reset.";
666 ResetCodecOnCodecThread();
667 return WEBRTC_VIDEO_CODEC_ERROR;
668 }
669 frames_dropped_media_encoder_++;
670 OnDroppedFrame();
671 return WEBRTC_VIDEO_CODEC_OK;
672 }
673 }
674 consecutive_full_queue_frame_drops_ = 0;
675
676 VideoFrame input_frame = frame;
677 if (scale_) {
678 // Check framerate before spatial resolution change.
679 quality_scaler_.OnEncodeFrame(frame);
680 const webrtc::QualityScaler::Resolution scaled_resolution =
681 quality_scaler_.GetScaledResolution();
682 if (scaled_resolution.width != frame.width() ||
683 scaled_resolution.height != frame.height()) {
684 if (frame.native_handle() != nullptr) {
685 rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
686 static_cast<AndroidTextureBuffer*>(
687 frame.video_frame_buffer().get())->ScaleAndRotate(
688 scaled_resolution.width,
689 scaled_resolution.height,
690 webrtc::kVideoRotation_0));
691 input_frame.set_video_frame_buffer(scaled_buffer);
692 } else {
693 input_frame = quality_scaler_.GetScaledFrame(frame);
694 }
695 }
696 }
697
698 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
699 ALOGE << "Failed to reconfigure encoder.";
700 return WEBRTC_VIDEO_CODEC_ERROR;
701 }
702
703 // Save time when input frame is sent to the encoder input.
704 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
705
706 const bool key_frame =
707 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
708 bool encode_status = true;
709 if (!input_frame.native_handle()) {
710 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
711 j_dequeue_input_buffer_method_);
712 CHECK_EXCEPTION(jni);
713 if (j_input_buffer_index == -1) {
714 // Video codec falls behind - no input buffer available.
715 ALOGW << "Encoder drop frame - no input buffers available";
716 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
717 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
718 frames_dropped_media_encoder_++;
719 OnDroppedFrame();
720 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
721 }
722 if (j_input_buffer_index == -2) {
723 ResetCodecOnCodecThread();
724 return WEBRTC_VIDEO_CODEC_ERROR;
725 }
726 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
727 j_input_buffer_index);
728 } else {
729 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
730 }
731
732 if (!encode_status) {
733 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
734 ResetCodecOnCodecThread();
735 return WEBRTC_VIDEO_CODEC_ERROR;
736 }
737
738 last_input_timestamp_ms_ =
739 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
740 frames_in_queue_++;
741
742 // Save input image timestamps for later output
743 timestamps_.push_back(input_frame.timestamp());
744 render_times_ms_.push_back(input_frame.render_time_ms());
745 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
746
747 if (!DeliverPendingOutputs(jni)) {
748 ALOGE << "Failed deliver pending outputs.";
749 ResetCodecOnCodecThread();
750 return WEBRTC_VIDEO_CODEC_ERROR;
751 }
752 return WEBRTC_VIDEO_CODEC_OK;
753 }
754
755 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
756 const webrtc::VideoFrame& frame) {
757 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
758
759 const bool is_texture_frame = frame.native_handle() != nullptr;
760 const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
761 const bool reconfigure_due_to_size =
762 frame.width() != width_ || frame.height() != height_;
763
764 if (reconfigure_due_to_format) {
765 ALOGD << "Reconfigure encoder due to format change. "
766 << (use_surface_ ?
767 "Reconfiguring to encode from byte buffer." :
768 "Reconfiguring to encode from texture.");
769 LogStatistics(true);
770 }
771 if (reconfigure_due_to_size) {
772 ALOGW << "Reconfigure encoder due to frame resolution change from "
773 << width_ << " x " << height_ << " to " << frame.width() << " x "
774 << frame.height();
775 LogStatistics(true);
776 width_ = frame.width();
777 height_ = frame.height();
778 }
779
780 if (!reconfigure_due_to_format && !reconfigure_due_to_size)
781 return true;
782
783 ReleaseOnCodecThread();
784
785 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
786 WEBRTC_VIDEO_CODEC_OK;
787 }
788
789 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
790 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
791 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
792 RTC_CHECK(!use_surface_);
793
794 jobject j_input_buffer = input_buffers_[input_buffer_index];
795 uint8_t* yuv_buffer =
796 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
797 CHECK_EXCEPTION(jni);
798 RTC_CHECK(yuv_buffer) << "Indirect buffer??";
799 RTC_CHECK(!libyuv::ConvertFromI420(
800 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
801 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
802 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
803 yuv_buffer, width_, width_, height_, encoder_fourcc_))
804 << "ConvertFromI420 failed";
805
806 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
807 j_encode_buffer_method_,
808 key_frame,
809 input_buffer_index,
810 yuv_size_,
811 current_timestamp_us_);
812 CHECK_EXCEPTION(jni);
813 return encode_status;
814 }
815
816 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
817 bool key_frame, const webrtc::VideoFrame& frame) {
818 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
819 RTC_CHECK(use_surface_);
820 NativeHandleImpl* handle =
821 static_cast<NativeHandleImpl*>(frame.native_handle());
822 jfloatArray sampling_matrix = jni->NewFloatArray(16);
823 jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
824
825 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
826 j_encode_texture_method_,
827 key_frame,
828 handle->oes_texture_id,
829 sampling_matrix,
830 current_timestamp_us_);
831 CHECK_EXCEPTION(jni);
832 return encode_status;
833 }
834
835 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
836 webrtc::EncodedImageCallback* callback) {
837 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
838 JNIEnv* jni = AttachCurrentThreadIfNeeded();
839 ScopedLocalRefFrame local_ref_frame(jni);
840 callback_ = callback;
841 return WEBRTC_VIDEO_CODEC_OK;
842 }
843
844 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
845 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
846 if (!inited_) {
847 return WEBRTC_VIDEO_CODEC_OK;
848 }
849 JNIEnv* jni = AttachCurrentThreadIfNeeded();
850 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
851 frames_received_ << ". Encoded: " << frames_encoded_ <<
852 ". Dropped: " << frames_dropped_media_encoder_;
853 ScopedLocalRefFrame local_ref_frame(jni);
854 for (size_t i = 0; i < input_buffers_.size(); ++i)
855 jni->DeleteGlobalRef(input_buffers_[i]);
856 input_buffers_.clear();
857 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
858 CHECK_EXCEPTION(jni);
859 rtc::MessageQueueManager::Clear(this);
860 inited_ = false;
861 use_surface_ = false;
862 ALOGD << "EncoderReleaseOnCodecThread done.";
863 return WEBRTC_VIDEO_CODEC_OK;
864 }
865
866 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
867 uint32_t frame_rate) {
868 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
869 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
870 frame_rate : MAX_ALLOWED_VIDEO_FPS;
871 if (last_set_bitrate_kbps_ == new_bit_rate &&
872 last_set_fps_ == frame_rate) {
873 return WEBRTC_VIDEO_CODEC_OK;
874 }
875 if (scale_) {
876 quality_scaler_.ReportFramerate(frame_rate);
877 }
878 JNIEnv* jni = AttachCurrentThreadIfNeeded();
879 ScopedLocalRefFrame local_ref_frame(jni);
880 if (new_bit_rate > 0) {
881 last_set_bitrate_kbps_ = new_bit_rate;
882 }
883 if (frame_rate > 0) {
884 last_set_fps_ = frame_rate;
885 }
886 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
887 j_set_rates_method_,
888 last_set_bitrate_kbps_,
889 last_set_fps_);
890 CHECK_EXCEPTION(jni);
891 if (!ret) {
892 ResetCodecOnCodecThread();
893 return WEBRTC_VIDEO_CODEC_ERROR;
894 }
895 return WEBRTC_VIDEO_CODEC_OK;
896 }
897
898 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
899 JNIEnv* jni,
900 jobject j_output_buffer_info) {
901 return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
902 }
903
904 jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
905 JNIEnv* jni,
906 jobject j_output_buffer_info) {
907 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
908 }
909
910 bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
911 JNIEnv* jni,
912 jobject j_output_buffer_info) {
913 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
914 }
915
916 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
917 JNIEnv* jni,
918 jobject j_output_buffer_info) {
919 return GetLongField(
920 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
921 }
922
923 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
924 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
925 while (true) {
926 jobject j_output_buffer_info = jni->CallObjectMethod(
927 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
928 CHECK_EXCEPTION(jni);
929 if (IsNull(jni, j_output_buffer_info)) {
930 break;
931 }
932
933 int output_buffer_index =
934 GetOutputBufferInfoIndex(jni, j_output_buffer_info);
935 if (output_buffer_index == -1) {
936 ResetCodecOnCodecThread();
937 return false;
938 }
939
940 // Get key and config frame flags.
941 jobject j_output_buffer =
942 GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
943 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
944
945 // Get frame timestamps from a queue - for non config frames only.
946 int64_t frame_encoding_time_ms = 0;
947 last_output_timestamp_ms_ =
948 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
949 1000;
950 if (frames_in_queue_ > 0) {
951 output_timestamp_ = timestamps_.front();
952 timestamps_.erase(timestamps_.begin());
953 output_render_time_ms_ = render_times_ms_.front();
954 render_times_ms_.erase(render_times_ms_.begin());
955 frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
956 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
957 frames_in_queue_--;
958 }
959
960 // Extract payload.
961 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
962 uint8_t* payload = reinterpret_cast<uint8_t*>(
963 jni->GetDirectBufferAddress(j_output_buffer));
964 CHECK_EXCEPTION(jni);
965
966 if (frames_encoded_ < kMaxEncodedLogFrames) {
967 int current_latency =
968 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_);
969 ALOGD << "Encoder frame out # " << frames_encoded_ <<
970 ". Key: " << key_frame <<
971 ". Size: " << payload_size <<
972 ". TS: " << (int)last_output_timestamp_ms_ <<
973 ". Latency: " << current_latency <<
974 ". EncTime: " << frame_encoding_time_ms;
975 }
976
977 // Callback - return encoded frame.
978 int32_t callback_status = 0;
979 if (callback_) {
980 scoped_ptr<webrtc::EncodedImage> image(
981 new webrtc::EncodedImage(payload, payload_size, payload_size));
982 image->_encodedWidth = width_;
983 image->_encodedHeight = height_;
984 image->_timeStamp = output_timestamp_;
985 image->capture_time_ms_ = output_render_time_ms_;
986 image->_frameType =
987 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
988 image->_completeFrame = true;
989 image->adapt_reason_.quality_resolution_downscales =
990 scale_ ? quality_scaler_.downscale_shift() : -1;
991
992 webrtc::CodecSpecificInfo info;
993 memset(&info, 0, sizeof(info));
994 info.codecType = codecType_;
995 if (codecType_ == kVideoCodecVP8) {
996 info.codecSpecific.VP8.pictureId = picture_id_;
997 info.codecSpecific.VP8.nonReference = false;
998 info.codecSpecific.VP8.simulcastIdx = 0;
999 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
1000 info.codecSpecific.VP8.layerSync = false;
1001 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
1002 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
1003 } else if (codecType_ == kVideoCodecVP9) {
1004 if (key_frame) {
1005 gof_idx_ = 0;
1006 }
1007 info.codecSpecific.VP9.picture_id = picture_id_;
1008 info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
1009 info.codecSpecific.VP9.flexible_mode = false;
1010 info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
1011 info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
1012 info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx;
1013 info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx;
1014 info.codecSpecific.VP9.temporal_up_switch = true;
1015 info.codecSpecific.VP9.inter_layer_predicted = false;
1016 info.codecSpecific.VP9.gof_idx =
1017 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
1018 info.codecSpecific.VP9.num_spatial_layers = 1;
1019 info.codecSpecific.VP9.spatial_layer_resolution_present = false;
1020 if (info.codecSpecific.VP9.ss_data_available) {
1021 info.codecSpecific.VP9.spatial_layer_resolution_present = true;
1022 info.codecSpecific.VP9.width[0] = width_;
1023 info.codecSpecific.VP9.height[0] = height_;
1024 info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
1025 }
1026 }
1027 picture_id_ = (picture_id_ + 1) & 0x7FFF;
1028
1029 // Generate a header describing a single fragment.
1030 webrtc::RTPFragmentationHeader header;
1031 memset(&header, 0, sizeof(header));
1032 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
1033 header.VerifyAndAllocateFragmentationHeader(1);
1034 header.fragmentationOffset[0] = 0;
1035 header.fragmentationLength[0] = image->_length;
1036 header.fragmentationPlType[0] = 0;
1037 header.fragmentationTimeDiff[0] = 0;
1038 if (codecType_ == kVideoCodecVP8 && scale_) {
1039 int qp;
1040 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
1041 current_acc_qp_ += qp;
1042 quality_scaler_.ReportQP(qp);
1043 }
1044 }
1045 } else if (codecType_ == kVideoCodecH264) {
1046 if (scale_) {
1047 h264_bitstream_parser_.ParseBitstream(payload, payload_size);
1048 int qp;
1049 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
1050 current_acc_qp_ += qp;
1051 quality_scaler_.ReportQP(qp);
1052 }
1053 }
1054 // For H.264 search for start codes.
1055 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
1056 int32_t scPositionsLength = 0;
1057 int32_t scPosition = 0;
1058 while (scPositionsLength < MAX_NALUS_PERFRAME) {
1059 int32_t naluPosition = NextNaluPosition(
1060 payload + scPosition, payload_size - scPosition);
1061 if (naluPosition < 0) {
1062 break;
1063 }
1064 scPosition += naluPosition;
1065 scPositions[scPositionsLength++] = scPosition;
1066 scPosition += H264_SC_LENGTH;
1067 }
1068 if (scPositionsLength == 0) {
1069 ALOGE << "Start code is not found!";
1070 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
1071 << " " << image->_buffer[2] << " " << image->_buffer[3]
1072 << " " << image->_buffer[4] << " " << image->_buffer[5];
1073 ResetCodecOnCodecThread();
1074 return false;
1075 }
1076 scPositions[scPositionsLength] = payload_size;
1077 header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
1078 for (size_t i = 0; i < scPositionsLength; i++) {
1079 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
1080 header.fragmentationLength[i] =
1081 scPositions[i + 1] - header.fragmentationOffset[i];
1082 header.fragmentationPlType[i] = 0;
1083 header.fragmentationTimeDiff[i] = 0;
1084 }
1085 }
1086
1087 callback_status = callback_->Encoded(*image, &info, &header);
1088 }
1089
1090 // Return output buffer back to the encoder.
1091 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
1092 j_release_output_buffer_method_,
1093 output_buffer_index);
1094 CHECK_EXCEPTION(jni);
1095 if (!success) {
1096 ResetCodecOnCodecThread();
1097 return false;
1098 }
1099
1100 // Calculate and print encoding statistics - every 3 seconds.
1101 frames_encoded_++;
1102 current_frames_++;
1103 current_bytes_ += payload_size;
1104 current_encoding_time_ms_ += frame_encoding_time_ms;
1105 LogStatistics(false);
1106
1107 if (callback_status > 0) {
1108 drop_next_input_frame_ = true;
1109 // Theoretically could handle callback_status<0 here, but unclear what
1110 // that would mean for us.
1111 }
1112 }
1113 return true;
1114 }
1115
1116 void MediaCodecVideoEncoder::LogStatistics(bool force_log) {
1117 int statistic_time_ms = GetCurrentTimeMs() - stat_start_time_ms_;
1118 if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) &&
1119 current_frames_ > 0 && statistic_time_ms > 0) {
1120 int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
1121 int current_fps =
1122 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
1123 ALOGD << "Encoded frames: " << frames_encoded_ <<
1124 ". Bitrate: " << current_bitrate <<
1125 ", target: " << last_set_bitrate_kbps_ << " kbps" <<
1126 ", fps: " << current_fps <<
1127 ", encTime: " << (current_encoding_time_ms_ / current_frames_) <<
1128 ". QP: " << (current_acc_qp_ / current_frames_) <<
1129 " for last " << statistic_time_ms << " ms.";
1130 stat_start_time_ms_ = GetCurrentTimeMs();
1131 current_frames_ = 0;
1132 current_bytes_ = 0;
1133 current_acc_qp_ = 0;
1134 current_encoding_time_ms_ = 0;
1135 }
1136 }
1137
1138 int32_t MediaCodecVideoEncoder::NextNaluPosition(
1139 uint8_t *buffer, size_t buffer_size) {
1140 if (buffer_size < H264_SC_LENGTH) {
1141 return -1;
1142 }
1143 uint8_t *head = buffer;
1144 // Set end buffer pointer to 4 bytes before actual buffer end so we can
1145 // access head[1], head[2] and head[3] in a loop without buffer overrun.
1146 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
1147
1148 while (head < end) {
1149 if (head[0]) {
1150 head++;
1151 continue;
1152 }
1153 if (head[1]) { // got 00xx
1154 head += 2;
1155 continue;
1156 }
1157 if (head[2]) { // got 0000xx
1158 head += 3;
1159 continue;
1160 }
1161 if (head[3] != 0x01) { // got 000000xx
1162 head++; // xx != 1, continue searching.
1163 continue;
1164 }
1165 return (int32_t)(head - buffer);
1166 }
1167 return -1;
1168 }
1169
1170 void MediaCodecVideoEncoder::OnDroppedFrame() {
1171 // Report dropped frame to quality_scaler_.
1172 if (scale_)
1173 quality_scaler_.ReportDroppedFrame();
1174 }
1175
1176 int MediaCodecVideoEncoder::GetTargetFramerate() {
1177 return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
1178 }
1179
1180 const char* MediaCodecVideoEncoder::ImplementationName() const {
1181 return "MediaCodec";
1182 }
1183
1184 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
1185 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1186 ScopedLocalRefFrame local_ref_frame(jni);
1187 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
1188 supported_codecs_.clear();
1189
1190 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
1191 j_encoder_class,
1192 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
1193 CHECK_EXCEPTION(jni);
1194 if (is_vp8_hw_supported) {
1195 ALOGD << "VP8 HW Encoder supported.";
1196 supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
1197 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
1198 }
1199
1200 bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
1201 j_encoder_class,
1202 GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
1203 CHECK_EXCEPTION(jni);
1204 if (is_vp9_hw_supported) {
1205 ALOGD << "VP9 HW Encoder supported.";
1206 supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9",
1207 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
1208 }
1209
1210 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
1211 j_encoder_class,
1212 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
1213 CHECK_EXCEPTION(jni);
1214 if (is_h264_hw_supported) {
1215 ALOGD << "H.264 HW Encoder supported.";
1216 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
1217 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
1218 }
1219 }
1220
1221 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {
1222 ALOGD << "MediaCodecVideoEncoderFactory dtor";
1223 }
1224
1225 void MediaCodecVideoEncoderFactory::SetEGLContext(
1226 JNIEnv* jni, jobject render_egl_context) {
1227 ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
1228 if (!egl_base_.CreateEglBase(jni, render_egl_context)) {
1229 ALOGW << "Invalid EGL context - HW surface encoding is disabled.";
1230 }
1231 }
1232
1233 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
1234 VideoCodecType type) {
1235 if (supported_codecs_.empty()) {
1236 ALOGW << "No HW video encoder for type " << (int)type;
1237 return nullptr;
1238 }
1239 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
1240 it != supported_codecs_.end(); ++it) {
1241 if (it->type == type) {
1242 ALOGD << "Create HW video encoder for type " << (int)type <<
1243 " (" << it->name << ").";
1244 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
1245 egl_base_.egl_base_context());
1246 }
1247 }
1248 ALOGW << "Can not find HW video encoder for type " << (int)type;
1249 return nullptr;
1250 }
1251
1252 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
1253 MediaCodecVideoEncoderFactory::codecs() const {
1254 return supported_codecs_;
1255 }
1256
1257 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1258 webrtc::VideoEncoder* encoder) {
1259 ALOGD << "Destroy video encoder.";
1260 delete encoder;
1261 }
1262
1263 } // namespace webrtc_jni
1264
OLDNEW
« no previous file with comments | « talk/app/webrtc/java/jni/androidmediaencoder_jni.h ('k') | talk/app/webrtc/java/jni/androidnetworkmonitor_jni.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698