| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 // NOTICE: androidmediaencoder_jni.h must be included before | |
| 12 // androidmediacodeccommon.h to avoid build errors. | |
| 13 #include "webrtc/api/android/jni/androidmediaencoder_jni.h" | |
| 14 | |
| 15 #include <algorithm> | |
| 16 #include <memory> | |
| 17 #include <list> | |
| 18 | |
| 19 #include "third_party/libyuv/include/libyuv/convert.h" | |
| 20 #include "third_party/libyuv/include/libyuv/convert_from.h" | |
| 21 #include "third_party/libyuv/include/libyuv/video_common.h" | |
| 22 #include "webrtc/api/android/jni/androidmediacodeccommon.h" | |
| 23 #include "webrtc/api/android/jni/classreferenceholder.h" | |
| 24 #include "webrtc/api/android/jni/native_handle_impl.h" | |
| 25 #include "webrtc/base/bind.h" | |
| 26 #include "webrtc/base/checks.h" | |
| 27 #include "webrtc/base/logging.h" | |
| 28 #include "webrtc/base/thread.h" | |
| 29 #include "webrtc/base/thread_checker.h" | |
| 30 #include "webrtc/base/timeutils.h" | |
| 31 #include "webrtc/common_types.h" | |
| 32 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | |
| 33 #include "webrtc/modules/video_coding/utility/h264_bitstream_parser.h" | |
| 34 #include "webrtc/modules/video_coding/utility/quality_scaler.h" | |
| 35 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | |
| 36 #include "webrtc/system_wrappers/include/field_trial.h" | |
| 37 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | |
| 38 | |
| 39 using rtc::Bind; | |
| 40 using rtc::Thread; | |
| 41 using rtc::ThreadManager; | |
| 42 | |
| 43 using webrtc::CodecSpecificInfo; | |
| 44 using webrtc::EncodedImage; | |
| 45 using webrtc::VideoFrame; | |
| 46 using webrtc::RTPFragmentationHeader; | |
| 47 using webrtc::VideoCodec; | |
| 48 using webrtc::VideoCodecType; | |
| 49 using webrtc::kVideoCodecH264; | |
| 50 using webrtc::kVideoCodecVP8; | |
| 51 using webrtc::kVideoCodecVP9; | |
| 52 using webrtc::QualityScaler; | |
| 53 | |
| 54 namespace webrtc_jni { | |
| 55 | |
| 56 // H.264 start code length. | |
| 57 #define H264_SC_LENGTH 4 | |
| 58 // Maximum allowed NALUs in one output frame. | |
| 59 #define MAX_NALUS_PERFRAME 32 | |
| 60 // Maximum supported HW video encoder resolution. | |
| 61 #define MAX_VIDEO_WIDTH 1280 | |
| 62 #define MAX_VIDEO_HEIGHT 1280 | |
| 63 // Maximum supported HW video encoder fps. | |
| 64 #define MAX_VIDEO_FPS 30 | |
| 65 // Maximum allowed fps value in SetRates() call. | |
| 66 #define MAX_ALLOWED_VIDEO_FPS 60 | |
| 67 // Maximum allowed frames in encoder input queue. | |
| 68 #define MAX_ENCODER_Q_SIZE 2 | |
| 69 // Maximum amount of dropped frames caused by full encoder queue - exceeding | |
| 70 // this threshold means that encoder probably got stuck and need to be reset. | |
| 71 #define ENCODER_STALL_FRAMEDROP_THRESHOLD 60 | |
| 72 | |
| 73 // Logging macros. | |
| 74 #define TAG_ENCODER "MediaCodecVideoEncoder" | |
| 75 #ifdef TRACK_BUFFER_TIMING | |
| 76 #define ALOGV(...) | |
| 77 __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__) | |
| 78 #else | |
| 79 #define ALOGV(...) | |
| 80 #endif | |
| 81 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER) | |
| 82 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_ENCODER) | |
| 83 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER) | |
| 84 | |
| 85 namespace { | |
| 86 // Maximum time limit between incoming frames before requesting a key frame. | |
| 87 const size_t kFrameDiffThresholdMs = 1100; | |
| 88 const int kMinKeyFrameInterval = 2; | |
| 89 } // namespace | |
| 90 | |
| 91 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses | |
| 92 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) | |
| 93 // HW-backed video encode. This C++ class is implemented as a very thin shim, | |
| 94 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. | |
| 95 // MediaCodecVideoEncoder is created, operated, and destroyed on a single | |
| 96 // thread, currently the libjingle Worker thread. | |
| 97 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, | |
| 98 public rtc::MessageHandler { | |
| 99 public: | |
| 100 virtual ~MediaCodecVideoEncoder(); | |
| 101 MediaCodecVideoEncoder(JNIEnv* jni, | |
| 102 VideoCodecType codecType, | |
| 103 jobject egl_context); | |
| 104 | |
| 105 // webrtc::VideoEncoder implementation. Everything trampolines to | |
| 106 // |codec_thread_| for execution. | |
| 107 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, | |
| 108 int32_t /* number_of_cores */, | |
| 109 size_t /* max_payload_size */) override; | |
| 110 int32_t Encode(const webrtc::VideoFrame& input_image, | |
| 111 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | |
| 112 const std::vector<webrtc::FrameType>* frame_types) override; | |
| 113 int32_t RegisterEncodeCompleteCallback( | |
| 114 webrtc::EncodedImageCallback* callback) override; | |
| 115 int32_t Release() override; | |
| 116 int32_t SetChannelParameters(uint32_t /* packet_loss */, | |
| 117 int64_t /* rtt */) override; | |
| 118 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override; | |
| 119 | |
| 120 // rtc::MessageHandler implementation. | |
| 121 void OnMessage(rtc::Message* msg) override; | |
| 122 | |
| 123 void OnDroppedFrame() override; | |
| 124 | |
| 125 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | |
| 126 const char* ImplementationName() const override; | |
| 127 | |
| 128 private: | |
| 129 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and | |
| 130 // InitEncodeOnCodecThread() in an attempt to restore the codec to an | |
| 131 // operable state. Necessary after all manner of OMX-layer errors. | |
| 132 bool ResetCodecOnCodecThread(); | |
| 133 | |
| 134 // Implementation of webrtc::VideoEncoder methods above, all running on the | |
| 135 // codec thread exclusively. | |
| 136 // | |
| 137 // If width==0 then this is assumed to be a re-initialization and the | |
| 138 // previously-current values are reused instead of the passed parameters | |
| 139 // (makes it easier to reason about thread-safety). | |
| 140 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, | |
| 141 bool use_surface); | |
| 142 // Reconfigure to match |frame| in width, height. Also reconfigures the | |
| 143 // encoder if |frame| is a texture/byte buffer and the encoder is initialized | |
| 144 // for byte buffer/texture. Returns false if reconfiguring fails. | |
| 145 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); | |
| 146 int32_t EncodeOnCodecThread( | |
| 147 const webrtc::VideoFrame& input_image, | |
| 148 const std::vector<webrtc::FrameType>* frame_types, | |
| 149 const int64_t frame_input_time_ms); | |
| 150 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, | |
| 151 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); | |
| 152 bool EncodeTextureOnCodecThread(JNIEnv* jni, | |
| 153 bool key_frame, const webrtc::VideoFrame& frame); | |
| 154 | |
| 155 int32_t RegisterEncodeCompleteCallbackOnCodecThread( | |
| 156 webrtc::EncodedImageCallback* callback); | |
| 157 int32_t ReleaseOnCodecThread(); | |
| 158 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); | |
| 159 void OnDroppedFrameOnCodecThread(); | |
| 160 | |
| 161 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. | |
| 162 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); | |
| 163 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); | |
| 164 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); | |
| 165 jlong GetOutputBufferInfoPresentationTimestampUs( | |
| 166 JNIEnv* jni, jobject j_output_buffer_info); | |
| 167 | |
| 168 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | |
| 169 // true on success. | |
| 170 bool DeliverPendingOutputs(JNIEnv* jni); | |
| 171 | |
| 172 // Search for H.264 start codes. | |
| 173 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); | |
| 174 | |
| 175 // Displays encoder statistics. | |
| 176 void LogStatistics(bool force_log); | |
| 177 | |
| 178 // Type of video codec. | |
| 179 VideoCodecType codecType_; | |
| 180 | |
| 181 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to | |
| 182 // |codec_thread_| synchronously. | |
| 183 webrtc::EncodedImageCallback* callback_; | |
| 184 | |
| 185 // State that is constant for the lifetime of this object once the ctor | |
| 186 // returns. | |
| 187 std::unique_ptr<Thread> | |
| 188 codec_thread_; // Thread on which to operate MediaCodec. | |
| 189 rtc::ThreadChecker codec_thread_checker_; | |
| 190 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; | |
| 191 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; | |
| 192 jmethodID j_init_encode_method_; | |
| 193 jmethodID j_get_input_buffers_method_; | |
| 194 jmethodID j_dequeue_input_buffer_method_; | |
| 195 jmethodID j_encode_buffer_method_; | |
| 196 jmethodID j_encode_texture_method_; | |
| 197 jmethodID j_release_method_; | |
| 198 jmethodID j_set_rates_method_; | |
| 199 jmethodID j_dequeue_output_buffer_method_; | |
| 200 jmethodID j_release_output_buffer_method_; | |
| 201 jfieldID j_color_format_field_; | |
| 202 jfieldID j_info_index_field_; | |
| 203 jfieldID j_info_buffer_field_; | |
| 204 jfieldID j_info_is_key_frame_field_; | |
| 205 jfieldID j_info_presentation_timestamp_us_field_; | |
| 206 | |
| 207 // State that is valid only between InitEncode() and the next Release(). | |
| 208 // Touched only on codec_thread_ so no explicit synchronization necessary. | |
| 209 int width_; // Frame width in pixels. | |
| 210 int height_; // Frame height in pixels. | |
| 211 bool inited_; | |
| 212 bool use_surface_; | |
| 213 uint16_t picture_id_; | |
| 214 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. | |
| 215 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | |
| 216 int last_set_fps_; // Last-requested frame rate. | |
| 217 int64_t current_timestamp_us_; // Current frame timestamps in us. | |
| 218 int frames_received_; // Number of frames received by encoder. | |
| 219 int frames_encoded_; // Number of frames encoded by encoder. | |
| 220 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. | |
| 221 // Number of dropped frames caused by full queue. | |
| 222 int consecutive_full_queue_frame_drops_; | |
| 223 int64_t stat_start_time_ms_; // Start time for statistics. | |
| 224 int current_frames_; // Number of frames in the current statistics interval. | |
| 225 int current_bytes_; // Encoded bytes in the current statistics interval. | |
| 226 int current_acc_qp_; // Accumulated QP in the current statistics interval. | |
| 227 int current_encoding_time_ms_; // Overall encoding time in the current second | |
| 228 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. | |
| 229 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. | |
| 230 | |
| 231 struct InputFrameInfo { | |
| 232 InputFrameInfo(int64_t encode_start_time, | |
| 233 int32_t frame_timestamp, | |
| 234 int64_t frame_render_time_ms, | |
| 235 webrtc::VideoRotation rotation) | |
| 236 : encode_start_time(encode_start_time), | |
| 237 frame_timestamp(frame_timestamp), | |
| 238 frame_render_time_ms(frame_render_time_ms), | |
| 239 rotation(rotation) {} | |
| 240 // Time when video frame is sent to encoder input. | |
| 241 const int64_t encode_start_time; | |
| 242 | |
| 243 // Input frame information. | |
| 244 const int32_t frame_timestamp; | |
| 245 const int64_t frame_render_time_ms; | |
| 246 const webrtc::VideoRotation rotation; | |
| 247 }; | |
| 248 std::list<InputFrameInfo> input_frame_infos_; | |
| 249 int32_t output_timestamp_; // Last output frame timestamp from | |
| 250 // |input_frame_infos_|. | |
| 251 int64_t output_render_time_ms_; // Last output frame render time from | |
| 252 // |input_frame_infos_|. | |
| 253 webrtc::VideoRotation output_rotation_; // Last output frame rotation from | |
| 254 // |input_frame_infos_|. | |
| 255 // Frame size in bytes fed to MediaCodec. | |
| 256 int yuv_size_; | |
| 257 // True only when between a callback_->Encoded() call return a positive value | |
| 258 // and the next Encode() call being ignored. | |
| 259 bool drop_next_input_frame_; | |
| 260 // Global references; must be deleted in Release(). | |
| 261 std::vector<jobject> input_buffers_; | |
| 262 QualityScaler quality_scaler_; | |
| 263 // Dynamic resolution change, off by default. | |
| 264 bool scale_; | |
| 265 | |
| 266 // H264 bitstream parser, used to extract QP from encoded bitstreams. | |
| 267 webrtc::H264BitstreamParser h264_bitstream_parser_; | |
| 268 | |
| 269 // VP9 variables to populate codec specific structure. | |
| 270 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for | |
| 271 // non-flexible VP9 mode. | |
| 272 uint8_t tl0_pic_idx_; | |
| 273 size_t gof_idx_; | |
| 274 | |
| 275 // EGL context - owned by factory, should not be allocated/destroyed | |
| 276 // by MediaCodecVideoEncoder. | |
| 277 jobject egl_context_; | |
| 278 | |
| 279 // Temporary fix for VP8. | |
| 280 // Sends a key frame if frames are largely spaced apart (possibly | |
| 281 // corresponding to a large image change). | |
| 282 int64_t last_frame_received_ms_; | |
| 283 int frames_received_since_last_key_; | |
| 284 webrtc::VideoCodecMode codec_mode_; | |
| 285 }; | |
| 286 | |
| 287 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { | |
| 288 // Call Release() to ensure no more callbacks to us after we are deleted. | |
| 289 Release(); | |
| 290 } | |
| 291 | |
| 292 MediaCodecVideoEncoder::MediaCodecVideoEncoder( | |
| 293 JNIEnv* jni, VideoCodecType codecType, jobject egl_context) : | |
| 294 codecType_(codecType), | |
| 295 callback_(NULL), | |
| 296 codec_thread_(new Thread()), | |
| 297 j_media_codec_video_encoder_class_( | |
| 298 jni, | |
| 299 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), | |
| 300 j_media_codec_video_encoder_( | |
| 301 jni, | |
| 302 jni->NewObject(*j_media_codec_video_encoder_class_, | |
| 303 GetMethodID(jni, | |
| 304 *j_media_codec_video_encoder_class_, | |
| 305 "<init>", | |
| 306 "()V"))), | |
| 307 inited_(false), | |
| 308 use_surface_(false), | |
| 309 picture_id_(0), | |
| 310 egl_context_(egl_context) { | |
| 311 ScopedLocalRefFrame local_ref_frame(jni); | |
| 312 // It would be nice to avoid spinning up a new thread per MediaCodec, and | |
| 313 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | |
| 314 // 2732 means that deadlocks abound. This class synchronously trampolines | |
| 315 // to |codec_thread_|, so if anything else can be coming to _us_ from | |
| 316 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | |
| 317 // in the bug, we have a problem. For now work around that with a dedicated | |
| 318 // thread. | |
| 319 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | |
| 320 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | |
| 321 codec_thread_checker_.DetachFromThread(); | |
| 322 jclass j_output_buffer_info_class = | |
| 323 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | |
| 324 j_init_encode_method_ = GetMethodID( | |
| 325 jni, | |
| 326 *j_media_codec_video_encoder_class_, | |
| 327 "initEncode", | |
| 328 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" | |
| 329 "IIIILorg/webrtc/EglBase14$Context;)Z"); | |
| 330 j_get_input_buffers_method_ = GetMethodID( | |
| 331 jni, | |
| 332 *j_media_codec_video_encoder_class_, | |
| 333 "getInputBuffers", | |
| 334 "()[Ljava/nio/ByteBuffer;"); | |
| 335 j_dequeue_input_buffer_method_ = GetMethodID( | |
| 336 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); | |
| 337 j_encode_buffer_method_ = GetMethodID( | |
| 338 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); | |
| 339 j_encode_texture_method_ = GetMethodID( | |
| 340 jni, *j_media_codec_video_encoder_class_, "encodeTexture", | |
| 341 "(ZI[FJ)Z"); | |
| 342 j_release_method_ = | |
| 343 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); | |
| 344 j_set_rates_method_ = GetMethodID( | |
| 345 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); | |
| 346 j_dequeue_output_buffer_method_ = GetMethodID( | |
| 347 jni, | |
| 348 *j_media_codec_video_encoder_class_, | |
| 349 "dequeueOutputBuffer", | |
| 350 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); | |
| 351 j_release_output_buffer_method_ = GetMethodID( | |
| 352 jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z"); | |
| 353 | |
| 354 j_color_format_field_ = | |
| 355 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); | |
| 356 j_info_index_field_ = | |
| 357 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); | |
| 358 j_info_buffer_field_ = GetFieldID( | |
| 359 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); | |
| 360 j_info_is_key_frame_field_ = | |
| 361 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); | |
| 362 j_info_presentation_timestamp_us_field_ = GetFieldID( | |
| 363 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); | |
| 364 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed"; | |
| 365 srand(time(NULL)); | |
| 366 AllowBlockingCalls(); | |
| 367 } | |
| 368 | |
| 369 int32_t MediaCodecVideoEncoder::InitEncode( | |
| 370 const webrtc::VideoCodec* codec_settings, | |
| 371 int32_t /* number_of_cores */, | |
| 372 size_t /* max_payload_size */) { | |
| 373 if (codec_settings == NULL) { | |
| 374 ALOGE << "NULL VideoCodec instance"; | |
| 375 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
| 376 } | |
| 377 // Factory should guard against other codecs being used with us. | |
| 378 RTC_CHECK(codec_settings->codecType == codecType_) | |
| 379 << "Unsupported codec " << codec_settings->codecType << " for " | |
| 380 << codecType_; | |
| 381 | |
| 382 codec_mode_ = codec_settings->mode; | |
| 383 int init_width = codec_settings->width; | |
| 384 int init_height = codec_settings->height; | |
| 385 // Scaling is disabled for VP9, but optionally enabled for VP8. | |
| 386 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. | |
| 387 scale_ = false; | |
| 388 if (codecType_ == kVideoCodecVP8) { | |
| 389 scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn; | |
| 390 } else if (codecType_ != kVideoCodecVP9) { | |
| 391 scale_ = true; | |
| 392 } | |
| 393 | |
| 394 ALOGD << "InitEncode request: " << init_width << " x " << init_height; | |
| 395 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); | |
| 396 | |
| 397 if (scale_) { | |
| 398 if (codecType_ == kVideoCodecVP8) { | |
| 399 quality_scaler_.Init( | |
| 400 QualityScaler::kLowVp8QpThreshold, QualityScaler::kBadVp8QpThreshold, | |
| 401 codec_settings->startBitrate, codec_settings->width, | |
| 402 codec_settings->height, codec_settings->maxFramerate); | |
| 403 } else if (codecType_ == kVideoCodecH264) { | |
| 404 quality_scaler_.Init(QualityScaler::kLowH264QpThreshold, | |
| 405 QualityScaler::kBadH264QpThreshold, | |
| 406 codec_settings->startBitrate, codec_settings->width, | |
| 407 codec_settings->height, | |
| 408 codec_settings->maxFramerate); | |
| 409 } else { | |
| 410 // When adding codec support to additional hardware codecs, also configure | |
| 411 // their QP thresholds for scaling. | |
| 412 RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds."; | |
| 413 scale_ = false; | |
| 414 } | |
| 415 QualityScaler::Resolution res = quality_scaler_.GetScaledResolution(); | |
| 416 init_width = res.width; | |
| 417 init_height = res.height; | |
| 418 ALOGD << "Scaled resolution: " << init_width << " x " << init_height; | |
| 419 } | |
| 420 | |
| 421 return codec_thread_->Invoke<int32_t>( | |
| 422 RTC_FROM_HERE, | |
| 423 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, | |
| 424 init_height, codec_settings->startBitrate, | |
| 425 codec_settings->maxFramerate, | |
| 426 codec_settings->expect_encode_from_texture)); | |
| 427 } | |
| 428 | |
| 429 int32_t MediaCodecVideoEncoder::Encode( | |
| 430 const webrtc::VideoFrame& frame, | |
| 431 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | |
| 432 const std::vector<webrtc::FrameType>* frame_types) { | |
| 433 return codec_thread_->Invoke<int32_t>( | |
| 434 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this, | |
| 435 frame, frame_types, rtc::TimeMillis())); | |
| 436 } | |
| 437 | |
| 438 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( | |
| 439 webrtc::EncodedImageCallback* callback) { | |
| 440 return codec_thread_->Invoke<int32_t>( | |
| 441 RTC_FROM_HERE, | |
| 442 Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread, | |
| 443 this, callback)); | |
| 444 } | |
| 445 | |
| 446 int32_t MediaCodecVideoEncoder::Release() { | |
| 447 ALOGD << "EncoderRelease request"; | |
| 448 return codec_thread_->Invoke<int32_t>( | |
| 449 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); | |
| 450 } | |
| 451 | |
| 452 int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, | |
| 453 int64_t /* rtt */) { | |
| 454 return WEBRTC_VIDEO_CODEC_OK; | |
| 455 } | |
| 456 | |
| 457 int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate, | |
| 458 uint32_t frame_rate) { | |
| 459 return codec_thread_->Invoke<int32_t>( | |
| 460 RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, this, | |
| 461 new_bit_rate, frame_rate)); | |
| 462 } | |
| 463 | |
| 464 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { | |
| 465 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 466 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 467 ScopedLocalRefFrame local_ref_frame(jni); | |
| 468 | |
| 469 // We only ever send one message to |this| directly (not through a Bind()'d | |
| 470 // functor), so expect no ID/data. | |
| 471 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | |
| 472 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | |
| 473 if (!inited_) { | |
| 474 return; | |
| 475 } | |
| 476 | |
| 477 // It would be nice to recover from a failure here if one happened, but it's | |
| 478 // unclear how to signal such a failure to the app, so instead we stay silent | |
| 479 // about it and let the next app-called API method reveal the borkedness. | |
| 480 DeliverPendingOutputs(jni); | |
| 481 | |
| 482 // If there aren't more frames to deliver, we can start polling at lower rate. | |
| 483 if (input_frame_infos_.empty()) { | |
| 484 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollNoFramesMs, this); | |
| 485 } else { | |
| 486 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
| 487 } | |
| 488 | |
| 489 // Call log statistics here so it's called even if no frames are being | |
| 490 // delivered. | |
| 491 LogStatistics(false); | |
| 492 } | |
| 493 | |
| 494 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { | |
| 495 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 496 ALOGE << "ResetOnCodecThread"; | |
| 497 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || | |
| 498 InitEncodeOnCodecThread(width_, height_, 0, 0, false) != | |
| 499 WEBRTC_VIDEO_CODEC_OK) { | |
| 500 // TODO(fischman): wouldn't it be nice if there was a way to gracefully | |
| 501 // degrade to a SW encoder at this point? There isn't one AFAICT :( | |
| 502 // https://code.google.com/p/webrtc/issues/detail?id=2920 | |
| 503 return false; | |
| 504 } | |
| 505 return true; | |
| 506 } | |
| 507 | |
| 508 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | |
| 509 int width, int height, int kbps, int fps, bool use_surface) { | |
| 510 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 511 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; | |
| 512 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 513 ScopedLocalRefFrame local_ref_frame(jni); | |
| 514 | |
| 515 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << | |
| 516 width << " x " << height << ". Bitrate: " << kbps << | |
| 517 " kbps. Fps: " << fps; | |
| 518 if (kbps == 0) { | |
| 519 kbps = last_set_bitrate_kbps_; | |
| 520 } | |
| 521 if (fps == 0) { | |
| 522 fps = MAX_VIDEO_FPS; | |
| 523 } | |
| 524 | |
| 525 width_ = width; | |
| 526 height_ = height; | |
| 527 last_set_bitrate_kbps_ = kbps; | |
| 528 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; | |
| 529 yuv_size_ = width_ * height_ * 3 / 2; | |
| 530 frames_received_ = 0; | |
| 531 frames_encoded_ = 0; | |
| 532 frames_dropped_media_encoder_ = 0; | |
| 533 consecutive_full_queue_frame_drops_ = 0; | |
| 534 current_timestamp_us_ = 0; | |
| 535 stat_start_time_ms_ = rtc::TimeMillis(); | |
| 536 current_frames_ = 0; | |
| 537 current_bytes_ = 0; | |
| 538 current_acc_qp_ = 0; | |
| 539 current_encoding_time_ms_ = 0; | |
| 540 last_input_timestamp_ms_ = -1; | |
| 541 last_output_timestamp_ms_ = -1; | |
| 542 output_timestamp_ = 0; | |
| 543 output_render_time_ms_ = 0; | |
| 544 input_frame_infos_.clear(); | |
| 545 drop_next_input_frame_ = false; | |
| 546 use_surface_ = use_surface; | |
| 547 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | |
| 548 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); | |
| 549 tl0_pic_idx_ = static_cast<uint8_t>(rand()); | |
| 550 gof_idx_ = 0; | |
| 551 last_frame_received_ms_ = -1; | |
| 552 frames_received_since_last_key_ = kMinKeyFrameInterval; | |
| 553 | |
| 554 // We enforce no extra stride/padding in the format creation step. | |
| 555 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | |
| 556 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); | |
| 557 const bool encode_status = jni->CallBooleanMethod( | |
| 558 *j_media_codec_video_encoder_, j_init_encode_method_, | |
| 559 j_video_codec_enum, width, height, kbps, fps, | |
| 560 (use_surface ? egl_context_ : nullptr)); | |
| 561 if (!encode_status) { | |
| 562 ALOGE << "Failed to configure encoder."; | |
| 563 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 564 } | |
| 565 CHECK_EXCEPTION(jni); | |
| 566 | |
| 567 if (!use_surface) { | |
| 568 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | |
| 569 jni->CallObjectMethod(*j_media_codec_video_encoder_, | |
| 570 j_get_input_buffers_method_)); | |
| 571 CHECK_EXCEPTION(jni); | |
| 572 if (IsNull(jni, input_buffers)) { | |
| 573 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 574 } | |
| 575 | |
| 576 switch (GetIntField(jni, *j_media_codec_video_encoder_, | |
| 577 j_color_format_field_)) { | |
| 578 case COLOR_FormatYUV420Planar: | |
| 579 encoder_fourcc_ = libyuv::FOURCC_YU12; | |
| 580 break; | |
| 581 case COLOR_FormatYUV420SemiPlanar: | |
| 582 case COLOR_QCOM_FormatYUV420SemiPlanar: | |
| 583 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | |
| 584 encoder_fourcc_ = libyuv::FOURCC_NV12; | |
| 585 break; | |
| 586 default: | |
| 587 LOG(LS_ERROR) << "Wrong color format."; | |
| 588 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 589 } | |
| 590 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | |
| 591 RTC_CHECK(input_buffers_.empty()) | |
| 592 << "Unexpected double InitEncode without Release"; | |
| 593 input_buffers_.resize(num_input_buffers); | |
| 594 for (size_t i = 0; i < num_input_buffers; ++i) { | |
| 595 input_buffers_[i] = | |
| 596 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | |
| 597 int64_t yuv_buffer_capacity = | |
| 598 jni->GetDirectBufferCapacity(input_buffers_[i]); | |
| 599 CHECK_EXCEPTION(jni); | |
| 600 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | |
| 601 } | |
| 602 } | |
| 603 | |
| 604 inited_ = true; | |
| 605 return WEBRTC_VIDEO_CODEC_OK; | |
| 606 } | |
| 607 | |
| 608 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | |
| 609 const webrtc::VideoFrame& frame, | |
| 610 const std::vector<webrtc::FrameType>* frame_types, | |
| 611 const int64_t frame_input_time_ms) { | |
| 612 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 613 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 614 ScopedLocalRefFrame local_ref_frame(jni); | |
| 615 | |
| 616 if (!inited_) { | |
| 617 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
| 618 } | |
| 619 | |
| 620 bool send_key_frame = false; | |
| 621 if (codec_mode_ == webrtc::kRealtimeVideo) { | |
| 622 ++frames_received_since_last_key_; | |
| 623 int64_t now_ms = rtc::TimeMillis(); | |
| 624 if (last_frame_received_ms_ != -1 && | |
| 625 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { | |
| 626 // Add limit to prevent triggering a key for every frame for very low | |
| 627 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). | |
| 628 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { | |
| 629 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); | |
| 630 send_key_frame = true; | |
| 631 } | |
| 632 frames_received_since_last_key_ = 0; | |
| 633 } | |
| 634 last_frame_received_ms_ = now_ms; | |
| 635 } | |
| 636 | |
| 637 frames_received_++; | |
| 638 if (!DeliverPendingOutputs(jni)) { | |
| 639 if (!ResetCodecOnCodecThread()) | |
| 640 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 641 } | |
| 642 if (frames_encoded_ < kMaxEncodedLogFrames) { | |
| 643 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | |
| 644 << ". TS: " << (int)(current_timestamp_us_ / 1000) | |
| 645 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | |
| 646 << ". Kbps: " << last_set_bitrate_kbps_; | |
| 647 } | |
| 648 | |
| 649 if (drop_next_input_frame_) { | |
| 650 ALOGW << "Encoder drop frame - failed callback."; | |
| 651 drop_next_input_frame_ = false; | |
| 652 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
| 653 frames_dropped_media_encoder_++; | |
| 654 OnDroppedFrameOnCodecThread(); | |
| 655 return WEBRTC_VIDEO_CODEC_OK; | |
| 656 } | |
| 657 | |
| 658 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | |
| 659 | |
| 660 // Check if we accumulated too many frames in encoder input buffers and drop | |
| 661 // frame if so. | |
| 662 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { | |
| 663 ALOGD << "Already " << input_frame_infos_.size() | |
| 664 << " frames in the queue, dropping" | |
| 665 << ". TS: " << (int)(current_timestamp_us_ / 1000) | |
| 666 << ". Fps: " << last_set_fps_ | |
| 667 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | |
| 668 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
| 669 consecutive_full_queue_frame_drops_++; | |
| 670 if (consecutive_full_queue_frame_drops_ >= | |
| 671 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | |
| 672 ALOGE << "Encoder got stuck. Reset."; | |
| 673 ResetCodecOnCodecThread(); | |
| 674 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 675 } | |
| 676 frames_dropped_media_encoder_++; | |
| 677 OnDroppedFrameOnCodecThread(); | |
| 678 return WEBRTC_VIDEO_CODEC_OK; | |
| 679 } | |
| 680 consecutive_full_queue_frame_drops_ = 0; | |
| 681 | |
| 682 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | |
| 683 frame.video_frame_buffer()); | |
| 684 if (scale_) { | |
| 685 // Check framerate before spatial resolution change. | |
| 686 quality_scaler_.OnEncodeFrame(frame.width(), frame.height()); | |
| 687 const webrtc::QualityScaler::Resolution scaled_resolution = | |
| 688 quality_scaler_.GetScaledResolution(); | |
| 689 if (scaled_resolution.width != frame.width() || | |
| 690 scaled_resolution.height != frame.height()) { | |
| 691 if (input_buffer->native_handle() != nullptr) { | |
| 692 input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get()) | |
| 693 ->CropScaleAndRotate(frame.width(), frame.height(), | |
| 694 0, 0, | |
| 695 scaled_resolution.width, | |
| 696 scaled_resolution.height, | |
| 697 webrtc::kVideoRotation_0); | |
| 698 } else { | |
| 699 input_buffer = quality_scaler_.GetScaledBuffer(input_buffer); | |
| 700 } | |
| 701 } | |
| 702 } | |
| 703 | |
| 704 VideoFrame input_frame(input_buffer, frame.timestamp(), | |
| 705 frame.render_time_ms(), frame.rotation()); | |
| 706 | |
| 707 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { | |
| 708 ALOGE << "Failed to reconfigure encoder."; | |
| 709 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 710 } | |
| 711 | |
| 712 const bool key_frame = | |
| 713 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | |
| 714 bool encode_status = true; | |
| 715 if (!input_frame.video_frame_buffer()->native_handle()) { | |
| 716 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | |
| 717 j_dequeue_input_buffer_method_); | |
| 718 CHECK_EXCEPTION(jni); | |
| 719 if (j_input_buffer_index == -1) { | |
| 720 // Video codec falls behind - no input buffer available. | |
| 721 ALOGW << "Encoder drop frame - no input buffers available"; | |
| 722 if (frames_received_ > 1) { | |
| 723 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
| 724 frames_dropped_media_encoder_++; | |
| 725 OnDroppedFrameOnCodecThread(); | |
| 726 } else { | |
| 727 // Input buffers are not ready after codec initialization, HW is still | |
| 728 // allocating thme - this is expected and should not result in drop | |
| 729 // frame report. | |
| 730 frames_received_ = 0; | |
| 731 } | |
| 732 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | |
| 733 } else if (j_input_buffer_index == -2) { | |
| 734 ResetCodecOnCodecThread(); | |
| 735 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 736 } | |
| 737 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | |
| 738 j_input_buffer_index); | |
| 739 } else { | |
| 740 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | |
| 741 } | |
| 742 | |
| 743 if (!encode_status) { | |
| 744 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | |
| 745 ResetCodecOnCodecThread(); | |
| 746 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 747 } | |
| 748 | |
| 749 // Save input image timestamps for later output. | |
| 750 input_frame_infos_.emplace_back( | |
| 751 frame_input_time_ms, input_frame.timestamp(), | |
| 752 input_frame.render_time_ms(), input_frame.rotation()); | |
| 753 | |
| 754 last_input_timestamp_ms_ = | |
| 755 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | |
| 756 | |
| 757 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | |
| 758 | |
| 759 codec_thread_->Clear(this); | |
| 760 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | |
| 761 | |
| 762 if (!DeliverPendingOutputs(jni)) { | |
| 763 ALOGE << "Failed deliver pending outputs."; | |
| 764 ResetCodecOnCodecThread(); | |
| 765 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 766 } | |
| 767 return WEBRTC_VIDEO_CODEC_OK; | |
| 768 } | |
| 769 | |
| 770 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | |
| 771 const webrtc::VideoFrame& frame) { | |
| 772 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 773 | |
| 774 const bool is_texture_frame = | |
| 775 frame.video_frame_buffer()->native_handle() != nullptr; | |
| 776 const bool reconfigure_due_to_format = is_texture_frame != use_surface_; | |
| 777 const bool reconfigure_due_to_size = | |
| 778 frame.width() != width_ || frame.height() != height_; | |
| 779 | |
| 780 if (reconfigure_due_to_format) { | |
| 781 ALOGD << "Reconfigure encoder due to format change. " | |
| 782 << (use_surface_ ? | |
| 783 "Reconfiguring to encode from byte buffer." : | |
| 784 "Reconfiguring to encode from texture."); | |
| 785 LogStatistics(true); | |
| 786 } | |
| 787 if (reconfigure_due_to_size) { | |
| 788 ALOGW << "Reconfigure encoder due to frame resolution change from " | |
| 789 << width_ << " x " << height_ << " to " << frame.width() << " x " | |
| 790 << frame.height(); | |
| 791 LogStatistics(true); | |
| 792 width_ = frame.width(); | |
| 793 height_ = frame.height(); | |
| 794 } | |
| 795 | |
| 796 if (!reconfigure_due_to_format && !reconfigure_due_to_size) | |
| 797 return true; | |
| 798 | |
| 799 ReleaseOnCodecThread(); | |
| 800 | |
| 801 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == | |
| 802 WEBRTC_VIDEO_CODEC_OK; | |
| 803 } | |
| 804 | |
| 805 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | |
| 806 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | |
| 807 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 808 RTC_CHECK(!use_surface_); | |
| 809 | |
| 810 jobject j_input_buffer = input_buffers_[input_buffer_index]; | |
| 811 uint8_t* yuv_buffer = | |
| 812 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | |
| 813 CHECK_EXCEPTION(jni); | |
| 814 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | |
| 815 RTC_CHECK(!libyuv::ConvertFromI420( | |
| 816 frame.video_frame_buffer()->DataY(), | |
| 817 frame.video_frame_buffer()->StrideY(), | |
| 818 frame.video_frame_buffer()->DataU(), | |
| 819 frame.video_frame_buffer()->StrideU(), | |
| 820 frame.video_frame_buffer()->DataV(), | |
| 821 frame.video_frame_buffer()->StrideV(), | |
| 822 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | |
| 823 << "ConvertFromI420 failed"; | |
| 824 | |
| 825 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
| 826 j_encode_buffer_method_, | |
| 827 key_frame, | |
| 828 input_buffer_index, | |
| 829 yuv_size_, | |
| 830 current_timestamp_us_); | |
| 831 CHECK_EXCEPTION(jni); | |
| 832 return encode_status; | |
| 833 } | |
| 834 | |
| 835 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, | |
| 836 bool key_frame, const webrtc::VideoFrame& frame) { | |
| 837 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 838 RTC_CHECK(use_surface_); | |
| 839 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( | |
| 840 frame.video_frame_buffer()->native_handle()); | |
| 841 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); | |
| 842 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
| 843 j_encode_texture_method_, | |
| 844 key_frame, | |
| 845 handle->oes_texture_id, | |
| 846 sampling_matrix, | |
| 847 current_timestamp_us_); | |
| 848 CHECK_EXCEPTION(jni); | |
| 849 return encode_status; | |
| 850 } | |
| 851 | |
| 852 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | |
| 853 webrtc::EncodedImageCallback* callback) { | |
| 854 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 855 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 856 ScopedLocalRefFrame local_ref_frame(jni); | |
| 857 callback_ = callback; | |
| 858 return WEBRTC_VIDEO_CODEC_OK; | |
| 859 } | |
| 860 | |
| 861 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | |
| 862 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 863 if (!inited_) { | |
| 864 return WEBRTC_VIDEO_CODEC_OK; | |
| 865 } | |
| 866 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 867 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | |
| 868 frames_received_ << ". Encoded: " << frames_encoded_ << | |
| 869 ". Dropped: " << frames_dropped_media_encoder_; | |
| 870 ScopedLocalRefFrame local_ref_frame(jni); | |
| 871 for (size_t i = 0; i < input_buffers_.size(); ++i) | |
| 872 jni->DeleteGlobalRef(input_buffers_[i]); | |
| 873 input_buffers_.clear(); | |
| 874 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | |
| 875 CHECK_EXCEPTION(jni); | |
| 876 rtc::MessageQueueManager::Clear(this); | |
| 877 inited_ = false; | |
| 878 use_surface_ = false; | |
| 879 ALOGD << "EncoderReleaseOnCodecThread done."; | |
| 880 return WEBRTC_VIDEO_CODEC_OK; | |
| 881 } | |
| 882 | |
| 883 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | |
| 884 uint32_t frame_rate) { | |
| 885 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 886 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? | |
| 887 frame_rate : MAX_ALLOWED_VIDEO_FPS; | |
| 888 if (last_set_bitrate_kbps_ == new_bit_rate && | |
| 889 last_set_fps_ == frame_rate) { | |
| 890 return WEBRTC_VIDEO_CODEC_OK; | |
| 891 } | |
| 892 if (scale_) { | |
| 893 quality_scaler_.ReportFramerate(frame_rate); | |
| 894 } | |
| 895 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 896 ScopedLocalRefFrame local_ref_frame(jni); | |
| 897 if (new_bit_rate > 0) { | |
| 898 last_set_bitrate_kbps_ = new_bit_rate; | |
| 899 } | |
| 900 if (frame_rate > 0) { | |
| 901 last_set_fps_ = frame_rate; | |
| 902 } | |
| 903 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
| 904 j_set_rates_method_, | |
| 905 last_set_bitrate_kbps_, | |
| 906 last_set_fps_); | |
| 907 CHECK_EXCEPTION(jni); | |
| 908 if (!ret) { | |
| 909 ResetCodecOnCodecThread(); | |
| 910 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 911 } | |
| 912 return WEBRTC_VIDEO_CODEC_OK; | |
| 913 } | |
| 914 | |
| 915 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( | |
| 916 JNIEnv* jni, | |
| 917 jobject j_output_buffer_info) { | |
| 918 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); | |
| 919 } | |
| 920 | |
| 921 jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer( | |
| 922 JNIEnv* jni, | |
| 923 jobject j_output_buffer_info) { | |
| 924 return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_); | |
| 925 } | |
| 926 | |
| 927 bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame( | |
| 928 JNIEnv* jni, | |
| 929 jobject j_output_buffer_info) { | |
| 930 return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_); | |
| 931 } | |
| 932 | |
| 933 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( | |
| 934 JNIEnv* jni, | |
| 935 jobject j_output_buffer_info) { | |
| 936 return GetLongField( | |
| 937 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); | |
| 938 } | |
| 939 | |
| 940 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { | |
| 941 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 942 | |
| 943 while (true) { | |
| 944 jobject j_output_buffer_info = jni->CallObjectMethod( | |
| 945 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); | |
| 946 CHECK_EXCEPTION(jni); | |
| 947 if (IsNull(jni, j_output_buffer_info)) { | |
| 948 break; | |
| 949 } | |
| 950 | |
| 951 int output_buffer_index = | |
| 952 GetOutputBufferInfoIndex(jni, j_output_buffer_info); | |
| 953 if (output_buffer_index == -1) { | |
| 954 ResetCodecOnCodecThread(); | |
| 955 return false; | |
| 956 } | |
| 957 | |
| 958 // Get key and config frame flags. | |
| 959 jobject j_output_buffer = | |
| 960 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | |
| 961 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | |
| 962 | |
| 963 // Get frame timestamps from a queue - for non config frames only. | |
| 964 int64_t encoding_start_time_ms = 0; | |
| 965 int64_t frame_encoding_time_ms = 0; | |
| 966 last_output_timestamp_ms_ = | |
| 967 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | |
| 968 rtc::kNumMicrosecsPerMillisec; | |
| 969 if (!input_frame_infos_.empty()) { | |
| 970 const InputFrameInfo& frame_info = input_frame_infos_.front(); | |
| 971 output_timestamp_ = frame_info.frame_timestamp; | |
| 972 output_render_time_ms_ = frame_info.frame_render_time_ms; | |
| 973 output_rotation_ = frame_info.rotation; | |
| 974 encoding_start_time_ms = frame_info.encode_start_time; | |
| 975 input_frame_infos_.pop_front(); | |
| 976 } | |
| 977 | |
| 978 // Extract payload. | |
| 979 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | |
| 980 uint8_t* payload = reinterpret_cast<uint8_t*>( | |
| 981 jni->GetDirectBufferAddress(j_output_buffer)); | |
| 982 CHECK_EXCEPTION(jni); | |
| 983 | |
| 984 // Callback - return encoded frame. | |
| 985 int32_t callback_status = 0; | |
| 986 if (callback_) { | |
| 987 std::unique_ptr<webrtc::EncodedImage> image( | |
| 988 new webrtc::EncodedImage(payload, payload_size, payload_size)); | |
| 989 image->_encodedWidth = width_; | |
| 990 image->_encodedHeight = height_; | |
| 991 image->_timeStamp = output_timestamp_; | |
| 992 image->capture_time_ms_ = output_render_time_ms_; | |
| 993 image->rotation_ = output_rotation_; | |
| 994 image->_frameType = | |
| 995 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | |
| 996 image->_completeFrame = true; | |
| 997 image->adapt_reason_.quality_resolution_downscales = | |
| 998 scale_ ? quality_scaler_.downscale_shift() : -1; | |
| 999 | |
| 1000 webrtc::CodecSpecificInfo info; | |
| 1001 memset(&info, 0, sizeof(info)); | |
| 1002 info.codecType = codecType_; | |
| 1003 if (codecType_ == kVideoCodecVP8) { | |
| 1004 info.codecSpecific.VP8.pictureId = picture_id_; | |
| 1005 info.codecSpecific.VP8.nonReference = false; | |
| 1006 info.codecSpecific.VP8.simulcastIdx = 0; | |
| 1007 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; | |
| 1008 info.codecSpecific.VP8.layerSync = false; | |
| 1009 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; | |
| 1010 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx; | |
| 1011 } else if (codecType_ == kVideoCodecVP9) { | |
| 1012 if (key_frame) { | |
| 1013 gof_idx_ = 0; | |
| 1014 } | |
| 1015 info.codecSpecific.VP9.picture_id = picture_id_; | |
| 1016 info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true; | |
| 1017 info.codecSpecific.VP9.flexible_mode = false; | |
| 1018 info.codecSpecific.VP9.ss_data_available = key_frame ? true : false; | |
| 1019 info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++; | |
| 1020 info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx; | |
| 1021 info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx; | |
| 1022 info.codecSpecific.VP9.temporal_up_switch = true; | |
| 1023 info.codecSpecific.VP9.inter_layer_predicted = false; | |
| 1024 info.codecSpecific.VP9.gof_idx = | |
| 1025 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof); | |
| 1026 info.codecSpecific.VP9.num_spatial_layers = 1; | |
| 1027 info.codecSpecific.VP9.spatial_layer_resolution_present = false; | |
| 1028 if (info.codecSpecific.VP9.ss_data_available) { | |
| 1029 info.codecSpecific.VP9.spatial_layer_resolution_present = true; | |
| 1030 info.codecSpecific.VP9.width[0] = width_; | |
| 1031 info.codecSpecific.VP9.height[0] = height_; | |
| 1032 info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_); | |
| 1033 } | |
| 1034 } | |
| 1035 picture_id_ = (picture_id_ + 1) & 0x7FFF; | |
| 1036 | |
| 1037 // Generate a header describing a single fragment. | |
| 1038 webrtc::RTPFragmentationHeader header; | |
| 1039 memset(&header, 0, sizeof(header)); | |
| 1040 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { | |
| 1041 header.VerifyAndAllocateFragmentationHeader(1); | |
| 1042 header.fragmentationOffset[0] = 0; | |
| 1043 header.fragmentationLength[0] = image->_length; | |
| 1044 header.fragmentationPlType[0] = 0; | |
| 1045 header.fragmentationTimeDiff[0] = 0; | |
| 1046 if (codecType_ == kVideoCodecVP8 && scale_) { | |
| 1047 int qp; | |
| 1048 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { | |
| 1049 current_acc_qp_ += qp; | |
| 1050 quality_scaler_.ReportQP(qp); | |
| 1051 image->qp_ = qp; | |
| 1052 } | |
| 1053 } | |
| 1054 } else if (codecType_ == kVideoCodecH264) { | |
| 1055 if (scale_) { | |
| 1056 h264_bitstream_parser_.ParseBitstream(payload, payload_size); | |
| 1057 int qp; | |
| 1058 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { | |
| 1059 current_acc_qp_ += qp; | |
| 1060 quality_scaler_.ReportQP(qp); | |
| 1061 } | |
| 1062 } | |
| 1063 // For H.264 search for start codes. | |
| 1064 int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {}; | |
| 1065 int32_t scPositionsLength = 0; | |
| 1066 int32_t scPosition = 0; | |
| 1067 while (scPositionsLength < MAX_NALUS_PERFRAME) { | |
| 1068 int32_t naluPosition = NextNaluPosition( | |
| 1069 payload + scPosition, payload_size - scPosition); | |
| 1070 if (naluPosition < 0) { | |
| 1071 break; | |
| 1072 } | |
| 1073 scPosition += naluPosition; | |
| 1074 scPositions[scPositionsLength++] = scPosition; | |
| 1075 scPosition += H264_SC_LENGTH; | |
| 1076 } | |
| 1077 if (scPositionsLength == 0) { | |
| 1078 ALOGE << "Start code is not found!"; | |
| 1079 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] | |
| 1080 << " " << image->_buffer[2] << " " << image->_buffer[3] | |
| 1081 << " " << image->_buffer[4] << " " << image->_buffer[5]; | |
| 1082 ResetCodecOnCodecThread(); | |
| 1083 return false; | |
| 1084 } | |
| 1085 scPositions[scPositionsLength] = payload_size; | |
| 1086 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); | |
| 1087 for (size_t i = 0; i < scPositionsLength; i++) { | |
| 1088 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; | |
| 1089 header.fragmentationLength[i] = | |
| 1090 scPositions[i + 1] - header.fragmentationOffset[i]; | |
| 1091 header.fragmentationPlType[i] = 0; | |
| 1092 header.fragmentationTimeDiff[i] = 0; | |
| 1093 } | |
| 1094 } | |
| 1095 | |
| 1096 callback_status = callback_->Encoded(*image, &info, &header); | |
| 1097 } | |
| 1098 | |
| 1099 // Return output buffer back to the encoder. | |
| 1100 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | |
| 1101 j_release_output_buffer_method_, | |
| 1102 output_buffer_index); | |
| 1103 CHECK_EXCEPTION(jni); | |
| 1104 if (!success) { | |
| 1105 ResetCodecOnCodecThread(); | |
| 1106 return false; | |
| 1107 } | |
| 1108 | |
| 1109 // Print per frame statistics. | |
| 1110 if (encoding_start_time_ms > 0) { | |
| 1111 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; | |
| 1112 } | |
| 1113 if (frames_encoded_ < kMaxEncodedLogFrames) { | |
| 1114 int current_latency = | |
| 1115 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); | |
| 1116 ALOGD << "Encoder frame out # " << frames_encoded_ << | |
| 1117 ". Key: " << key_frame << | |
| 1118 ". Size: " << payload_size << | |
| 1119 ". TS: " << (int)last_output_timestamp_ms_ << | |
| 1120 ". Latency: " << current_latency << | |
| 1121 ". EncTime: " << frame_encoding_time_ms; | |
| 1122 } | |
| 1123 | |
| 1124 // Calculate and print encoding statistics - every 3 seconds. | |
| 1125 frames_encoded_++; | |
| 1126 current_frames_++; | |
| 1127 current_bytes_ += payload_size; | |
| 1128 current_encoding_time_ms_ += frame_encoding_time_ms; | |
| 1129 LogStatistics(false); | |
| 1130 | |
| 1131 if (callback_status > 0) { | |
| 1132 drop_next_input_frame_ = true; | |
| 1133 // Theoretically could handle callback_status<0 here, but unclear what | |
| 1134 // that would mean for us. | |
| 1135 } | |
| 1136 } | |
| 1137 return true; | |
| 1138 } | |
| 1139 | |
| 1140 void MediaCodecVideoEncoder::LogStatistics(bool force_log) { | |
| 1141 int statistic_time_ms = rtc::TimeMillis() - stat_start_time_ms_; | |
| 1142 if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) | |
| 1143 && statistic_time_ms > 0) { | |
| 1144 // Prevent division by zero. | |
| 1145 int current_frames_divider = current_frames_ != 0 ? current_frames_ : 1; | |
| 1146 | |
| 1147 int current_bitrate = current_bytes_ * 8 / statistic_time_ms; | |
| 1148 int current_fps = | |
| 1149 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms; | |
| 1150 ALOGD << "Encoded frames: " << frames_encoded_ << | |
| 1151 ". Bitrate: " << current_bitrate << | |
| 1152 ", target: " << last_set_bitrate_kbps_ << " kbps" << | |
| 1153 ", fps: " << current_fps << | |
| 1154 ", encTime: " << (current_encoding_time_ms_ / current_frames_divider) << | |
| 1155 ". QP: " << (current_acc_qp_ / current_frames_divider) << | |
| 1156 " for last " << statistic_time_ms << " ms."; | |
| 1157 stat_start_time_ms_ = rtc::TimeMillis(); | |
| 1158 current_frames_ = 0; | |
| 1159 current_bytes_ = 0; | |
| 1160 current_acc_qp_ = 0; | |
| 1161 current_encoding_time_ms_ = 0; | |
| 1162 } | |
| 1163 } | |
| 1164 | |
| 1165 int32_t MediaCodecVideoEncoder::NextNaluPosition( | |
| 1166 uint8_t *buffer, size_t buffer_size) { | |
| 1167 if (buffer_size < H264_SC_LENGTH) { | |
| 1168 return -1; | |
| 1169 } | |
| 1170 uint8_t *head = buffer; | |
| 1171 // Set end buffer pointer to 4 bytes before actual buffer end so we can | |
| 1172 // access head[1], head[2] and head[3] in a loop without buffer overrun. | |
| 1173 uint8_t *end = buffer + buffer_size - H264_SC_LENGTH; | |
| 1174 | |
| 1175 while (head < end) { | |
| 1176 if (head[0]) { | |
| 1177 head++; | |
| 1178 continue; | |
| 1179 } | |
| 1180 if (head[1]) { // got 00xx | |
| 1181 head += 2; | |
| 1182 continue; | |
| 1183 } | |
| 1184 if (head[2]) { // got 0000xx | |
| 1185 head += 3; | |
| 1186 continue; | |
| 1187 } | |
| 1188 if (head[3] != 0x01) { // got 000000xx | |
| 1189 head++; // xx != 1, continue searching. | |
| 1190 continue; | |
| 1191 } | |
| 1192 return (int32_t)(head - buffer); | |
| 1193 } | |
| 1194 return -1; | |
| 1195 } | |
| 1196 | |
| 1197 void MediaCodecVideoEncoder::OnDroppedFrame() { | |
| 1198 // Methods running on the codec thread should call OnDroppedFrameOnCodecThread | |
| 1199 // directly. | |
| 1200 RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread()); | |
| 1201 codec_thread_->Invoke<void>( | |
| 1202 RTC_FROM_HERE, | |
| 1203 Bind(&MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread, this)); | |
| 1204 } | |
| 1205 | |
| 1206 void MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread() { | |
| 1207 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 1208 // Report dropped frame to quality_scaler_. | |
| 1209 if (scale_) | |
| 1210 quality_scaler_.ReportDroppedFrame(); | |
| 1211 } | |
| 1212 | |
| 1213 const char* MediaCodecVideoEncoder::ImplementationName() const { | |
| 1214 return "MediaCodec"; | |
| 1215 } | |
| 1216 | |
| 1217 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() | |
| 1218 : egl_context_(nullptr) { | |
| 1219 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 1220 ScopedLocalRefFrame local_ref_frame(jni); | |
| 1221 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder"); | |
| 1222 supported_codecs_.clear(); | |
| 1223 | |
| 1224 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( | |
| 1225 j_encoder_class, | |
| 1226 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z")); | |
| 1227 CHECK_EXCEPTION(jni); | |
| 1228 if (is_vp8_hw_supported) { | |
| 1229 ALOGD << "VP8 HW Encoder supported."; | |
| 1230 supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8", | |
| 1231 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | |
| 1232 } | |
| 1233 | |
| 1234 bool is_vp9_hw_supported = jni->CallStaticBooleanMethod( | |
| 1235 j_encoder_class, | |
| 1236 GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z")); | |
| 1237 CHECK_EXCEPTION(jni); | |
| 1238 if (is_vp9_hw_supported) { | |
| 1239 ALOGD << "VP9 HW Encoder supported."; | |
| 1240 supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9", | |
| 1241 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | |
| 1242 } | |
| 1243 | |
| 1244 bool is_h264_hw_supported = jni->CallStaticBooleanMethod( | |
| 1245 j_encoder_class, | |
| 1246 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z")); | |
| 1247 CHECK_EXCEPTION(jni); | |
| 1248 if (is_h264_hw_supported) { | |
| 1249 ALOGD << "H.264 HW Encoder supported."; | |
| 1250 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264", | |
| 1251 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | |
| 1252 } | |
| 1253 } | |
| 1254 | |
| 1255 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() { | |
| 1256 ALOGD << "MediaCodecVideoEncoderFactory dtor"; | |
| 1257 if (egl_context_) { | |
| 1258 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | |
| 1259 jni->DeleteGlobalRef(egl_context_); | |
| 1260 } | |
| 1261 } | |
| 1262 | |
| 1263 void MediaCodecVideoEncoderFactory::SetEGLContext( | |
| 1264 JNIEnv* jni, jobject egl_context) { | |
| 1265 ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext"; | |
| 1266 if (egl_context_) { | |
| 1267 jni->DeleteGlobalRef(egl_context_); | |
| 1268 egl_context_ = nullptr; | |
| 1269 } | |
| 1270 egl_context_ = jni->NewGlobalRef(egl_context); | |
| 1271 if (CheckException(jni)) { | |
| 1272 ALOGE << "error calling NewGlobalRef for EGL Context."; | |
| 1273 } | |
| 1274 } | |
| 1275 | |
| 1276 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder( | |
| 1277 VideoCodecType type) { | |
| 1278 if (supported_codecs_.empty()) { | |
| 1279 ALOGW << "No HW video encoder for type " << (int)type; | |
| 1280 return nullptr; | |
| 1281 } | |
| 1282 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin(); | |
| 1283 it != supported_codecs_.end(); ++it) { | |
| 1284 if (it->type == type) { | |
| 1285 ALOGD << "Create HW video encoder for type " << (int)type << | |
| 1286 " (" << it->name << ")."; | |
| 1287 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type, | |
| 1288 egl_context_); | |
| 1289 } | |
| 1290 } | |
| 1291 ALOGW << "Can not find HW video encoder for type " << (int)type; | |
| 1292 return nullptr; | |
| 1293 } | |
| 1294 | |
| 1295 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>& | |
| 1296 MediaCodecVideoEncoderFactory::codecs() const { | |
| 1297 return supported_codecs_; | |
| 1298 } | |
| 1299 | |
| 1300 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | |
| 1301 webrtc::VideoEncoder* encoder) { | |
| 1302 ALOGD << "Destroy video encoder."; | |
| 1303 delete encoder; | |
| 1304 } | |
| 1305 | |
| 1306 } // namespace webrtc_jni | |
| OLD | NEW |