| OLD | NEW |
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 15 matching lines...) Expand all Loading... |
| 26 * | 26 * |
| 27 */ | 27 */ |
| 28 | 28 |
| 29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h" | 29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h" |
| 30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
| 31 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 31 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
| 32 #include "webrtc/base/bind.h" | 32 #include "webrtc/base/bind.h" |
| 33 #include "webrtc/base/checks.h" | 33 #include "webrtc/base/checks.h" |
| 34 #include "webrtc/base/logging.h" | 34 #include "webrtc/base/logging.h" |
| 35 #include "webrtc/base/thread.h" | 35 #include "webrtc/base/thread.h" |
| 36 #include "webrtc/base/thread_checker.h" | |
| 37 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h" | 36 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h" |
| 38 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 37 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
| 39 #include "webrtc/modules/video_coding/utility/include/quality_scaler.h" | 38 #include "webrtc/modules/video_coding/utility/include/quality_scaler.h" |
| 40 #include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h" | 39 #include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h" |
| 41 #include "webrtc/system_wrappers/interface/field_trial.h" | 40 #include "webrtc/system_wrappers/interface/field_trial.h" |
| 42 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | 41 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" |
| 43 #include "third_party/libyuv/include/libyuv/convert.h" | 42 #include "third_party/libyuv/include/libyuv/convert.h" |
| 44 #include "third_party/libyuv/include/libyuv/convert_from.h" | 43 #include "third_party/libyuv/include/libyuv/convert_from.h" |
| 45 #include "third_party/libyuv/include/libyuv/video_common.h" | 44 #include "third_party/libyuv/include/libyuv/video_common.h" |
| 46 | 45 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 73 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses | 72 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses |
| 74 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) | 73 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) |
| 75 // HW-backed video encode. This C++ class is implemented as a very thin shim, | 74 // HW-backed video encode. This C++ class is implemented as a very thin shim, |
| 76 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. | 75 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. |
| 77 // MediaCodecVideoEncoder is created, operated, and destroyed on a single | 76 // MediaCodecVideoEncoder is created, operated, and destroyed on a single |
| 78 // thread, currently the libjingle Worker thread. | 77 // thread, currently the libjingle Worker thread. |
| 79 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, | 78 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
| 80 public rtc::MessageHandler { | 79 public rtc::MessageHandler { |
| 81 public: | 80 public: |
| 82 virtual ~MediaCodecVideoEncoder(); | 81 virtual ~MediaCodecVideoEncoder(); |
| 83 MediaCodecVideoEncoder(JNIEnv* jni, | 82 explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType); |
| 84 VideoCodecType codecType); | |
| 85 | 83 |
| 86 // webrtc::VideoEncoder implementation. Everything trampolines to | 84 // webrtc::VideoEncoder implementation. Everything trampolines to |
| 87 // |codec_thread_| for execution. | 85 // |codec_thread_| for execution. |
| 88 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, | 86 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, |
| 89 int32_t /* number_of_cores */, | 87 int32_t /* number_of_cores */, |
| 90 size_t /* max_payload_size */) override; | 88 size_t /* max_payload_size */) override; |
| 91 int32_t Encode( | 89 int32_t Encode( |
| 92 const webrtc::VideoFrame& input_image, | 90 const webrtc::VideoFrame& input_image, |
| 93 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | 91 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
| 94 const std::vector<webrtc::VideoFrameType>* frame_types) override; | 92 const std::vector<webrtc::VideoFrameType>* frame_types) override; |
| 95 int32_t RegisterEncodeCompleteCallback( | 93 int32_t RegisterEncodeCompleteCallback( |
| 96 webrtc::EncodedImageCallback* callback) override; | 94 webrtc::EncodedImageCallback* callback) override; |
| 97 int32_t Release() override; | 95 int32_t Release() override; |
| 98 int32_t SetChannelParameters(uint32_t /* packet_loss */, | 96 int32_t SetChannelParameters(uint32_t /* packet_loss */, |
| 99 int64_t /* rtt */) override; | 97 int64_t /* rtt */) override; |
| 100 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override; | 98 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override; |
| 101 | 99 |
| 102 // rtc::MessageHandler implementation. | 100 // rtc::MessageHandler implementation. |
| 103 void OnMessage(rtc::Message* msg) override; | 101 void OnMessage(rtc::Message* msg) override; |
| 104 | 102 |
| 105 void OnDroppedFrame() override; | 103 void OnDroppedFrame() override; |
| 106 | 104 |
| 107 int GetTargetFramerate() override; | 105 int GetTargetFramerate() override; |
| 108 | 106 |
| 109 private: | 107 private: |
| 110 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and | 108 // CHECK-fail if not running on |codec_thread_|. |
| 111 // InitEncodeOnCodecThread() in an attempt to restore the codec to an | 109 void CheckOnCodecThread(); |
| 110 |
| 111 // Release() and InitEncode() in an attempt to restore the codec to an |
| 112 // operable state. Necessary after all manner of OMX-layer errors. | 112 // operable state. Necessary after all manner of OMX-layer errors. |
| 113 void ResetCodecOnCodecThread(); | 113 void ResetCodec(); |
| 114 | 114 |
| 115 // Implementation of webrtc::VideoEncoder methods above, all running on the | 115 // Implementation of webrtc::VideoEncoder methods above, all running on the |
| 116 // codec thread exclusively. | 116 // codec thread exclusively. |
| 117 // | 117 // |
| 118 // If width==0 then this is assumed to be a re-initialization and the | 118 // If width==0 then this is assumed to be a re-initialization and the |
| 119 // previously-current values are reused instead of the passed parameters | 119 // previously-current values are reused instead of the passed parameters |
| 120 // (makes it easier to reason about thread-safety). | 120 // (makes it easier to reason about thread-safety). |
| 121 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps); | 121 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps); |
| 122 // Reconfigure to match |frame| in width, height. Returns false if | |
| 123 // reconfiguring fails. | |
| 124 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); | |
| 125 int32_t EncodeOnCodecThread( | 122 int32_t EncodeOnCodecThread( |
| 126 const webrtc::VideoFrame& input_image, | 123 const webrtc::VideoFrame& input_image, |
| 127 const std::vector<webrtc::VideoFrameType>* frame_types); | 124 const std::vector<webrtc::VideoFrameType>* frame_types); |
| 128 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, | |
| 129 bool key_frame, const webrtc::VideoFrame& frame); | |
| 130 | |
| 131 int32_t RegisterEncodeCompleteCallbackOnCodecThread( | 125 int32_t RegisterEncodeCompleteCallbackOnCodecThread( |
| 132 webrtc::EncodedImageCallback* callback); | 126 webrtc::EncodedImageCallback* callback); |
| 133 int32_t ReleaseOnCodecThread(); | 127 int32_t ReleaseOnCodecThread(); |
| 134 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); | 128 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); |
| 135 | 129 |
| 136 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. | 130 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. |
| 137 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); | 131 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); |
| 138 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); | 132 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); |
| 139 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); | 133 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); |
| 140 jlong GetOutputBufferInfoPresentationTimestampUs( | 134 jlong GetOutputBufferInfoPresentationTimestampUs( |
| 141 JNIEnv* jni, jobject j_output_buffer_info); | 135 JNIEnv* jni, jobject j_output_buffer_info); |
| 142 | 136 |
| 143 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 137 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
| 144 // true on success. | 138 // true on success. |
| 145 bool DeliverPendingOutputs(JNIEnv* jni); | 139 bool DeliverPendingOutputs(JNIEnv* jni); |
| 146 | 140 |
| 147 // Search for H.264 start codes. | 141 // Search for H.264 start codes. |
| 148 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); | 142 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); |
| 149 | 143 |
| 150 // Type of video codec. | 144 // Type of video codec. |
| 151 VideoCodecType codecType_; | 145 VideoCodecType codecType_; |
| 152 | 146 |
| 153 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to | 147 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to |
| 154 // |codec_thread_| synchronously. | 148 // |codec_thread_| synchronously. |
| 155 webrtc::EncodedImageCallback* callback_; | 149 webrtc::EncodedImageCallback* callback_; |
| 156 | 150 |
| 157 // State that is constant for the lifetime of this object once the ctor | 151 // State that is constant for the lifetime of this object once the ctor |
| 158 // returns. | 152 // returns. |
| 159 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | 153 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. |
| 160 rtc::ThreadChecker codec_thread_checker_; | |
| 161 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; | 154 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; |
| 162 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; | 155 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; |
| 163 jmethodID j_init_encode_method_; | 156 jmethodID j_init_encode_method_; |
| 164 jmethodID j_get_input_buffers_method_; | |
| 165 jmethodID j_dequeue_input_buffer_method_; | 157 jmethodID j_dequeue_input_buffer_method_; |
| 166 jmethodID j_encode_buffer_method_; | 158 jmethodID j_encode_method_; |
| 167 jmethodID j_release_method_; | 159 jmethodID j_release_method_; |
| 168 jmethodID j_set_rates_method_; | 160 jmethodID j_set_rates_method_; |
| 169 jmethodID j_dequeue_output_buffer_method_; | 161 jmethodID j_dequeue_output_buffer_method_; |
| 170 jmethodID j_release_output_buffer_method_; | 162 jmethodID j_release_output_buffer_method_; |
| 171 jfieldID j_color_format_field_; | 163 jfieldID j_color_format_field_; |
| 172 jfieldID j_info_index_field_; | 164 jfieldID j_info_index_field_; |
| 173 jfieldID j_info_buffer_field_; | 165 jfieldID j_info_buffer_field_; |
| 174 jfieldID j_info_is_key_frame_field_; | 166 jfieldID j_info_is_key_frame_field_; |
| 175 jfieldID j_info_presentation_timestamp_us_field_; | 167 jfieldID j_info_presentation_timestamp_us_field_; |
| 176 | 168 |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 241 ScopedLocalRefFrame local_ref_frame(jni); | 233 ScopedLocalRefFrame local_ref_frame(jni); |
| 242 // It would be nice to avoid spinning up a new thread per MediaCodec, and | 234 // It would be nice to avoid spinning up a new thread per MediaCodec, and |
| 243 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | 235 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug |
| 244 // 2732 means that deadlocks abound. This class synchronously trampolines | 236 // 2732 means that deadlocks abound. This class synchronously trampolines |
| 245 // to |codec_thread_|, so if anything else can be coming to _us_ from | 237 // to |codec_thread_|, so if anything else can be coming to _us_ from |
| 246 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | 238 // |codec_thread_|, or from any thread holding the |_sendCritSect| described |
| 247 // in the bug, we have a problem. For now work around that with a dedicated | 239 // in the bug, we have a problem. For now work around that with a dedicated |
| 248 // thread. | 240 // thread. |
| 249 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | 241 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); |
| 250 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | 242 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; |
| 251 codec_thread_checker_.DetachFromThread(); | 243 |
| 252 jclass j_output_buffer_info_class = | 244 jclass j_output_buffer_info_class = |
| 253 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | 245 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |
| 254 j_init_encode_method_ = GetMethodID( | 246 j_init_encode_method_ = GetMethodID( |
| 255 jni, | 247 jni, |
| 256 *j_media_codec_video_encoder_class_, | 248 *j_media_codec_video_encoder_class_, |
| 257 "initEncode", | 249 "initEncode", |
| 258 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)Z"); | 250 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)" |
| 259 j_get_input_buffers_method_ = GetMethodID( | 251 "[Ljava/nio/ByteBuffer;"); |
| 260 jni, | |
| 261 *j_media_codec_video_encoder_class_, | |
| 262 "getInputBuffers", | |
| 263 "()[Ljava/nio/ByteBuffer;"); | |
| 264 j_dequeue_input_buffer_method_ = GetMethodID( | 252 j_dequeue_input_buffer_method_ = GetMethodID( |
| 265 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); | 253 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); |
| 266 j_encode_buffer_method_ = GetMethodID( | 254 j_encode_method_ = GetMethodID( |
| 267 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); | 255 jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z"); |
| 268 j_release_method_ = | 256 j_release_method_ = |
| 269 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); | 257 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); |
| 270 j_set_rates_method_ = GetMethodID( | 258 j_set_rates_method_ = GetMethodID( |
| 271 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); | 259 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); |
| 272 j_dequeue_output_buffer_method_ = GetMethodID( | 260 j_dequeue_output_buffer_method_ = GetMethodID( |
| 273 jni, | 261 jni, |
| 274 *j_media_codec_video_encoder_class_, | 262 *j_media_codec_video_encoder_class_, |
| 275 "dequeueOutputBuffer", | 263 "dequeueOutputBuffer", |
| 276 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); | 264 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); |
| 277 j_release_output_buffer_method_ = GetMethodID( | 265 j_release_output_buffer_method_ = GetMethodID( |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 380 quality_scaler_.ReportFramerate(frame_rate); | 368 quality_scaler_.ReportFramerate(frame_rate); |
| 381 | 369 |
| 382 return codec_thread_->Invoke<int32_t>( | 370 return codec_thread_->Invoke<int32_t>( |
| 383 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, | 371 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, |
| 384 this, | 372 this, |
| 385 new_bit_rate, | 373 new_bit_rate, |
| 386 frame_rate)); | 374 frame_rate)); |
| 387 } | 375 } |
| 388 | 376 |
| 389 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { | 377 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { |
| 390 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 391 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 378 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 392 ScopedLocalRefFrame local_ref_frame(jni); | 379 ScopedLocalRefFrame local_ref_frame(jni); |
| 393 | 380 |
| 394 // We only ever send one message to |this| directly (not through a Bind()'d | 381 // We only ever send one message to |this| directly (not through a Bind()'d |
| 395 // functor), so expect no ID/data. | 382 // functor), so expect no ID/data. |
| 396 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | 383 RTC_CHECK(!msg->message_id) << "Unexpected message!"; |
| 397 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | 384 RTC_CHECK(!msg->pdata) << "Unexpected message!"; |
| 385 CheckOnCodecThread(); |
| 398 if (!inited_) { | 386 if (!inited_) { |
| 399 return; | 387 return; |
| 400 } | 388 } |
| 401 | 389 |
| 402 // It would be nice to recover from a failure here if one happened, but it's | 390 // It would be nice to recover from a failure here if one happened, but it's |
| 403 // unclear how to signal such a failure to the app, so instead we stay silent | 391 // unclear how to signal such a failure to the app, so instead we stay silent |
| 404 // about it and let the next app-called API method reveal the borkedness. | 392 // about it and let the next app-called API method reveal the borkedness. |
| 405 DeliverPendingOutputs(jni); | 393 DeliverPendingOutputs(jni); |
| 406 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 394 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| 407 } | 395 } |
| 408 | 396 |
| 409 void MediaCodecVideoEncoder::ResetCodecOnCodecThread() { | 397 void MediaCodecVideoEncoder::CheckOnCodecThread() { |
| 410 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 398 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread()) |
| 411 ALOGE << "ResetOnCodecThread"; | 399 << "Running on wrong thread!"; |
| 412 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || | 400 } |
| 413 InitEncodeOnCodecThread(width_, height_, 0, 0) | 401 |
| 414 != WEBRTC_VIDEO_CODEC_OK) { | 402 void MediaCodecVideoEncoder::ResetCodec() { |
| 403 ALOGE << "ResetCodec"; |
| 404 if (Release() != WEBRTC_VIDEO_CODEC_OK || |
| 405 codec_thread_->Invoke<int32_t>(Bind( |
| 406 &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, |
| 407 width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) { |
| 415 // TODO(fischman): wouldn't it be nice if there was a way to gracefully | 408 // TODO(fischman): wouldn't it be nice if there was a way to gracefully |
| 416 // degrade to a SW encoder at this point? There isn't one AFAICT :( | 409 // degrade to a SW encoder at this point? There isn't one AFAICT :( |
| 417 // https://code.google.com/p/webrtc/issues/detail?id=2920 | 410 // https://code.google.com/p/webrtc/issues/detail?id=2920 |
| 418 } | 411 } |
| 419 } | 412 } |
| 420 | 413 |
| 421 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | 414 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
| 422 int width, int height, int kbps, int fps) { | 415 int width, int height, int kbps, int fps) { |
| 423 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 416 CheckOnCodecThread(); |
| 424 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 417 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 425 ScopedLocalRefFrame local_ref_frame(jni); | 418 ScopedLocalRefFrame local_ref_frame(jni); |
| 426 | 419 |
| 427 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << | 420 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << |
| 428 width << " x " << height << ". Bitrate: " << kbps << | 421 width << " x " << height << ". Bitrate: " << kbps << |
| 429 " kbps. Fps: " << fps; | 422 " kbps. Fps: " << fps; |
| 430 if (kbps == 0) { | 423 if (kbps == 0) { |
| 431 kbps = last_set_bitrate_kbps_; | 424 kbps = last_set_bitrate_kbps_; |
| 432 } | 425 } |
| 433 if (fps == 0) { | 426 if (fps == 0) { |
| (...skipping 16 matching lines...) Expand all Loading... |
| 450 current_encoding_time_ms_ = 0; | 443 current_encoding_time_ms_ = 0; |
| 451 last_input_timestamp_ms_ = -1; | 444 last_input_timestamp_ms_ = -1; |
| 452 last_output_timestamp_ms_ = -1; | 445 last_output_timestamp_ms_ = -1; |
| 453 output_timestamp_ = 0; | 446 output_timestamp_ = 0; |
| 454 output_render_time_ms_ = 0; | 447 output_render_time_ms_ = 0; |
| 455 timestamps_.clear(); | 448 timestamps_.clear(); |
| 456 render_times_ms_.clear(); | 449 render_times_ms_.clear(); |
| 457 frame_rtc_times_ms_.clear(); | 450 frame_rtc_times_ms_.clear(); |
| 458 drop_next_input_frame_ = false; | 451 drop_next_input_frame_ = false; |
| 459 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | 452 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; |
| 460 | |
| 461 // We enforce no extra stride/padding in the format creation step. | 453 // We enforce no extra stride/padding in the format creation step. |
| 462 jobject j_video_codec_enum = JavaEnumFromIndex( | 454 jobject j_video_codec_enum = JavaEnumFromIndex( |
| 463 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); | 455 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); |
| 464 const bool encode_status = jni->CallBooleanMethod( | |
| 465 *j_media_codec_video_encoder_, j_init_encode_method_, | |
| 466 j_video_codec_enum, width, height, kbps, fps); | |
| 467 if (!encode_status) { | |
| 468 ALOGE << "Failed to configure encoder."; | |
| 469 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 470 } | |
| 471 CHECK_EXCEPTION(jni); | |
| 472 | |
| 473 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | 456 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( |
| 474 jni->CallObjectMethod(*j_media_codec_video_encoder_, | 457 jni->CallObjectMethod(*j_media_codec_video_encoder_, |
| 475 j_get_input_buffers_method_)); | 458 j_init_encode_method_, |
| 459 j_video_codec_enum, |
| 460 width_, |
| 461 height_, |
| 462 kbps, |
| 463 fps)); |
| 476 CHECK_EXCEPTION(jni); | 464 CHECK_EXCEPTION(jni); |
| 477 if (IsNull(jni, input_buffers)) { | 465 if (IsNull(jni, input_buffers)) { |
| 478 return WEBRTC_VIDEO_CODEC_ERROR; | 466 return WEBRTC_VIDEO_CODEC_ERROR; |
| 479 } | 467 } |
| 480 | 468 |
| 469 inited_ = true; |
| 481 switch (GetIntField(jni, *j_media_codec_video_encoder_, | 470 switch (GetIntField(jni, *j_media_codec_video_encoder_, |
| 482 j_color_format_field_)) { | 471 j_color_format_field_)) { |
| 483 case COLOR_FormatYUV420Planar: | 472 case COLOR_FormatYUV420Planar: |
| 484 encoder_fourcc_ = libyuv::FOURCC_YU12; | 473 encoder_fourcc_ = libyuv::FOURCC_YU12; |
| 485 break; | 474 break; |
| 486 case COLOR_FormatYUV420SemiPlanar: | 475 case COLOR_FormatYUV420SemiPlanar: |
| 487 case COLOR_QCOM_FormatYUV420SemiPlanar: | 476 case COLOR_QCOM_FormatYUV420SemiPlanar: |
| 488 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | 477 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
| 489 encoder_fourcc_ = libyuv::FOURCC_NV12; | 478 encoder_fourcc_ = libyuv::FOURCC_NV12; |
| 490 break; | 479 break; |
| 491 default: | 480 default: |
| 492 LOG(LS_ERROR) << "Wrong color format."; | 481 LOG(LS_ERROR) << "Wrong color format."; |
| 493 return WEBRTC_VIDEO_CODEC_ERROR; | 482 return WEBRTC_VIDEO_CODEC_ERROR; |
| 494 } | 483 } |
| 495 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 484 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
| 496 RTC_CHECK(input_buffers_.empty()) | 485 RTC_CHECK(input_buffers_.empty()) |
| 497 << "Unexpected double InitEncode without Release"; | 486 << "Unexpected double InitEncode without Release"; |
| 498 input_buffers_.resize(num_input_buffers); | 487 input_buffers_.resize(num_input_buffers); |
| 499 for (size_t i = 0; i < num_input_buffers; ++i) { | 488 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 500 input_buffers_[i] = | 489 input_buffers_[i] = |
| 501 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 490 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 502 int64_t yuv_buffer_capacity = | 491 int64_t yuv_buffer_capacity = |
| 503 jni->GetDirectBufferCapacity(input_buffers_[i]); | 492 jni->GetDirectBufferCapacity(input_buffers_[i]); |
| 504 CHECK_EXCEPTION(jni); | 493 CHECK_EXCEPTION(jni); |
| 505 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | 494 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
| 506 } | 495 } |
| 507 CHECK_EXCEPTION(jni); | 496 CHECK_EXCEPTION(jni); |
| 508 | 497 |
| 509 | |
| 510 inited_ = true; | |
| 511 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 498 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| 512 return WEBRTC_VIDEO_CODEC_OK; | 499 return WEBRTC_VIDEO_CODEC_OK; |
| 513 } | 500 } |
| 514 | 501 |
| 515 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 502 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
| 516 const webrtc::VideoFrame& frame, | 503 const webrtc::VideoFrame& frame, |
| 517 const std::vector<webrtc::VideoFrameType>* frame_types) { | 504 const std::vector<webrtc::VideoFrameType>* frame_types) { |
| 518 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 505 CheckOnCodecThread(); |
| 519 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 506 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 520 ScopedLocalRefFrame local_ref_frame(jni); | 507 ScopedLocalRefFrame local_ref_frame(jni); |
| 521 | 508 |
| 522 if (!inited_) { | 509 if (!inited_) { |
| 523 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 510 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 524 } | 511 } |
| 525 | |
| 526 frames_received_++; | 512 frames_received_++; |
| 527 if (!DeliverPendingOutputs(jni)) { | 513 if (!DeliverPendingOutputs(jni)) { |
| 528 ResetCodecOnCodecThread(); | 514 ResetCodec(); |
| 529 // Continue as if everything's fine. | 515 // Continue as if everything's fine. |
| 530 } | 516 } |
| 531 | 517 |
| 532 if (drop_next_input_frame_) { | 518 if (drop_next_input_frame_) { |
| 533 ALOGD << "Encoder drop frame - failed callback."; | 519 ALOGV("Encoder drop frame - failed callback."); |
| 534 drop_next_input_frame_ = false; | 520 drop_next_input_frame_ = false; |
| 535 return WEBRTC_VIDEO_CODEC_OK; | 521 return WEBRTC_VIDEO_CODEC_OK; |
| 536 } | 522 } |
| 537 | 523 |
| 538 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | 524 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
| 539 // Check framerate before spatial resolution change. | 525 // Check framerate before spatial resolution change. |
| 540 if (scale_) | 526 if (scale_) |
| 541 quality_scaler_.OnEncodeFrame(frame); | 527 quality_scaler_.OnEncodeFrame(frame); |
| 542 | 528 |
| 543 const VideoFrame& input_frame = | 529 const VideoFrame& input_frame = |
| 544 scale_ ? quality_scaler_.GetScaledFrame(frame) : frame; | 530 scale_ ? quality_scaler_.GetScaledFrame(frame) : frame; |
| 545 | 531 |
| 546 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { | 532 if (input_frame.width() != width_ || input_frame.height() != height_) { |
| 547 ALOGE << "Failed to reconfigure encoder."; | 533 ALOGD << "Frame resolution change from " << width_ << " x " << height_ << |
| 548 return WEBRTC_VIDEO_CODEC_ERROR; | 534 " to " << input_frame.width() << " x " << input_frame.height(); |
| 535 width_ = input_frame.width(); |
| 536 height_ = input_frame.height(); |
| 537 ResetCodec(); |
| 538 return WEBRTC_VIDEO_CODEC_OK; |
| 549 } | 539 } |
| 550 | 540 |
| 551 // Check if we accumulated too many frames in encoder input buffers | 541 // Check if we accumulated too many frames in encoder input buffers |
| 552 // or the encoder latency exceeds 70 ms and drop frame if so. | 542 // or the encoder latency exceeds 70 ms and drop frame if so. |
| 553 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { | 543 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { |
| 554 int encoder_latency_ms = last_input_timestamp_ms_ - | 544 int encoder_latency_ms = last_input_timestamp_ms_ - |
| 555 last_output_timestamp_ms_; | 545 last_output_timestamp_ms_; |
| 556 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) { | 546 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) { |
| 557 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << | 547 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << |
| 558 " ms. Q size: " << frames_in_queue_; | 548 " ms. Q size: " << frames_in_queue_; |
| 559 frames_dropped_++; | 549 frames_dropped_++; |
| 560 // Report dropped frame to quality_scaler_. | 550 // Report dropped frame to quality_scaler_. |
| 561 OnDroppedFrame(); | 551 OnDroppedFrame(); |
| 562 return WEBRTC_VIDEO_CODEC_OK; | 552 return WEBRTC_VIDEO_CODEC_OK; |
| 563 } | 553 } |
| 564 } | 554 } |
| 565 | 555 |
| 566 last_input_timestamp_ms_ = | |
| 567 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | |
| 568 frames_in_queue_++; | |
| 569 | |
| 570 // Save input image timestamps for later output | |
| 571 timestamps_.push_back(input_frame.timestamp()); | |
| 572 render_times_ms_.push_back(input_frame.render_time_ms()); | |
| 573 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); | |
| 574 | |
| 575 const bool key_frame = frame_types->front() != webrtc::kDeltaFrame; | |
| 576 const bool encode_status = | |
| 577 EncodeByteBufferOnCodecThread(jni, key_frame, input_frame); | |
| 578 | |
| 579 current_timestamp_us_ += 1000000 / last_set_fps_; | |
| 580 | |
| 581 if (!encode_status || !DeliverPendingOutputs(jni)) { | |
| 582 ALOGE << "Failed deliver pending outputs."; | |
| 583 ResetCodecOnCodecThread(); | |
| 584 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 585 } | |
| 586 return WEBRTC_VIDEO_CODEC_OK; | |
| 587 } | |
| 588 | |
| 589 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | |
| 590 const webrtc::VideoFrame& frame) { | |
| 591 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 592 | |
| 593 const bool reconfigure_due_to_size = | |
| 594 frame.width() != width_ || frame.height() != height_; | |
| 595 | |
| 596 if (reconfigure_due_to_size) { | |
| 597 ALOGD << "Reconfigure encoder due to frame resolution change from " | |
| 598 << width_ << " x " << height_ << " to " << frame.width() << " x " | |
| 599 << frame.height(); | |
| 600 width_ = frame.width(); | |
| 601 height_ = frame.height(); | |
| 602 } | |
| 603 | |
| 604 if (!reconfigure_due_to_size) | |
| 605 return true; | |
| 606 | |
| 607 ReleaseOnCodecThread(); | |
| 608 | |
| 609 return InitEncodeOnCodecThread(width_, height_, 0, 0) == | |
| 610 WEBRTC_VIDEO_CODEC_OK; | |
| 611 } | |
| 612 | |
| 613 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | |
| 614 bool key_frame, const webrtc::VideoFrame& frame) { | |
| 615 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 616 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 556 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
| 617 j_dequeue_input_buffer_method_); | 557 j_dequeue_input_buffer_method_); |
| 618 CHECK_EXCEPTION(jni); | 558 CHECK_EXCEPTION(jni); |
| 619 if (j_input_buffer_index == -1) { | 559 if (j_input_buffer_index == -1) { |
| 620 // Video codec falls behind - no input buffer available. | 560 // Video codec falls behind - no input buffer available. |
| 621 ALOGD <<"Encoder drop frame - no input buffers available"; | 561 ALOGV("Encoder drop frame - no input buffers available"); |
| 622 frames_dropped_++; | 562 frames_dropped_++; |
| 623 // Report dropped frame to quality_scaler_. | 563 // Report dropped frame to quality_scaler_. |
| 624 OnDroppedFrame(); | 564 OnDroppedFrame(); |
| 625 return true; // TODO(fischman): see webrtc bug 2887. | 565 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
| 626 } | 566 } |
| 627 if (j_input_buffer_index == -2) { | 567 if (j_input_buffer_index == -2) { |
| 628 return false; | 568 ResetCodec(); |
| 569 return WEBRTC_VIDEO_CODEC_ERROR; |
| 629 } | 570 } |
| 630 | 571 |
| 631 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", | 572 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", |
| 632 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); | 573 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); |
| 633 | 574 |
| 634 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | 575 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; |
| 635 uint8_t* yuv_buffer = | 576 uint8_t* yuv_buffer = |
| 636 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 577 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
| 637 CHECK_EXCEPTION(jni); | 578 CHECK_EXCEPTION(jni); |
| 638 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | 579 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
| 639 RTC_CHECK(!libyuv::ConvertFromI420( | 580 RTC_CHECK(!libyuv::ConvertFromI420( |
| 640 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane), | 581 input_frame.buffer(webrtc::kYPlane), input_frame.stride(webrtc::kYPlane), |
| 641 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), | 582 input_frame.buffer(webrtc::kUPlane), input_frame.stride(webrtc::kUPlane), |
| 642 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), | 583 input_frame.buffer(webrtc::kVPlane), input_frame.stride(webrtc::kVPlane), |
| 643 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | 584 yuv_buffer, width_, width_, height_, encoder_fourcc_)) |
| 644 << "ConvertFromI420 failed"; | 585 << "ConvertFromI420 failed"; |
| 586 last_input_timestamp_ms_ = current_timestamp_us_ / 1000; |
| 587 frames_in_queue_++; |
| 645 | 588 |
| 589 // Save input image timestamps for later output |
| 590 timestamps_.push_back(input_frame.timestamp()); |
| 591 render_times_ms_.push_back(input_frame.render_time_ms()); |
| 592 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); |
| 646 | 593 |
| 594 bool key_frame = frame_types->front() != webrtc::kDeltaFrame; |
| 647 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 595 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 648 j_encode_buffer_method_, | 596 j_encode_method_, |
| 649 key_frame, | 597 key_frame, |
| 650 j_input_buffer_index, | 598 j_input_buffer_index, |
| 651 yuv_size_, | 599 yuv_size_, |
| 652 current_timestamp_us_); | 600 current_timestamp_us_); |
| 653 CHECK_EXCEPTION(jni); | 601 CHECK_EXCEPTION(jni); |
| 654 return encode_status; | 602 current_timestamp_us_ += 1000000 / last_set_fps_; |
| 603 |
| 604 if (!encode_status || !DeliverPendingOutputs(jni)) { |
| 605 ResetCodec(); |
| 606 return WEBRTC_VIDEO_CODEC_ERROR; |
| 607 } |
| 608 |
| 609 return WEBRTC_VIDEO_CODEC_OK; |
| 655 } | 610 } |
| 656 | 611 |
| 657 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | 612 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( |
| 658 webrtc::EncodedImageCallback* callback) { | 613 webrtc::EncodedImageCallback* callback) { |
| 659 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 614 CheckOnCodecThread(); |
| 660 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 615 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 661 ScopedLocalRefFrame local_ref_frame(jni); | 616 ScopedLocalRefFrame local_ref_frame(jni); |
| 662 callback_ = callback; | 617 callback_ = callback; |
| 663 return WEBRTC_VIDEO_CODEC_OK; | 618 return WEBRTC_VIDEO_CODEC_OK; |
| 664 } | 619 } |
| 665 | 620 |
| 666 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | 621 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { |
| 667 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 668 if (!inited_) { | 622 if (!inited_) { |
| 669 return WEBRTC_VIDEO_CODEC_OK; | 623 return WEBRTC_VIDEO_CODEC_OK; |
| 670 } | 624 } |
| 625 CheckOnCodecThread(); |
| 671 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 626 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 672 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | 627 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << |
| 673 frames_received_ << ". Encoded: " << frames_encoded_ << | 628 frames_received_ << ". Encoded: " << frames_encoded_ << |
| 674 ". Dropped: " << frames_dropped_; | 629 ". Dropped: " << frames_dropped_; |
| 675 ScopedLocalRefFrame local_ref_frame(jni); | 630 ScopedLocalRefFrame local_ref_frame(jni); |
| 676 for (size_t i = 0; i < input_buffers_.size(); ++i) | 631 for (size_t i = 0; i < input_buffers_.size(); ++i) |
| 677 jni->DeleteGlobalRef(input_buffers_[i]); | 632 jni->DeleteGlobalRef(input_buffers_[i]); |
| 678 input_buffers_.clear(); | 633 input_buffers_.clear(); |
| 679 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | 634 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); |
| 680 CHECK_EXCEPTION(jni); | 635 CHECK_EXCEPTION(jni); |
| 681 rtc::MessageQueueManager::Clear(this); | 636 rtc::MessageQueueManager::Clear(this); |
| 682 inited_ = false; | 637 inited_ = false; |
| 683 ALOGD << "EncoderReleaseOnCodecThread done."; | 638 ALOGD << "EncoderReleaseOnCodecThread done."; |
| 684 return WEBRTC_VIDEO_CODEC_OK; | 639 return WEBRTC_VIDEO_CODEC_OK; |
| 685 } | 640 } |
| 686 | 641 |
| 687 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | 642 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
| 688 uint32_t frame_rate) { | 643 uint32_t frame_rate) { |
| 689 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 644 CheckOnCodecThread(); |
| 690 if (last_set_bitrate_kbps_ == new_bit_rate && | 645 if (last_set_bitrate_kbps_ == new_bit_rate && |
| 691 last_set_fps_ == frame_rate) { | 646 last_set_fps_ == frame_rate) { |
| 692 return WEBRTC_VIDEO_CODEC_OK; | 647 return WEBRTC_VIDEO_CODEC_OK; |
| 693 } | 648 } |
| 694 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 649 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 695 ScopedLocalRefFrame local_ref_frame(jni); | 650 ScopedLocalRefFrame local_ref_frame(jni); |
| 696 if (new_bit_rate > 0) { | 651 if (new_bit_rate > 0) { |
| 697 last_set_bitrate_kbps_ = new_bit_rate; | 652 last_set_bitrate_kbps_ = new_bit_rate; |
| 698 } | 653 } |
| 699 if (frame_rate > 0) { | 654 if (frame_rate > 0) { |
| 700 last_set_fps_ = frame_rate; | 655 last_set_fps_ = frame_rate; |
| 701 } | 656 } |
| 702 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 657 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 703 j_set_rates_method_, | 658 j_set_rates_method_, |
| 704 last_set_bitrate_kbps_, | 659 last_set_bitrate_kbps_, |
| 705 last_set_fps_); | 660 last_set_fps_); |
| 706 CHECK_EXCEPTION(jni); | 661 CHECK_EXCEPTION(jni); |
| 707 if (!ret) { | 662 if (!ret) { |
| 708 ResetCodecOnCodecThread(); | 663 ResetCodec(); |
| 709 return WEBRTC_VIDEO_CODEC_ERROR; | 664 return WEBRTC_VIDEO_CODEC_ERROR; |
| 710 } | 665 } |
| 711 return WEBRTC_VIDEO_CODEC_OK; | 666 return WEBRTC_VIDEO_CODEC_OK; |
| 712 } | 667 } |
| 713 | 668 |
| 714 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( | 669 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( |
| 715 JNIEnv* jni, | 670 JNIEnv* jni, |
| 716 jobject j_output_buffer_info) { | 671 jobject j_output_buffer_info) { |
| 717 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); | 672 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); |
| 718 } | 673 } |
| (...skipping 11 matching lines...) Expand all Loading... |
| 730 } | 685 } |
| 731 | 686 |
| 732 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( | 687 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( |
| 733 JNIEnv* jni, | 688 JNIEnv* jni, |
| 734 jobject j_output_buffer_info) { | 689 jobject j_output_buffer_info) { |
| 735 return GetLongField( | 690 return GetLongField( |
| 736 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); | 691 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); |
| 737 } | 692 } |
| 738 | 693 |
| 739 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { | 694 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
| 740 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 741 while (true) { | 695 while (true) { |
| 742 jobject j_output_buffer_info = jni->CallObjectMethod( | 696 jobject j_output_buffer_info = jni->CallObjectMethod( |
| 743 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); | 697 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); |
| 744 CHECK_EXCEPTION(jni); | 698 CHECK_EXCEPTION(jni); |
| 745 if (IsNull(jni, j_output_buffer_info)) { | 699 if (IsNull(jni, j_output_buffer_info)) { |
| 746 break; | 700 break; |
| 747 } | 701 } |
| 748 | 702 |
| 749 int output_buffer_index = | 703 int output_buffer_index = |
| 750 GetOutputBufferInfoIndex(jni, j_output_buffer_info); | 704 GetOutputBufferInfoIndex(jni, j_output_buffer_info); |
| 751 if (output_buffer_index == -1) { | 705 if (output_buffer_index == -1) { |
| 752 ResetCodecOnCodecThread(); | 706 ResetCodec(); |
| 753 return false; | 707 return false; |
| 754 } | 708 } |
| 755 | 709 |
| 756 // Get key and config frame flags. | 710 // Get key and config frame flags. |
| 757 jobject j_output_buffer = | 711 jobject j_output_buffer = |
| 758 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | 712 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); |
| 759 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | 713 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); |
| 760 | 714 |
| 761 // Get frame timestamps from a queue - for non config frames only. | 715 // Get frame timestamps from a queue - for non config frames only. |
| 762 int64_t frame_encoding_time_ms = 0; | 716 int64_t frame_encoding_time_ms = 0; |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 863 } | 817 } |
| 864 scPosition += naluPosition; | 818 scPosition += naluPosition; |
| 865 scPositions[scPositionsLength++] = scPosition; | 819 scPositions[scPositionsLength++] = scPosition; |
| 866 scPosition += H264_SC_LENGTH; | 820 scPosition += H264_SC_LENGTH; |
| 867 } | 821 } |
| 868 if (scPositionsLength == 0) { | 822 if (scPositionsLength == 0) { |
| 869 ALOGE << "Start code is not found!"; | 823 ALOGE << "Start code is not found!"; |
| 870 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] | 824 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] |
| 871 << " " << image->_buffer[2] << " " << image->_buffer[3] | 825 << " " << image->_buffer[2] << " " << image->_buffer[3] |
| 872 << " " << image->_buffer[4] << " " << image->_buffer[5]; | 826 << " " << image->_buffer[4] << " " << image->_buffer[5]; |
| 873 ResetCodecOnCodecThread(); | 827 ResetCodec(); |
| 874 return false; | 828 return false; |
| 875 } | 829 } |
| 876 scPositions[scPositionsLength] = payload_size; | 830 scPositions[scPositionsLength] = payload_size; |
| 877 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); | 831 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); |
| 878 for (size_t i = 0; i < scPositionsLength; i++) { | 832 for (size_t i = 0; i < scPositionsLength; i++) { |
| 879 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; | 833 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; |
| 880 header.fragmentationLength[i] = | 834 header.fragmentationLength[i] = |
| 881 scPositions[i + 1] - header.fragmentationOffset[i]; | 835 scPositions[i + 1] - header.fragmentationOffset[i]; |
| 882 header.fragmentationPlType[i] = 0; | 836 header.fragmentationPlType[i] = 0; |
| 883 header.fragmentationTimeDiff[i] = 0; | 837 header.fragmentationTimeDiff[i] = 0; |
| 884 } | 838 } |
| 885 } | 839 } |
| 886 | 840 |
| 887 callback_status = callback_->Encoded(*image, &info, &header); | 841 callback_status = callback_->Encoded(*image, &info, &header); |
| 888 } | 842 } |
| 889 | 843 |
| 890 // Return output buffer back to the encoder. | 844 // Return output buffer back to the encoder. |
| 891 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 845 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 892 j_release_output_buffer_method_, | 846 j_release_output_buffer_method_, |
| 893 output_buffer_index); | 847 output_buffer_index); |
| 894 CHECK_EXCEPTION(jni); | 848 CHECK_EXCEPTION(jni); |
| 895 if (!success) { | 849 if (!success) { |
| 896 ResetCodecOnCodecThread(); | 850 ResetCodec(); |
| 897 return false; | 851 return false; |
| 898 } | 852 } |
| 899 | 853 |
| 900 if (callback_status > 0) { | 854 if (callback_status > 0) { |
| 901 drop_next_input_frame_ = true; | 855 drop_next_input_frame_ = true; |
| 902 // Theoretically could handle callback_status<0 here, but unclear what | 856 // Theoretically could handle callback_status<0 here, but unclear what |
| 903 // that would mean for us. | 857 // that would mean for us. |
| 904 } | 858 } |
| 905 } | 859 } |
| 906 | 860 |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 999 } | 953 } |
| 1000 | 954 |
| 1001 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 955 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
| 1002 webrtc::VideoEncoder* encoder) { | 956 webrtc::VideoEncoder* encoder) { |
| 1003 ALOGD << "Destroy video encoder."; | 957 ALOGD << "Destroy video encoder."; |
| 1004 delete encoder; | 958 delete encoder; |
| 1005 } | 959 } |
| 1006 | 960 |
| 1007 } // namespace webrtc_jni | 961 } // namespace webrtc_jni |
| 1008 | 962 |
| OLD | NEW |