Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 26 * | 26 * |
| 27 */ | 27 */ |
| 28 | 28 |
| 29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h" | 29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h" |
| 30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
| 31 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 31 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
| 32 #include "webrtc/base/bind.h" | 32 #include "webrtc/base/bind.h" |
| 33 #include "webrtc/base/checks.h" | 33 #include "webrtc/base/checks.h" |
| 34 #include "webrtc/base/logging.h" | 34 #include "webrtc/base/logging.h" |
| 35 #include "webrtc/base/thread.h" | 35 #include "webrtc/base/thread.h" |
| 36 #include "webrtc/base/thread_checker.h" | |
| 36 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h" | 37 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h" |
| 37 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 38 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" |
| 38 #include "webrtc/modules/video_coding/utility/include/quality_scaler.h" | 39 #include "webrtc/modules/video_coding/utility/include/quality_scaler.h" |
| 39 #include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h" | 40 #include "webrtc/modules/video_coding/utility/include/vp8_header_parser.h" |
| 40 #include "webrtc/system_wrappers/interface/field_trial.h" | 41 #include "webrtc/system_wrappers/interface/field_trial.h" |
| 41 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | 42 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" |
| 42 #include "third_party/libyuv/include/libyuv/convert.h" | 43 #include "third_party/libyuv/include/libyuv/convert.h" |
| 43 #include "third_party/libyuv/include/libyuv/convert_from.h" | 44 #include "third_party/libyuv/include/libyuv/convert_from.h" |
| 44 #include "third_party/libyuv/include/libyuv/video_common.h" | 45 #include "third_party/libyuv/include/libyuv/video_common.h" |
| 45 | 46 |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 72 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses | 73 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses |
| 73 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) | 74 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) |
| 74 // HW-backed video encode. This C++ class is implemented as a very thin shim, | 75 // HW-backed video encode. This C++ class is implemented as a very thin shim, |
| 75 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. | 76 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. |
| 76 // MediaCodecVideoEncoder is created, operated, and destroyed on a single | 77 // MediaCodecVideoEncoder is created, operated, and destroyed on a single |
| 77 // thread, currently the libjingle Worker thread. | 78 // thread, currently the libjingle Worker thread. |
| 78 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, | 79 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
| 79 public rtc::MessageHandler { | 80 public rtc::MessageHandler { |
| 80 public: | 81 public: |
| 81 virtual ~MediaCodecVideoEncoder(); | 82 virtual ~MediaCodecVideoEncoder(); |
| 82 explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType); | 83 MediaCodecVideoEncoder(JNIEnv* jni, |
| 84 VideoCodecType codecType); | |
| 83 | 85 |
| 84 // webrtc::VideoEncoder implementation. Everything trampolines to | 86 // webrtc::VideoEncoder implementation. Everything trampolines to |
| 85 // |codec_thread_| for execution. | 87 // |codec_thread_| for execution. |
| 86 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, | 88 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, |
| 87 int32_t /* number_of_cores */, | 89 int32_t /* number_of_cores */, |
| 88 size_t /* max_payload_size */) override; | 90 size_t /* max_payload_size */) override; |
| 89 int32_t Encode(const webrtc::VideoFrame& input_image, | 91 int32_t Encode(const webrtc::VideoFrame& input_image, |
| 90 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | 92 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
| 91 const std::vector<webrtc::FrameType>* frame_types) override; | 93 const std::vector<webrtc::FrameType>* frame_types) override; |
| 92 int32_t RegisterEncodeCompleteCallback( | 94 int32_t RegisterEncodeCompleteCallback( |
| 93 webrtc::EncodedImageCallback* callback) override; | 95 webrtc::EncodedImageCallback* callback) override; |
| 94 int32_t Release() override; | 96 int32_t Release() override; |
| 95 int32_t SetChannelParameters(uint32_t /* packet_loss */, | 97 int32_t SetChannelParameters(uint32_t /* packet_loss */, |
| 96 int64_t /* rtt */) override; | 98 int64_t /* rtt */) override; |
| 97 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override; | 99 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override; |
| 98 | 100 |
| 99 // rtc::MessageHandler implementation. | 101 // rtc::MessageHandler implementation. |
| 100 void OnMessage(rtc::Message* msg) override; | 102 void OnMessage(rtc::Message* msg) override; |
| 101 | 103 |
| 102 void OnDroppedFrame() override; | 104 void OnDroppedFrame() override; |
| 103 | 105 |
| 104 int GetTargetFramerate() override; | 106 int GetTargetFramerate() override; |
| 105 | 107 |
| 106 private: | 108 private: |
| 107 // CHECK-fail if not running on |codec_thread_|. | 109 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and |
| 108 void CheckOnCodecThread(); | 110 // InitEncodeOnCodecThread() in an attempt to restore the codec to an |
| 109 | |
| 110 // Release() and InitEncode() in an attempt to restore the codec to an | |
| 111 // operable state. Necessary after all manner of OMX-layer errors. | 111 // operable state. Necessary after all manner of OMX-layer errors. |
| 112 void ResetCodec(); | 112 bool ResetCodecOnCodecThread(); |
| 113 | 113 |
| 114 // Implementation of webrtc::VideoEncoder methods above, all running on the | 114 // Implementation of webrtc::VideoEncoder methods above, all running on the |
| 115 // codec thread exclusively. | 115 // codec thread exclusively. |
| 116 // | 116 // |
| 117 // If width==0 then this is assumed to be a re-initialization and the | 117 // If width==0 then this is assumed to be a re-initialization and the |
| 118 // previously-current values are reused instead of the passed parameters | 118 // previously-current values are reused instead of the passed parameters |
| 119 // (makes it easier to reason about thread-safety). | 119 // (makes it easier to reason about thread-safety). |
| 120 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps); | 120 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps); |
| 121 // Reconfigure to match |frame| in width, height. Returns false if | |
| 122 // reconfiguring fails. | |
| 123 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); | |
| 121 int32_t EncodeOnCodecThread( | 124 int32_t EncodeOnCodecThread( |
| 122 const webrtc::VideoFrame& input_image, | 125 const webrtc::VideoFrame& input_image, |
| 123 const std::vector<webrtc::FrameType>* frame_types); | 126 const std::vector<webrtc::FrameType>* frame_types); |
| 127 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, | |
| 128 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); | |
| 129 | |
| 124 int32_t RegisterEncodeCompleteCallbackOnCodecThread( | 130 int32_t RegisterEncodeCompleteCallbackOnCodecThread( |
| 125 webrtc::EncodedImageCallback* callback); | 131 webrtc::EncodedImageCallback* callback); |
| 126 int32_t ReleaseOnCodecThread(); | 132 int32_t ReleaseOnCodecThread(); |
| 127 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); | 133 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); |
| 128 | 134 |
| 129 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. | 135 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. |
| 130 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); | 136 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); |
| 131 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); | 137 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); |
| 132 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); | 138 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); |
| 133 jlong GetOutputBufferInfoPresentationTimestampUs( | 139 jlong GetOutputBufferInfoPresentationTimestampUs( |
| 134 JNIEnv* jni, jobject j_output_buffer_info); | 140 JNIEnv* jni, jobject j_output_buffer_info); |
| 135 | 141 |
| 136 // Deliver any outputs pending in the MediaCodec to our |callback_| and return | 142 // Deliver any outputs pending in the MediaCodec to our |callback_| and return |
| 137 // true on success. | 143 // true on success. |
| 138 bool DeliverPendingOutputs(JNIEnv* jni); | 144 bool DeliverPendingOutputs(JNIEnv* jni); |
| 139 | 145 |
| 140 // Search for H.264 start codes. | 146 // Search for H.264 start codes. |
| 141 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); | 147 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); |
| 142 | 148 |
| 143 // Type of video codec. | 149 // Type of video codec. |
| 144 VideoCodecType codecType_; | 150 VideoCodecType codecType_; |
| 145 | 151 |
| 146 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to | 152 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to |
| 147 // |codec_thread_| synchronously. | 153 // |codec_thread_| synchronously. |
| 148 webrtc::EncodedImageCallback* callback_; | 154 webrtc::EncodedImageCallback* callback_; |
| 149 | 155 |
| 150 // State that is constant for the lifetime of this object once the ctor | 156 // State that is constant for the lifetime of this object once the ctor |
| 151 // returns. | 157 // returns. |
| 152 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | 158 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. |
| 159 rtc::ThreadChecker codec_thread_checker_; | |
| 153 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; | 160 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; |
| 154 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; | 161 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; |
| 155 jmethodID j_init_encode_method_; | 162 jmethodID j_init_encode_method_; |
| 163 jmethodID j_get_input_buffers_method_; | |
| 156 jmethodID j_dequeue_input_buffer_method_; | 164 jmethodID j_dequeue_input_buffer_method_; |
| 157 jmethodID j_encode_method_; | 165 jmethodID j_encode_buffer_method_; |
| 158 jmethodID j_release_method_; | 166 jmethodID j_release_method_; |
| 159 jmethodID j_set_rates_method_; | 167 jmethodID j_set_rates_method_; |
| 160 jmethodID j_dequeue_output_buffer_method_; | 168 jmethodID j_dequeue_output_buffer_method_; |
| 161 jmethodID j_release_output_buffer_method_; | 169 jmethodID j_release_output_buffer_method_; |
| 162 jfieldID j_color_format_field_; | 170 jfieldID j_color_format_field_; |
| 163 jfieldID j_info_index_field_; | 171 jfieldID j_info_index_field_; |
| 164 jfieldID j_info_buffer_field_; | 172 jfieldID j_info_buffer_field_; |
| 165 jfieldID j_info_is_key_frame_field_; | 173 jfieldID j_info_is_key_frame_field_; |
| 166 jfieldID j_info_presentation_timestamp_us_field_; | 174 jfieldID j_info_presentation_timestamp_us_field_; |
| 167 | 175 |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 232 ScopedLocalRefFrame local_ref_frame(jni); | 240 ScopedLocalRefFrame local_ref_frame(jni); |
| 233 // It would be nice to avoid spinning up a new thread per MediaCodec, and | 241 // It would be nice to avoid spinning up a new thread per MediaCodec, and |
| 234 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | 242 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug |
| 235 // 2732 means that deadlocks abound. This class synchronously trampolines | 243 // 2732 means that deadlocks abound. This class synchronously trampolines |
| 236 // to |codec_thread_|, so if anything else can be coming to _us_ from | 244 // to |codec_thread_|, so if anything else can be coming to _us_ from |
| 237 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | 245 // |codec_thread_|, or from any thread holding the |_sendCritSect| described |
| 238 // in the bug, we have a problem. For now work around that with a dedicated | 246 // in the bug, we have a problem. For now work around that with a dedicated |
| 239 // thread. | 247 // thread. |
| 240 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | 248 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); |
| 241 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | 249 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; |
| 242 | 250 codec_thread_checker_.DetachFromThread(); |
| 243 jclass j_output_buffer_info_class = | 251 jclass j_output_buffer_info_class = |
| 244 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | 252 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |
| 245 j_init_encode_method_ = GetMethodID( | 253 j_init_encode_method_ = GetMethodID( |
| 246 jni, | 254 jni, |
| 247 *j_media_codec_video_encoder_class_, | 255 *j_media_codec_video_encoder_class_, |
| 248 "initEncode", | 256 "initEncode", |
| 249 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)" | 257 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)Z"); |
| 250 "[Ljava/nio/ByteBuffer;"); | 258 j_get_input_buffers_method_ = GetMethodID( |
| 259 jni, | |
| 260 *j_media_codec_video_encoder_class_, | |
| 261 "getInputBuffers", | |
| 262 "()[Ljava/nio/ByteBuffer;"); | |
| 251 j_dequeue_input_buffer_method_ = GetMethodID( | 263 j_dequeue_input_buffer_method_ = GetMethodID( |
| 252 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); | 264 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); |
| 253 j_encode_method_ = GetMethodID( | 265 j_encode_buffer_method_ = GetMethodID( |
| 254 jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z"); | 266 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); |
| 255 j_release_method_ = | 267 j_release_method_ = |
| 256 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); | 268 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); |
| 257 j_set_rates_method_ = GetMethodID( | 269 j_set_rates_method_ = GetMethodID( |
| 258 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); | 270 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); |
| 259 j_dequeue_output_buffer_method_ = GetMethodID( | 271 j_dequeue_output_buffer_method_ = GetMethodID( |
| 260 jni, | 272 jni, |
| 261 *j_media_codec_video_encoder_class_, | 273 *j_media_codec_video_encoder_class_, |
| 262 "dequeueOutputBuffer", | 274 "dequeueOutputBuffer", |
| 263 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); | 275 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); |
| 264 j_release_output_buffer_method_ = GetMethodID( | 276 j_release_output_buffer_method_ = GetMethodID( |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 367 quality_scaler_.ReportFramerate(frame_rate); | 379 quality_scaler_.ReportFramerate(frame_rate); |
| 368 | 380 |
| 369 return codec_thread_->Invoke<int32_t>( | 381 return codec_thread_->Invoke<int32_t>( |
| 370 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, | 382 Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, |
| 371 this, | 383 this, |
| 372 new_bit_rate, | 384 new_bit_rate, |
| 373 frame_rate)); | 385 frame_rate)); |
| 374 } | 386 } |
| 375 | 387 |
| 376 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { | 388 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { |
| 389 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 377 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 390 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 378 ScopedLocalRefFrame local_ref_frame(jni); | 391 ScopedLocalRefFrame local_ref_frame(jni); |
| 379 | 392 |
| 380 // We only ever send one message to |this| directly (not through a Bind()'d | 393 // We only ever send one message to |this| directly (not through a Bind()'d |
| 381 // functor), so expect no ID/data. | 394 // functor), so expect no ID/data. |
| 382 RTC_CHECK(!msg->message_id) << "Unexpected message!"; | 395 RTC_CHECK(!msg->message_id) << "Unexpected message!"; |
| 383 RTC_CHECK(!msg->pdata) << "Unexpected message!"; | 396 RTC_CHECK(!msg->pdata) << "Unexpected message!"; |
| 384 CheckOnCodecThread(); | |
| 385 if (!inited_) { | 397 if (!inited_) { |
| 386 return; | 398 return; |
| 387 } | 399 } |
| 388 | 400 |
| 389 // It would be nice to recover from a failure here if one happened, but it's | 401 // It would be nice to recover from a failure here if one happened, but it's |
| 390 // unclear how to signal such a failure to the app, so instead we stay silent | 402 // unclear how to signal such a failure to the app, so instead we stay silent |
| 391 // about it and let the next app-called API method reveal the borkedness. | 403 // about it and let the next app-called API method reveal the borkedness. |
| 392 DeliverPendingOutputs(jni); | 404 DeliverPendingOutputs(jni); |
| 393 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 405 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| 394 } | 406 } |
| 395 | 407 |
| 396 void MediaCodecVideoEncoder::CheckOnCodecThread() { | 408 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { |
| 397 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread()) | 409 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 398 << "Running on wrong thread!"; | 410 ALOGE << "ResetOnCodecThread"; |
| 399 } | 411 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || |
| 400 | 412 InitEncodeOnCodecThread(width_, height_, 0, 0) |
| 401 void MediaCodecVideoEncoder::ResetCodec() { | 413 != WEBRTC_VIDEO_CODEC_OK) { |
| 402 ALOGE << "ResetCodec"; | |
| 403 if (Release() != WEBRTC_VIDEO_CODEC_OK || | |
| 404 codec_thread_->Invoke<int32_t>(Bind( | |
| 405 &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, | |
| 406 width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) { | |
| 407 // TODO(fischman): wouldn't it be nice if there was a way to gracefully | 414 // TODO(fischman): wouldn't it be nice if there was a way to gracefully |
| 408 // degrade to a SW encoder at this point? There isn't one AFAICT :( | 415 // degrade to a SW encoder at this point? There isn't one AFAICT :( |
| 409 // https://code.google.com/p/webrtc/issues/detail?id=2920 | 416 // https://code.google.com/p/webrtc/issues/detail?id=2920 |
| 417 return false; | |
| 410 } | 418 } |
| 419 return true; | |
| 411 } | 420 } |
| 412 | 421 |
| 413 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | 422 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
| 414 int width, int height, int kbps, int fps) { | 423 int width, int height, int kbps, int fps) { |
| 415 CheckOnCodecThread(); | 424 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 416 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 425 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 417 ScopedLocalRefFrame local_ref_frame(jni); | 426 ScopedLocalRefFrame local_ref_frame(jni); |
| 418 | 427 |
| 419 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << | 428 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << |
| 420 width << " x " << height << ". Bitrate: " << kbps << | 429 width << " x " << height << ". Bitrate: " << kbps << |
| 421 " kbps. Fps: " << fps; | 430 " kbps. Fps: " << fps; |
| 422 if (kbps == 0) { | 431 if (kbps == 0) { |
| 423 kbps = last_set_bitrate_kbps_; | 432 kbps = last_set_bitrate_kbps_; |
| 424 } | 433 } |
| 425 if (fps == 0) { | 434 if (fps == 0) { |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 442 current_encoding_time_ms_ = 0; | 451 current_encoding_time_ms_ = 0; |
| 443 last_input_timestamp_ms_ = -1; | 452 last_input_timestamp_ms_ = -1; |
| 444 last_output_timestamp_ms_ = -1; | 453 last_output_timestamp_ms_ = -1; |
| 445 output_timestamp_ = 0; | 454 output_timestamp_ = 0; |
| 446 output_render_time_ms_ = 0; | 455 output_render_time_ms_ = 0; |
| 447 timestamps_.clear(); | 456 timestamps_.clear(); |
| 448 render_times_ms_.clear(); | 457 render_times_ms_.clear(); |
| 449 frame_rtc_times_ms_.clear(); | 458 frame_rtc_times_ms_.clear(); |
| 450 drop_next_input_frame_ = false; | 459 drop_next_input_frame_ = false; |
| 451 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | 460 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; |
| 461 | |
| 452 // We enforce no extra stride/padding in the format creation step. | 462 // We enforce no extra stride/padding in the format creation step. |
| 453 jobject j_video_codec_enum = JavaEnumFromIndex( | 463 jobject j_video_codec_enum = JavaEnumFromIndex( |
| 454 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); | 464 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); |
| 465 const bool encode_status = jni->CallBooleanMethod( | |
| 466 *j_media_codec_video_encoder_, j_init_encode_method_, | |
| 467 j_video_codec_enum, width, height, kbps, fps); | |
| 468 if (!encode_status) { | |
| 469 ALOGE << "Failed to configure encoder."; | |
| 470 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 471 } | |
| 472 CHECK_EXCEPTION(jni); | |
| 473 | |
| 455 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | 474 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( |
| 456 jni->CallObjectMethod(*j_media_codec_video_encoder_, | 475 jni->CallObjectMethod(*j_media_codec_video_encoder_, |
| 457 j_init_encode_method_, | 476 j_get_input_buffers_method_)); |
| 458 j_video_codec_enum, | |
| 459 width_, | |
| 460 height_, | |
| 461 kbps, | |
| 462 fps)); | |
| 463 CHECK_EXCEPTION(jni); | 477 CHECK_EXCEPTION(jni); |
| 464 if (IsNull(jni, input_buffers)) { | 478 if (IsNull(jni, input_buffers)) { |
| 465 return WEBRTC_VIDEO_CODEC_ERROR; | 479 return WEBRTC_VIDEO_CODEC_ERROR; |
| 466 } | 480 } |
| 467 | 481 |
| 468 inited_ = true; | |
| 469 switch (GetIntField(jni, *j_media_codec_video_encoder_, | 482 switch (GetIntField(jni, *j_media_codec_video_encoder_, |
| 470 j_color_format_field_)) { | 483 j_color_format_field_)) { |
| 471 case COLOR_FormatYUV420Planar: | 484 case COLOR_FormatYUV420Planar: |
| 472 encoder_fourcc_ = libyuv::FOURCC_YU12; | 485 encoder_fourcc_ = libyuv::FOURCC_YU12; |
| 473 break; | 486 break; |
| 474 case COLOR_FormatYUV420SemiPlanar: | 487 case COLOR_FormatYUV420SemiPlanar: |
| 475 case COLOR_QCOM_FormatYUV420SemiPlanar: | 488 case COLOR_QCOM_FormatYUV420SemiPlanar: |
| 476 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | 489 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
| 477 encoder_fourcc_ = libyuv::FOURCC_NV12; | 490 encoder_fourcc_ = libyuv::FOURCC_NV12; |
| 478 break; | 491 break; |
| 479 default: | 492 default: |
| 480 LOG(LS_ERROR) << "Wrong color format."; | 493 LOG(LS_ERROR) << "Wrong color format."; |
| 481 return WEBRTC_VIDEO_CODEC_ERROR; | 494 return WEBRTC_VIDEO_CODEC_ERROR; |
| 482 } | 495 } |
| 483 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 496 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
| 484 RTC_CHECK(input_buffers_.empty()) | 497 RTC_CHECK(input_buffers_.empty()) |
| 485 << "Unexpected double InitEncode without Release"; | 498 << "Unexpected double InitEncode without Release"; |
| 486 input_buffers_.resize(num_input_buffers); | 499 input_buffers_.resize(num_input_buffers); |
| 487 for (size_t i = 0; i < num_input_buffers; ++i) { | 500 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 488 input_buffers_[i] = | 501 input_buffers_[i] = |
| 489 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 502 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 490 int64_t yuv_buffer_capacity = | 503 int64_t yuv_buffer_capacity = |
| 491 jni->GetDirectBufferCapacity(input_buffers_[i]); | 504 jni->GetDirectBufferCapacity(input_buffers_[i]); |
| 492 CHECK_EXCEPTION(jni); | 505 CHECK_EXCEPTION(jni); |
| 493 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | 506 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
| 494 } | 507 } |
| 495 CHECK_EXCEPTION(jni); | 508 CHECK_EXCEPTION(jni); |
| 496 | 509 |
| 510 | |
| 511 inited_ = true; | |
| 497 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 512 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
| 498 return WEBRTC_VIDEO_CODEC_OK; | 513 return WEBRTC_VIDEO_CODEC_OK; |
| 499 } | 514 } |
| 500 | 515 |
| 501 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 516 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
| 502 const webrtc::VideoFrame& frame, | 517 const webrtc::VideoFrame& frame, |
| 503 const std::vector<webrtc::FrameType>* frame_types) { | 518 const std::vector<webrtc::FrameType>* frame_types) { |
| 504 CheckOnCodecThread(); | 519 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 505 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 520 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 506 ScopedLocalRefFrame local_ref_frame(jni); | 521 ScopedLocalRefFrame local_ref_frame(jni); |
| 507 | 522 |
| 508 if (!inited_) { | 523 if (!inited_) { |
| 509 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 524 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 510 } | 525 } |
| 526 | |
| 511 frames_received_++; | 527 frames_received_++; |
| 512 if (!DeliverPendingOutputs(jni)) { | 528 if (!DeliverPendingOutputs(jni)) { |
| 513 ResetCodec(); | 529 if (!ResetCodecOnCodecThread()) |
| 514 // Continue as if everything's fine. | 530 return WEBRTC_VIDEO_CODEC_ERROR; |
| 515 } | 531 } |
| 516 | 532 |
| 517 if (drop_next_input_frame_) { | 533 if (drop_next_input_frame_) { |
| 518 ALOGV("Encoder drop frame - failed callback."); | 534 ALOGW << "Encoder drop frame - failed callback."; |
| 519 drop_next_input_frame_ = false; | 535 drop_next_input_frame_ = false; |
| 520 return WEBRTC_VIDEO_CODEC_OK; | 536 return WEBRTC_VIDEO_CODEC_OK; |
| 521 } | 537 } |
| 522 | 538 |
| 523 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; | 539 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
| 524 // Check framerate before spatial resolution change. | 540 // Check framerate before spatial resolution change. |
| 525 if (scale_) | 541 if (scale_) |
| 526 quality_scaler_.OnEncodeFrame(frame); | 542 quality_scaler_.OnEncodeFrame(frame); |
| 527 | 543 |
| 528 const VideoFrame& input_frame = | 544 const VideoFrame& input_frame = |
| 529 scale_ ? quality_scaler_.GetScaledFrame(frame) : frame; | 545 scale_ ? quality_scaler_.GetScaledFrame(frame) : frame; |
| 530 | 546 |
| 531 if (input_frame.width() != width_ || input_frame.height() != height_) { | 547 if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { |
| 532 ALOGD << "Frame resolution change from " << width_ << " x " << height_ << | 548 ALOGE << "Failed to reconfigure encoder."; |
| 533 " to " << input_frame.width() << " x " << input_frame.height(); | 549 return WEBRTC_VIDEO_CODEC_ERROR; |
| 534 width_ = input_frame.width(); | |
| 535 height_ = input_frame.height(); | |
| 536 ResetCodec(); | |
| 537 return WEBRTC_VIDEO_CODEC_OK; | |
| 538 } | 550 } |
| 539 | 551 |
| 540 // Check if we accumulated too many frames in encoder input buffers | 552 // Check if we accumulated too many frames in encoder input buffers |
| 541 // or the encoder latency exceeds 70 ms and drop frame if so. | 553 // or the encoder latency exceeds 70 ms and drop frame if so. |
| 542 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { | 554 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { |
| 543 int encoder_latency_ms = last_input_timestamp_ms_ - | 555 int encoder_latency_ms = last_input_timestamp_ms_ - |
| 544 last_output_timestamp_ms_; | 556 last_output_timestamp_ms_; |
| 545 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) { | 557 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) { |
| 546 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << | 558 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << |
| 547 " ms. Q size: " << frames_in_queue_; | 559 " ms. Q size: " << frames_in_queue_; |
| 548 frames_dropped_++; | 560 frames_dropped_++; |
| 549 // Report dropped frame to quality_scaler_. | 561 // Report dropped frame to quality_scaler_. |
| 550 OnDroppedFrame(); | 562 OnDroppedFrame(); |
| 551 return WEBRTC_VIDEO_CODEC_OK; | 563 return WEBRTC_VIDEO_CODEC_OK; |
| 552 } | 564 } |
| 553 } | 565 } |
| 554 | 566 |
| 555 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 567 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
| 556 j_dequeue_input_buffer_method_); | 568 j_dequeue_input_buffer_method_); |
| 557 CHECK_EXCEPTION(jni); | 569 CHECK_EXCEPTION(jni); |
| 558 if (j_input_buffer_index == -1) { | 570 if (j_input_buffer_index == -1) { |
| 559 // Video codec falls behind - no input buffer available. | 571 // Video codec falls behind - no input buffer available. |
| 560 ALOGV("Encoder drop frame - no input buffers available"); | 572 ALOGW << "Encoder drop frame - no input buffers available"; |
| 561 frames_dropped_++; | 573 frames_dropped_++; |
| 562 // Report dropped frame to quality_scaler_. | 574 // Report dropped frame to quality_scaler_. |
| 563 OnDroppedFrame(); | 575 OnDroppedFrame(); |
| 564 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 576 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
| 565 } | 577 } |
| 566 if (j_input_buffer_index == -2) { | 578 if (j_input_buffer_index == -2) { |
| 567 ResetCodec(); | 579 ResetCodecOnCodecThread(); |
|
magjed_webrtc
2015/10/22 23:24:17
Should we return WEBRTC_VIDEO_CODEC_ERROR even if
perkj_webrtc
2015/11/12 13:25:22
Yes, it will give the upper layer the chance to fa
| |
| 568 return WEBRTC_VIDEO_CODEC_ERROR; | 580 return WEBRTC_VIDEO_CODEC_ERROR; |
| 569 } | 581 } |
| 570 | 582 |
| 571 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", | 583 last_input_timestamp_ms_ = |
| 572 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); | 584 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
| 573 | |
| 574 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | |
| 575 uint8_t* yuv_buffer = | |
| 576 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | |
| 577 CHECK_EXCEPTION(jni); | |
| 578 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | |
| 579 RTC_CHECK(!libyuv::ConvertFromI420( | |
| 580 input_frame.buffer(webrtc::kYPlane), input_frame.stride(webrtc::kYPlane), | |
| 581 input_frame.buffer(webrtc::kUPlane), input_frame.stride(webrtc::kUPlane), | |
| 582 input_frame.buffer(webrtc::kVPlane), input_frame.stride(webrtc::kVPlane), | |
| 583 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | |
| 584 << "ConvertFromI420 failed"; | |
| 585 last_input_timestamp_ms_ = current_timestamp_us_ / 1000; | |
| 586 frames_in_queue_++; | 585 frames_in_queue_++; |
| 587 | 586 |
| 588 // Save input image timestamps for later output | 587 // Save input image timestamps for later output |
| 589 timestamps_.push_back(input_frame.timestamp()); | 588 timestamps_.push_back(input_frame.timestamp()); |
| 590 render_times_ms_.push_back(input_frame.render_time_ms()); | 589 render_times_ms_.push_back(input_frame.render_time_ms()); |
| 591 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); | 590 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); |
| 592 | 591 |
| 593 bool key_frame = frame_types->front() != webrtc::kDeltaFrame; | 592 const bool key_frame = frame_types->front() != webrtc::kDeltaFrame; |
| 593 const bool encode_status = | |
| 594 EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | |
| 595 j_input_buffer_index); | |
| 596 | |
| 597 current_timestamp_us_ += 1000000 / last_set_fps_; | |
|
magjed_webrtc
2015/10/22 23:24:17
s/1000000/rtc::kNumMicrosecsPerSec/g
perkj_webrtc
2015/11/12 13:25:22
Done.
| |
| 598 | |
| 599 if (!encode_status || !DeliverPendingOutputs(jni)) { | |
| 600 ALOGE << "Failed deliver pending outputs."; | |
| 601 ResetCodecOnCodecThread(); | |
| 602 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 603 } | |
| 604 return WEBRTC_VIDEO_CODEC_OK; | |
| 605 } | |
| 606 | |
| 607 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | |
| 608 const webrtc::VideoFrame& frame) { | |
| 609 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 610 | |
| 611 const bool reconfigure_due_to_size = | |
| 612 frame.width() != width_ || frame.height() != height_; | |
| 613 | |
| 614 if (reconfigure_due_to_size) { | |
| 615 ALOGD << "Reconfigure encoder due to frame resolution change from " | |
| 616 << width_ << " x " << height_ << " to " << frame.width() << " x " | |
| 617 << frame.height(); | |
| 618 width_ = frame.width(); | |
| 619 height_ = frame.height(); | |
| 620 } | |
| 621 | |
| 622 if (!reconfigure_due_to_size) | |
| 623 return true; | |
| 624 | |
| 625 ReleaseOnCodecThread(); | |
| 626 | |
| 627 return InitEncodeOnCodecThread(width_, height_, 0, 0) == | |
| 628 WEBRTC_VIDEO_CODEC_OK; | |
| 629 } | |
| 630 | |
| 631 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | |
| 632 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | |
| 633 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 634 | |
| 635 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", | |
| 636 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); | |
| 637 | |
| 638 jobject j_input_buffer = input_buffers_[input_buffer_index]; | |
| 639 uint8_t* yuv_buffer = | |
| 640 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | |
| 641 CHECK_EXCEPTION(jni); | |
| 642 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | |
| 643 RTC_CHECK(!libyuv::ConvertFromI420( | |
| 644 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane), | |
| 645 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), | |
| 646 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), | |
| 647 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | |
| 648 << "ConvertFromI420 failed"; | |
| 649 | |
| 594 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 650 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 595 j_encode_method_, | 651 j_encode_buffer_method_, |
| 596 key_frame, | 652 key_frame, |
| 597 j_input_buffer_index, | 653 input_buffer_index, |
| 598 yuv_size_, | 654 yuv_size_, |
| 599 current_timestamp_us_); | 655 current_timestamp_us_); |
| 600 CHECK_EXCEPTION(jni); | 656 CHECK_EXCEPTION(jni); |
| 601 current_timestamp_us_ += 1000000 / last_set_fps_; | 657 return encode_status; |
| 602 | |
| 603 if (!encode_status || !DeliverPendingOutputs(jni)) { | |
| 604 ResetCodec(); | |
| 605 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 606 } | |
| 607 | |
| 608 return WEBRTC_VIDEO_CODEC_OK; | |
| 609 } | 658 } |
| 610 | 659 |
| 611 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | 660 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( |
| 612 webrtc::EncodedImageCallback* callback) { | 661 webrtc::EncodedImageCallback* callback) { |
| 613 CheckOnCodecThread(); | 662 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 614 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 663 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 615 ScopedLocalRefFrame local_ref_frame(jni); | 664 ScopedLocalRefFrame local_ref_frame(jni); |
| 616 callback_ = callback; | 665 callback_ = callback; |
| 617 return WEBRTC_VIDEO_CODEC_OK; | 666 return WEBRTC_VIDEO_CODEC_OK; |
| 618 } | 667 } |
| 619 | 668 |
| 620 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | 669 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { |
| 670 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 621 if (!inited_) { | 671 if (!inited_) { |
| 622 return WEBRTC_VIDEO_CODEC_OK; | 672 return WEBRTC_VIDEO_CODEC_OK; |
| 623 } | 673 } |
| 624 CheckOnCodecThread(); | |
| 625 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 674 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 626 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | 675 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << |
| 627 frames_received_ << ". Encoded: " << frames_encoded_ << | 676 frames_received_ << ". Encoded: " << frames_encoded_ << |
| 628 ". Dropped: " << frames_dropped_; | 677 ". Dropped: " << frames_dropped_; |
| 629 ScopedLocalRefFrame local_ref_frame(jni); | 678 ScopedLocalRefFrame local_ref_frame(jni); |
| 630 for (size_t i = 0; i < input_buffers_.size(); ++i) | 679 for (size_t i = 0; i < input_buffers_.size(); ++i) |
| 631 jni->DeleteGlobalRef(input_buffers_[i]); | 680 jni->DeleteGlobalRef(input_buffers_[i]); |
| 632 input_buffers_.clear(); | 681 input_buffers_.clear(); |
| 633 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | 682 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); |
| 634 CHECK_EXCEPTION(jni); | 683 CHECK_EXCEPTION(jni); |
| 635 rtc::MessageQueueManager::Clear(this); | 684 rtc::MessageQueueManager::Clear(this); |
| 636 inited_ = false; | 685 inited_ = false; |
| 637 ALOGD << "EncoderReleaseOnCodecThread done."; | 686 ALOGD << "EncoderReleaseOnCodecThread done."; |
| 638 return WEBRTC_VIDEO_CODEC_OK; | 687 return WEBRTC_VIDEO_CODEC_OK; |
| 639 } | 688 } |
| 640 | 689 |
| 641 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | 690 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
| 642 uint32_t frame_rate) { | 691 uint32_t frame_rate) { |
| 643 CheckOnCodecThread(); | 692 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 644 if (last_set_bitrate_kbps_ == new_bit_rate && | 693 if (last_set_bitrate_kbps_ == new_bit_rate && |
| 645 last_set_fps_ == frame_rate) { | 694 last_set_fps_ == frame_rate) { |
| 646 return WEBRTC_VIDEO_CODEC_OK; | 695 return WEBRTC_VIDEO_CODEC_OK; |
| 647 } | 696 } |
| 648 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 697 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 649 ScopedLocalRefFrame local_ref_frame(jni); | 698 ScopedLocalRefFrame local_ref_frame(jni); |
| 650 if (new_bit_rate > 0) { | 699 if (new_bit_rate > 0) { |
| 651 last_set_bitrate_kbps_ = new_bit_rate; | 700 last_set_bitrate_kbps_ = new_bit_rate; |
| 652 } | 701 } |
| 653 if (frame_rate > 0) { | 702 if (frame_rate > 0) { |
| 654 last_set_fps_ = frame_rate; | 703 last_set_fps_ = frame_rate; |
| 655 } | 704 } |
| 656 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 705 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 657 j_set_rates_method_, | 706 j_set_rates_method_, |
| 658 last_set_bitrate_kbps_, | 707 last_set_bitrate_kbps_, |
| 659 last_set_fps_); | 708 last_set_fps_); |
| 660 CHECK_EXCEPTION(jni); | 709 CHECK_EXCEPTION(jni); |
| 661 if (!ret) { | 710 if (!ret) { |
| 662 ResetCodec(); | 711 ResetCodecOnCodecThread(); |
| 663 return WEBRTC_VIDEO_CODEC_ERROR; | 712 return WEBRTC_VIDEO_CODEC_ERROR; |
| 664 } | 713 } |
| 665 return WEBRTC_VIDEO_CODEC_OK; | 714 return WEBRTC_VIDEO_CODEC_OK; |
| 666 } | 715 } |
| 667 | 716 |
| 668 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( | 717 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( |
| 669 JNIEnv* jni, | 718 JNIEnv* jni, |
| 670 jobject j_output_buffer_info) { | 719 jobject j_output_buffer_info) { |
| 671 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); | 720 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); |
| 672 } | 721 } |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 684 } | 733 } |
| 685 | 734 |
| 686 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( | 735 jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( |
| 687 JNIEnv* jni, | 736 JNIEnv* jni, |
| 688 jobject j_output_buffer_info) { | 737 jobject j_output_buffer_info) { |
| 689 return GetLongField( | 738 return GetLongField( |
| 690 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); | 739 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); |
| 691 } | 740 } |
| 692 | 741 |
| 693 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { | 742 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
| 743 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | |
| 694 while (true) { | 744 while (true) { |
| 695 jobject j_output_buffer_info = jni->CallObjectMethod( | 745 jobject j_output_buffer_info = jni->CallObjectMethod( |
| 696 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); | 746 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); |
| 697 CHECK_EXCEPTION(jni); | 747 CHECK_EXCEPTION(jni); |
| 698 if (IsNull(jni, j_output_buffer_info)) { | 748 if (IsNull(jni, j_output_buffer_info)) { |
| 699 break; | 749 break; |
| 700 } | 750 } |
| 701 | 751 |
| 702 int output_buffer_index = | 752 int output_buffer_index = |
| 703 GetOutputBufferInfoIndex(jni, j_output_buffer_info); | 753 GetOutputBufferInfoIndex(jni, j_output_buffer_info); |
| 704 if (output_buffer_index == -1) { | 754 if (output_buffer_index == -1) { |
| 705 ResetCodec(); | 755 ResetCodecOnCodecThread(); |
| 706 return false; | 756 return false; |
| 707 } | 757 } |
| 708 | 758 |
| 709 // Get key and config frame flags. | 759 // Get key and config frame flags. |
| 710 jobject j_output_buffer = | 760 jobject j_output_buffer = |
| 711 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | 761 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); |
| 712 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | 762 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); |
| 713 | 763 |
| 714 // Get frame timestamps from a queue - for non config frames only. | 764 // Get frame timestamps from a queue - for non config frames only. |
| 715 int64_t frame_encoding_time_ms = 0; | 765 int64_t frame_encoding_time_ms = 0; |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 819 } | 869 } |
| 820 scPosition += naluPosition; | 870 scPosition += naluPosition; |
| 821 scPositions[scPositionsLength++] = scPosition; | 871 scPositions[scPositionsLength++] = scPosition; |
| 822 scPosition += H264_SC_LENGTH; | 872 scPosition += H264_SC_LENGTH; |
| 823 } | 873 } |
| 824 if (scPositionsLength == 0) { | 874 if (scPositionsLength == 0) { |
| 825 ALOGE << "Start code is not found!"; | 875 ALOGE << "Start code is not found!"; |
| 826 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] | 876 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] |
| 827 << " " << image->_buffer[2] << " " << image->_buffer[3] | 877 << " " << image->_buffer[2] << " " << image->_buffer[3] |
| 828 << " " << image->_buffer[4] << " " << image->_buffer[5]; | 878 << " " << image->_buffer[4] << " " << image->_buffer[5]; |
| 829 ResetCodec(); | 879 ResetCodecOnCodecThread(); |
| 830 return false; | 880 return false; |
| 831 } | 881 } |
| 832 scPositions[scPositionsLength] = payload_size; | 882 scPositions[scPositionsLength] = payload_size; |
| 833 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); | 883 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); |
| 834 for (size_t i = 0; i < scPositionsLength; i++) { | 884 for (size_t i = 0; i < scPositionsLength; i++) { |
| 835 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; | 885 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; |
| 836 header.fragmentationLength[i] = | 886 header.fragmentationLength[i] = |
| 837 scPositions[i + 1] - header.fragmentationOffset[i]; | 887 scPositions[i + 1] - header.fragmentationOffset[i]; |
| 838 header.fragmentationPlType[i] = 0; | 888 header.fragmentationPlType[i] = 0; |
| 839 header.fragmentationTimeDiff[i] = 0; | 889 header.fragmentationTimeDiff[i] = 0; |
| 840 } | 890 } |
| 841 } | 891 } |
| 842 | 892 |
| 843 callback_status = callback_->Encoded(*image, &info, &header); | 893 callback_status = callback_->Encoded(*image, &info, &header); |
| 844 } | 894 } |
| 845 | 895 |
| 846 // Return output buffer back to the encoder. | 896 // Return output buffer back to the encoder. |
| 847 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 897 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 848 j_release_output_buffer_method_, | 898 j_release_output_buffer_method_, |
| 849 output_buffer_index); | 899 output_buffer_index); |
| 850 CHECK_EXCEPTION(jni); | 900 CHECK_EXCEPTION(jni); |
| 851 if (!success) { | 901 if (!success) { |
| 852 ResetCodec(); | 902 ResetCodecOnCodecThread(); |
| 853 return false; | 903 return false; |
| 854 } | 904 } |
| 855 | 905 |
| 856 if (callback_status > 0) { | 906 if (callback_status > 0) { |
| 857 drop_next_input_frame_ = true; | 907 drop_next_input_frame_ = true; |
| 858 // Theoretically could handle callback_status<0 here, but unclear what | 908 // Theoretically could handle callback_status<0 here, but unclear what |
| 859 // that would mean for us. | 909 // that would mean for us. |
| 860 } | 910 } |
| 861 } | 911 } |
| 862 | 912 |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 955 } | 1005 } |
| 956 | 1006 |
| 957 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1007 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
| 958 webrtc::VideoEncoder* encoder) { | 1008 webrtc::VideoEncoder* encoder) { |
| 959 ALOGD << "Destroy video encoder."; | 1009 ALOGD << "Destroy video encoder."; |
| 960 delete encoder; | 1010 delete encoder; |
| 961 } | 1011 } |
| 962 | 1012 |
| 963 } // namespace webrtc_jni | 1013 } // namespace webrtc_jni |
| 964 | 1014 |
| OLD | NEW |