Index: webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc |
diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc |
index f9ef0ac62c2b3f84cf0e3d9d5f6e8dd7d4097d19..c3e4ca2e31560af3f31080b9213cd48639c819bb 100644 |
--- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc |
+++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc |
@@ -25,9 +25,11 @@ |
#include "webrtc/base/bind.h" |
#include "webrtc/base/checks.h" |
#include "webrtc/base/logging.h" |
+#include "webrtc/base/task_queue.h" |
#include "webrtc/base/thread.h" |
#include "webrtc/base/thread_checker.h" |
#include "webrtc/base/timeutils.h" |
+#include "webrtc/base/weak_ptr.h" |
#include "webrtc/common_types.h" |
#include "webrtc/common_video/h264/h264_bitstream_parser.h" |
#include "webrtc/common_video/h264/h264_common.h" |
@@ -90,18 +92,16 @@ const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile"; |
// Android's MediaCodec SDK API behind the scenes to implement (hopefully) |
// HW-backed video encode. This C++ class is implemented as a very thin shim, |
// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. |
-// MediaCodecVideoEncoder is created, operated, and destroyed on a single |
-// thread, currently the libjingle Worker thread. |
-class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
- public rtc::MessageHandler { |
+// MediaCodecVideoEncoder must be created, operated, and destroyed on a single |
+// task queue, currently this is the encoder queue from ViE encoder. |
+class MediaCodecVideoEncoder : public webrtc::VideoEncoder { |
public: |
virtual ~MediaCodecVideoEncoder(); |
MediaCodecVideoEncoder(JNIEnv* jni, |
const cricket::VideoCodec& codec, |
jobject egl_context); |
- // webrtc::VideoEncoder implementation. Everything trampolines to |
- // |codec_thread_| for execution. |
+ // webrtc::VideoEncoder implementation. |
int32_t InitEncode(const webrtc::VideoCodec* codec_settings, |
int32_t /* number_of_cores */, |
size_t /* max_payload_size */) override; |
@@ -116,55 +116,53 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, |
uint32_t frame_rate) override; |
- // rtc::MessageHandler implementation. |
- void OnMessage(rtc::Message* msg) override; |
- |
bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } |
const char* ImplementationName() const override; |
private: |
- // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and |
- // InitEncodeOnCodecThread() in an attempt to restore the codec to an |
- // operable state. Necessary after all manner of OMX-layer errors. |
- // Returns true if the codec was reset successfully. |
- bool ResetCodecOnCodecThread(); |
+ class EncodeTask : public rtc::QueuedTask { |
+ public: |
+ EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder); |
+ bool Run() override; |
+ |
+ private: |
+ rtc::WeakPtr<MediaCodecVideoEncoder> encoder_; |
magjed_webrtc
2017/02/03 10:39:17
Why do we need a WeakPtr?
sakal
2017/02/03 11:57:13
Discussed offline. (MediaCodecVideoEncoder can get
|
+ }; |
+ |
+ // ResetCodec() calls Release() and InitEncodeInternal() in an attempt to |
+ // restore the codec to an operable state. Necessary after all manner of |
+ // OMX-layer errors. Returns true if the codec was reset successfully. |
+ bool ResetCodec(); |
// Fallback to a software encoder if one is supported else try to reset the |
// encoder. Called with |reset_if_fallback_unavailable| equal to false from |
// init/release encoder so that we don't go into infinite recursion. |
// Returns true if the codec was reset successfully. |
- bool ProcessHWErrorOnCodecThread(bool reset_if_fallback_unavailable); |
+ bool ProcessHWError(bool reset_if_fallback_unavailable); |
- // Calls ProcessHWErrorOnCodecThread(true). Returns |
- // WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if sw_fallback_required_ was set or |
- // WEBRTC_VIDEO_CODEC_ERROR otherwise. |
- int32_t ProcessHWErrorOnEncodeOnCodecThread(); |
+ // Calls ProcessHWError(true). Returns WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if |
+ // sw_fallback_required_ was set or WEBRTC_VIDEO_CODEC_ERROR otherwise. |
+ int32_t ProcessHWErrorOnEncode(); |
- // Implementation of webrtc::VideoEncoder methods above, all running on the |
- // codec thread exclusively. |
- // |
// If width==0 then this is assumed to be a re-initialization and the |
// previously-current values are reused instead of the passed parameters |
// (makes it easier to reason about thread-safety). |
- int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, |
- bool use_surface); |
+ int32_t InitEncodeInternal(int width, |
+ int height, |
+ int kbps, |
+ int fps, |
+ bool use_surface); |
// Reconfigure to match |frame| in width, height. Also reconfigures the |
// encoder if |frame| is a texture/byte buffer and the encoder is initialized |
// for byte buffer/texture. Returns false if reconfiguring fails. |
- bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); |
- int32_t EncodeOnCodecThread( |
- const webrtc::VideoFrame& input_image, |
- const std::vector<webrtc::FrameType>* frame_types, |
- const int64_t frame_input_time_ms); |
- bool EncodeByteBufferOnCodecThread(JNIEnv* jni, |
- bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); |
- bool EncodeTextureOnCodecThread(JNIEnv* jni, |
- bool key_frame, const webrtc::VideoFrame& frame); |
- |
- int32_t RegisterEncodeCompleteCallbackOnCodecThread( |
- webrtc::EncodedImageCallback* callback); |
- int32_t ReleaseOnCodecThread(); |
- int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); |
+ bool MaybeReconfigureEncoder(const webrtc::VideoFrame& frame); |
+ bool EncodeByteBuffer(JNIEnv* jni, |
+ bool key_frame, |
+ const webrtc::VideoFrame& frame, |
+ int input_buffer_index); |
+ bool EncodeTexture(JNIEnv* jni, |
+ bool key_frame, |
+ const webrtc::VideoFrame& frame); |
// Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. |
int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); |
@@ -185,15 +183,11 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
// Type of video codec. |
const cricket::VideoCodec codec_; |
- // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to |
- // |codec_thread_| synchronously. |
webrtc::EncodedImageCallback* callback_; |
// State that is constant for the lifetime of this object once the ctor |
// returns. |
- std::unique_ptr<Thread> |
- codec_thread_; // Thread on which to operate MediaCodec. |
- rtc::ThreadChecker codec_thread_checker_; |
+ rtc::ThreadChecker encoder_queue_checker_; |
magjed_webrtc
2017/02/03 10:39:17
Use SequencedTaskChecker instead. Also, update CL
sakal
2017/02/03 11:57:13
Done.
|
ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; |
ScopedGlobalRef<jobject> j_media_codec_video_encoder_; |
jmethodID j_init_encode_method_; |
@@ -212,7 +206,6 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
jfieldID j_info_presentation_timestamp_us_field_; |
// State that is valid only between InitEncode() and the next Release(). |
- // Touched only on codec_thread_ so no explicit synchronization necessary. |
int width_; // Frame width in pixels. |
int height_; // Frame height in pixels. |
bool inited_; |
@@ -287,9 +280,15 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
webrtc::VideoCodecMode codec_mode_; |
bool sw_fallback_required_; |
+ // Holds the task while the polling loop is paused. |
+ std::unique_ptr<rtc::QueuedTask> encode_task_; |
+ |
+ // All other member variables should be before WeakPtrFactory. |
+ rtc::WeakPtrFactory<MediaCodecVideoEncoder> weak_factory_; |
}; |
MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
// Call Release() to ensure no more callbacks to us after we are deleted. |
Release(); |
} |
@@ -299,7 +298,6 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, |
jobject egl_context) |
: codec_(codec), |
callback_(NULL), |
- codec_thread_(new Thread()), |
j_media_codec_video_encoder_class_( |
jni, |
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), |
@@ -314,17 +312,11 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, |
use_surface_(false), |
picture_id_(0), |
egl_context_(egl_context), |
- sw_fallback_required_(false) { |
- // It would be nice to avoid spinning up a new thread per MediaCodec, and |
- // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug |
- // 2732 means that deadlocks abound. This class synchronously trampolines |
- // to |codec_thread_|, so if anything else can be coming to _us_ from |
- // |codec_thread_|, or from any thread holding the |_sendCritSect| described |
- // in the bug, we have a problem. For now work around that with a dedicated |
- // thread. |
- codec_thread_->SetName("MediaCodecVideoEncoder", NULL); |
- RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; |
- codec_thread_checker_.DetachFromThread(); |
+ sw_fallback_required_(false), |
+ weak_factory_(this) { |
+ encode_task_ = std::unique_ptr<rtc::QueuedTask>( |
+ new EncodeTask(weak_factory_.GetWeakPtr())); |
+ |
jclass j_output_buffer_info_class = |
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |
j_init_encode_method_ = GetMethodID( |
@@ -369,16 +361,16 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, |
jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); |
if (CheckException(jni)) { |
ALOGW << "MediaCodecVideoEncoder ctor failed."; |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
} |
srand(time(NULL)); |
- AllowBlockingCalls(); |
} |
int32_t MediaCodecVideoEncoder::InitEncode( |
const webrtc::VideoCodec* codec_settings, |
int32_t /* number_of_cores */, |
size_t /* max_payload_size */) { |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
if (codec_settings == NULL) { |
ALOGE << "NULL VideoCodec instance"; |
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
@@ -407,35 +399,189 @@ int32_t MediaCodecVideoEncoder::InitEncode( |
ALOGD << "InitEncode request: " << init_width << " x " << init_height; |
ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); |
- return codec_thread_->Invoke<int32_t>( |
- RTC_FROM_HERE, |
- Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width, |
- init_height, codec_settings->startBitrate, |
- codec_settings->maxFramerate, |
- codec_settings->expect_encode_from_texture)); |
+ return InitEncodeInternal( |
+ init_width, init_height, codec_settings->startBitrate, |
+ codec_settings->maxFramerate, codec_settings->expect_encode_from_texture); |
} |
int32_t MediaCodecVideoEncoder::Encode( |
const webrtc::VideoFrame& frame, |
const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
const std::vector<webrtc::FrameType>* frame_types) { |
- return codec_thread_->Invoke<int32_t>( |
- RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this, |
magjed_webrtc
2017/02/03 10:39:17
Can you try to reorder the functions in order to m
sakal
2017/02/03 11:57:13
Done. Though, I often like to keep the order of de
|
- frame, frame_types, rtc::TimeMillis())); |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
+ if (sw_fallback_required_) |
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
+ JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
+ ScopedLocalRefFrame local_ref_frame(jni); |
+ const int64_t frame_input_time_ms = rtc::TimeMillis(); |
+ |
+ if (!inited_) { |
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
+ } |
+ |
+ bool send_key_frame = false; |
+ if (codec_mode_ == webrtc::kRealtimeVideo) { |
+ ++frames_received_since_last_key_; |
+ int64_t now_ms = rtc::TimeMillis(); |
+ if (last_frame_received_ms_ != -1 && |
+ (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { |
+ // Add limit to prevent triggering a key for every frame for very low |
+ // framerates (e.g. if frame diff > kFrameDiffThresholdMs). |
+ if (frames_received_since_last_key_ > kMinKeyFrameInterval) { |
+ ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); |
+ send_key_frame = true; |
+ } |
+ frames_received_since_last_key_ = 0; |
+ } |
+ last_frame_received_ms_ = now_ms; |
+ } |
+ |
+ frames_received_++; |
+ if (!DeliverPendingOutputs(jni)) { |
+ if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) { |
+ return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
+ : WEBRTC_VIDEO_CODEC_ERROR; |
+ } |
+ } |
+ if (frames_encoded_ < kMaxEncodedLogFrames) { |
+ ALOGD << "Encoder frame in # " << (frames_received_ - 1) |
+ << ". TS: " << (int)(current_timestamp_us_ / 1000) |
+ << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ |
+ << ". Kbps: " << last_set_bitrate_kbps_; |
+ } |
+ |
+ if (drop_next_input_frame_) { |
+ ALOGW << "Encoder drop frame - failed callback."; |
+ drop_next_input_frame_ = false; |
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
+ frames_dropped_media_encoder_++; |
+ return WEBRTC_VIDEO_CODEC_OK; |
+ } |
+ |
+ RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
+ |
+ // Check if we accumulated too many frames in encoder input buffers and drop |
+ // frame if so. |
+ if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { |
+ ALOGD << "Already " << input_frame_infos_.size() |
+ << " frames in the queue, dropping" |
+ << ". TS: " << (int)(current_timestamp_us_ / 1000) |
+ << ". Fps: " << last_set_fps_ |
+ << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; |
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
+ consecutive_full_queue_frame_drops_++; |
+ if (consecutive_full_queue_frame_drops_ >= |
+ ENCODER_STALL_FRAMEDROP_THRESHOLD) { |
+ ALOGE << "Encoder got stuck."; |
+ return ProcessHWErrorOnEncode(); |
+ } |
+ frames_dropped_media_encoder_++; |
+ return WEBRTC_VIDEO_CODEC_OK; |
+ } |
+ consecutive_full_queue_frame_drops_ = 0; |
+ |
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( |
+ frame.video_frame_buffer()); |
+ |
+ VideoFrame input_frame(input_buffer, frame.timestamp(), |
+ frame.render_time_ms(), frame.rotation()); |
+ |
+ if (!MaybeReconfigureEncoder(input_frame)) { |
+ ALOGE << "Failed to reconfigure encoder."; |
+ return WEBRTC_VIDEO_CODEC_ERROR; |
+ } |
+ |
+ const bool key_frame = |
+ frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; |
+ bool encode_status = true; |
+ if (!input_frame.video_frame_buffer()->native_handle()) { |
+ int j_input_buffer_index = jni->CallIntMethod( |
+ *j_media_codec_video_encoder_, j_dequeue_input_buffer_method_); |
+ if (CheckException(jni)) { |
+ ALOGE << "Exception in dequeu input buffer."; |
+ return ProcessHWErrorOnEncode(); |
+ } |
+ if (j_input_buffer_index == -1) { |
+ // Video codec falls behind - no input buffer available. |
+ ALOGW << "Encoder drop frame - no input buffers available"; |
+ if (frames_received_ > 1) { |
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
+ frames_dropped_media_encoder_++; |
+ } else { |
+ // Input buffers are not ready after codec initialization, HW is still |
+ // allocating thme - this is expected and should not result in drop |
+ // frame report. |
+ frames_received_ = 0; |
+ } |
+ return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
+ } else if (j_input_buffer_index == -2) { |
+ return ProcessHWErrorOnEncode(); |
+ } |
+ encode_status = |
+ EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); |
+ } else { |
+ encode_status = EncodeTexture(jni, key_frame, input_frame); |
+ } |
+ |
+ if (!encode_status) { |
+ ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
+ return ProcessHWErrorOnEncode(); |
+ } |
+ |
+ // Save input image timestamps for later output. |
+ input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(), |
+ input_frame.render_time_ms(), |
+ input_frame.rotation()); |
+ |
+ last_input_timestamp_ms_ = |
+ current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
+ |
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
+ |
+ // Start the polling loop if it is not started. |
+ if (encode_task_) { |
+ rtc::TaskQueue::Current()->PostDelayedTask(std::move(encode_task_), |
+ kMediaCodecPollMs); |
+ } |
+ |
+ if (!DeliverPendingOutputs(jni)) { |
+ return ProcessHWErrorOnEncode(); |
+ } |
+ return WEBRTC_VIDEO_CODEC_OK; |
} |
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( |
webrtc::EncodedImageCallback* callback) { |
- return codec_thread_->Invoke<int32_t>( |
- RTC_FROM_HERE, |
- Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread, |
- this, callback)); |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
+ JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
+ ScopedLocalRefFrame local_ref_frame(jni); |
+ callback_ = callback; |
+ return WEBRTC_VIDEO_CODEC_OK; |
} |
int32_t MediaCodecVideoEncoder::Release() { |
- ALOGD << "EncoderRelease request"; |
- return codec_thread_->Invoke<int32_t>( |
- RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
+ if (!inited_) { |
+ return WEBRTC_VIDEO_CODEC_OK; |
+ } |
+ JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
+ ALOGD << "EncoderRelease: Frames received: " << frames_received_ |
+ << ". Encoded: " << frames_encoded_ |
+ << ". Dropped: " << frames_dropped_media_encoder_; |
+ ScopedLocalRefFrame local_ref_frame(jni); |
+ for (size_t i = 0; i < input_buffers_.size(); ++i) |
+ jni->DeleteGlobalRef(input_buffers_[i]); |
+ input_buffers_.clear(); |
+ jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); |
+ if (CheckException(jni)) { |
+ ALOGE << "Exception in release."; |
+ ProcessHWError(false /* reset_if_fallback_unavailable */); |
+ return WEBRTC_VIDEO_CODEC_ERROR; |
+ } |
+ inited_ = false; |
+ use_surface_ = false; |
+ ALOGD << "EncoderRelease done."; |
+ return WEBRTC_VIDEO_CODEC_OK; |
} |
int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, |
@@ -446,59 +592,92 @@ int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */, |
int32_t MediaCodecVideoEncoder::SetRateAllocation( |
const webrtc::BitrateAllocation& rate_allocation, |
uint32_t frame_rate) { |
- return codec_thread_->Invoke<int32_t>( |
- RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, this, |
- rate_allocation.get_sum_kbps(), frame_rate)); |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
+ const uint32_t new_bit_rate = rate_allocation.get_sum_kbps(); |
+ if (sw_fallback_required_) |
+ return WEBRTC_VIDEO_CODEC_OK; |
+ frame_rate = |
+ (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? frame_rate : MAX_ALLOWED_VIDEO_FPS; |
+ if (last_set_bitrate_kbps_ == new_bit_rate && last_set_fps_ == frame_rate) { |
+ return WEBRTC_VIDEO_CODEC_OK; |
+ } |
+ JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
+ ScopedLocalRefFrame local_ref_frame(jni); |
+ if (new_bit_rate > 0) { |
+ last_set_bitrate_kbps_ = new_bit_rate; |
+ } |
+ if (frame_rate > 0) { |
+ last_set_fps_ = frame_rate; |
+ } |
+ bool ret = |
+ jni->CallBooleanMethod(*j_media_codec_video_encoder_, j_set_rates_method_, |
+ last_set_bitrate_kbps_, last_set_fps_); |
+ if (CheckException(jni) || !ret) { |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
+ return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK |
+ : WEBRTC_VIDEO_CODEC_ERROR; |
+ } |
+ return WEBRTC_VIDEO_CODEC_OK; |
+} |
+ |
+bool MediaCodecVideoEncoder::ResetCodec() { |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
+ ALOGE << "Reset"; |
+ if (Release() != WEBRTC_VIDEO_CODEC_OK) { |
+ ALOGE << "Releasing codec failed during reset."; |
+ return false; |
+ } |
+ if (InitEncodeInternal(width_, height_, 0, 0, false) != |
+ WEBRTC_VIDEO_CODEC_OK) { |
+ ALOGE << "Initializing encoder failed during reset."; |
+ return false; |
+ } |
+ return true; |
} |
-void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
+MediaCodecVideoEncoder::EncodeTask::EncodeTask( |
+ rtc::WeakPtr<MediaCodecVideoEncoder> encoder) |
+ : encoder_(encoder) {} |
+ |
+bool MediaCodecVideoEncoder::EncodeTask::Run() { |
+ if (!encoder_) { |
+ // Encoder was destroyed. |
+ return true; |
+ } |
+ |
+ RTC_DCHECK(encoder_->encoder_queue_checker_.CalledOnValidThread()); |
JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
ScopedLocalRefFrame local_ref_frame(jni); |
- // We only ever send one message to |this| directly (not through a Bind()'d |
- // functor), so expect no ID/data. |
- RTC_CHECK(!msg->message_id) << "Unexpected message!"; |
- RTC_CHECK(!msg->pdata) << "Unexpected message!"; |
- if (!inited_) { |
- return; |
+ if (!encoder_->inited_) { |
+ encoder_->encode_task_ = std::unique_ptr<rtc::QueuedTask>(this); |
+ return false; |
} |
// It would be nice to recover from a failure here if one happened, but it's |
// unclear how to signal such a failure to the app, so instead we stay silent |
// about it and let the next app-called API method reveal the borkedness. |
- DeliverPendingOutputs(jni); |
- |
- // If there aren't more frames to deliver, we can start polling at lower rate. |
- if (input_frame_infos_.empty()) { |
- codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollNoFramesMs, this); |
- } else { |
- codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); |
- } |
+ encoder_->DeliverPendingOutputs(jni); |
// Call log statistics here so it's called even if no frames are being |
// delivered. |
- LogStatistics(false); |
-} |
+ encoder_->LogStatistics(false); |
-bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
- ALOGE << "ResetOnCodecThread"; |
- if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK) { |
- ALOGE << "Releasing codec failed during reset."; |
- return false; |
- } |
- if (InitEncodeOnCodecThread(width_, height_, 0, 0, false) != |
- WEBRTC_VIDEO_CODEC_OK) { |
- ALOGE << "Initializing encoder failed during reset."; |
- return false; |
+ // If there aren't more frames to deliver, we can start polling at lower rate. |
+ if (encoder_->input_frame_infos_.empty()) { |
+ rtc::TaskQueue::Current()->PostDelayedTask( |
+ std::unique_ptr<rtc::QueuedTask>(this), kMediaCodecPollNoFramesMs); |
+ } else { |
+ rtc::TaskQueue::Current()->PostDelayedTask( |
+ std::unique_ptr<rtc::QueuedTask>(this), kMediaCodecPollMs); |
} |
- return true; |
+ |
+ return false; |
} |
-bool MediaCodecVideoEncoder::ProcessHWErrorOnCodecThread( |
+bool MediaCodecVideoEncoder::ProcessHWError( |
bool reset_if_fallback_unavailable) { |
- ALOGE << "ProcessHWErrorOnCodecThread"; |
+ ALOGE << "ProcessHWError"; |
if (FindMatchingCodec(cricket::InternalEncoderFactory().supported_codecs(), |
codec_)) { |
ALOGE << "Fallback to SW encoder."; |
@@ -506,20 +685,23 @@ bool MediaCodecVideoEncoder::ProcessHWErrorOnCodecThread( |
return false; |
} else if (reset_if_fallback_unavailable) { |
ALOGE << "Reset encoder."; |
- return ResetCodecOnCodecThread(); |
+ return ResetCodec(); |
} |
return false; |
} |
-int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncodeOnCodecThread() { |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncode() { |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
: WEBRTC_VIDEO_CODEC_ERROR; |
} |
-int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
- int width, int height, int kbps, int fps, bool use_surface) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
+int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, |
+ int height, |
+ int kbps, |
+ int fps, |
+ bool use_surface) { |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
if (sw_fallback_required_) { |
return WEBRTC_VIDEO_CODEC_OK; |
} |
@@ -529,7 +711,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name) |
.value_or(webrtc::kVideoCodecUnknown); |
- ALOGD << "InitEncodeOnCodecThread Type: " << (int)codec_type << ", " << width |
+ ALOGD << "InitEncodeInternal Type: " << (int)codec_type << ", " << width |
<< " x " << height << ". Bitrate: " << kbps << " kbps. Fps: " << fps; |
if (kbps == 0) { |
kbps = last_set_bitrate_kbps_; |
@@ -576,12 +758,12 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
(use_surface ? egl_context_ : nullptr)); |
if (!encode_status) { |
ALOGE << "Failed to configure encoder."; |
- ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); |
+ ProcessHWError(false /* reset_if_fallback_unavailable */); |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
if (CheckException(jni)) { |
ALOGE << "Exception in init encode."; |
- ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); |
+ ProcessHWError(false /* reset_if_fallback_unavailable */); |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
@@ -591,12 +773,12 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
j_get_input_buffers_method_)); |
if (CheckException(jni)) { |
ALOGE << "Exception in get input buffers."; |
- ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); |
+ ProcessHWError(false /* reset_if_fallback_unavailable */); |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
if (IsNull(jni, input_buffers)) { |
- ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); |
+ ProcessHWError(false /* reset_if_fallback_unavailable */); |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
@@ -612,7 +794,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
break; |
default: |
LOG(LS_ERROR) << "Wrong color format."; |
- ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); |
+ ProcessHWError(false /* reset_if_fallback_unavailable */); |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
@@ -626,7 +808,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
jni->GetDirectBufferCapacity(input_buffers_[i]); |
if (CheckException(jni)) { |
ALOGE << "Exception in get direct buffer capacity."; |
- ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); |
+ ProcessHWError(false /* reset_if_fallback_unavailable */); |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
@@ -637,152 +819,9 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
return WEBRTC_VIDEO_CODEC_OK; |
} |
-int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
- const webrtc::VideoFrame& frame, |
- const std::vector<webrtc::FrameType>* frame_types, |
- const int64_t frame_input_time_ms) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
- if (sw_fallback_required_) |
- return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
- JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
- ScopedLocalRefFrame local_ref_frame(jni); |
- |
- if (!inited_) { |
- return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
- } |
- |
- bool send_key_frame = false; |
- if (codec_mode_ == webrtc::kRealtimeVideo) { |
- ++frames_received_since_last_key_; |
- int64_t now_ms = rtc::TimeMillis(); |
- if (last_frame_received_ms_ != -1 && |
- (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { |
- // Add limit to prevent triggering a key for every frame for very low |
- // framerates (e.g. if frame diff > kFrameDiffThresholdMs). |
- if (frames_received_since_last_key_ > kMinKeyFrameInterval) { |
- ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); |
- send_key_frame = true; |
- } |
- frames_received_since_last_key_ = 0; |
- } |
- last_frame_received_ms_ = now_ms; |
- } |
- |
- frames_received_++; |
- if (!DeliverPendingOutputs(jni)) { |
- if (!ProcessHWErrorOnCodecThread( |
- true /* reset_if_fallback_unavailable */)) { |
- return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
- : WEBRTC_VIDEO_CODEC_ERROR; |
- } |
- } |
- if (frames_encoded_ < kMaxEncodedLogFrames) { |
- ALOGD << "Encoder frame in # " << (frames_received_ - 1) |
- << ". TS: " << (int)(current_timestamp_us_ / 1000) |
- << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ |
- << ". Kbps: " << last_set_bitrate_kbps_; |
- } |
- |
- if (drop_next_input_frame_) { |
- ALOGW << "Encoder drop frame - failed callback."; |
- drop_next_input_frame_ = false; |
- current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
- frames_dropped_media_encoder_++; |
- return WEBRTC_VIDEO_CODEC_OK; |
- } |
- |
- RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
- |
- // Check if we accumulated too many frames in encoder input buffers and drop |
- // frame if so. |
- if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { |
- ALOGD << "Already " << input_frame_infos_.size() |
- << " frames in the queue, dropping" |
- << ". TS: " << (int)(current_timestamp_us_ / 1000) |
- << ". Fps: " << last_set_fps_ |
- << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; |
- current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
- consecutive_full_queue_frame_drops_++; |
- if (consecutive_full_queue_frame_drops_ >= |
- ENCODER_STALL_FRAMEDROP_THRESHOLD) { |
- ALOGE << "Encoder got stuck."; |
- return ProcessHWErrorOnEncodeOnCodecThread(); |
- } |
- frames_dropped_media_encoder_++; |
- return WEBRTC_VIDEO_CODEC_OK; |
- } |
- consecutive_full_queue_frame_drops_ = 0; |
- |
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( |
- frame.video_frame_buffer()); |
- |
- VideoFrame input_frame(input_buffer, frame.timestamp(), |
- frame.render_time_ms(), frame.rotation()); |
- |
- if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) { |
- ALOGE << "Failed to reconfigure encoder."; |
- return WEBRTC_VIDEO_CODEC_ERROR; |
- } |
- |
- const bool key_frame = |
- frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; |
- bool encode_status = true; |
- if (!input_frame.video_frame_buffer()->native_handle()) { |
- int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
- j_dequeue_input_buffer_method_); |
- if (CheckException(jni)) { |
- ALOGE << "Exception in dequeu input buffer."; |
- return ProcessHWErrorOnEncodeOnCodecThread(); |
- } |
- if (j_input_buffer_index == -1) { |
- // Video codec falls behind - no input buffer available. |
- ALOGW << "Encoder drop frame - no input buffers available"; |
- if (frames_received_ > 1) { |
- current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
- frames_dropped_media_encoder_++; |
- } else { |
- // Input buffers are not ready after codec initialization, HW is still |
- // allocating thme - this is expected and should not result in drop |
- // frame report. |
- frames_received_ = 0; |
- } |
- return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
- } else if (j_input_buffer_index == -2) { |
- return ProcessHWErrorOnEncodeOnCodecThread(); |
- } |
- encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, |
- j_input_buffer_index); |
- } else { |
- encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); |
- } |
- |
- if (!encode_status) { |
- ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
- return ProcessHWErrorOnEncodeOnCodecThread(); |
- } |
- |
- // Save input image timestamps for later output. |
- input_frame_infos_.emplace_back( |
- frame_input_time_ms, input_frame.timestamp(), |
- input_frame.render_time_ms(), input_frame.rotation()); |
- |
- last_input_timestamp_ms_ = |
- current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
- |
- current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
- |
- codec_thread_->Clear(this); |
- codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); |
- |
- if (!DeliverPendingOutputs(jni)) { |
- return ProcessHWErrorOnEncodeOnCodecThread(); |
- } |
- return WEBRTC_VIDEO_CODEC_OK; |
-} |
- |
-bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( |
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( |
const webrtc::VideoFrame& frame) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
const bool is_texture_frame = |
frame.video_frame_buffer()->native_handle() != nullptr; |
@@ -809,15 +848,17 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( |
if (!reconfigure_due_to_format && !reconfigure_due_to_size) |
return true; |
- ReleaseOnCodecThread(); |
+ Release(); |
- return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == |
- WEBRTC_VIDEO_CODEC_OK; |
+ return InitEncodeInternal(width_, height_, 0, 0, is_texture_frame) == |
+ WEBRTC_VIDEO_CODEC_OK; |
} |
-bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, |
- bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
+bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, |
+ bool key_frame, |
+ const webrtc::VideoFrame& frame, |
+ int input_buffer_index) { |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
RTC_CHECK(!use_surface_); |
jobject j_input_buffer = input_buffers_[input_buffer_index]; |
@@ -825,7 +866,7 @@ bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, |
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
if (CheckException(jni)) { |
ALOGE << "Exception in get direct buffer address."; |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return false; |
} |
RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
@@ -847,15 +888,16 @@ bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, |
current_timestamp_us_); |
if (CheckException(jni)) { |
ALOGE << "Exception in encode buffer."; |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return false; |
} |
return encode_status; |
} |
-bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, |
- bool key_frame, const webrtc::VideoFrame& frame) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
+bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, |
+ bool key_frame, |
+ const webrtc::VideoFrame& frame) { |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
RTC_CHECK(use_surface_); |
NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( |
frame.video_frame_buffer()->native_handle()); |
@@ -868,78 +910,12 @@ bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, |
current_timestamp_us_); |
if (CheckException(jni)) { |
ALOGE << "Exception in encode texture."; |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return false; |
} |
return encode_status; |
} |
-int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( |
- webrtc::EncodedImageCallback* callback) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
- JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
- ScopedLocalRefFrame local_ref_frame(jni); |
- callback_ = callback; |
- return WEBRTC_VIDEO_CODEC_OK; |
-} |
- |
-int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
- if (!inited_) { |
- return WEBRTC_VIDEO_CODEC_OK; |
- } |
- JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
- ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << |
- frames_received_ << ". Encoded: " << frames_encoded_ << |
- ". Dropped: " << frames_dropped_media_encoder_; |
- ScopedLocalRefFrame local_ref_frame(jni); |
- for (size_t i = 0; i < input_buffers_.size(); ++i) |
- jni->DeleteGlobalRef(input_buffers_[i]); |
- input_buffers_.clear(); |
- jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); |
- if (CheckException(jni)) { |
- ALOGE << "Exception in release."; |
- ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */); |
- return WEBRTC_VIDEO_CODEC_ERROR; |
- } |
- rtc::MessageQueueManager::Clear(this); |
- inited_ = false; |
- use_surface_ = false; |
- ALOGD << "EncoderReleaseOnCodecThread done."; |
- return WEBRTC_VIDEO_CODEC_OK; |
-} |
- |
-int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
- uint32_t frame_rate) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
- if (sw_fallback_required_) |
- return WEBRTC_VIDEO_CODEC_OK; |
- frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? |
- frame_rate : MAX_ALLOWED_VIDEO_FPS; |
- if (last_set_bitrate_kbps_ == new_bit_rate && |
- last_set_fps_ == frame_rate) { |
- return WEBRTC_VIDEO_CODEC_OK; |
- } |
- JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
- ScopedLocalRefFrame local_ref_frame(jni); |
- if (new_bit_rate > 0) { |
- last_set_bitrate_kbps_ = new_bit_rate; |
- } |
- if (frame_rate > 0) { |
- last_set_fps_ = frame_rate; |
- } |
- bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
- j_set_rates_method_, |
- last_set_bitrate_kbps_, |
- last_set_fps_); |
- if (CheckException(jni) || !ret) { |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
- return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK |
- : WEBRTC_VIDEO_CODEC_ERROR; |
- } |
- return WEBRTC_VIDEO_CODEC_OK; |
-} |
- |
int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( |
JNIEnv* jni, |
jobject j_output_buffer_info) { |
@@ -966,14 +942,14 @@ jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( |
} |
bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
- RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
+ RTC_DCHECK_RUN_ON(&encoder_queue_checker_); |
while (true) { |
jobject j_output_buffer_info = jni->CallObjectMethod( |
*j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); |
if (CheckException(jni)) { |
ALOGE << "Exception in set dequeue output buffer."; |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
if (IsNull(jni, j_output_buffer_info)) { |
@@ -983,7 +959,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
int output_buffer_index = |
GetOutputBufferInfoIndex(jni, j_output_buffer_info); |
if (output_buffer_index == -1) { |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return false; |
} |
@@ -1013,7 +989,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
jni->GetDirectBufferAddress(j_output_buffer)); |
if (CheckException(jni)) { |
ALOGE << "Exception in get direct buffer address."; |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return WEBRTC_VIDEO_CODEC_ERROR; |
} |
@@ -1102,7 +1078,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] |
<< " " << image->_buffer[2] << " " << image->_buffer[3] |
<< " " << image->_buffer[4] << " " << image->_buffer[5]; |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return false; |
} |
header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size()); |
@@ -1122,7 +1098,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
j_release_output_buffer_method_, |
output_buffer_index); |
if (CheckException(jni) || !success) { |
- ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */); |
+ ProcessHWError(true /* reset_if_fallback_unavailable */); |
return false; |
} |