OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
122 | 122 |
123 void OnDroppedFrame() override; | 123 void OnDroppedFrame() override; |
124 | 124 |
125 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | 125 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } |
126 const char* ImplementationName() const override; | 126 const char* ImplementationName() const override; |
127 | 127 |
128 private: | 128 private: |
129 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and | 129 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and |
130 // InitEncodeOnCodecThread() in an attempt to restore the codec to an | 130 // InitEncodeOnCodecThread() in an attempt to restore the codec to an |
131 // operable state. Necessary after all manner of OMX-layer errors. | 131 // operable state. Necessary after all manner of OMX-layer errors. |
132 // Returns true if the codec was reset successfully. | |
132 bool ResetCodecOnCodecThread(); | 133 bool ResetCodecOnCodecThread(); |
133 | 134 |
135 // Fallback to a software encoder if one is supported else try to reset the | |
136 // encoder. Called with tryResetIfFallbackUnavailable equal to false from | |
137 // init/release encoder so that we don't go into infinite recursion. | |
138 // Returns true if the codec was reset successfully. | |
139 bool ProcessHWErrorOnCodecThread(bool tryResetIfFallbackUnavailable); | |
stefan-webrtc
2016/10/06 09:27:47
No camel case variable names.
sakal
2016/10/06 12:10:02
Done.
| |
140 | |
141 // Calls ProcessHWErrorOnCodecThread(true). Returns | |
142 // WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if sw_fallback_required_ was set or | |
143 // WEBRTC_VIDEO_CODEC_ERROR otherwise. | |
144 int32_t ProcessHWErrorOnEncodeOnCodecThread(); | |
145 | |
134 // Implementation of webrtc::VideoEncoder methods above, all running on the | 146 // Implementation of webrtc::VideoEncoder methods above, all running on the |
135 // codec thread exclusively. | 147 // codec thread exclusively. |
136 // | 148 // |
137 // If width==0 then this is assumed to be a re-initialization and the | 149 // If width==0 then this is assumed to be a re-initialization and the |
138 // previously-current values are reused instead of the passed parameters | 150 // previously-current values are reused instead of the passed parameters |
139 // (makes it easier to reason about thread-safety). | 151 // (makes it easier to reason about thread-safety). |
140 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, | 152 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, |
141 bool use_surface); | 153 bool use_surface); |
142 // Reconfigure to match |frame| in width, height. Also reconfigures the | 154 // Reconfigure to match |frame| in width, height. Also reconfigures the |
143 // encoder if |frame| is a texture/byte buffer and the encoder is initialized | 155 // encoder if |frame| is a texture/byte buffer and the encoder is initialized |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
275 // EGL context - owned by factory, should not be allocated/destroyed | 287 // EGL context - owned by factory, should not be allocated/destroyed |
276 // by MediaCodecVideoEncoder. | 288 // by MediaCodecVideoEncoder. |
277 jobject egl_context_; | 289 jobject egl_context_; |
278 | 290 |
279 // Temporary fix for VP8. | 291 // Temporary fix for VP8. |
280 // Sends a key frame if frames are largely spaced apart (possibly | 292 // Sends a key frame if frames are largely spaced apart (possibly |
281 // corresponding to a large image change). | 293 // corresponding to a large image change). |
282 int64_t last_frame_received_ms_; | 294 int64_t last_frame_received_ms_; |
283 int frames_received_since_last_key_; | 295 int frames_received_since_last_key_; |
284 webrtc::VideoCodecMode codec_mode_; | 296 webrtc::VideoCodecMode codec_mode_; |
297 | |
298 bool sw_fallback_required_; | |
285 }; | 299 }; |
286 | 300 |
287 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { | 301 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { |
288 // Call Release() to ensure no more callbacks to us after we are deleted. | 302 // Call Release() to ensure no more callbacks to us after we are deleted. |
289 Release(); | 303 Release(); |
290 } | 304 } |
291 | 305 |
292 MediaCodecVideoEncoder::MediaCodecVideoEncoder( | 306 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, |
293 JNIEnv* jni, VideoCodecType codecType, jobject egl_context) : | 307 VideoCodecType codecType, |
294 codecType_(codecType), | 308 jobject egl_context) |
295 callback_(NULL), | 309 : codecType_(codecType), |
296 codec_thread_(new Thread()), | 310 callback_(NULL), |
297 j_media_codec_video_encoder_class_( | 311 codec_thread_(new Thread()), |
298 jni, | 312 j_media_codec_video_encoder_class_( |
299 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), | 313 jni, |
300 j_media_codec_video_encoder_( | 314 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), |
301 jni, | 315 j_media_codec_video_encoder_( |
302 jni->NewObject(*j_media_codec_video_encoder_class_, | 316 jni, |
303 GetMethodID(jni, | 317 jni->NewObject(*j_media_codec_video_encoder_class_, |
304 *j_media_codec_video_encoder_class_, | 318 GetMethodID(jni, |
305 "<init>", | 319 *j_media_codec_video_encoder_class_, |
306 "()V"))), | 320 "<init>", |
307 inited_(false), | 321 "()V"))), |
308 use_surface_(false), | 322 inited_(false), |
309 picture_id_(0), | 323 use_surface_(false), |
310 egl_context_(egl_context) { | 324 picture_id_(0), |
325 egl_context_(egl_context), | |
326 sw_fallback_required_(false) { | |
311 ScopedLocalRefFrame local_ref_frame(jni); | 327 ScopedLocalRefFrame local_ref_frame(jni); |
312 // It would be nice to avoid spinning up a new thread per MediaCodec, and | 328 // It would be nice to avoid spinning up a new thread per MediaCodec, and |
313 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | 329 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug |
314 // 2732 means that deadlocks abound. This class synchronously trampolines | 330 // 2732 means that deadlocks abound. This class synchronously trampolines |
315 // to |codec_thread_|, so if anything else can be coming to _us_ from | 331 // to |codec_thread_|, so if anything else can be coming to _us_ from |
316 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | 332 // |codec_thread_|, or from any thread holding the |_sendCritSect| described |
317 // in the bug, we have a problem. For now work around that with a dedicated | 333 // in the bug, we have a problem. For now work around that with a dedicated |
318 // thread. | 334 // thread. |
319 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | 335 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); |
320 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | 336 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
354 j_color_format_field_ = | 370 j_color_format_field_ = |
355 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); | 371 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); |
356 j_info_index_field_ = | 372 j_info_index_field_ = |
357 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); | 373 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); |
358 j_info_buffer_field_ = GetFieldID( | 374 j_info_buffer_field_ = GetFieldID( |
359 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); | 375 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); |
360 j_info_is_key_frame_field_ = | 376 j_info_is_key_frame_field_ = |
361 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); | 377 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); |
362 j_info_presentation_timestamp_us_field_ = GetFieldID( | 378 j_info_presentation_timestamp_us_field_ = GetFieldID( |
363 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); | 379 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); |
364 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed"; | 380 if (CheckException(jni)) { |
381 ALOGW << "MediaCodecVideoEncoder ctor failed."; | |
382 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
383 } | |
365 srand(time(NULL)); | 384 srand(time(NULL)); |
366 AllowBlockingCalls(); | 385 AllowBlockingCalls(); |
367 } | 386 } |
368 | 387 |
369 int32_t MediaCodecVideoEncoder::InitEncode( | 388 int32_t MediaCodecVideoEncoder::InitEncode( |
370 const webrtc::VideoCodec* codec_settings, | 389 const webrtc::VideoCodec* codec_settings, |
371 int32_t /* number_of_cores */, | 390 int32_t /* number_of_cores */, |
372 size_t /* max_payload_size */) { | 391 size_t /* max_payload_size */) { |
373 if (codec_settings == NULL) { | 392 if (codec_settings == NULL) { |
374 ALOGE << "NULL VideoCodec instance"; | 393 ALOGE << "NULL VideoCodec instance"; |
375 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 394 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
376 } | 395 } |
377 // Factory should guard against other codecs being used with us. | 396 // Factory should guard against other codecs being used with us. |
378 RTC_CHECK(codec_settings->codecType == codecType_) | 397 RTC_CHECK(codec_settings->codecType == codecType_) |
379 << "Unsupported codec " << codec_settings->codecType << " for " | 398 << "Unsupported codec " << codec_settings->codecType << " for " |
380 << codecType_; | 399 << codecType_; |
381 | 400 if (sw_fallback_required_) { |
401 return WEBRTC_VIDEO_CODEC_OK; | |
402 } | |
382 codec_mode_ = codec_settings->mode; | 403 codec_mode_ = codec_settings->mode; |
383 int init_width = codec_settings->width; | 404 int init_width = codec_settings->width; |
384 int init_height = codec_settings->height; | 405 int init_height = codec_settings->height; |
385 // Scaling is disabled for VP9, but optionally enabled for VP8. | 406 // Scaling is disabled for VP9, but optionally enabled for VP8. |
386 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. | 407 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. |
387 scale_ = false; | 408 scale_ = false; |
388 if (codecType_ == kVideoCodecVP8) { | 409 if (codecType_ == kVideoCodecVP8) { |
389 scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn; | 410 scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn; |
390 } else if (codecType_ != kVideoCodecVP9) { | 411 } else if (codecType_ != kVideoCodecVP9) { |
391 scale_ = true; | 412 scale_ = true; |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
487 } | 508 } |
488 | 509 |
489 // Call log statistics here so it's called even if no frames are being | 510 // Call log statistics here so it's called even if no frames are being |
490 // delivered. | 511 // delivered. |
491 LogStatistics(false); | 512 LogStatistics(false); |
492 } | 513 } |
493 | 514 |
494 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { | 515 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { |
495 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 516 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
496 ALOGE << "ResetOnCodecThread"; | 517 ALOGE << "ResetOnCodecThread"; |
497 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || | 518 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK) { |
498 InitEncodeOnCodecThread(width_, height_, 0, 0, false) != | 519 ALOGE << "Releasing codec failed during reset."; |
499 WEBRTC_VIDEO_CODEC_OK) { | 520 return false; |
500 // TODO(fischman): wouldn't it be nice if there was a way to gracefully | 521 } |
501 // degrade to a SW encoder at this point? There isn't one AFAICT :( | 522 if (InitEncodeOnCodecThread(width_, height_, 0, 0, false) != |
502 // https://code.google.com/p/webrtc/issues/detail?id=2920 | 523 WEBRTC_VIDEO_CODEC_OK) { |
524 ALOGE << "Initializing encoder failed during reset."; | |
503 return false; | 525 return false; |
504 } | 526 } |
505 return true; | 527 return true; |
506 } | 528 } |
507 | 529 |
530 bool MediaCodecVideoEncoder::ProcessHWErrorOnCodecThread( | |
531 bool tryResetIfFallbackUnavailable) { | |
532 ALOGE << "ProcessHWErrorOnCodecThread"; | |
533 if (VideoEncoder::IsSupported(VideoEncoder::CodecToEncoderType(codecType_))) { | |
534 ALOGE << "Fallback to SW encoder."; | |
535 sw_fallback_required_ = true; | |
536 return false; | |
537 } else if (tryResetIfFallbackUnavailable) { | |
538 ALOGE << "Reset encoder."; | |
539 return ResetCodecOnCodecThread(); | |
540 } | |
541 return false; | |
542 } | |
543 | |
544 int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncodeOnCodecThread() { | |
545 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
546 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | |
547 : WEBRTC_VIDEO_CODEC_ERROR; | |
548 } | |
549 | |
508 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | 550 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
509 int width, int height, int kbps, int fps, bool use_surface) { | 551 int width, int height, int kbps, int fps, bool use_surface) { |
510 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 552 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
553 if (sw_fallback_required_) { | |
554 return WEBRTC_VIDEO_CODEC_OK; | |
555 } | |
511 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; | 556 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; |
512 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 557 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
513 ScopedLocalRefFrame local_ref_frame(jni); | 558 ScopedLocalRefFrame local_ref_frame(jni); |
514 | 559 |
515 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << | 560 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << |
516 width << " x " << height << ". Bitrate: " << kbps << | 561 width << " x " << height << ". Bitrate: " << kbps << |
517 " kbps. Fps: " << fps; | 562 " kbps. Fps: " << fps; |
518 if (kbps == 0) { | 563 if (kbps == 0) { |
519 kbps = last_set_bitrate_kbps_; | 564 kbps = last_set_bitrate_kbps_; |
520 } | 565 } |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
553 | 598 |
554 // We enforce no extra stride/padding in the format creation step. | 599 // We enforce no extra stride/padding in the format creation step. |
555 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | 600 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( |
556 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); | 601 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); |
557 const bool encode_status = jni->CallBooleanMethod( | 602 const bool encode_status = jni->CallBooleanMethod( |
558 *j_media_codec_video_encoder_, j_init_encode_method_, | 603 *j_media_codec_video_encoder_, j_init_encode_method_, |
559 j_video_codec_enum, width, height, kbps, fps, | 604 j_video_codec_enum, width, height, kbps, fps, |
560 (use_surface ? egl_context_ : nullptr)); | 605 (use_surface ? egl_context_ : nullptr)); |
561 if (!encode_status) { | 606 if (!encode_status) { |
562 ALOGE << "Failed to configure encoder."; | 607 ALOGE << "Failed to configure encoder."; |
608 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
563 return WEBRTC_VIDEO_CODEC_ERROR; | 609 return WEBRTC_VIDEO_CODEC_ERROR; |
564 } | 610 } |
565 CHECK_EXCEPTION(jni); | 611 if (CheckException(jni)) { |
612 ALOGE << "Exception in init encode."; | |
613 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
614 return WEBRTC_VIDEO_CODEC_ERROR; | |
615 } | |
566 | 616 |
567 if (!use_surface) { | 617 if (!use_surface) { |
568 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | 618 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( |
569 jni->CallObjectMethod(*j_media_codec_video_encoder_, | 619 jni->CallObjectMethod(*j_media_codec_video_encoder_, |
570 j_get_input_buffers_method_)); | 620 j_get_input_buffers_method_)); |
571 CHECK_EXCEPTION(jni); | 621 if (CheckException(jni)) { |
572 if (IsNull(jni, input_buffers)) { | 622 ALOGE << "Exception in get input buffers."; |
623 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
573 return WEBRTC_VIDEO_CODEC_ERROR; | 624 return WEBRTC_VIDEO_CODEC_ERROR; |
574 } | 625 } |
575 | 626 |
627 if (IsNull(jni, input_buffers)) { | |
628 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
629 return WEBRTC_VIDEO_CODEC_ERROR; | |
630 } | |
631 | |
576 switch (GetIntField(jni, *j_media_codec_video_encoder_, | 632 switch (GetIntField(jni, *j_media_codec_video_encoder_, |
577 j_color_format_field_)) { | 633 j_color_format_field_)) { |
578 case COLOR_FormatYUV420Planar: | 634 case COLOR_FormatYUV420Planar: |
579 encoder_fourcc_ = libyuv::FOURCC_YU12; | 635 encoder_fourcc_ = libyuv::FOURCC_YU12; |
580 break; | 636 break; |
581 case COLOR_FormatYUV420SemiPlanar: | 637 case COLOR_FormatYUV420SemiPlanar: |
582 case COLOR_QCOM_FormatYUV420SemiPlanar: | 638 case COLOR_QCOM_FormatYUV420SemiPlanar: |
583 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | 639 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
584 encoder_fourcc_ = libyuv::FOURCC_NV12; | 640 encoder_fourcc_ = libyuv::FOURCC_NV12; |
585 break; | 641 break; |
586 default: | 642 default: |
587 LOG(LS_ERROR) << "Wrong color format."; | 643 LOG(LS_ERROR) << "Wrong color format."; |
644 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
588 return WEBRTC_VIDEO_CODEC_ERROR; | 645 return WEBRTC_VIDEO_CODEC_ERROR; |
589 } | 646 } |
590 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 647 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
591 RTC_CHECK(input_buffers_.empty()) | 648 RTC_CHECK(input_buffers_.empty()) |
592 << "Unexpected double InitEncode without Release"; | 649 << "Unexpected double InitEncode without Release"; |
593 input_buffers_.resize(num_input_buffers); | 650 input_buffers_.resize(num_input_buffers); |
594 for (size_t i = 0; i < num_input_buffers; ++i) { | 651 for (size_t i = 0; i < num_input_buffers; ++i) { |
595 input_buffers_[i] = | 652 input_buffers_[i] = |
596 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 653 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
597 int64_t yuv_buffer_capacity = | 654 int64_t yuv_buffer_capacity = |
598 jni->GetDirectBufferCapacity(input_buffers_[i]); | 655 jni->GetDirectBufferCapacity(input_buffers_[i]); |
599 CHECK_EXCEPTION(jni); | 656 if (CheckException(jni)) { |
657 ALOGE << "Exception in get direct buffer capacity."; | |
658 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
659 return WEBRTC_VIDEO_CODEC_ERROR; | |
660 } | |
600 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | 661 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
601 } | 662 } |
602 } | 663 } |
603 | 664 |
604 inited_ = true; | 665 inited_ = true; |
605 return WEBRTC_VIDEO_CODEC_OK; | 666 return WEBRTC_VIDEO_CODEC_OK; |
606 } | 667 } |
607 | 668 |
608 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 669 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
609 const webrtc::VideoFrame& frame, | 670 const webrtc::VideoFrame& frame, |
610 const std::vector<webrtc::FrameType>* frame_types, | 671 const std::vector<webrtc::FrameType>* frame_types, |
611 const int64_t frame_input_time_ms) { | 672 const int64_t frame_input_time_ms) { |
612 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 673 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
674 if (sw_fallback_required_) { | |
stefan-webrtc
2016/10/06 09:27:47
Optionally remove {}
sakal
2016/10/06 12:10:02
Done.
| |
675 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
676 } | |
613 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 677 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
614 ScopedLocalRefFrame local_ref_frame(jni); | 678 ScopedLocalRefFrame local_ref_frame(jni); |
615 | 679 |
616 if (!inited_) { | 680 if (!inited_) { |
617 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 681 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
618 } | 682 } |
619 | 683 |
620 bool send_key_frame = false; | 684 bool send_key_frame = false; |
621 if (codec_mode_ == webrtc::kRealtimeVideo) { | 685 if (codec_mode_ == webrtc::kRealtimeVideo) { |
622 ++frames_received_since_last_key_; | 686 ++frames_received_since_last_key_; |
623 int64_t now_ms = rtc::TimeMillis(); | 687 int64_t now_ms = rtc::TimeMillis(); |
624 if (last_frame_received_ms_ != -1 && | 688 if (last_frame_received_ms_ != -1 && |
625 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { | 689 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { |
626 // Add limit to prevent triggering a key for every frame for very low | 690 // Add limit to prevent triggering a key for every frame for very low |
627 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). | 691 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). |
628 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { | 692 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { |
629 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); | 693 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); |
630 send_key_frame = true; | 694 send_key_frame = true; |
631 } | 695 } |
632 frames_received_since_last_key_ = 0; | 696 frames_received_since_last_key_ = 0; |
633 } | 697 } |
634 last_frame_received_ms_ = now_ms; | 698 last_frame_received_ms_ = now_ms; |
635 } | 699 } |
636 | 700 |
637 frames_received_++; | 701 frames_received_++; |
638 if (!DeliverPendingOutputs(jni)) { | 702 if (!DeliverPendingOutputs(jni)) { |
639 if (!ResetCodecOnCodecThread()) | 703 if (!ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */)) |
stefan-webrtc
2016/10/06 09:27:47
{}
sakal
2016/10/06 12:10:02
Done.
| |
640 return WEBRTC_VIDEO_CODEC_ERROR; | 704 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
705 : WEBRTC_VIDEO_CODEC_ERROR; | |
641 } | 706 } |
642 if (frames_encoded_ < kMaxEncodedLogFrames) { | 707 if (frames_encoded_ < kMaxEncodedLogFrames) { |
643 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | 708 ALOGD << "Encoder frame in # " << (frames_received_ - 1) |
644 << ". TS: " << (int)(current_timestamp_us_ / 1000) | 709 << ". TS: " << (int)(current_timestamp_us_ / 1000) |
645 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | 710 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ |
646 << ". Kbps: " << last_set_bitrate_kbps_; | 711 << ". Kbps: " << last_set_bitrate_kbps_; |
647 } | 712 } |
648 | 713 |
649 if (drop_next_input_frame_) { | 714 if (drop_next_input_frame_) { |
650 ALOGW << "Encoder drop frame - failed callback."; | 715 ALOGW << "Encoder drop frame - failed callback."; |
(...skipping 11 matching lines...) Expand all Loading... | |
662 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { | 727 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { |
663 ALOGD << "Already " << input_frame_infos_.size() | 728 ALOGD << "Already " << input_frame_infos_.size() |
664 << " frames in the queue, dropping" | 729 << " frames in the queue, dropping" |
665 << ". TS: " << (int)(current_timestamp_us_ / 1000) | 730 << ". TS: " << (int)(current_timestamp_us_ / 1000) |
666 << ". Fps: " << last_set_fps_ | 731 << ". Fps: " << last_set_fps_ |
667 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | 732 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; |
668 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 733 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
669 consecutive_full_queue_frame_drops_++; | 734 consecutive_full_queue_frame_drops_++; |
670 if (consecutive_full_queue_frame_drops_ >= | 735 if (consecutive_full_queue_frame_drops_ >= |
671 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | 736 ENCODER_STALL_FRAMEDROP_THRESHOLD) { |
672 ALOGE << "Encoder got stuck. Reset."; | 737 ALOGE << "Encoder got stuck."; |
673 ResetCodecOnCodecThread(); | 738 return ProcessHWErrorOnEncodeOnCodecThread(); |
674 return WEBRTC_VIDEO_CODEC_ERROR; | |
675 } | 739 } |
676 frames_dropped_media_encoder_++; | 740 frames_dropped_media_encoder_++; |
677 OnDroppedFrameOnCodecThread(); | 741 OnDroppedFrameOnCodecThread(); |
678 return WEBRTC_VIDEO_CODEC_OK; | 742 return WEBRTC_VIDEO_CODEC_OK; |
679 } | 743 } |
680 consecutive_full_queue_frame_drops_ = 0; | 744 consecutive_full_queue_frame_drops_ = 0; |
681 | 745 |
682 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | 746 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( |
683 frame.video_frame_buffer()); | 747 frame.video_frame_buffer()); |
684 if (scale_) { | 748 if (scale_) { |
(...skipping 23 matching lines...) Expand all Loading... | |
708 ALOGE << "Failed to reconfigure encoder."; | 772 ALOGE << "Failed to reconfigure encoder."; |
709 return WEBRTC_VIDEO_CODEC_ERROR; | 773 return WEBRTC_VIDEO_CODEC_ERROR; |
710 } | 774 } |
711 | 775 |
712 const bool key_frame = | 776 const bool key_frame = |
713 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | 777 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; |
714 bool encode_status = true; | 778 bool encode_status = true; |
715 if (!input_frame.video_frame_buffer()->native_handle()) { | 779 if (!input_frame.video_frame_buffer()->native_handle()) { |
716 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 780 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
717 j_dequeue_input_buffer_method_); | 781 j_dequeue_input_buffer_method_); |
718 CHECK_EXCEPTION(jni); | 782 if (CheckException(jni)) { |
783 ALOGE << "Exception in dequeu input buffer."; | |
784 return ProcessHWErrorOnEncodeOnCodecThread(); | |
785 } | |
719 if (j_input_buffer_index == -1) { | 786 if (j_input_buffer_index == -1) { |
720 // Video codec falls behind - no input buffer available. | 787 // Video codec falls behind - no input buffer available. |
721 ALOGW << "Encoder drop frame - no input buffers available"; | 788 ALOGW << "Encoder drop frame - no input buffers available"; |
722 if (frames_received_ > 1) { | 789 if (frames_received_ > 1) { |
723 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 790 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
724 frames_dropped_media_encoder_++; | 791 frames_dropped_media_encoder_++; |
725 OnDroppedFrameOnCodecThread(); | 792 OnDroppedFrameOnCodecThread(); |
726 } else { | 793 } else { |
727 // Input buffers are not ready after codec initialization, HW is still | 794 // Input buffers are not ready after codec initialization, HW is still |
728 // allocating thme - this is expected and should not result in drop | 795 // allocating thme - this is expected and should not result in drop |
729 // frame report. | 796 // frame report. |
730 frames_received_ = 0; | 797 frames_received_ = 0; |
731 } | 798 } |
732 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 799 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
733 } else if (j_input_buffer_index == -2) { | 800 } else if (j_input_buffer_index == -2) { |
734 ResetCodecOnCodecThread(); | 801 return ProcessHWErrorOnEncodeOnCodecThread(); |
735 return WEBRTC_VIDEO_CODEC_ERROR; | |
736 } | 802 } |
737 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | 803 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, |
738 j_input_buffer_index); | 804 j_input_buffer_index); |
739 } else { | 805 } else { |
740 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | 806 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); |
741 } | 807 } |
742 | 808 |
743 if (!encode_status) { | 809 if (!encode_status) { |
744 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | 810 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
745 ResetCodecOnCodecThread(); | 811 return ProcessHWErrorOnEncodeOnCodecThread(); |
746 return WEBRTC_VIDEO_CODEC_ERROR; | |
747 } | 812 } |
748 | 813 |
749 // Save input image timestamps for later output. | 814 // Save input image timestamps for later output. |
750 input_frame_infos_.emplace_back( | 815 input_frame_infos_.emplace_back( |
751 frame_input_time_ms, input_frame.timestamp(), | 816 frame_input_time_ms, input_frame.timestamp(), |
752 input_frame.render_time_ms(), input_frame.rotation()); | 817 input_frame.render_time_ms(), input_frame.rotation()); |
753 | 818 |
754 last_input_timestamp_ms_ = | 819 last_input_timestamp_ms_ = |
755 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | 820 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
756 | 821 |
757 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 822 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
758 | 823 |
759 codec_thread_->Clear(this); | 824 codec_thread_->Clear(this); |
760 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | 825 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); |
761 | 826 |
762 if (!DeliverPendingOutputs(jni)) { | 827 if (!DeliverPendingOutputs(jni)) { |
763 ALOGE << "Failed deliver pending outputs."; | 828 return ProcessHWErrorOnEncodeOnCodecThread(); |
764 ResetCodecOnCodecThread(); | |
765 return WEBRTC_VIDEO_CODEC_ERROR; | |
766 } | 829 } |
767 return WEBRTC_VIDEO_CODEC_OK; | 830 return WEBRTC_VIDEO_CODEC_OK; |
768 } | 831 } |
769 | 832 |
770 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | 833 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( |
771 const webrtc::VideoFrame& frame) { | 834 const webrtc::VideoFrame& frame) { |
772 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 835 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
773 | 836 |
774 const bool is_texture_frame = | 837 const bool is_texture_frame = |
775 frame.video_frame_buffer()->native_handle() != nullptr; | 838 frame.video_frame_buffer()->native_handle() != nullptr; |
(...skipping 27 matching lines...) Expand all Loading... | |
803 } | 866 } |
804 | 867 |
805 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | 868 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, |
806 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | 869 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { |
807 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 870 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
808 RTC_CHECK(!use_surface_); | 871 RTC_CHECK(!use_surface_); |
809 | 872 |
810 jobject j_input_buffer = input_buffers_[input_buffer_index]; | 873 jobject j_input_buffer = input_buffers_[input_buffer_index]; |
811 uint8_t* yuv_buffer = | 874 uint8_t* yuv_buffer = |
812 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 875 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
813 CHECK_EXCEPTION(jni); | 876 if (CheckException(jni)) { |
877 ALOGE << "Exception in get direct buffer address."; | |
878 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
879 return false; | |
880 } | |
814 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | 881 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
815 RTC_CHECK(!libyuv::ConvertFromI420( | 882 RTC_CHECK(!libyuv::ConvertFromI420( |
816 frame.video_frame_buffer()->DataY(), | 883 frame.video_frame_buffer()->DataY(), |
817 frame.video_frame_buffer()->StrideY(), | 884 frame.video_frame_buffer()->StrideY(), |
818 frame.video_frame_buffer()->DataU(), | 885 frame.video_frame_buffer()->DataU(), |
819 frame.video_frame_buffer()->StrideU(), | 886 frame.video_frame_buffer()->StrideU(), |
820 frame.video_frame_buffer()->DataV(), | 887 frame.video_frame_buffer()->DataV(), |
821 frame.video_frame_buffer()->StrideV(), | 888 frame.video_frame_buffer()->StrideV(), |
822 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | 889 yuv_buffer, width_, width_, height_, encoder_fourcc_)) |
823 << "ConvertFromI420 failed"; | 890 << "ConvertFromI420 failed"; |
824 | 891 |
825 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 892 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
826 j_encode_buffer_method_, | 893 j_encode_buffer_method_, |
827 key_frame, | 894 key_frame, |
828 input_buffer_index, | 895 input_buffer_index, |
829 yuv_size_, | 896 yuv_size_, |
830 current_timestamp_us_); | 897 current_timestamp_us_); |
831 CHECK_EXCEPTION(jni); | 898 if (CheckException(jni)) { |
899 ALOGE << "Exception in encode buffer."; | |
900 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
901 return false; | |
902 } | |
832 return encode_status; | 903 return encode_status; |
833 } | 904 } |
834 | 905 |
835 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, | 906 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, |
836 bool key_frame, const webrtc::VideoFrame& frame) { | 907 bool key_frame, const webrtc::VideoFrame& frame) { |
837 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 908 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
838 RTC_CHECK(use_surface_); | 909 RTC_CHECK(use_surface_); |
839 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( | 910 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( |
840 frame.video_frame_buffer()->native_handle()); | 911 frame.video_frame_buffer()->native_handle()); |
841 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); | 912 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); |
842 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 913 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
843 j_encode_texture_method_, | 914 j_encode_texture_method_, |
844 key_frame, | 915 key_frame, |
845 handle->oes_texture_id, | 916 handle->oes_texture_id, |
846 sampling_matrix, | 917 sampling_matrix, |
847 current_timestamp_us_); | 918 current_timestamp_us_); |
848 CHECK_EXCEPTION(jni); | 919 if (CheckException(jni)) { |
920 ALOGE << "Exception in encode texture."; | |
921 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
922 return false; | |
923 } | |
849 return encode_status; | 924 return encode_status; |
850 } | 925 } |
851 | 926 |
852 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | 927 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( |
853 webrtc::EncodedImageCallback* callback) { | 928 webrtc::EncodedImageCallback* callback) { |
854 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 929 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
855 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 930 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
856 ScopedLocalRefFrame local_ref_frame(jni); | 931 ScopedLocalRefFrame local_ref_frame(jni); |
857 callback_ = callback; | 932 callback_ = callback; |
858 return WEBRTC_VIDEO_CODEC_OK; | 933 return WEBRTC_VIDEO_CODEC_OK; |
859 } | 934 } |
860 | 935 |
861 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | 936 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { |
862 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 937 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
863 if (!inited_) { | 938 if (!inited_) { |
864 return WEBRTC_VIDEO_CODEC_OK; | 939 return WEBRTC_VIDEO_CODEC_OK; |
865 } | 940 } |
866 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 941 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
867 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | 942 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << |
868 frames_received_ << ". Encoded: " << frames_encoded_ << | 943 frames_received_ << ". Encoded: " << frames_encoded_ << |
869 ". Dropped: " << frames_dropped_media_encoder_; | 944 ". Dropped: " << frames_dropped_media_encoder_; |
870 ScopedLocalRefFrame local_ref_frame(jni); | 945 ScopedLocalRefFrame local_ref_frame(jni); |
871 for (size_t i = 0; i < input_buffers_.size(); ++i) | 946 for (size_t i = 0; i < input_buffers_.size(); ++i) |
872 jni->DeleteGlobalRef(input_buffers_[i]); | 947 jni->DeleteGlobalRef(input_buffers_[i]); |
873 input_buffers_.clear(); | 948 input_buffers_.clear(); |
874 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | 949 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); |
875 CHECK_EXCEPTION(jni); | 950 if (CheckException(jni)) { |
951 ALOGE << "Exception in release."; | |
952 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
953 return WEBRTC_VIDEO_CODEC_ERROR; | |
954 } | |
876 rtc::MessageQueueManager::Clear(this); | 955 rtc::MessageQueueManager::Clear(this); |
877 inited_ = false; | 956 inited_ = false; |
878 use_surface_ = false; | 957 use_surface_ = false; |
879 ALOGD << "EncoderReleaseOnCodecThread done."; | 958 ALOGD << "EncoderReleaseOnCodecThread done."; |
880 return WEBRTC_VIDEO_CODEC_OK; | 959 return WEBRTC_VIDEO_CODEC_OK; |
881 } | 960 } |
882 | 961 |
883 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | 962 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
884 uint32_t frame_rate) { | 963 uint32_t frame_rate) { |
885 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 964 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
965 if (sw_fallback_required_) { | |
stefan-webrtc
2016/10/06 09:27:47
You may remove {}
sakal
2016/10/06 12:10:02
Done.
| |
966 return WEBRTC_VIDEO_CODEC_OK; | |
967 } | |
886 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? | 968 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? |
887 frame_rate : MAX_ALLOWED_VIDEO_FPS; | 969 frame_rate : MAX_ALLOWED_VIDEO_FPS; |
888 if (last_set_bitrate_kbps_ == new_bit_rate && | 970 if (last_set_bitrate_kbps_ == new_bit_rate && |
889 last_set_fps_ == frame_rate) { | 971 last_set_fps_ == frame_rate) { |
890 return WEBRTC_VIDEO_CODEC_OK; | 972 return WEBRTC_VIDEO_CODEC_OK; |
891 } | 973 } |
892 if (scale_) { | 974 if (scale_) { |
893 quality_scaler_.ReportFramerate(frame_rate); | 975 quality_scaler_.ReportFramerate(frame_rate); |
894 } | 976 } |
895 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 977 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
896 ScopedLocalRefFrame local_ref_frame(jni); | 978 ScopedLocalRefFrame local_ref_frame(jni); |
897 if (new_bit_rate > 0) { | 979 if (new_bit_rate > 0) { |
898 last_set_bitrate_kbps_ = new_bit_rate; | 980 last_set_bitrate_kbps_ = new_bit_rate; |
899 } | 981 } |
900 if (frame_rate > 0) { | 982 if (frame_rate > 0) { |
901 last_set_fps_ = frame_rate; | 983 last_set_fps_ = frame_rate; |
902 } | 984 } |
903 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 985 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
904 j_set_rates_method_, | 986 j_set_rates_method_, |
905 last_set_bitrate_kbps_, | 987 last_set_bitrate_kbps_, |
906 last_set_fps_); | 988 last_set_fps_); |
907 CHECK_EXCEPTION(jni); | 989 if (CheckException(jni) || !ret) { |
908 if (!ret) { | 990 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
909 ResetCodecOnCodecThread(); | 991 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK |
910 return WEBRTC_VIDEO_CODEC_ERROR; | 992 : WEBRTC_VIDEO_CODEC_ERROR; |
911 } | 993 } |
912 return WEBRTC_VIDEO_CODEC_OK; | 994 return WEBRTC_VIDEO_CODEC_OK; |
913 } | 995 } |
914 | 996 |
915 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( | 997 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( |
916 JNIEnv* jni, | 998 JNIEnv* jni, |
917 jobject j_output_buffer_info) { | 999 jobject j_output_buffer_info) { |
918 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); | 1000 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); |
919 } | 1001 } |
920 | 1002 |
(...skipping 15 matching lines...) Expand all Loading... | |
936 return GetLongField( | 1018 return GetLongField( |
937 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); | 1019 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); |
938 } | 1020 } |
939 | 1021 |
940 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { | 1022 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
941 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 1023 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
942 | 1024 |
943 while (true) { | 1025 while (true) { |
944 jobject j_output_buffer_info = jni->CallObjectMethod( | 1026 jobject j_output_buffer_info = jni->CallObjectMethod( |
945 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); | 1027 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); |
946 CHECK_EXCEPTION(jni); | 1028 if (CheckException(jni)) { |
1029 ALOGE << "Exception in set dequeue output buffer."; | |
1030 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
1031 return WEBRTC_VIDEO_CODEC_ERROR; | |
1032 } | |
947 if (IsNull(jni, j_output_buffer_info)) { | 1033 if (IsNull(jni, j_output_buffer_info)) { |
948 break; | 1034 break; |
949 } | 1035 } |
950 | 1036 |
951 int output_buffer_index = | 1037 int output_buffer_index = |
952 GetOutputBufferInfoIndex(jni, j_output_buffer_info); | 1038 GetOutputBufferInfoIndex(jni, j_output_buffer_info); |
953 if (output_buffer_index == -1) { | 1039 if (output_buffer_index == -1) { |
954 ResetCodecOnCodecThread(); | 1040 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
955 return false; | 1041 return false; |
956 } | 1042 } |
957 | 1043 |
958 // Get key and config frame flags. | 1044 // Get key and config frame flags. |
959 jobject j_output_buffer = | 1045 jobject j_output_buffer = |
960 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | 1046 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); |
961 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | 1047 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); |
962 | 1048 |
963 // Get frame timestamps from a queue - for non config frames only. | 1049 // Get frame timestamps from a queue - for non config frames only. |
964 int64_t encoding_start_time_ms = 0; | 1050 int64_t encoding_start_time_ms = 0; |
965 int64_t frame_encoding_time_ms = 0; | 1051 int64_t frame_encoding_time_ms = 0; |
966 last_output_timestamp_ms_ = | 1052 last_output_timestamp_ms_ = |
967 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | 1053 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / |
968 rtc::kNumMicrosecsPerMillisec; | 1054 rtc::kNumMicrosecsPerMillisec; |
969 if (!input_frame_infos_.empty()) { | 1055 if (!input_frame_infos_.empty()) { |
970 const InputFrameInfo& frame_info = input_frame_infos_.front(); | 1056 const InputFrameInfo& frame_info = input_frame_infos_.front(); |
971 output_timestamp_ = frame_info.frame_timestamp; | 1057 output_timestamp_ = frame_info.frame_timestamp; |
972 output_render_time_ms_ = frame_info.frame_render_time_ms; | 1058 output_render_time_ms_ = frame_info.frame_render_time_ms; |
973 output_rotation_ = frame_info.rotation; | 1059 output_rotation_ = frame_info.rotation; |
974 encoding_start_time_ms = frame_info.encode_start_time; | 1060 encoding_start_time_ms = frame_info.encode_start_time; |
975 input_frame_infos_.pop_front(); | 1061 input_frame_infos_.pop_front(); |
976 } | 1062 } |
977 | 1063 |
978 // Extract payload. | 1064 // Extract payload. |
979 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 1065 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
980 uint8_t* payload = reinterpret_cast<uint8_t*>( | 1066 uint8_t* payload = reinterpret_cast<uint8_t*>( |
981 jni->GetDirectBufferAddress(j_output_buffer)); | 1067 jni->GetDirectBufferAddress(j_output_buffer)); |
982 CHECK_EXCEPTION(jni); | 1068 if (CheckException(jni)) { |
1069 ALOGE << "Exception in get direct buffer address."; | |
1070 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
1071 return WEBRTC_VIDEO_CODEC_ERROR; | |
1072 } | |
983 | 1073 |
984 // Callback - return encoded frame. | 1074 // Callback - return encoded frame. |
985 int32_t callback_status = 0; | 1075 int32_t callback_status = 0; |
986 if (callback_) { | 1076 if (callback_) { |
987 std::unique_ptr<webrtc::EncodedImage> image( | 1077 std::unique_ptr<webrtc::EncodedImage> image( |
988 new webrtc::EncodedImage(payload, payload_size, payload_size)); | 1078 new webrtc::EncodedImage(payload, payload_size, payload_size)); |
989 image->_encodedWidth = width_; | 1079 image->_encodedWidth = width_; |
990 image->_encodedHeight = height_; | 1080 image->_encodedHeight = height_; |
991 image->_timeStamp = output_timestamp_; | 1081 image->_timeStamp = output_timestamp_; |
992 image->capture_time_ms_ = output_render_time_ms_; | 1082 image->capture_time_ms_ = output_render_time_ms_; |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1072 } | 1162 } |
1073 scPosition += naluPosition; | 1163 scPosition += naluPosition; |
1074 scPositions[scPositionsLength++] = scPosition; | 1164 scPositions[scPositionsLength++] = scPosition; |
1075 scPosition += H264_SC_LENGTH; | 1165 scPosition += H264_SC_LENGTH; |
1076 } | 1166 } |
1077 if (scPositionsLength == 0) { | 1167 if (scPositionsLength == 0) { |
1078 ALOGE << "Start code is not found!"; | 1168 ALOGE << "Start code is not found!"; |
1079 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] | 1169 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] |
1080 << " " << image->_buffer[2] << " " << image->_buffer[3] | 1170 << " " << image->_buffer[2] << " " << image->_buffer[3] |
1081 << " " << image->_buffer[4] << " " << image->_buffer[5]; | 1171 << " " << image->_buffer[4] << " " << image->_buffer[5]; |
1082 ResetCodecOnCodecThread(); | 1172 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
1083 return false; | 1173 return false; |
1084 } | 1174 } |
1085 scPositions[scPositionsLength] = payload_size; | 1175 scPositions[scPositionsLength] = payload_size; |
1086 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); | 1176 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); |
1087 for (size_t i = 0; i < scPositionsLength; i++) { | 1177 for (size_t i = 0; i < scPositionsLength; i++) { |
1088 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; | 1178 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; |
1089 header.fragmentationLength[i] = | 1179 header.fragmentationLength[i] = |
1090 scPositions[i + 1] - header.fragmentationOffset[i]; | 1180 scPositions[i + 1] - header.fragmentationOffset[i]; |
1091 header.fragmentationPlType[i] = 0; | 1181 header.fragmentationPlType[i] = 0; |
1092 header.fragmentationTimeDiff[i] = 0; | 1182 header.fragmentationTimeDiff[i] = 0; |
1093 } | 1183 } |
1094 } | 1184 } |
1095 | 1185 |
1096 callback_status = callback_->Encoded(*image, &info, &header); | 1186 callback_status = callback_->Encoded(*image, &info, &header); |
1097 } | 1187 } |
1098 | 1188 |
1099 // Return output buffer back to the encoder. | 1189 // Return output buffer back to the encoder. |
1100 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 1190 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
1101 j_release_output_buffer_method_, | 1191 j_release_output_buffer_method_, |
1102 output_buffer_index); | 1192 output_buffer_index); |
1103 CHECK_EXCEPTION(jni); | 1193 if (CheckException(jni) || !success) { |
1104 if (!success) { | 1194 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
1105 ResetCodecOnCodecThread(); | |
1106 return false; | 1195 return false; |
1107 } | 1196 } |
1108 | 1197 |
1109 // Print per frame statistics. | 1198 // Print per frame statistics. |
1110 if (encoding_start_time_ms > 0) { | 1199 if (encoding_start_time_ms > 0) { |
1111 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; | 1200 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; |
1112 } | 1201 } |
1113 if (frames_encoded_ < kMaxEncodedLogFrames) { | 1202 if (frames_encoded_ < kMaxEncodedLogFrames) { |
1114 int current_latency = | 1203 int current_latency = |
1115 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); | 1204 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); |
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1297 return supported_codecs_; | 1386 return supported_codecs_; |
1298 } | 1387 } |
1299 | 1388 |
1300 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1389 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1301 webrtc::VideoEncoder* encoder) { | 1390 webrtc::VideoEncoder* encoder) { |
1302 ALOGD << "Destroy video encoder."; | 1391 ALOGD << "Destroy video encoder."; |
1303 delete encoder; | 1392 delete encoder; |
1304 } | 1393 } |
1305 | 1394 |
1306 } // namespace webrtc_jni | 1395 } // namespace webrtc_jni |
OLD | NEW |