Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 122 | 122 |
| 123 void OnDroppedFrame() override; | 123 void OnDroppedFrame() override; |
| 124 | 124 |
| 125 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } | 125 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } |
| 126 const char* ImplementationName() const override; | 126 const char* ImplementationName() const override; |
| 127 | 127 |
| 128 private: | 128 private: |
| 129 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and | 129 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and |
| 130 // InitEncodeOnCodecThread() in an attempt to restore the codec to an | 130 // InitEncodeOnCodecThread() in an attempt to restore the codec to an |
| 131 // operable state. Necessary after all manner of OMX-layer errors. | 131 // operable state. Necessary after all manner of OMX-layer errors. |
| 132 // Returns true if the codec was reset successfully. | |
| 132 bool ResetCodecOnCodecThread(); | 133 bool ResetCodecOnCodecThread(); |
| 133 | 134 |
| 135 // Fallback to a software encoder if one is supported else try to reset the | |
| 136 // encoder. Called with tryResetIfFallbackUnavailable equal to false from | |
| 137 // init/release encoder so that we don't go into infinite recursion. | |
| 138 // Returns true if the codec was reset successfully. | |
| 139 bool ProcessHWErrorOnCodecThread(bool tryResetIfFallbackUnavailable); | |
| 140 | |
| 134 // Implementation of webrtc::VideoEncoder methods above, all running on the | 141 // Implementation of webrtc::VideoEncoder methods above, all running on the |
| 135 // codec thread exclusively. | 142 // codec thread exclusively. |
| 136 // | 143 // |
| 137 // If width==0 then this is assumed to be a re-initialization and the | 144 // If width==0 then this is assumed to be a re-initialization and the |
| 138 // previously-current values are reused instead of the passed parameters | 145 // previously-current values are reused instead of the passed parameters |
| 139 // (makes it easier to reason about thread-safety). | 146 // (makes it easier to reason about thread-safety). |
| 140 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, | 147 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, |
| 141 bool use_surface); | 148 bool use_surface); |
| 142 // Reconfigure to match |frame| in width, height. Also reconfigures the | 149 // Reconfigure to match |frame| in width, height. Also reconfigures the |
| 143 // encoder if |frame| is a texture/byte buffer and the encoder is initialized | 150 // encoder if |frame| is a texture/byte buffer and the encoder is initialized |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 275 // EGL context - owned by factory, should not be allocated/destroyed | 282 // EGL context - owned by factory, should not be allocated/destroyed |
| 276 // by MediaCodecVideoEncoder. | 283 // by MediaCodecVideoEncoder. |
| 277 jobject egl_context_; | 284 jobject egl_context_; |
| 278 | 285 |
| 279 // Temporary fix for VP8. | 286 // Temporary fix for VP8. |
| 280 // Sends a key frame if frames are largely spaced apart (possibly | 287 // Sends a key frame if frames are largely spaced apart (possibly |
| 281 // corresponding to a large image change). | 288 // corresponding to a large image change). |
| 282 int64_t last_frame_received_ms_; | 289 int64_t last_frame_received_ms_; |
| 283 int frames_received_since_last_key_; | 290 int frames_received_since_last_key_; |
| 284 webrtc::VideoCodecMode codec_mode_; | 291 webrtc::VideoCodecMode codec_mode_; |
| 292 | |
| 293 bool sw_fallback_required_; | |
| 285 }; | 294 }; |
| 286 | 295 |
| 287 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { | 296 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { |
| 288 // Call Release() to ensure no more callbacks to us after we are deleted. | 297 // Call Release() to ensure no more callbacks to us after we are deleted. |
| 289 Release(); | 298 Release(); |
| 290 } | 299 } |
| 291 | 300 |
| 292 MediaCodecVideoEncoder::MediaCodecVideoEncoder( | 301 MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, |
| 293 JNIEnv* jni, VideoCodecType codecType, jobject egl_context) : | 302 VideoCodecType codecType, |
| 294 codecType_(codecType), | 303 jobject egl_context) |
| 295 callback_(NULL), | 304 : codecType_(codecType), |
| 296 codec_thread_(new Thread()), | 305 callback_(NULL), |
| 297 j_media_codec_video_encoder_class_( | 306 codec_thread_(new Thread()), |
| 298 jni, | 307 j_media_codec_video_encoder_class_( |
| 299 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), | 308 jni, |
| 300 j_media_codec_video_encoder_( | 309 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), |
| 301 jni, | 310 j_media_codec_video_encoder_( |
| 302 jni->NewObject(*j_media_codec_video_encoder_class_, | 311 jni, |
| 303 GetMethodID(jni, | 312 jni->NewObject(*j_media_codec_video_encoder_class_, |
| 304 *j_media_codec_video_encoder_class_, | 313 GetMethodID(jni, |
| 305 "<init>", | 314 *j_media_codec_video_encoder_class_, |
| 306 "()V"))), | 315 "<init>", |
| 307 inited_(false), | 316 "()V"))), |
| 308 use_surface_(false), | 317 inited_(false), |
| 309 picture_id_(0), | 318 use_surface_(false), |
| 310 egl_context_(egl_context) { | 319 picture_id_(0), |
| 320 egl_context_(egl_context), | |
| 321 sw_fallback_required_(false) { | |
| 311 ScopedLocalRefFrame local_ref_frame(jni); | 322 ScopedLocalRefFrame local_ref_frame(jni); |
| 312 // It would be nice to avoid spinning up a new thread per MediaCodec, and | 323 // It would be nice to avoid spinning up a new thread per MediaCodec, and |
| 313 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | 324 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug |
| 314 // 2732 means that deadlocks abound. This class synchronously trampolines | 325 // 2732 means that deadlocks abound. This class synchronously trampolines |
| 315 // to |codec_thread_|, so if anything else can be coming to _us_ from | 326 // to |codec_thread_|, so if anything else can be coming to _us_ from |
| 316 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | 327 // |codec_thread_|, or from any thread holding the |_sendCritSect| described |
| 317 // in the bug, we have a problem. For now work around that with a dedicated | 328 // in the bug, we have a problem. For now work around that with a dedicated |
| 318 // thread. | 329 // thread. |
| 319 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | 330 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); |
| 320 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | 331 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 354 j_color_format_field_ = | 365 j_color_format_field_ = |
| 355 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); | 366 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); |
| 356 j_info_index_field_ = | 367 j_info_index_field_ = |
| 357 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); | 368 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); |
| 358 j_info_buffer_field_ = GetFieldID( | 369 j_info_buffer_field_ = GetFieldID( |
| 359 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); | 370 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); |
| 360 j_info_is_key_frame_field_ = | 371 j_info_is_key_frame_field_ = |
| 361 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); | 372 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); |
| 362 j_info_presentation_timestamp_us_field_ = GetFieldID( | 373 j_info_presentation_timestamp_us_field_ = GetFieldID( |
| 363 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); | 374 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); |
| 364 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed"; | 375 if (CheckException(jni)) { |
| 376 ALOGW << "MediaCodecVideoEncoder ctor failed."; | |
| 377 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
| 378 } | |
| 365 srand(time(NULL)); | 379 srand(time(NULL)); |
| 366 AllowBlockingCalls(); | 380 AllowBlockingCalls(); |
| 367 } | 381 } |
| 368 | 382 |
| 369 int32_t MediaCodecVideoEncoder::InitEncode( | 383 int32_t MediaCodecVideoEncoder::InitEncode( |
| 370 const webrtc::VideoCodec* codec_settings, | 384 const webrtc::VideoCodec* codec_settings, |
| 371 int32_t /* number_of_cores */, | 385 int32_t /* number_of_cores */, |
| 372 size_t /* max_payload_size */) { | 386 size_t /* max_payload_size */) { |
| 373 if (codec_settings == NULL) { | 387 if (codec_settings == NULL) { |
| 374 ALOGE << "NULL VideoCodec instance"; | 388 ALOGE << "NULL VideoCodec instance"; |
| 375 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 389 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 376 } | 390 } |
| 377 // Factory should guard against other codecs being used with us. | 391 // Factory should guard against other codecs being used with us. |
| 378 RTC_CHECK(codec_settings->codecType == codecType_) | 392 RTC_CHECK(codec_settings->codecType == codecType_) |
| 379 << "Unsupported codec " << codec_settings->codecType << " for " | 393 << "Unsupported codec " << codec_settings->codecType << " for " |
| 380 << codecType_; | 394 << codecType_; |
| 381 | 395 if (sw_fallback_required_) { |
| 396 return WEBRTC_VIDEO_CODEC_ERROR; | |
|
AlexG
2016/09/09 21:55:17
WEBRTC_VIDEO_CODEC_OK - similarly to InitDecode()
| |
| 397 } | |
| 382 codec_mode_ = codec_settings->mode; | 398 codec_mode_ = codec_settings->mode; |
| 383 int init_width = codec_settings->width; | 399 int init_width = codec_settings->width; |
| 384 int init_height = codec_settings->height; | 400 int init_height = codec_settings->height; |
| 385 // Scaling is disabled for VP9, but optionally enabled for VP8. | 401 // Scaling is disabled for VP9, but optionally enabled for VP8. |
| 386 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. | 402 // TODO(pbos): Extract automaticResizeOn out of VP8 settings. |
| 387 scale_ = false; | 403 scale_ = false; |
| 388 if (codecType_ == kVideoCodecVP8) { | 404 if (codecType_ == kVideoCodecVP8) { |
| 389 scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn; | 405 scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn; |
| 390 } else if (codecType_ != kVideoCodecVP9) { | 406 } else if (codecType_ != kVideoCodecVP9) { |
| 391 scale_ = true; | 407 scale_ = true; |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 487 } | 503 } |
| 488 | 504 |
| 489 // Call log statistics here so it's called even if no frames are being | 505 // Call log statistics here so it's called even if no frames are being |
| 490 // delivered. | 506 // delivered. |
| 491 LogStatistics(false); | 507 LogStatistics(false); |
| 492 } | 508 } |
| 493 | 509 |
| 494 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { | 510 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { |
| 495 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 511 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 496 ALOGE << "ResetOnCodecThread"; | 512 ALOGE << "ResetOnCodecThread"; |
| 497 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || | 513 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK) { |
| 498 InitEncodeOnCodecThread(width_, height_, 0, 0, false) != | 514 ALOGE << "Releasing codec failed during reset."; |
| 499 WEBRTC_VIDEO_CODEC_OK) { | 515 return false; |
| 500 // TODO(fischman): wouldn't it be nice if there was a way to gracefully | 516 } |
| 501 // degrade to a SW encoder at this point? There isn't one AFAICT :( | 517 if (InitEncodeOnCodecThread(width_, height_, 0, 0, false) != |
| 502 // https://code.google.com/p/webrtc/issues/detail?id=2920 | 518 WEBRTC_VIDEO_CODEC_OK) { |
| 519 ALOGE << "Initializing encoder failed during reset."; | |
| 503 return false; | 520 return false; |
| 504 } | 521 } |
| 505 return true; | 522 return true; |
| 506 } | 523 } |
| 507 | 524 |
| 525 bool MediaCodecVideoEncoder::ProcessHWErrorOnCodecThread( | |
| 526 bool tryResetIfFallbackUnavailable) { | |
| 527 ALOGE << "ProcessHWErrorOnCodecThread"; | |
| 528 if (VideoEncoder::IsSupported(VideoEncoder::CodecToEncoderType(codecType_))) { | |
| 529 ALOGE << "Fallback to SW encoder."; | |
| 530 sw_fallback_required_ = true; | |
| 531 return false; | |
| 532 } else if (tryResetIfFallbackUnavailable) { | |
| 533 ALOGE << "Reset encoder."; | |
| 534 return ResetCodecOnCodecThread(); | |
| 535 } | |
| 536 return false; | |
|
AlexG
2016/09/09 21:55:17
nit: not necessary return here
| |
| 537 } | |
| 538 | |
| 508 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | 539 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
| 509 int width, int height, int kbps, int fps, bool use_surface) { | 540 int width, int height, int kbps, int fps, bool use_surface) { |
| 510 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 541 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 542 if (sw_fallback_required_) { | |
| 543 return WEBRTC_VIDEO_CODEC_ERROR; | |
|
AlexG
2016/09/09 21:55:17
WEBRTC_VIDEO_CODEC_OK?
| |
| 544 } | |
| 511 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; | 545 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; |
| 512 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 546 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 513 ScopedLocalRefFrame local_ref_frame(jni); | 547 ScopedLocalRefFrame local_ref_frame(jni); |
| 514 | 548 |
| 515 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << | 549 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << |
| 516 width << " x " << height << ". Bitrate: " << kbps << | 550 width << " x " << height << ". Bitrate: " << kbps << |
| 517 " kbps. Fps: " << fps; | 551 " kbps. Fps: " << fps; |
| 518 if (kbps == 0) { | 552 if (kbps == 0) { |
| 519 kbps = last_set_bitrate_kbps_; | 553 kbps = last_set_bitrate_kbps_; |
| 520 } | 554 } |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 553 | 587 |
| 554 // We enforce no extra stride/padding in the format creation step. | 588 // We enforce no extra stride/padding in the format creation step. |
| 555 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( | 589 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( |
| 556 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); | 590 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); |
| 557 const bool encode_status = jni->CallBooleanMethod( | 591 const bool encode_status = jni->CallBooleanMethod( |
| 558 *j_media_codec_video_encoder_, j_init_encode_method_, | 592 *j_media_codec_video_encoder_, j_init_encode_method_, |
| 559 j_video_codec_enum, width, height, kbps, fps, | 593 j_video_codec_enum, width, height, kbps, fps, |
| 560 (use_surface ? egl_context_ : nullptr)); | 594 (use_surface ? egl_context_ : nullptr)); |
| 561 if (!encode_status) { | 595 if (!encode_status) { |
| 562 ALOGE << "Failed to configure encoder."; | 596 ALOGE << "Failed to configure encoder."; |
| 597 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
| 563 return WEBRTC_VIDEO_CODEC_ERROR; | 598 return WEBRTC_VIDEO_CODEC_ERROR; |
| 564 } | 599 } |
| 565 CHECK_EXCEPTION(jni); | 600 if (CheckException(jni)) { |
| 601 ALOGE << "Exception in init encode."; | |
| 602 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
| 603 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 604 } | |
| 566 | 605 |
| 567 if (!use_surface) { | 606 if (!use_surface) { |
| 568 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | 607 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( |
| 569 jni->CallObjectMethod(*j_media_codec_video_encoder_, | 608 jni->CallObjectMethod(*j_media_codec_video_encoder_, |
| 570 j_get_input_buffers_method_)); | 609 j_get_input_buffers_method_)); |
| 571 CHECK_EXCEPTION(jni); | 610 if (CheckException(jni)) { |
| 572 if (IsNull(jni, input_buffers)) { | 611 ALOGE << "Exception in get input buffers."; |
| 612 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
| 573 return WEBRTC_VIDEO_CODEC_ERROR; | 613 return WEBRTC_VIDEO_CODEC_ERROR; |
| 574 } | 614 } |
| 575 | 615 |
| 616 if (IsNull(jni, input_buffers)) { | |
| 617 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
| 618 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 619 } | |
| 620 | |
| 576 switch (GetIntField(jni, *j_media_codec_video_encoder_, | 621 switch (GetIntField(jni, *j_media_codec_video_encoder_, |
| 577 j_color_format_field_)) { | 622 j_color_format_field_)) { |
| 578 case COLOR_FormatYUV420Planar: | 623 case COLOR_FormatYUV420Planar: |
| 579 encoder_fourcc_ = libyuv::FOURCC_YU12; | 624 encoder_fourcc_ = libyuv::FOURCC_YU12; |
| 580 break; | 625 break; |
| 581 case COLOR_FormatYUV420SemiPlanar: | 626 case COLOR_FormatYUV420SemiPlanar: |
| 582 case COLOR_QCOM_FormatYUV420SemiPlanar: | 627 case COLOR_QCOM_FormatYUV420SemiPlanar: |
| 583 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | 628 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
| 584 encoder_fourcc_ = libyuv::FOURCC_NV12; | 629 encoder_fourcc_ = libyuv::FOURCC_NV12; |
| 585 break; | 630 break; |
| 586 default: | 631 default: |
| 587 LOG(LS_ERROR) << "Wrong color format."; | 632 LOG(LS_ERROR) << "Wrong color format."; |
| 633 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
| 588 return WEBRTC_VIDEO_CODEC_ERROR; | 634 return WEBRTC_VIDEO_CODEC_ERROR; |
| 589 } | 635 } |
| 590 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 636 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
| 591 RTC_CHECK(input_buffers_.empty()) | 637 RTC_CHECK(input_buffers_.empty()) |
| 592 << "Unexpected double InitEncode without Release"; | 638 << "Unexpected double InitEncode without Release"; |
| 593 input_buffers_.resize(num_input_buffers); | 639 input_buffers_.resize(num_input_buffers); |
| 594 for (size_t i = 0; i < num_input_buffers; ++i) { | 640 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 595 input_buffers_[i] = | 641 input_buffers_[i] = |
| 596 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 642 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 597 int64_t yuv_buffer_capacity = | 643 int64_t yuv_buffer_capacity = |
| 598 jni->GetDirectBufferCapacity(input_buffers_[i]); | 644 jni->GetDirectBufferCapacity(input_buffers_[i]); |
| 599 CHECK_EXCEPTION(jni); | 645 if (CheckException(jni)) { |
| 646 ALOGE << "Exception in get direct buffer capacity."; | |
| 647 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
| 648 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 649 } | |
| 600 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | 650 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
| 601 } | 651 } |
| 602 } | 652 } |
| 603 | 653 |
| 604 inited_ = true; | 654 inited_ = true; |
| 605 return WEBRTC_VIDEO_CODEC_OK; | 655 return WEBRTC_VIDEO_CODEC_OK; |
| 606 } | 656 } |
| 607 | 657 |
| 608 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 658 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
| 609 const webrtc::VideoFrame& frame, | 659 const webrtc::VideoFrame& frame, |
| 610 const std::vector<webrtc::FrameType>* frame_types, | 660 const std::vector<webrtc::FrameType>* frame_types, |
| 611 const int64_t frame_input_time_ms) { | 661 const int64_t frame_input_time_ms) { |
| 612 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 662 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 663 if (sw_fallback_required_) { | |
| 664 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | |
| 665 } | |
| 613 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 666 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 614 ScopedLocalRefFrame local_ref_frame(jni); | 667 ScopedLocalRefFrame local_ref_frame(jni); |
| 615 | 668 |
| 616 if (!inited_) { | 669 if (!inited_) { |
| 617 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 670 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 618 } | 671 } |
| 619 | 672 |
| 620 bool send_key_frame = false; | 673 bool send_key_frame = false; |
| 621 if (codec_mode_ == webrtc::kRealtimeVideo) { | 674 if (codec_mode_ == webrtc::kRealtimeVideo) { |
| 622 ++frames_received_since_last_key_; | 675 ++frames_received_since_last_key_; |
| 623 int64_t now_ms = rtc::TimeMillis(); | 676 int64_t now_ms = rtc::TimeMillis(); |
| 624 if (last_frame_received_ms_ != -1 && | 677 if (last_frame_received_ms_ != -1 && |
| 625 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { | 678 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { |
| 626 // Add limit to prevent triggering a key for every frame for very low | 679 // Add limit to prevent triggering a key for every frame for very low |
| 627 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). | 680 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). |
| 628 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { | 681 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { |
| 629 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); | 682 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); |
| 630 send_key_frame = true; | 683 send_key_frame = true; |
| 631 } | 684 } |
| 632 frames_received_since_last_key_ = 0; | 685 frames_received_since_last_key_ = 0; |
| 633 } | 686 } |
| 634 last_frame_received_ms_ = now_ms; | 687 last_frame_received_ms_ = now_ms; |
| 635 } | 688 } |
| 636 | 689 |
| 637 frames_received_++; | 690 frames_received_++; |
| 638 if (!DeliverPendingOutputs(jni)) { | 691 if (!DeliverPendingOutputs(jni)) { |
| 639 if (!ResetCodecOnCodecThread()) | 692 if (!ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */)) |
| 640 return WEBRTC_VIDEO_CODEC_ERROR; | 693 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
| 694 : WEBRTC_VIDEO_CODEC_ERROR; | |
| 641 } | 695 } |
| 642 if (frames_encoded_ < kMaxEncodedLogFrames) { | 696 if (frames_encoded_ < kMaxEncodedLogFrames) { |
| 643 ALOGD << "Encoder frame in # " << (frames_received_ - 1) | 697 ALOGD << "Encoder frame in # " << (frames_received_ - 1) |
| 644 << ". TS: " << (int)(current_timestamp_us_ / 1000) | 698 << ". TS: " << (int)(current_timestamp_us_ / 1000) |
| 645 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ | 699 << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_ |
| 646 << ". Kbps: " << last_set_bitrate_kbps_; | 700 << ". Kbps: " << last_set_bitrate_kbps_; |
| 647 } | 701 } |
| 648 | 702 |
| 649 if (drop_next_input_frame_) { | 703 if (drop_next_input_frame_) { |
| 650 ALOGW << "Encoder drop frame - failed callback."; | 704 ALOGW << "Encoder drop frame - failed callback."; |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 662 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { | 716 if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) { |
| 663 ALOGD << "Already " << input_frame_infos_.size() | 717 ALOGD << "Already " << input_frame_infos_.size() |
| 664 << " frames in the queue, dropping" | 718 << " frames in the queue, dropping" |
| 665 << ". TS: " << (int)(current_timestamp_us_ / 1000) | 719 << ". TS: " << (int)(current_timestamp_us_ / 1000) |
| 666 << ". Fps: " << last_set_fps_ | 720 << ". Fps: " << last_set_fps_ |
| 667 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; | 721 << ". Consecutive drops: " << consecutive_full_queue_frame_drops_; |
| 668 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 722 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
| 669 consecutive_full_queue_frame_drops_++; | 723 consecutive_full_queue_frame_drops_++; |
| 670 if (consecutive_full_queue_frame_drops_ >= | 724 if (consecutive_full_queue_frame_drops_ >= |
| 671 ENCODER_STALL_FRAMEDROP_THRESHOLD) { | 725 ENCODER_STALL_FRAMEDROP_THRESHOLD) { |
| 672 ALOGE << "Encoder got stuck. Reset."; | 726 ALOGE << "Encoder got stuck."; |
| 673 ResetCodecOnCodecThread(); | 727 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
|
AlexG
2016/09/09 21:55:17
Create helper function for
ProcessHWErrorOnCodecT
| |
| 674 return WEBRTC_VIDEO_CODEC_ERROR; | 728 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
| 729 : WEBRTC_VIDEO_CODEC_ERROR; | |
| 675 } | 730 } |
| 676 frames_dropped_media_encoder_++; | 731 frames_dropped_media_encoder_++; |
| 677 OnDroppedFrameOnCodecThread(); | 732 OnDroppedFrameOnCodecThread(); |
| 678 return WEBRTC_VIDEO_CODEC_OK; | 733 return WEBRTC_VIDEO_CODEC_OK; |
| 679 } | 734 } |
| 680 consecutive_full_queue_frame_drops_ = 0; | 735 consecutive_full_queue_frame_drops_ = 0; |
| 681 | 736 |
| 682 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( | 737 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( |
| 683 frame.video_frame_buffer()); | 738 frame.video_frame_buffer()); |
| 684 if (scale_) { | 739 if (scale_) { |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 708 ALOGE << "Failed to reconfigure encoder."; | 763 ALOGE << "Failed to reconfigure encoder."; |
| 709 return WEBRTC_VIDEO_CODEC_ERROR; | 764 return WEBRTC_VIDEO_CODEC_ERROR; |
| 710 } | 765 } |
| 711 | 766 |
| 712 const bool key_frame = | 767 const bool key_frame = |
| 713 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; | 768 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; |
| 714 bool encode_status = true; | 769 bool encode_status = true; |
| 715 if (!input_frame.video_frame_buffer()->native_handle()) { | 770 if (!input_frame.video_frame_buffer()->native_handle()) { |
| 716 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 771 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
| 717 j_dequeue_input_buffer_method_); | 772 j_dequeue_input_buffer_method_); |
| 718 CHECK_EXCEPTION(jni); | 773 if (CheckException(jni)) { |
| 774 ALOGE << "Exception in dequeu input buffer."; | |
| 775 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
| 776 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE | |
| 777 : WEBRTC_VIDEO_CODEC_ERROR; | |
| 778 } | |
| 719 if (j_input_buffer_index == -1) { | 779 if (j_input_buffer_index == -1) { |
| 720 // Video codec falls behind - no input buffer available. | 780 // Video codec falls behind - no input buffer available. |
| 721 ALOGW << "Encoder drop frame - no input buffers available"; | 781 ALOGW << "Encoder drop frame - no input buffers available"; |
| 722 if (frames_received_ > 1) { | 782 if (frames_received_ > 1) { |
| 723 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 783 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
| 724 frames_dropped_media_encoder_++; | 784 frames_dropped_media_encoder_++; |
| 725 OnDroppedFrameOnCodecThread(); | 785 OnDroppedFrameOnCodecThread(); |
| 726 } else { | 786 } else { |
| 727 // Input buffers are not ready after codec initialization, HW is still | 787 // Input buffers are not ready after codec initialization, HW is still |
| 728 // allocating thme - this is expected and should not result in drop | 788 // allocating thme - this is expected and should not result in drop |
| 729 // frame report. | 789 // frame report. |
| 730 frames_received_ = 0; | 790 frames_received_ = 0; |
| 731 } | 791 } |
| 732 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 792 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
| 733 } else if (j_input_buffer_index == -2) { | 793 } else if (j_input_buffer_index == -2) { |
| 734 ResetCodecOnCodecThread(); | 794 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
| 735 return WEBRTC_VIDEO_CODEC_ERROR; | 795 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
| 796 : WEBRTC_VIDEO_CODEC_ERROR; | |
| 736 } | 797 } |
| 737 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | 798 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, |
| 738 j_input_buffer_index); | 799 j_input_buffer_index); |
| 739 } else { | 800 } else { |
| 740 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); | 801 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); |
| 741 } | 802 } |
| 742 | 803 |
| 743 if (!encode_status) { | 804 if (!encode_status) { |
| 744 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); | 805 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
| 745 ResetCodecOnCodecThread(); | 806 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
| 746 return WEBRTC_VIDEO_CODEC_ERROR; | 807 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
| 808 : WEBRTC_VIDEO_CODEC_ERROR; | |
| 747 } | 809 } |
| 748 | 810 |
| 749 // Save input image timestamps for later output. | 811 // Save input image timestamps for later output. |
| 750 input_frame_infos_.emplace_back( | 812 input_frame_infos_.emplace_back( |
| 751 frame_input_time_ms, input_frame.timestamp(), | 813 frame_input_time_ms, input_frame.timestamp(), |
| 752 input_frame.render_time_ms(), input_frame.rotation()); | 814 input_frame.render_time_ms(), input_frame.rotation()); |
| 753 | 815 |
| 754 last_input_timestamp_ms_ = | 816 last_input_timestamp_ms_ = |
| 755 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | 817 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
| 756 | 818 |
| 757 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 819 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
| 758 | 820 |
| 759 codec_thread_->Clear(this); | 821 codec_thread_->Clear(this); |
| 760 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); | 822 codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); |
| 761 | 823 |
| 762 if (!DeliverPendingOutputs(jni)) { | 824 if (!DeliverPendingOutputs(jni)) { |
| 763 ALOGE << "Failed deliver pending outputs."; | 825 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
| 764 ResetCodecOnCodecThread(); | 826 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE |
| 765 return WEBRTC_VIDEO_CODEC_ERROR; | 827 : WEBRTC_VIDEO_CODEC_ERROR; |
| 766 } | 828 } |
| 767 return WEBRTC_VIDEO_CODEC_OK; | 829 return WEBRTC_VIDEO_CODEC_OK; |
| 768 } | 830 } |
| 769 | 831 |
| 770 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | 832 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( |
| 771 const webrtc::VideoFrame& frame) { | 833 const webrtc::VideoFrame& frame) { |
| 772 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 834 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 773 | 835 |
| 774 const bool is_texture_frame = | 836 const bool is_texture_frame = |
| 775 frame.video_frame_buffer()->native_handle() != nullptr; | 837 frame.video_frame_buffer()->native_handle() != nullptr; |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 803 } | 865 } |
| 804 | 866 |
| 805 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | 867 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, |
| 806 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | 868 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { |
| 807 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 869 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 808 RTC_CHECK(!use_surface_); | 870 RTC_CHECK(!use_surface_); |
| 809 | 871 |
| 810 jobject j_input_buffer = input_buffers_[input_buffer_index]; | 872 jobject j_input_buffer = input_buffers_[input_buffer_index]; |
| 811 uint8_t* yuv_buffer = | 873 uint8_t* yuv_buffer = |
| 812 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 874 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
| 813 CHECK_EXCEPTION(jni); | 875 if (CheckException(jni)) { |
| 876 ALOGE << "Exception in get direct buffer address."; | |
| 877 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
| 878 return false; | |
| 879 } | |
| 814 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | 880 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
| 815 RTC_CHECK(!libyuv::ConvertFromI420( | 881 RTC_CHECK(!libyuv::ConvertFromI420( |
| 816 frame.video_frame_buffer()->DataY(), | 882 frame.video_frame_buffer()->DataY(), |
| 817 frame.video_frame_buffer()->StrideY(), | 883 frame.video_frame_buffer()->StrideY(), |
| 818 frame.video_frame_buffer()->DataU(), | 884 frame.video_frame_buffer()->DataU(), |
| 819 frame.video_frame_buffer()->StrideU(), | 885 frame.video_frame_buffer()->StrideU(), |
| 820 frame.video_frame_buffer()->DataV(), | 886 frame.video_frame_buffer()->DataV(), |
| 821 frame.video_frame_buffer()->StrideV(), | 887 frame.video_frame_buffer()->StrideV(), |
| 822 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | 888 yuv_buffer, width_, width_, height_, encoder_fourcc_)) |
| 823 << "ConvertFromI420 failed"; | 889 << "ConvertFromI420 failed"; |
| 824 | 890 |
| 825 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 891 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 826 j_encode_buffer_method_, | 892 j_encode_buffer_method_, |
| 827 key_frame, | 893 key_frame, |
| 828 input_buffer_index, | 894 input_buffer_index, |
| 829 yuv_size_, | 895 yuv_size_, |
| 830 current_timestamp_us_); | 896 current_timestamp_us_); |
| 831 CHECK_EXCEPTION(jni); | 897 if (CheckException(jni)) { |
| 898 ALOGE << "Exception in encode buffer."; | |
| 899 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
| 900 return false; | |
| 901 } | |
| 832 return encode_status; | 902 return encode_status; |
| 833 } | 903 } |
| 834 | 904 |
| 835 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, | 905 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, |
| 836 bool key_frame, const webrtc::VideoFrame& frame) { | 906 bool key_frame, const webrtc::VideoFrame& frame) { |
| 837 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 907 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 838 RTC_CHECK(use_surface_); | 908 RTC_CHECK(use_surface_); |
| 839 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( | 909 NativeHandleImpl* handle = static_cast<NativeHandleImpl*>( |
| 840 frame.video_frame_buffer()->native_handle()); | 910 frame.video_frame_buffer()->native_handle()); |
| 841 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); | 911 jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni); |
| 842 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 912 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 843 j_encode_texture_method_, | 913 j_encode_texture_method_, |
| 844 key_frame, | 914 key_frame, |
| 845 handle->oes_texture_id, | 915 handle->oes_texture_id, |
| 846 sampling_matrix, | 916 sampling_matrix, |
| 847 current_timestamp_us_); | 917 current_timestamp_us_); |
| 848 CHECK_EXCEPTION(jni); | 918 if (CheckException(jni)) { |
| 919 ALOGE << "Exception in encode texture."; | |
| 920 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
| 921 return false; | |
| 922 } | |
| 849 return encode_status; | 923 return encode_status; |
| 850 } | 924 } |
| 851 | 925 |
| 852 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | 926 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( |
| 853 webrtc::EncodedImageCallback* callback) { | 927 webrtc::EncodedImageCallback* callback) { |
| 854 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 928 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 855 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 929 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 856 ScopedLocalRefFrame local_ref_frame(jni); | 930 ScopedLocalRefFrame local_ref_frame(jni); |
| 857 callback_ = callback; | 931 callback_ = callback; |
| 858 return WEBRTC_VIDEO_CODEC_OK; | 932 return WEBRTC_VIDEO_CODEC_OK; |
| 859 } | 933 } |
| 860 | 934 |
| 861 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | 935 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { |
| 862 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 936 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 863 if (!inited_) { | 937 if (!inited_) { |
| 864 return WEBRTC_VIDEO_CODEC_OK; | 938 return WEBRTC_VIDEO_CODEC_OK; |
| 865 } | 939 } |
| 866 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 940 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 867 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | 941 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << |
| 868 frames_received_ << ". Encoded: " << frames_encoded_ << | 942 frames_received_ << ". Encoded: " << frames_encoded_ << |
| 869 ". Dropped: " << frames_dropped_media_encoder_; | 943 ". Dropped: " << frames_dropped_media_encoder_; |
| 870 ScopedLocalRefFrame local_ref_frame(jni); | 944 ScopedLocalRefFrame local_ref_frame(jni); |
| 871 for (size_t i = 0; i < input_buffers_.size(); ++i) | 945 for (size_t i = 0; i < input_buffers_.size(); ++i) |
| 872 jni->DeleteGlobalRef(input_buffers_[i]); | 946 jni->DeleteGlobalRef(input_buffers_[i]); |
| 873 input_buffers_.clear(); | 947 input_buffers_.clear(); |
| 874 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | 948 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); |
| 875 CHECK_EXCEPTION(jni); | 949 if (CheckException(jni)) { |
| 950 ALOGE << "Exception in release."; | |
| 951 ProcessHWErrorOnCodecThread(false /* tryResetIfFallbackUnavailable */); | |
| 952 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 953 } | |
| 876 rtc::MessageQueueManager::Clear(this); | 954 rtc::MessageQueueManager::Clear(this); |
| 877 inited_ = false; | 955 inited_ = false; |
| 878 use_surface_ = false; | 956 use_surface_ = false; |
| 879 ALOGD << "EncoderReleaseOnCodecThread done."; | 957 ALOGD << "EncoderReleaseOnCodecThread done."; |
| 880 return WEBRTC_VIDEO_CODEC_OK; | 958 return WEBRTC_VIDEO_CODEC_OK; |
| 881 } | 959 } |
| 882 | 960 |
| 883 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | 961 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
| 884 uint32_t frame_rate) { | 962 uint32_t frame_rate) { |
| 885 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 963 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 964 if (sw_fallback_required_) { | |
| 965 return WEBRTC_VIDEO_CODEC_OK; | |
| 966 } | |
| 886 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? | 967 frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? |
| 887 frame_rate : MAX_ALLOWED_VIDEO_FPS; | 968 frame_rate : MAX_ALLOWED_VIDEO_FPS; |
| 888 if (last_set_bitrate_kbps_ == new_bit_rate && | 969 if (last_set_bitrate_kbps_ == new_bit_rate && |
| 889 last_set_fps_ == frame_rate) { | 970 last_set_fps_ == frame_rate) { |
| 890 return WEBRTC_VIDEO_CODEC_OK; | 971 return WEBRTC_VIDEO_CODEC_OK; |
| 891 } | 972 } |
| 892 if (scale_) { | 973 if (scale_) { |
| 893 quality_scaler_.ReportFramerate(frame_rate); | 974 quality_scaler_.ReportFramerate(frame_rate); |
| 894 } | 975 } |
| 895 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 976 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 896 ScopedLocalRefFrame local_ref_frame(jni); | 977 ScopedLocalRefFrame local_ref_frame(jni); |
| 897 if (new_bit_rate > 0) { | 978 if (new_bit_rate > 0) { |
| 898 last_set_bitrate_kbps_ = new_bit_rate; | 979 last_set_bitrate_kbps_ = new_bit_rate; |
| 899 } | 980 } |
| 900 if (frame_rate > 0) { | 981 if (frame_rate > 0) { |
| 901 last_set_fps_ = frame_rate; | 982 last_set_fps_ = frame_rate; |
| 902 } | 983 } |
| 903 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 984 bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 904 j_set_rates_method_, | 985 j_set_rates_method_, |
| 905 last_set_bitrate_kbps_, | 986 last_set_bitrate_kbps_, |
| 906 last_set_fps_); | 987 last_set_fps_); |
| 907 CHECK_EXCEPTION(jni); | 988 if (CheckException(jni) || !ret) { |
| 908 if (!ret) { | 989 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
| 909 ResetCodecOnCodecThread(); | 990 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK |
| 910 return WEBRTC_VIDEO_CODEC_ERROR; | 991 : WEBRTC_VIDEO_CODEC_ERROR; |
| 911 } | 992 } |
| 912 return WEBRTC_VIDEO_CODEC_OK; | 993 return WEBRTC_VIDEO_CODEC_OK; |
| 913 } | 994 } |
| 914 | 995 |
| 915 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( | 996 int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( |
| 916 JNIEnv* jni, | 997 JNIEnv* jni, |
| 917 jobject j_output_buffer_info) { | 998 jobject j_output_buffer_info) { |
| 918 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); | 999 return GetIntField(jni, j_output_buffer_info, j_info_index_field_); |
| 919 } | 1000 } |
| 920 | 1001 |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 936 return GetLongField( | 1017 return GetLongField( |
| 937 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); | 1018 jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); |
| 938 } | 1019 } |
| 939 | 1020 |
| 940 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { | 1021 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { |
| 941 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 1022 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 942 | 1023 |
| 943 while (true) { | 1024 while (true) { |
| 944 jobject j_output_buffer_info = jni->CallObjectMethod( | 1025 jobject j_output_buffer_info = jni->CallObjectMethod( |
| 945 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); | 1026 *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); |
| 946 CHECK_EXCEPTION(jni); | 1027 if (CheckException(jni)) { |
| 1028 ALOGE << "Exception in set dequeue output buffer."; | |
| 1029 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
| 1030 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 1031 } | |
| 947 if (IsNull(jni, j_output_buffer_info)) { | 1032 if (IsNull(jni, j_output_buffer_info)) { |
| 948 break; | 1033 break; |
| 949 } | 1034 } |
| 950 | 1035 |
| 951 int output_buffer_index = | 1036 int output_buffer_index = |
| 952 GetOutputBufferInfoIndex(jni, j_output_buffer_info); | 1037 GetOutputBufferInfoIndex(jni, j_output_buffer_info); |
| 953 if (output_buffer_index == -1) { | 1038 if (output_buffer_index == -1) { |
| 954 ResetCodecOnCodecThread(); | 1039 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
| 955 return false; | 1040 return false; |
| 956 } | 1041 } |
| 957 | 1042 |
| 958 // Get key and config frame flags. | 1043 // Get key and config frame flags. |
| 959 jobject j_output_buffer = | 1044 jobject j_output_buffer = |
| 960 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); | 1045 GetOutputBufferInfoBuffer(jni, j_output_buffer_info); |
| 961 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); | 1046 bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); |
| 962 | 1047 |
| 963 // Get frame timestamps from a queue - for non config frames only. | 1048 // Get frame timestamps from a queue - for non config frames only. |
| 964 int64_t encoding_start_time_ms = 0; | 1049 int64_t encoding_start_time_ms = 0; |
| 965 int64_t frame_encoding_time_ms = 0; | 1050 int64_t frame_encoding_time_ms = 0; |
| 966 last_output_timestamp_ms_ = | 1051 last_output_timestamp_ms_ = |
| 967 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / | 1052 GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / |
| 968 rtc::kNumMicrosecsPerMillisec; | 1053 rtc::kNumMicrosecsPerMillisec; |
| 969 if (!input_frame_infos_.empty()) { | 1054 if (!input_frame_infos_.empty()) { |
| 970 const InputFrameInfo& frame_info = input_frame_infos_.front(); | 1055 const InputFrameInfo& frame_info = input_frame_infos_.front(); |
| 971 output_timestamp_ = frame_info.frame_timestamp; | 1056 output_timestamp_ = frame_info.frame_timestamp; |
| 972 output_render_time_ms_ = frame_info.frame_render_time_ms; | 1057 output_render_time_ms_ = frame_info.frame_render_time_ms; |
| 973 output_rotation_ = frame_info.rotation; | 1058 output_rotation_ = frame_info.rotation; |
| 974 encoding_start_time_ms = frame_info.encode_start_time; | 1059 encoding_start_time_ms = frame_info.encode_start_time; |
| 975 input_frame_infos_.pop_front(); | 1060 input_frame_infos_.pop_front(); |
| 976 } | 1061 } |
| 977 | 1062 |
| 978 // Extract payload. | 1063 // Extract payload. |
| 979 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); | 1064 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); |
| 980 uint8_t* payload = reinterpret_cast<uint8_t*>( | 1065 uint8_t* payload = reinterpret_cast<uint8_t*>( |
| 981 jni->GetDirectBufferAddress(j_output_buffer)); | 1066 jni->GetDirectBufferAddress(j_output_buffer)); |
| 982 CHECK_EXCEPTION(jni); | 1067 if (CheckException(jni)) { |
| 1068 ALOGE << "Exception in get direct buffer address."; | |
| 1069 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); | |
| 1070 return WEBRTC_VIDEO_CODEC_ERROR; | |
| 1071 } | |
| 983 | 1072 |
| 984 // Callback - return encoded frame. | 1073 // Callback - return encoded frame. |
| 985 int32_t callback_status = 0; | 1074 int32_t callback_status = 0; |
| 986 if (callback_) { | 1075 if (callback_) { |
| 987 std::unique_ptr<webrtc::EncodedImage> image( | 1076 std::unique_ptr<webrtc::EncodedImage> image( |
| 988 new webrtc::EncodedImage(payload, payload_size, payload_size)); | 1077 new webrtc::EncodedImage(payload, payload_size, payload_size)); |
| 989 image->_encodedWidth = width_; | 1078 image->_encodedWidth = width_; |
| 990 image->_encodedHeight = height_; | 1079 image->_encodedHeight = height_; |
| 991 image->_timeStamp = output_timestamp_; | 1080 image->_timeStamp = output_timestamp_; |
| 992 image->capture_time_ms_ = output_render_time_ms_; | 1081 image->capture_time_ms_ = output_render_time_ms_; |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1072 } | 1161 } |
| 1073 scPosition += naluPosition; | 1162 scPosition += naluPosition; |
| 1074 scPositions[scPositionsLength++] = scPosition; | 1163 scPositions[scPositionsLength++] = scPosition; |
| 1075 scPosition += H264_SC_LENGTH; | 1164 scPosition += H264_SC_LENGTH; |
| 1076 } | 1165 } |
| 1077 if (scPositionsLength == 0) { | 1166 if (scPositionsLength == 0) { |
| 1078 ALOGE << "Start code is not found!"; | 1167 ALOGE << "Start code is not found!"; |
| 1079 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] | 1168 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] |
| 1080 << " " << image->_buffer[2] << " " << image->_buffer[3] | 1169 << " " << image->_buffer[2] << " " << image->_buffer[3] |
| 1081 << " " << image->_buffer[4] << " " << image->_buffer[5]; | 1170 << " " << image->_buffer[4] << " " << image->_buffer[5]; |
| 1082 ResetCodecOnCodecThread(); | 1171 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
| 1083 return false; | 1172 return false; |
| 1084 } | 1173 } |
| 1085 scPositions[scPositionsLength] = payload_size; | 1174 scPositions[scPositionsLength] = payload_size; |
| 1086 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); | 1175 header.VerifyAndAllocateFragmentationHeader(scPositionsLength); |
| 1087 for (size_t i = 0; i < scPositionsLength; i++) { | 1176 for (size_t i = 0; i < scPositionsLength; i++) { |
| 1088 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; | 1177 header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH; |
| 1089 header.fragmentationLength[i] = | 1178 header.fragmentationLength[i] = |
| 1090 scPositions[i + 1] - header.fragmentationOffset[i]; | 1179 scPositions[i + 1] - header.fragmentationOffset[i]; |
| 1091 header.fragmentationPlType[i] = 0; | 1180 header.fragmentationPlType[i] = 0; |
| 1092 header.fragmentationTimeDiff[i] = 0; | 1181 header.fragmentationTimeDiff[i] = 0; |
| 1093 } | 1182 } |
| 1094 } | 1183 } |
| 1095 | 1184 |
| 1096 callback_status = callback_->Encoded(*image, &info, &header); | 1185 callback_status = callback_->Encoded(*image, &info, &header); |
| 1097 } | 1186 } |
| 1098 | 1187 |
| 1099 // Return output buffer back to the encoder. | 1188 // Return output buffer back to the encoder. |
| 1100 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 1189 bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 1101 j_release_output_buffer_method_, | 1190 j_release_output_buffer_method_, |
| 1102 output_buffer_index); | 1191 output_buffer_index); |
| 1103 CHECK_EXCEPTION(jni); | 1192 if (CheckException(jni) || !success) { |
| 1104 if (!success) { | 1193 ProcessHWErrorOnCodecThread(true /* tryResetIfFallbackUnavailable */); |
| 1105 ResetCodecOnCodecThread(); | |
| 1106 return false; | 1194 return false; |
| 1107 } | 1195 } |
| 1108 | 1196 |
| 1109 // Print per frame statistics. | 1197 // Print per frame statistics. |
| 1110 if (encoding_start_time_ms > 0) { | 1198 if (encoding_start_time_ms > 0) { |
| 1111 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; | 1199 frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms; |
| 1112 } | 1200 } |
| 1113 if (frames_encoded_ < kMaxEncodedLogFrames) { | 1201 if (frames_encoded_ < kMaxEncodedLogFrames) { |
| 1114 int current_latency = | 1202 int current_latency = |
| 1115 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); | 1203 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_); |
| (...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1297 return supported_codecs_; | 1385 return supported_codecs_; |
| 1298 } | 1386 } |
| 1299 | 1387 |
| 1300 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1388 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
| 1301 webrtc::VideoEncoder* encoder) { | 1389 webrtc::VideoEncoder* encoder) { |
| 1302 ALOGD << "Destroy video encoder."; | 1390 ALOGD << "Destroy video encoder."; |
| 1303 delete encoder; | 1391 delete encoder; |
| 1304 } | 1392 } |
| 1305 | 1393 |
| 1306 } // namespace webrtc_jni | 1394 } // namespace webrtc_jni |
| OLD | NEW |