Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(21)

Side by Side Diff: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc

Issue 1661203002: Add more logging and fix PTS overflow for HW decoder. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc@master
Patch Set: Minor fix Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
72 #ifdef TRACK_BUFFER_TIMING 72 #ifdef TRACK_BUFFER_TIMING
73 #define ALOGV(...) 73 #define ALOGV(...)
74 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__) 74 __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__)
75 #else 75 #else
76 #define ALOGV(...) 76 #define ALOGV(...)
77 #endif 77 #endif
78 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER) 78 #define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER)
79 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER) 79 #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER)
80 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER) 80 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER)
81 81
82 enum { kMaxWarningLogFrames = 2 };
83
82 class MediaCodecVideoDecoder : public webrtc::VideoDecoder, 84 class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
83 public rtc::MessageHandler { 85 public rtc::MessageHandler {
84 public: 86 public:
85 explicit MediaCodecVideoDecoder( 87 explicit MediaCodecVideoDecoder(
86 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context); 88 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context);
87 virtual ~MediaCodecVideoDecoder(); 89 virtual ~MediaCodecVideoDecoder();
88 90
89 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) 91 int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
90 override; 92 override;
91 93
(...skipping 19 matching lines...) Expand all
111 // CHECK-fail if not running on |codec_thread_|. 113 // CHECK-fail if not running on |codec_thread_|.
112 void CheckOnCodecThread(); 114 void CheckOnCodecThread();
113 115
114 int32_t InitDecodeOnCodecThread(); 116 int32_t InitDecodeOnCodecThread();
115 int32_t ReleaseOnCodecThread(); 117 int32_t ReleaseOnCodecThread();
116 int32_t DecodeOnCodecThread(const EncodedImage& inputImage); 118 int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
117 // Deliver any outputs pending in the MediaCodec to our |callback_| and return 119 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
118 // true on success. 120 // true on success.
119 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us); 121 bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
120 int32_t ProcessHWErrorOnCodecThread(); 122 int32_t ProcessHWErrorOnCodecThread();
123 void EnableFrameLogOnWarning();
121 124
122 // Type of video codec. 125 // Type of video codec.
123 VideoCodecType codecType_; 126 VideoCodecType codecType_;
124 127
125 // Render EGL context - owned by factory, should not be allocated/destroyed 128 // Render EGL context - owned by factory, should not be allocated/destroyed
126 // by VideoDecoder. 129 // by VideoDecoder.
127 jobject render_egl_context_; 130 jobject render_egl_context_;
128 131
129 bool key_frame_required_; 132 bool key_frame_required_;
130 bool inited_; 133 bool inited_;
131 bool sw_fallback_required_; 134 bool sw_fallback_required_;
132 bool use_surface_; 135 bool use_surface_;
133 VideoCodec codec_; 136 VideoCodec codec_;
134 webrtc::I420BufferPool decoded_frame_pool_; 137 webrtc::I420BufferPool decoded_frame_pool_;
135 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; 138 rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
136 DecodedImageCallback* callback_; 139 DecodedImageCallback* callback_;
137 int frames_received_; // Number of frames received by decoder. 140 int frames_received_; // Number of frames received by decoder.
138 int frames_decoded_; // Number of frames decoded by decoder. 141 int frames_decoded_; // Number of frames decoded by decoder.
142 // Number of decoded frames for which log information is displayed.
143 int frames_decoded_logged_;
139 int64_t start_time_ms_; // Start time for statistics. 144 int64_t start_time_ms_; // Start time for statistics.
140 int current_frames_; // Number of frames in the current statistics interval. 145 int current_frames_; // Number of frames in the current statistics interval.
141 int current_bytes_; // Encoded bytes in the current statistics interval. 146 int current_bytes_; // Encoded bytes in the current statistics interval.
142 int current_decoding_time_ms_; // Overall decoding time in the current second 147 int current_decoding_time_ms_; // Overall decoding time in the current second
143 uint32_t max_pending_frames_; // Maximum number of pending input frames 148 uint32_t max_pending_frames_; // Maximum number of pending input frames
144 149
145 // State that is constant for the lifetime of this object once the ctor 150 // State that is constant for the lifetime of this object once the ctor
146 // returns. 151 // returns.
147 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. 152 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
148 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; 153 ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
328 if (ret_val < 0) { 333 if (ret_val < 0) {
329 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec"; 334 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
330 sw_fallback_required_ = true; 335 sw_fallback_required_ = true;
331 return WEBRTC_VIDEO_CODEC_ERROR; 336 return WEBRTC_VIDEO_CODEC_ERROR;
332 } 337 }
333 338
334 // Always start with a complete key frame. 339 // Always start with a complete key frame.
335 key_frame_required_ = true; 340 key_frame_required_ = true;
336 frames_received_ = 0; 341 frames_received_ = 0;
337 frames_decoded_ = 0; 342 frames_decoded_ = 0;
343 frames_decoded_logged_ = kMaxDecodedLogFrames;
338 344
339 jobject java_surface_texture_helper_ = nullptr; 345 jobject java_surface_texture_helper_ = nullptr;
340 if (use_surface_) { 346 if (use_surface_) {
341 java_surface_texture_helper_ = jni->CallStaticObjectMethod( 347 java_surface_texture_helper_ = jni->CallStaticObjectMethod(
342 FindClass(jni, "org/webrtc/SurfaceTextureHelper"), 348 FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
343 GetStaticMethodID(jni, 349 GetStaticMethodID(jni,
344 FindClass(jni, "org/webrtc/SurfaceTextureHelper"), 350 FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
345 "create", 351 "create",
346 "(Lorg/webrtc/EglBase$Context;)" 352 "(Lorg/webrtc/EglBase$Context;)"
347 "Lorg/webrtc/SurfaceTextureHelper;"), 353 "Lorg/webrtc/SurfaceTextureHelper;"),
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
434 } 440 }
435 ALOGD << "DecoderReleaseOnCodecThread done"; 441 ALOGD << "DecoderReleaseOnCodecThread done";
436 return WEBRTC_VIDEO_CODEC_OK; 442 return WEBRTC_VIDEO_CODEC_OK;
437 } 443 }
438 444
439 void MediaCodecVideoDecoder::CheckOnCodecThread() { 445 void MediaCodecVideoDecoder::CheckOnCodecThread() {
440 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread()) 446 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
441 << "Running on wrong thread!"; 447 << "Running on wrong thread!";
442 } 448 }
443 449
450 void MediaCodecVideoDecoder::EnableFrameLogOnWarning() {
451 // Log next 2 output frames.
452 frames_decoded_logged_ = std::max(
453 frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames);
454 }
455
444 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { 456 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
445 CheckOnCodecThread(); 457 CheckOnCodecThread();
446 int ret_val = ReleaseOnCodecThread(); 458 int ret_val = ReleaseOnCodecThread();
447 if (ret_val < 0) { 459 if (ret_val < 0) {
448 ALOGE << "ProcessHWError: Release failure"; 460 ALOGE << "ProcessHWError: Release failure";
449 } 461 }
450 if (codecType_ == kVideoCodecH264) { 462 if (codecType_ == kVideoCodecH264) {
451 // For now there is no SW H.264 which can be used as fallback codec. 463 // For now there is no SW H.264 which can be used as fallback codec.
452 // So try to restart hw codec for now. 464 // So try to restart hw codec for now.
453 ret_val = InitDecodeOnCodecThread(); 465 ret_val = InitDecodeOnCodecThread();
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
525 } 537 }
526 538
527 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( 539 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
528 const EncodedImage& inputImage) { 540 const EncodedImage& inputImage) {
529 CheckOnCodecThread(); 541 CheckOnCodecThread();
530 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 542 JNIEnv* jni = AttachCurrentThreadIfNeeded();
531 ScopedLocalRefFrame local_ref_frame(jni); 543 ScopedLocalRefFrame local_ref_frame(jni);
532 544
533 // Try to drain the decoder and wait until output is not too 545 // Try to drain the decoder and wait until output is not too
534 // much behind the input. 546 // much behind the input.
547 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
548 ALOGW << "Decoder is too far behind. Try to drain. Received: " <<
549 frames_received_ << ". Decoded: " << frames_decoded_;
550 EnableFrameLogOnWarning();
551 }
535 const int64 drain_start = GetCurrentTimeMs(); 552 const int64 drain_start = GetCurrentTimeMs();
536 while ((frames_received_ > frames_decoded_ + max_pending_frames_) && 553 while ((frames_received_ > frames_decoded_ + max_pending_frames_) &&
537 (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) { 554 (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) {
538 ALOGV("Received: %d. Decoded: %d. Wait for output...",
539 frames_received_, frames_decoded_);
540 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) { 555 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
541 ALOGE << "DeliverPendingOutputs error. Frames received: " << 556 ALOGE << "DeliverPendingOutputs error. Frames received: " <<
542 frames_received_ << ". Frames decoded: " << frames_decoded_; 557 frames_received_ << ". Frames decoded: " << frames_decoded_;
543 return ProcessHWErrorOnCodecThread(); 558 return ProcessHWErrorOnCodecThread();
544 } 559 }
545 } 560 }
546 if (frames_received_ > frames_decoded_ + max_pending_frames_) { 561 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
547 ALOGE << "Output buffer dequeue timeout. Frames received: " << 562 ALOGE << "Output buffer dequeue timeout. Frames received: " <<
548 frames_received_ << ". Frames decoded: " << frames_decoded_; 563 frames_received_ << ". Frames decoded: " << frames_decoded_;
549 return ProcessHWErrorOnCodecThread(); 564 return ProcessHWErrorOnCodecThread();
550 } 565 }
551 566
552 // Get input buffer. 567 // Get input buffer.
553 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, 568 int j_input_buffer_index = jni->CallIntMethod(
554 j_dequeue_input_buffer_method_); 569 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
555 if (CheckException(jni) || j_input_buffer_index < 0) { 570 if (CheckException(jni) || j_input_buffer_index < 0) {
556 ALOGE << "dequeueInputBuffer error"; 571 ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index <<
557 return ProcessHWErrorOnCodecThread(); 572 ". Retry DeliverPendingOutputs.";
573 EnableFrameLogOnWarning();
574 // Try to drain the decoder.
575 if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
576 ALOGE << "DeliverPendingOutputs error. Frames received: " <<
577 frames_received_ << ". Frames decoded: " << frames_decoded_;
578 return ProcessHWErrorOnCodecThread();
579 }
580 // Try dequeue input buffer one last time.
581 j_input_buffer_index = jni->CallIntMethod(
582 *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
583 if (CheckException(jni) || j_input_buffer_index < 0) {
584 ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index;
585 return ProcessHWErrorOnCodecThread();
586 }
558 } 587 }
559 588
560 // Copy encoded data to Java ByteBuffer. 589 // Copy encoded data to Java ByteBuffer.
561 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; 590 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
562 uint8_t* buffer = 591 uint8_t* buffer =
563 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); 592 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
564 RTC_CHECK(buffer) << "Indirect buffer??"; 593 RTC_CHECK(buffer) << "Indirect buffer??";
565 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); 594 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
566 if (CheckException(jni) || buffer_capacity < inputImage._length) { 595 if (CheckException(jni) || buffer_capacity < inputImage._length) {
567 ALOGE << "Input frame size "<< inputImage._length << 596 ALOGE << "Input frame size "<< inputImage._length <<
568 " is bigger than buffer size " << buffer_capacity; 597 " is bigger than buffer size " << buffer_capacity;
569 return ProcessHWErrorOnCodecThread(); 598 return ProcessHWErrorOnCodecThread();
570 } 599 }
571 jlong presentation_timestamp_us = 600 jlong presentation_timestamp_us = static_cast<jlong>(
572 (frames_received_ * 1000000) / codec_.maxFramerate; 601 static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate);
573 memcpy(buffer, inputImage._buffer, inputImage._length); 602 memcpy(buffer, inputImage._buffer, inputImage._length);
574 603
575 if (frames_decoded_ < kMaxDecodedLogFrames) { 604 if (frames_decoded_ < frames_decoded_logged_) {
576 ALOGD << "Decoder frame in # " << frames_received_ << 605 ALOGD << "Decoder frame in # " << frames_received_ <<
577 ". Type: " << inputImage._frameType << 606 ". Type: " << inputImage._frameType <<
578 ". Buffer # " << j_input_buffer_index << 607 ". Buffer # " << j_input_buffer_index <<
579 ". TS: " << (int)(presentation_timestamp_us / 1000) << 608 ". TS: " << presentation_timestamp_us / 1000 <<
580 ". Size: " << inputImage._length; 609 ". Size: " << inputImage._length;
581 } 610 }
582 611
583 // Save input image timestamps for later output. 612 // Save input image timestamps for later output.
584 frames_received_++; 613 frames_received_++;
585 current_bytes_ += inputImage._length; 614 current_bytes_ += inputImage._length;
586 615
587 // Feed input to decoder. 616 // Feed input to decoder.
588 bool success = jni->CallBooleanMethod( 617 bool success = jni->CallBooleanMethod(
589 *j_media_codec_video_decoder_, 618 *j_media_codec_video_decoder_,
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
639 j_slice_height_field_); 668 j_slice_height_field_);
640 669
641 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; 670 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
642 int64_t presentation_timestamps_ms = 0; 671 int64_t presentation_timestamps_ms = 0;
643 int64_t output_timestamps_ms = 0; 672 int64_t output_timestamps_ms = 0;
644 int64_t output_ntp_timestamps_ms = 0; 673 int64_t output_ntp_timestamps_ms = 0;
645 int decode_time_ms = 0; 674 int decode_time_ms = 0;
646 int64_t frame_delayed_ms = 0; 675 int64_t frame_delayed_ms = 0;
647 if (use_surface_) { 676 if (use_surface_) {
648 // Extract data from Java DecodedTextureBuffer. 677 // Extract data from Java DecodedTextureBuffer.
678 presentation_timestamps_ms = GetLongField(
679 jni, j_decoder_output_buffer,
680 j_texture_presentation_timestamp_ms_field_);
681 output_timestamps_ms = GetLongField(
682 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
683 output_ntp_timestamps_ms = GetLongField(
684 jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
685 decode_time_ms = GetLongField(
686 jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
687
649 const int texture_id = 688 const int texture_id =
650 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); 689 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
651 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. 690 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
652 const jfloatArray j_transform_matrix = 691 const jfloatArray j_transform_matrix =
653 reinterpret_cast<jfloatArray>(GetObjectField( 692 reinterpret_cast<jfloatArray>(GetObjectField(
654 jni, j_decoder_output_buffer, j_transform_matrix_field_)); 693 jni, j_decoder_output_buffer, j_transform_matrix_field_));
655 const int64_t timestamp_us = GetLongField(
656 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
657 presentation_timestamps_ms = GetLongField(
658 jni, j_decoder_output_buffer,
659 j_texture_presentation_timestamp_ms_field_);
660 output_timestamps_ms = GetLongField(
661 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
662 output_ntp_timestamps_ms = GetLongField(
663 jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
664 decode_time_ms = GetLongField(
665 jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
666 frame_delayed_ms = GetLongField( 694 frame_delayed_ms = GetLongField(
667 jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_); 695 jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_);
668 696
669 // Create webrtc::VideoFrameBuffer with native texture handle. 697 // Create webrtc::VideoFrameBuffer with native texture handle.
670 frame_buffer = surface_texture_helper_->CreateTextureFrame( 698 frame_buffer = surface_texture_helper_->CreateTextureFrame(
671 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); 699 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
700 } else {
701 EnableFrameLogOnWarning();
672 } 702 }
673 } else { 703 } else {
674 // Extract data from Java ByteBuffer and create output yuv420 frame - 704 // Extract data from Java ByteBuffer and create output yuv420 frame -
675 // for non surface decoding only. 705 // for non surface decoding only.
676 const int output_buffer_index = GetIntField( 706 const int output_buffer_index = GetIntField(
677 jni, j_decoder_output_buffer, j_info_index_field_); 707 jni, j_decoder_output_buffer, j_info_index_field_);
678 const int output_buffer_offset = GetIntField( 708 const int output_buffer_offset = GetIntField(
679 jni, j_decoder_output_buffer, j_info_offset_field_); 709 jni, j_decoder_output_buffer, j_info_offset_field_);
680 const int output_buffer_size = GetIntField( 710 const int output_buffer_size = GetIntField(
681 jni, j_decoder_output_buffer, j_info_size_field_); 711 jni, j_decoder_output_buffer, j_info_size_field_);
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
746 output_buffer_index); 776 output_buffer_index);
747 if (CheckException(jni)) { 777 if (CheckException(jni)) {
748 ALOGE << "returnDecodedOutputBuffer error"; 778 ALOGE << "returnDecodedOutputBuffer error";
749 return false; 779 return false;
750 } 780 }
751 } 781 }
752 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); 782 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
753 decoded_frame.set_timestamp(output_timestamps_ms); 783 decoded_frame.set_timestamp(output_timestamps_ms);
754 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); 784 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
755 785
756 if (frames_decoded_ < kMaxDecodedLogFrames) { 786 if (frames_decoded_ < frames_decoded_logged_) {
757 ALOGD << "Decoder frame out # " << frames_decoded_ << 787 ALOGD << "Decoder frame out # " << frames_decoded_ <<
758 ". " << width << " x " << height << 788 ". " << width << " x " << height <<
759 ". " << stride << " x " << slice_height << 789 ". " << stride << " x " << slice_height <<
760 ". Color: " << color_format << 790 ". Color: " << color_format <<
761 ". TS: " << presentation_timestamps_ms << 791 ". TS: " << presentation_timestamps_ms <<
762 ". DecTime: " << (int)decode_time_ms << 792 ". DecTime: " << (int)decode_time_ms <<
763 ". DelayTime: " << (int)frame_delayed_ms; 793 ". DelayTime: " << (int)frame_delayed_ms;
764 } 794 }
765 795
766 // Calculate and print decoding statistics - every 3 seconds. 796 // Calculate and print decoding statistics - every 3 seconds.
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
898 ALOGD << "Destroy video decoder."; 928 ALOGD << "Destroy video decoder.";
899 delete decoder; 929 delete decoder;
900 } 930 }
901 931
902 const char* MediaCodecVideoDecoder::ImplementationName() const { 932 const char* MediaCodecVideoDecoder::ImplementationName() const {
903 return "MediaCodec"; 933 return "MediaCodec";
904 } 934 }
905 935
906 } // namespace webrtc_jni 936 } // namespace webrtc_jni
907 937
OLDNEW
« no previous file with comments | « talk/app/webrtc/java/jni/androidmediacodeccommon.h ('k') | talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698