Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(26)

Side by Side Diff: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc

Issue 1653523003: Extra logging for HW codec. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc@master
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
160 jfieldID j_input_buffers_field_; 160 jfieldID j_input_buffers_field_;
161 jfieldID j_output_buffers_field_; 161 jfieldID j_output_buffers_field_;
162 jfieldID j_color_format_field_; 162 jfieldID j_color_format_field_;
163 jfieldID j_width_field_; 163 jfieldID j_width_field_;
164 jfieldID j_height_field_; 164 jfieldID j_height_field_;
165 jfieldID j_stride_field_; 165 jfieldID j_stride_field_;
166 jfieldID j_slice_height_field_; 166 jfieldID j_slice_height_field_;
167 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. 167 // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
168 jfieldID j_texture_id_field_; 168 jfieldID j_texture_id_field_;
169 jfieldID j_transform_matrix_field_; 169 jfieldID j_transform_matrix_field_;
170 jfieldID j_texture_presentation_timestamp_ms_field_;
170 jfieldID j_texture_timestamp_ms_field_; 171 jfieldID j_texture_timestamp_ms_field_;
171 jfieldID j_texture_ntp_timestamp_ms_field_; 172 jfieldID j_texture_ntp_timestamp_ms_field_;
172 jfieldID j_texture_decode_time_ms_field_; 173 jfieldID j_texture_decode_time_ms_field_;
173 jfieldID j_texture_frame_delay_ms_field_; 174 jfieldID j_texture_frame_delay_ms_field_;
174 // MediaCodecVideoDecoder.DecodedOutputBuffer fields. 175 // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
175 jfieldID j_info_index_field_; 176 jfieldID j_info_index_field_;
176 jfieldID j_info_offset_field_; 177 jfieldID j_info_offset_field_;
177 jfieldID j_info_size_field_; 178 jfieldID j_info_size_field_;
178 jfieldID j_info_timestamp_ms_field_; 179 jfieldID j_presentation_timestamp_ms_field_;
179 jfieldID j_info_ntp_timestamp_ms_field_; 180 jfieldID j_timestamp_ms_field_;
181 jfieldID j_ntp_timestamp_ms_field_;
180 jfieldID j_byte_buffer_decode_time_ms_field_; 182 jfieldID j_byte_buffer_decode_time_ms_field_;
181 183
182 // Global references; must be deleted in Release(). 184 // Global references; must be deleted in Release().
183 std::vector<jobject> input_buffers_; 185 std::vector<jobject> input_buffers_;
184 }; 186 };
185 187
186 MediaCodecVideoDecoder::MediaCodecVideoDecoder( 188 MediaCodecVideoDecoder::MediaCodecVideoDecoder(
187 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : 189 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
188 codecType_(codecType), 190 codecType_(codecType),
189 render_egl_context_(render_egl_context), 191 render_egl_context_(render_egl_context),
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
241 jni, *j_media_codec_video_decoder_class_, "stride", "I"); 243 jni, *j_media_codec_video_decoder_class_, "stride", "I");
242 j_slice_height_field_ = GetFieldID( 244 j_slice_height_field_ = GetFieldID(
243 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); 245 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
244 246
245 jclass j_decoded_texture_buffer_class = FindClass(jni, 247 jclass j_decoded_texture_buffer_class = FindClass(jni,
246 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); 248 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
247 j_texture_id_field_ = GetFieldID( 249 j_texture_id_field_ = GetFieldID(
248 jni, j_decoded_texture_buffer_class, "textureID", "I"); 250 jni, j_decoded_texture_buffer_class, "textureID", "I");
249 j_transform_matrix_field_ = GetFieldID( 251 j_transform_matrix_field_ = GetFieldID(
250 jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); 252 jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
253 j_texture_presentation_timestamp_ms_field_ = GetFieldID(
254 jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J");
251 j_texture_timestamp_ms_field_ = GetFieldID( 255 j_texture_timestamp_ms_field_ = GetFieldID(
252 jni, j_decoded_texture_buffer_class, "timeStampMs", "J"); 256 jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
253 j_texture_ntp_timestamp_ms_field_ = GetFieldID( 257 j_texture_ntp_timestamp_ms_field_ = GetFieldID(
254 jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J"); 258 jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
255 j_texture_decode_time_ms_field_ = GetFieldID( 259 j_texture_decode_time_ms_field_ = GetFieldID(
256 jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J"); 260 jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
257 j_texture_frame_delay_ms_field_ = GetFieldID( 261 j_texture_frame_delay_ms_field_ = GetFieldID(
258 jni, j_decoded_texture_buffer_class, "frameDelayMs", "J"); 262 jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
259 263
260 jclass j_decoded_output_buffer_class = FindClass(jni, 264 jclass j_decoded_output_buffer_class = FindClass(jni,
261 "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); 265 "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
262 j_info_index_field_ = GetFieldID( 266 j_info_index_field_ = GetFieldID(
263 jni, j_decoded_output_buffer_class, "index", "I"); 267 jni, j_decoded_output_buffer_class, "index", "I");
264 j_info_offset_field_ = GetFieldID( 268 j_info_offset_field_ = GetFieldID(
265 jni, j_decoded_output_buffer_class, "offset", "I"); 269 jni, j_decoded_output_buffer_class, "offset", "I");
266 j_info_size_field_ = GetFieldID( 270 j_info_size_field_ = GetFieldID(
267 jni, j_decoded_output_buffer_class, "size", "I"); 271 jni, j_decoded_output_buffer_class, "size", "I");
268 j_info_timestamp_ms_field_ = GetFieldID( 272 j_info_size_field_ = GetFieldID(
perkj_webrtc 2016/02/01 09:20:53 duplicate set
AlexG 2016/02/01 20:13:35 Done.
273 jni, j_decoded_output_buffer_class, "size", "I");
274 j_presentation_timestamp_ms_field_ = GetFieldID(
275 jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J");
276 j_timestamp_ms_field_ = GetFieldID(
269 jni, j_decoded_output_buffer_class, "timeStampMs", "J"); 277 jni, j_decoded_output_buffer_class, "timeStampMs", "J");
270 j_info_ntp_timestamp_ms_field_ = GetFieldID( 278 j_ntp_timestamp_ms_field_ = GetFieldID(
271 jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J"); 279 jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
272 j_byte_buffer_decode_time_ms_field_ = GetFieldID( 280 j_byte_buffer_decode_time_ms_field_ = GetFieldID(
273 jni, j_decoded_output_buffer_class, "decodeTimeMs", "J"); 281 jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
274 282
275 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; 283 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
276 use_surface_ = (render_egl_context_ != NULL); 284 use_surface_ = (render_egl_context_ != NULL);
277 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; 285 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
278 memset(&codec_, 0, sizeof(codec_)); 286 memset(&codec_, 0, sizeof(codec_));
279 AllowBlockingCalls(); 287 AllowBlockingCalls();
280 } 288 }
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after
559 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); 567 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
560 RTC_CHECK(buffer) << "Indirect buffer??"; 568 RTC_CHECK(buffer) << "Indirect buffer??";
561 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); 569 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
562 if (CheckException(jni) || buffer_capacity < inputImage._length) { 570 if (CheckException(jni) || buffer_capacity < inputImage._length) {
563 ALOGE << "Input frame size "<< inputImage._length << 571 ALOGE << "Input frame size "<< inputImage._length <<
564 " is bigger than buffer size " << buffer_capacity; 572 " is bigger than buffer size " << buffer_capacity;
565 return ProcessHWErrorOnCodecThread(); 573 return ProcessHWErrorOnCodecThread();
566 } 574 }
567 jlong presentation_timestamp_us = 575 jlong presentation_timestamp_us =
568 (frames_received_ * 1000000) / codec_.maxFramerate; 576 (frames_received_ * 1000000) / codec_.maxFramerate;
577 memcpy(buffer, inputImage._buffer, inputImage._length);
578
569 if (frames_decoded_ < kMaxDecodedLogFrames) { 579 if (frames_decoded_ < kMaxDecodedLogFrames) {
570 ALOGD << "Decoder frame in # " << frames_received_ << ". Type: " 580 ALOGD << "Decoder frame in # " << frames_received_ << ". Type: "
571 << inputImage._frameType << ". Buffer # " << 581 << inputImage._frameType << ". Buffer # " <<
572 j_input_buffer_index << ". pTS: " 582 j_input_buffer_index << ". TS: "
573 << (int)(presentation_timestamp_us / 1000) 583 << (int)(presentation_timestamp_us / 1000)
574 << ". TS: " << inputImage._timeStamp
575 << ". Size: " << inputImage._length; 584 << ". Size: " << inputImage._length;
576 } 585 }
577 memcpy(buffer, inputImage._buffer, inputImage._length);
578 586
579 // Save input image timestamps for later output. 587 // Save input image timestamps for later output.
580 frames_received_++; 588 frames_received_++;
581 current_bytes_ += inputImage._length; 589 current_bytes_ += inputImage._length;
582 590
583 // Feed input to decoder. 591 // Feed input to decoder.
584 bool success = jni->CallBooleanMethod( 592 bool success = jni->CallBooleanMethod(
585 *j_media_codec_video_decoder_, 593 *j_media_codec_video_decoder_,
586 j_queue_input_buffer_method_, 594 j_queue_input_buffer_method_,
587 j_input_buffer_index, 595 j_input_buffer_index,
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
628 // Get decoded video frame properties. 636 // Get decoded video frame properties.
629 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, 637 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
630 j_color_format_field_); 638 j_color_format_field_);
631 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); 639 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
632 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); 640 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
633 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); 641 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
634 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, 642 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
635 j_slice_height_field_); 643 j_slice_height_field_);
636 644
637 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; 645 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
646 int64_t presentation_timestamps_ms = 0;
638 int64_t output_timestamps_ms = 0; 647 int64_t output_timestamps_ms = 0;
639 int64_t output_ntp_timestamps_ms = 0; 648 int64_t output_ntp_timestamps_ms = 0;
640 int decode_time_ms = 0; 649 int decode_time_ms = 0;
641 int64_t frame_delayed_ms = 0; 650 int64_t frame_delayed_ms = 0;
642 if (use_surface_) { 651 if (use_surface_) {
643 // Extract data from Java DecodedTextureBuffer. 652 // Extract data from Java DecodedTextureBuffer.
644 const int texture_id = 653 const int texture_id =
645 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); 654 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
646 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. 655 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
647 const jfloatArray j_transform_matrix = 656 const jfloatArray j_transform_matrix =
648 reinterpret_cast<jfloatArray>(GetObjectField( 657 reinterpret_cast<jfloatArray>(GetObjectField(
649 jni, j_decoder_output_buffer, j_transform_matrix_field_)); 658 jni, j_decoder_output_buffer, j_transform_matrix_field_));
650 const int64_t timestamp_us = 659 const int64_t timestamp_us = GetLongField(
651 GetLongField(jni, j_decoder_output_buffer, 660 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
652 j_texture_timestamp_ms_field_); 661 presentation_timestamps_ms = GetLongField(
653 output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer, 662 jni, j_decoder_output_buffer,
654 j_texture_timestamp_ms_field_); 663 j_texture_presentation_timestamp_ms_field_);
655 output_ntp_timestamps_ms = 664 output_timestamps_ms = GetLongField(
656 GetLongField(jni, j_decoder_output_buffer, 665 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
657 j_texture_ntp_timestamp_ms_field_); 666 output_ntp_timestamps_ms = GetLongField(
658 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, 667 jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
659 j_texture_decode_time_ms_field_); 668 decode_time_ms = GetLongField(
660 frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer, 669 jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
661 j_texture_frame_delay_ms_field_); 670 frame_delayed_ms = GetLongField(
671 jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_);
662 672
663 // Create webrtc::VideoFrameBuffer with native texture handle. 673 // Create webrtc::VideoFrameBuffer with native texture handle.
664 frame_buffer = surface_texture_helper_->CreateTextureFrame( 674 frame_buffer = surface_texture_helper_->CreateTextureFrame(
665 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); 675 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
666 } 676 }
667 } else { 677 } else {
668 // Extract data from Java ByteBuffer and create output yuv420 frame - 678 // Extract data from Java ByteBuffer and create output yuv420 frame -
669 // for non surface decoding only. 679 // for non surface decoding only.
670 const int output_buffer_index = 680 const int output_buffer_index = GetIntField(
671 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); 681 jni, j_decoder_output_buffer, j_info_index_field_);
672 const int output_buffer_offset = 682 const int output_buffer_offset = GetIntField(
673 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); 683 jni, j_decoder_output_buffer, j_info_offset_field_);
674 const int output_buffer_size = 684 const int output_buffer_size = GetIntField(
675 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); 685 jni, j_decoder_output_buffer, j_info_size_field_);
676 output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer, 686 presentation_timestamps_ms = GetLongField(
677 j_info_timestamp_ms_field_); 687 jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_);
678 output_ntp_timestamps_ms = 688 output_timestamps_ms = GetLongField(
679 GetLongField(jni, j_decoder_output_buffer, 689 jni, j_decoder_output_buffer, j_timestamp_ms_field_);
680 j_info_ntp_timestamp_ms_field_); 690 output_ntp_timestamps_ms = GetLongField(
691 jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_);
681 692
682 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, 693 decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
683 j_byte_buffer_decode_time_ms_field_); 694 j_byte_buffer_decode_time_ms_field_);
684 695
685 if (output_buffer_size < width * height * 3 / 2) { 696 if (output_buffer_size < width * height * 3 / 2) {
686 ALOGE << "Insufficient output buffer size: " << output_buffer_size; 697 ALOGE << "Insufficient output buffer size: " << output_buffer_size;
687 return false; 698 return false;
688 } 699 }
689 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( 700 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
690 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); 701 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
742 return false; 753 return false;
743 } 754 }
744 } 755 }
745 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); 756 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
746 decoded_frame.set_timestamp(output_timestamps_ms); 757 decoded_frame.set_timestamp(output_timestamps_ms);
747 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); 758 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
748 759
749 if (frames_decoded_ < kMaxDecodedLogFrames) { 760 if (frames_decoded_ < kMaxDecodedLogFrames) {
750 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << 761 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width <<
751 " x " << height << ". " << stride << " x " << slice_height << 762 " x " << height << ". " << stride << " x " << slice_height <<
752 ". Color: " << color_format << ". TS:" << decoded_frame.timestamp() << 763 ". Color: " << color_format << ". TS: " << presentation_timestamps_ms <<
753 ". DecTime: " << (int)decode_time_ms << 764 ". DecTime: " << (int)decode_time_ms <<
754 ". DelayTime: " << (int)frame_delayed_ms; 765 ". DelayTime: " << (int)frame_delayed_ms;
755 } 766 }
756 767
757 // Calculate and print decoding statistics - every 3 seconds. 768 // Calculate and print decoding statistics - every 3 seconds.
758 frames_decoded_++; 769 frames_decoded_++;
759 current_frames_++; 770 current_frames_++;
760 current_decoding_time_ms_ += decode_time_ms; 771 current_decoding_time_ms_ += decode_time_ms;
761 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; 772 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
762 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && 773 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
894 ALOGD << "Destroy video decoder."; 905 ALOGD << "Destroy video decoder.";
895 delete decoder; 906 delete decoder;
896 } 907 }
897 908
898 const char* MediaCodecVideoDecoder::ImplementationName() const { 909 const char* MediaCodecVideoDecoder::ImplementationName() const {
899 return "MediaCodec"; 910 return "MediaCodec";
900 } 911 }
901 912
902 } // namespace webrtc_jni 913 } // namespace webrtc_jni
903 914
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698