Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(16)

Side by Side Diff: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc

Issue 1653523003: Extra logging for HW codec. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc@master
Patch Set: Address comments Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
160 jfieldID j_input_buffers_field_; 160 jfieldID j_input_buffers_field_;
161 jfieldID j_output_buffers_field_; 161 jfieldID j_output_buffers_field_;
162 jfieldID j_color_format_field_; 162 jfieldID j_color_format_field_;
163 jfieldID j_width_field_; 163 jfieldID j_width_field_;
164 jfieldID j_height_field_; 164 jfieldID j_height_field_;
165 jfieldID j_stride_field_; 165 jfieldID j_stride_field_;
166 jfieldID j_slice_height_field_; 166 jfieldID j_slice_height_field_;
167 // MediaCodecVideoDecoder.DecodedTextureBuffer fields. 167 // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
168 jfieldID j_texture_id_field_; 168 jfieldID j_texture_id_field_;
169 jfieldID j_transform_matrix_field_; 169 jfieldID j_transform_matrix_field_;
170 jfieldID j_texture_presentation_timestamp_ms_field_;
170 jfieldID j_texture_timestamp_ms_field_; 171 jfieldID j_texture_timestamp_ms_field_;
171 jfieldID j_texture_ntp_timestamp_ms_field_; 172 jfieldID j_texture_ntp_timestamp_ms_field_;
172 jfieldID j_texture_decode_time_ms_field_; 173 jfieldID j_texture_decode_time_ms_field_;
173 jfieldID j_texture_frame_delay_ms_field_; 174 jfieldID j_texture_frame_delay_ms_field_;
174 // MediaCodecVideoDecoder.DecodedOutputBuffer fields. 175 // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
175 jfieldID j_info_index_field_; 176 jfieldID j_info_index_field_;
176 jfieldID j_info_offset_field_; 177 jfieldID j_info_offset_field_;
177 jfieldID j_info_size_field_; 178 jfieldID j_info_size_field_;
178 jfieldID j_info_timestamp_ms_field_; 179 jfieldID j_presentation_timestamp_ms_field_;
179 jfieldID j_info_ntp_timestamp_ms_field_; 180 jfieldID j_timestamp_ms_field_;
181 jfieldID j_ntp_timestamp_ms_field_;
180 jfieldID j_byte_buffer_decode_time_ms_field_; 182 jfieldID j_byte_buffer_decode_time_ms_field_;
181 183
182 // Global references; must be deleted in Release(). 184 // Global references; must be deleted in Release().
183 std::vector<jobject> input_buffers_; 185 std::vector<jobject> input_buffers_;
184 }; 186 };
185 187
186 MediaCodecVideoDecoder::MediaCodecVideoDecoder( 188 MediaCodecVideoDecoder::MediaCodecVideoDecoder(
187 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) : 189 JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
188 codecType_(codecType), 190 codecType_(codecType),
189 render_egl_context_(render_egl_context), 191 render_egl_context_(render_egl_context),
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
241 jni, *j_media_codec_video_decoder_class_, "stride", "I"); 243 jni, *j_media_codec_video_decoder_class_, "stride", "I");
242 j_slice_height_field_ = GetFieldID( 244 j_slice_height_field_ = GetFieldID(
243 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); 245 jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
244 246
245 jclass j_decoded_texture_buffer_class = FindClass(jni, 247 jclass j_decoded_texture_buffer_class = FindClass(jni,
246 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); 248 "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
247 j_texture_id_field_ = GetFieldID( 249 j_texture_id_field_ = GetFieldID(
248 jni, j_decoded_texture_buffer_class, "textureID", "I"); 250 jni, j_decoded_texture_buffer_class, "textureID", "I");
249 j_transform_matrix_field_ = GetFieldID( 251 j_transform_matrix_field_ = GetFieldID(
250 jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); 252 jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
253 j_texture_presentation_timestamp_ms_field_ = GetFieldID(
254 jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J");
251 j_texture_timestamp_ms_field_ = GetFieldID( 255 j_texture_timestamp_ms_field_ = GetFieldID(
252 jni, j_decoded_texture_buffer_class, "timeStampMs", "J"); 256 jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
253 j_texture_ntp_timestamp_ms_field_ = GetFieldID( 257 j_texture_ntp_timestamp_ms_field_ = GetFieldID(
254 jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J"); 258 jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
255 j_texture_decode_time_ms_field_ = GetFieldID( 259 j_texture_decode_time_ms_field_ = GetFieldID(
256 jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J"); 260 jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
257 j_texture_frame_delay_ms_field_ = GetFieldID( 261 j_texture_frame_delay_ms_field_ = GetFieldID(
258 jni, j_decoded_texture_buffer_class, "frameDelayMs", "J"); 262 jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
259 263
260 jclass j_decoded_output_buffer_class = FindClass(jni, 264 jclass j_decoded_output_buffer_class = FindClass(jni,
261 "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); 265 "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
262 j_info_index_field_ = GetFieldID( 266 j_info_index_field_ = GetFieldID(
263 jni, j_decoded_output_buffer_class, "index", "I"); 267 jni, j_decoded_output_buffer_class, "index", "I");
264 j_info_offset_field_ = GetFieldID( 268 j_info_offset_field_ = GetFieldID(
265 jni, j_decoded_output_buffer_class, "offset", "I"); 269 jni, j_decoded_output_buffer_class, "offset", "I");
266 j_info_size_field_ = GetFieldID( 270 j_info_size_field_ = GetFieldID(
267 jni, j_decoded_output_buffer_class, "size", "I"); 271 jni, j_decoded_output_buffer_class, "size", "I");
268 j_info_timestamp_ms_field_ = GetFieldID( 272 j_presentation_timestamp_ms_field_ = GetFieldID(
273 jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J");
274 j_timestamp_ms_field_ = GetFieldID(
269 jni, j_decoded_output_buffer_class, "timeStampMs", "J"); 275 jni, j_decoded_output_buffer_class, "timeStampMs", "J");
270 j_info_ntp_timestamp_ms_field_ = GetFieldID( 276 j_ntp_timestamp_ms_field_ = GetFieldID(
271 jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J"); 277 jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
272 j_byte_buffer_decode_time_ms_field_ = GetFieldID( 278 j_byte_buffer_decode_time_ms_field_ = GetFieldID(
273 jni, j_decoded_output_buffer_class, "decodeTimeMs", "J"); 279 jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
274 280
275 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; 281 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
276 use_surface_ = (render_egl_context_ != NULL); 282 use_surface_ = (render_egl_context_ != NULL);
277 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; 283 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
278 memset(&codec_, 0, sizeof(codec_)); 284 memset(&codec_, 0, sizeof(codec_));
279 AllowBlockingCalls(); 285 AllowBlockingCalls();
280 } 286 }
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after
559 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); 565 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
560 RTC_CHECK(buffer) << "Indirect buffer??"; 566 RTC_CHECK(buffer) << "Indirect buffer??";
561 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); 567 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
562 if (CheckException(jni) || buffer_capacity < inputImage._length) { 568 if (CheckException(jni) || buffer_capacity < inputImage._length) {
563 ALOGE << "Input frame size "<< inputImage._length << 569 ALOGE << "Input frame size "<< inputImage._length <<
564 " is bigger than buffer size " << buffer_capacity; 570 " is bigger than buffer size " << buffer_capacity;
565 return ProcessHWErrorOnCodecThread(); 571 return ProcessHWErrorOnCodecThread();
566 } 572 }
567 jlong presentation_timestamp_us = 573 jlong presentation_timestamp_us =
568 (frames_received_ * 1000000) / codec_.maxFramerate; 574 (frames_received_ * 1000000) / codec_.maxFramerate;
575 memcpy(buffer, inputImage._buffer, inputImage._length);
576
569 if (frames_decoded_ < kMaxDecodedLogFrames) { 577 if (frames_decoded_ < kMaxDecodedLogFrames) {
570 ALOGD << "Decoder frame in # " << frames_received_ << ". Type: " 578 ALOGD << "Decoder frame in # " << frames_received_ <<
571 << inputImage._frameType << ". Buffer # " << 579 ". Type: " << inputImage._frameType <<
572 j_input_buffer_index << ". pTS: " 580 ". Buffer # " << j_input_buffer_index <<
573 << (int)(presentation_timestamp_us / 1000) 581 ". TS: " << (int)(presentation_timestamp_us / 1000) <<
574 << ". TS: " << inputImage._timeStamp 582 ". Size: " << inputImage._length;
575 << ". Size: " << inputImage._length;
576 } 583 }
577 memcpy(buffer, inputImage._buffer, inputImage._length);
578 584
579 // Save input image timestamps for later output. 585 // Save input image timestamps for later output.
580 frames_received_++; 586 frames_received_++;
581 current_bytes_ += inputImage._length; 587 current_bytes_ += inputImage._length;
582 588
583 // Feed input to decoder. 589 // Feed input to decoder.
584 bool success = jni->CallBooleanMethod( 590 bool success = jni->CallBooleanMethod(
585 *j_media_codec_video_decoder_, 591 *j_media_codec_video_decoder_,
586 j_queue_input_buffer_method_, 592 j_queue_input_buffer_method_,
587 j_input_buffer_index, 593 j_input_buffer_index,
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
628 // Get decoded video frame properties. 634 // Get decoded video frame properties.
629 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, 635 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
630 j_color_format_field_); 636 j_color_format_field_);
631 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); 637 int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
632 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); 638 int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
633 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); 639 int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
634 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, 640 int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
635 j_slice_height_field_); 641 j_slice_height_field_);
636 642
637 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; 643 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
644 int64_t presentation_timestamps_ms = 0;
638 int64_t output_timestamps_ms = 0; 645 int64_t output_timestamps_ms = 0;
639 int64_t output_ntp_timestamps_ms = 0; 646 int64_t output_ntp_timestamps_ms = 0;
640 int decode_time_ms = 0; 647 int decode_time_ms = 0;
641 int64_t frame_delayed_ms = 0; 648 int64_t frame_delayed_ms = 0;
642 if (use_surface_) { 649 if (use_surface_) {
643 // Extract data from Java DecodedTextureBuffer. 650 // Extract data from Java DecodedTextureBuffer.
644 const int texture_id = 651 const int texture_id =
645 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); 652 GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
646 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. 653 if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
647 const jfloatArray j_transform_matrix = 654 const jfloatArray j_transform_matrix =
648 reinterpret_cast<jfloatArray>(GetObjectField( 655 reinterpret_cast<jfloatArray>(GetObjectField(
649 jni, j_decoder_output_buffer, j_transform_matrix_field_)); 656 jni, j_decoder_output_buffer, j_transform_matrix_field_));
650 const int64_t timestamp_us = 657 const int64_t timestamp_us = GetLongField(
651 GetLongField(jni, j_decoder_output_buffer, 658 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
652 j_texture_timestamp_ms_field_); 659 presentation_timestamps_ms = GetLongField(
653 output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer, 660 jni, j_decoder_output_buffer,
654 j_texture_timestamp_ms_field_); 661 j_texture_presentation_timestamp_ms_field_);
655 output_ntp_timestamps_ms = 662 output_timestamps_ms = GetLongField(
656 GetLongField(jni, j_decoder_output_buffer, 663 jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
657 j_texture_ntp_timestamp_ms_field_); 664 output_ntp_timestamps_ms = GetLongField(
658 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, 665 jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
659 j_texture_decode_time_ms_field_); 666 decode_time_ms = GetLongField(
660 frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer, 667 jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
661 j_texture_frame_delay_ms_field_); 668 frame_delayed_ms = GetLongField(
669 jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_);
662 670
663 // Create webrtc::VideoFrameBuffer with native texture handle. 671 // Create webrtc::VideoFrameBuffer with native texture handle.
664 frame_buffer = surface_texture_helper_->CreateTextureFrame( 672 frame_buffer = surface_texture_helper_->CreateTextureFrame(
665 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); 673 width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
666 } 674 }
667 } else { 675 } else {
668 // Extract data from Java ByteBuffer and create output yuv420 frame - 676 // Extract data from Java ByteBuffer and create output yuv420 frame -
669 // for non surface decoding only. 677 // for non surface decoding only.
670 const int output_buffer_index = 678 const int output_buffer_index = GetIntField(
671 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); 679 jni, j_decoder_output_buffer, j_info_index_field_);
672 const int output_buffer_offset = 680 const int output_buffer_offset = GetIntField(
673 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); 681 jni, j_decoder_output_buffer, j_info_offset_field_);
674 const int output_buffer_size = 682 const int output_buffer_size = GetIntField(
675 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); 683 jni, j_decoder_output_buffer, j_info_size_field_);
676 output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer, 684 presentation_timestamps_ms = GetLongField(
677 j_info_timestamp_ms_field_); 685 jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_);
678 output_ntp_timestamps_ms = 686 output_timestamps_ms = GetLongField(
679 GetLongField(jni, j_decoder_output_buffer, 687 jni, j_decoder_output_buffer, j_timestamp_ms_field_);
680 j_info_ntp_timestamp_ms_field_); 688 output_ntp_timestamps_ms = GetLongField(
689 jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_);
681 690
682 decode_time_ms = GetLongField(jni, j_decoder_output_buffer, 691 decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
683 j_byte_buffer_decode_time_ms_field_); 692 j_byte_buffer_decode_time_ms_field_);
684 693
685 if (output_buffer_size < width * height * 3 / 2) { 694 if (output_buffer_size < width * height * 3 / 2) {
686 ALOGE << "Insufficient output buffer size: " << output_buffer_size; 695 ALOGE << "Insufficient output buffer size: " << output_buffer_size;
687 return false; 696 return false;
688 } 697 }
689 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( 698 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
690 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); 699 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
740 if (CheckException(jni)) { 749 if (CheckException(jni)) {
741 ALOGE << "returnDecodedOutputBuffer error"; 750 ALOGE << "returnDecodedOutputBuffer error";
742 return false; 751 return false;
743 } 752 }
744 } 753 }
745 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); 754 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
746 decoded_frame.set_timestamp(output_timestamps_ms); 755 decoded_frame.set_timestamp(output_timestamps_ms);
747 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); 756 decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
748 757
749 if (frames_decoded_ < kMaxDecodedLogFrames) { 758 if (frames_decoded_ < kMaxDecodedLogFrames) {
750 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << 759 ALOGD << "Decoder frame out # " << frames_decoded_ <<
751 " x " << height << ". " << stride << " x " << slice_height << 760 ". " << width << " x " << height <<
752 ". Color: " << color_format << ". TS:" << decoded_frame.timestamp() << 761 ". " << stride << " x " << slice_height <<
762 ". Color: " << color_format <<
763 ". TS: " << presentation_timestamps_ms <<
753 ". DecTime: " << (int)decode_time_ms << 764 ". DecTime: " << (int)decode_time_ms <<
754 ". DelayTime: " << (int)frame_delayed_ms; 765 ". DelayTime: " << (int)frame_delayed_ms;
755 } 766 }
756 767
757 // Calculate and print decoding statistics - every 3 seconds. 768 // Calculate and print decoding statistics - every 3 seconds.
758 frames_decoded_++; 769 frames_decoded_++;
759 current_frames_++; 770 current_frames_++;
760 current_decoding_time_ms_ += decode_time_ms; 771 current_decoding_time_ms_ += decode_time_ms;
761 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; 772 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
762 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && 773 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
763 current_frames_ > 0) { 774 current_frames_ > 0) {
764 ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: " 775 int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
765 << frames_received_ << ". Bitrate: " << 776 int current_fps =
766 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << 777 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
767 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) 778 ALOGD << "Decoded frames: " << frames_decoded_ <<
768 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) << 779 ". Received frames: " << frames_received_ <<
780 ". Bitrate: " << current_bitrate << " kbps" <<
781 ". Fps: " << current_fps <<
782 ". DecTime: " << (current_decoding_time_ms_ / current_frames_) <<
769 " for last " << statistic_time_ms << " ms."; 783 " for last " << statistic_time_ms << " ms.";
770 start_time_ms_ = GetCurrentTimeMs(); 784 start_time_ms_ = GetCurrentTimeMs();
771 current_frames_ = 0; 785 current_frames_ = 0;
772 current_bytes_ = 0; 786 current_bytes_ = 0;
773 current_decoding_time_ms_ = 0; 787 current_decoding_time_ms_ = 0;
774 } 788 }
775 789
776 // |.IsZeroSize())| returns true when a frame has been dropped. 790 // |.IsZeroSize())| returns true when a frame has been dropped.
777 if (!decoded_frame.IsZeroSize()) { 791 if (!decoded_frame.IsZeroSize()) {
778 // Callback - output decoded frame. 792 // Callback - output decoded frame.
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
894 ALOGD << "Destroy video decoder."; 908 ALOGD << "Destroy video decoder.";
895 delete decoder; 909 delete decoder;
896 } 910 }
897 911
898 const char* MediaCodecVideoDecoder::ImplementationName() const { 912 const char* MediaCodecVideoDecoder::ImplementationName() const {
899 return "MediaCodec"; 913 return "MediaCodec";
900 } 914 }
901 915
902 } // namespace webrtc_jni 916 } // namespace webrtc_jni
903 917
OLDNEW
« no previous file with comments | « talk/app/webrtc/java/jni/androidmediacodeccommon.h ('k') | talk/app/webrtc/java/jni/androidmediaencoder_jni.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698