Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(77)

Side by Side Diff: talk/app/webrtc/java/jni/androidmediaencoder_jni.cc

Issue 1653523003: Extra logging for HW codec. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc@master
Patch Set: Address comments Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
181 jlong GetOutputBufferInfoPresentationTimestampUs( 181 jlong GetOutputBufferInfoPresentationTimestampUs(
182 JNIEnv* jni, jobject j_output_buffer_info); 182 JNIEnv* jni, jobject j_output_buffer_info);
183 183
184 // Deliver any outputs pending in the MediaCodec to our |callback_| and return 184 // Deliver any outputs pending in the MediaCodec to our |callback_| and return
185 // true on success. 185 // true on success.
186 bool DeliverPendingOutputs(JNIEnv* jni); 186 bool DeliverPendingOutputs(JNIEnv* jni);
187 187
188 // Search for H.264 start codes. 188 // Search for H.264 start codes.
189 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size); 189 int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
190 190
191 // Displays encoder statistics.
192 void LogStatistics(bool force_log);
193
191 // Type of video codec. 194 // Type of video codec.
192 VideoCodecType codecType_; 195 VideoCodecType codecType_;
193 196
194 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to 197 // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
195 // |codec_thread_| synchronously. 198 // |codec_thread_| synchronously.
196 webrtc::EncodedImageCallback* callback_; 199 webrtc::EncodedImageCallback* callback_;
197 200
198 // State that is constant for the lifetime of this object once the ctor 201 // State that is constant for the lifetime of this object once the ctor
199 // returns. 202 // returns.
200 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. 203 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
(...skipping 25 matching lines...) Expand all
226 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. 229 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
227 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. 230 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
228 int last_set_fps_; // Last-requested frame rate. 231 int last_set_fps_; // Last-requested frame rate.
229 int64_t current_timestamp_us_; // Current frame timestamps in us. 232 int64_t current_timestamp_us_; // Current frame timestamps in us.
230 int frames_received_; // Number of frames received by encoder. 233 int frames_received_; // Number of frames received by encoder.
231 int frames_encoded_; // Number of frames encoded by encoder. 234 int frames_encoded_; // Number of frames encoded by encoder.
232 int frames_dropped_media_encoder_; // Number of frames dropped by encoder. 235 int frames_dropped_media_encoder_; // Number of frames dropped by encoder.
233 // Number of dropped frames caused by full queue. 236 // Number of dropped frames caused by full queue.
234 int consecutive_full_queue_frame_drops_; 237 int consecutive_full_queue_frame_drops_;
235 int frames_in_queue_; // Number of frames in encoder queue. 238 int frames_in_queue_; // Number of frames in encoder queue.
236 int64_t start_time_ms_; // Start time for statistics. 239 int64_t stat_start_time_ms_; // Start time for statistics.
237 int current_frames_; // Number of frames in the current statistics interval. 240 int current_frames_; // Number of frames in the current statistics interval.
238 int current_bytes_; // Encoded bytes in the current statistics interval. 241 int current_bytes_; // Encoded bytes in the current statistics interval.
239 int current_acc_qp_; // Accumulated QP in the current statistics interval. 242 int current_acc_qp_; // Accumulated QP in the current statistics interval.
240 int current_encoding_time_ms_; // Overall encoding time in the current second 243 int current_encoding_time_ms_; // Overall encoding time in the current second
241 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. 244 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
242 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. 245 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
243 std::vector<int32_t> timestamps_; // Video frames timestamp queue. 246 std::vector<int32_t> timestamps_; // Video frames timestamp queue.
244 std::vector<int64_t> render_times_ms_; // Video frames render time queue. 247 std::vector<int64_t> render_times_ms_; // Video frames render time queue.
245 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to 248 std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
246 // encoder input. 249 // encoder input.
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after
507 height_ = height; 510 height_ = height;
508 last_set_bitrate_kbps_ = kbps; 511 last_set_bitrate_kbps_ = kbps;
509 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS; 512 last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS;
510 yuv_size_ = width_ * height_ * 3 / 2; 513 yuv_size_ = width_ * height_ * 3 / 2;
511 frames_received_ = 0; 514 frames_received_ = 0;
512 frames_encoded_ = 0; 515 frames_encoded_ = 0;
513 frames_dropped_media_encoder_ = 0; 516 frames_dropped_media_encoder_ = 0;
514 consecutive_full_queue_frame_drops_ = 0; 517 consecutive_full_queue_frame_drops_ = 0;
515 frames_in_queue_ = 0; 518 frames_in_queue_ = 0;
516 current_timestamp_us_ = 0; 519 current_timestamp_us_ = 0;
517 start_time_ms_ = GetCurrentTimeMs(); 520 stat_start_time_ms_ = GetCurrentTimeMs();
518 current_frames_ = 0; 521 current_frames_ = 0;
519 current_bytes_ = 0; 522 current_bytes_ = 0;
520 current_acc_qp_ = 0; 523 current_acc_qp_ = 0;
521 current_encoding_time_ms_ = 0; 524 current_encoding_time_ms_ = 0;
522 last_input_timestamp_ms_ = -1; 525 last_input_timestamp_ms_ = -1;
523 last_output_timestamp_ms_ = -1; 526 last_output_timestamp_ms_ = -1;
524 output_timestamp_ = 0; 527 output_timestamp_ = 0;
525 output_render_time_ms_ = 0; 528 output_render_time_ms_ = 0;
526 timestamps_.clear(); 529 timestamps_.clear();
527 render_times_ms_.clear(); 530 render_times_ms_.clear();
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
595 const std::vector<webrtc::FrameType>* frame_types) { 598 const std::vector<webrtc::FrameType>* frame_types) {
596 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); 599 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
597 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 600 JNIEnv* jni = AttachCurrentThreadIfNeeded();
598 ScopedLocalRefFrame local_ref_frame(jni); 601 ScopedLocalRefFrame local_ref_frame(jni);
599 602
600 if (!inited_) { 603 if (!inited_) {
601 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 604 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
602 } 605 }
603 606
604 bool send_key_frame = false; 607 bool send_key_frame = false;
605 if (codecType_ == kVideoCodecVP8 && codec_mode_ == webrtc::kRealtimeVideo) { 608 if (codec_mode_ == webrtc::kRealtimeVideo) {
606 ++frames_received_since_last_key_; 609 ++frames_received_since_last_key_;
607 int64_t now_ms = GetCurrentTimeMs(); 610 int64_t now_ms = GetCurrentTimeMs();
608 if (last_frame_received_ms_ != -1 && 611 if (last_frame_received_ms_ != -1 &&
609 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { 612 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) {
610 // Add limit to prevent triggering a key for every frame for very low 613 // Add limit to prevent triggering a key for every frame for very low
611 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). 614 // framerates (e.g. if frame diff > kFrameDiffThresholdMs).
612 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { 615 if (frames_received_since_last_key_ > kMinKeyFrameInterval) {
613 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); 616 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_);
614 send_key_frame = true; 617 send_key_frame = true;
615 } 618 }
616 frames_received_since_last_key_ = 0; 619 frames_received_since_last_key_ = 0;
617 } 620 }
618 last_frame_received_ms_ = now_ms; 621 last_frame_received_ms_ = now_ms;
619 } 622 }
620 623
621 frames_received_++; 624 frames_received_++;
622 if (!DeliverPendingOutputs(jni)) { 625 if (!DeliverPendingOutputs(jni)) {
623 if (!ResetCodecOnCodecThread()) 626 if (!ResetCodecOnCodecThread())
624 return WEBRTC_VIDEO_CODEC_ERROR; 627 return WEBRTC_VIDEO_CODEC_ERROR;
625 } 628 }
626 if (frames_encoded_ < kMaxEncodedLogFrames) { 629 if (frames_encoded_ < kMaxEncodedLogFrames) {
627 ALOGD << "Encoder frame in # " << (frames_received_ - 1) << ". TS: " << 630 ALOGD << "Encoder frame in # " << (frames_received_ - 1) <<
628 (int)(current_timestamp_us_ / 1000) << ". Q: " << frames_in_queue_ << 631 ". TS: " << (int)(current_timestamp_us_ / 1000) <<
629 ". Fps: " << last_set_fps_ << ". Kbps: " << last_set_bitrate_kbps_; 632 ". Q: " << frames_in_queue_ <<
633 ". Fps: " << last_set_fps_ <<
634 ". Kbps: " << last_set_bitrate_kbps_;
630 } 635 }
631 636
632 if (drop_next_input_frame_) { 637 if (drop_next_input_frame_) {
633 ALOGW << "Encoder drop frame - failed callback."; 638 ALOGW << "Encoder drop frame - failed callback.";
634 drop_next_input_frame_ = false; 639 drop_next_input_frame_ = false;
635 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 640 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
636 frames_dropped_media_encoder_++; 641 frames_dropped_media_encoder_++;
637 OnDroppedFrame(); 642 OnDroppedFrame();
638 return WEBRTC_VIDEO_CODEC_OK; 643 return WEBRTC_VIDEO_CODEC_OK;
639 } 644 }
640 645
641 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; 646 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
642 647
643 // Check if we accumulated too many frames in encoder input buffers 648 // Check if we accumulated too many frames in encoder input buffers
644 // or the encoder latency exceeds 70 ms and drop frame if so. 649 // or the encoder latency exceeds 70 ms and drop frame if so.
645 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) { 650 if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
646 int encoder_latency_ms = last_input_timestamp_ms_ - 651 int encoder_latency_ms = last_input_timestamp_ms_ -
647 last_output_timestamp_ms_; 652 last_output_timestamp_ms_;
648 if (frames_in_queue_ > MAX_ENCODER_Q_SIZE || 653 if (frames_in_queue_ > MAX_ENCODER_Q_SIZE ||
649 encoder_latency_ms > MAX_ENCODER_LATENCY_MS) { 654 encoder_latency_ms > MAX_ENCODER_LATENCY_MS) {
650 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << 655 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms <<
651 " ms. Q size: " << frames_in_queue_ << ". Consecutive drops: " << 656 " ms. Q size: " << frames_in_queue_ << ". TS: " <<
652 consecutive_full_queue_frame_drops_; 657 (int)(current_timestamp_us_ / 1000) << ". Fps: " << last_set_fps_ <<
658 ". Consecutive drops: " << consecutive_full_queue_frame_drops_ ;
653 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 659 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
654 consecutive_full_queue_frame_drops_++; 660 consecutive_full_queue_frame_drops_++;
655 if (consecutive_full_queue_frame_drops_ >= 661 if (consecutive_full_queue_frame_drops_ >=
656 ENCODER_STALL_FRAMEDROP_THRESHOLD) { 662 ENCODER_STALL_FRAMEDROP_THRESHOLD) {
657 ALOGE << "Encoder got stuck. Reset."; 663 ALOGE << "Encoder got stuck. Reset.";
658 ResetCodecOnCodecThread(); 664 ResetCodecOnCodecThread();
659 return WEBRTC_VIDEO_CODEC_ERROR; 665 return WEBRTC_VIDEO_CODEC_ERROR;
660 } 666 }
661 frames_dropped_media_encoder_++; 667 frames_dropped_media_encoder_++;
662 OnDroppedFrame(); 668 OnDroppedFrame();
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
751 const bool is_texture_frame = frame.native_handle() != nullptr; 757 const bool is_texture_frame = frame.native_handle() != nullptr;
752 const bool reconfigure_due_to_format = is_texture_frame != use_surface_; 758 const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
753 const bool reconfigure_due_to_size = 759 const bool reconfigure_due_to_size =
754 frame.width() != width_ || frame.height() != height_; 760 frame.width() != width_ || frame.height() != height_;
755 761
756 if (reconfigure_due_to_format) { 762 if (reconfigure_due_to_format) {
757 ALOGD << "Reconfigure encoder due to format change. " 763 ALOGD << "Reconfigure encoder due to format change. "
758 << (use_surface_ ? 764 << (use_surface_ ?
759 "Reconfiguring to encode from byte buffer." : 765 "Reconfiguring to encode from byte buffer." :
760 "Reconfiguring to encode from texture."); 766 "Reconfiguring to encode from texture.");
767 LogStatistics(true);
761 } 768 }
762 if (reconfigure_due_to_size) { 769 if (reconfigure_due_to_size) {
763 ALOGD << "Reconfigure encoder due to frame resolution change from " 770 ALOGW << "Reconfigure encoder due to frame resolution change from "
764 << width_ << " x " << height_ << " to " << frame.width() << " x " 771 << width_ << " x " << height_ << " to " << frame.width() << " x "
765 << frame.height(); 772 << frame.height();
773 LogStatistics(true);
766 width_ = frame.width(); 774 width_ = frame.width();
767 height_ = frame.height(); 775 height_ = frame.height();
768 } 776 }
769 777
770 if (!reconfigure_due_to_format && !reconfigure_due_to_size) 778 if (!reconfigure_due_to_format && !reconfigure_due_to_size)
771 return true; 779 return true;
772 780
773 ReleaseOnCodecThread(); 781 ReleaseOnCodecThread();
774 782
775 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == 783 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
947 frames_in_queue_--; 955 frames_in_queue_--;
948 } 956 }
949 957
950 // Extract payload. 958 // Extract payload.
951 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer); 959 size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
952 uint8_t* payload = reinterpret_cast<uint8_t*>( 960 uint8_t* payload = reinterpret_cast<uint8_t*>(
953 jni->GetDirectBufferAddress(j_output_buffer)); 961 jni->GetDirectBufferAddress(j_output_buffer));
954 CHECK_EXCEPTION(jni); 962 CHECK_EXCEPTION(jni);
955 963
956 if (frames_encoded_ < kMaxEncodedLogFrames) { 964 if (frames_encoded_ < kMaxEncodedLogFrames) {
957 ALOGD << "Encoder frame out # " << frames_encoded_ << ". Key: " << 965 int current_latency =
958 key_frame << ". Size: " << payload_size << ". TS: " << 966 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_);
959 (int)last_output_timestamp_ms_ << ". Latency: " << 967 ALOGD << "Encoder frame out # " << frames_encoded_ <<
960 (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_) << 968 ". Key: " << key_frame <<
969 ". Size: " << payload_size <<
970 ". TS: " << (int)last_output_timestamp_ms_ <<
971 ". Latency: " << current_latency <<
961 ". EncTime: " << frame_encoding_time_ms; 972 ". EncTime: " << frame_encoding_time_ms;
962 } 973 }
963 974
964 // Callback - return encoded frame. 975 // Callback - return encoded frame.
965 int32_t callback_status = 0; 976 int32_t callback_status = 0;
966 if (callback_) { 977 if (callback_) {
967 scoped_ptr<webrtc::EncodedImage> image( 978 scoped_ptr<webrtc::EncodedImage> image(
968 new webrtc::EncodedImage(payload, payload_size, payload_size)); 979 new webrtc::EncodedImage(payload, payload_size, payload_size));
969 image->_encodedWidth = width_; 980 image->_encodedWidth = width_;
970 image->_encodedHeight = height_; 981 image->_encodedHeight = height_;
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
1082 if (!success) { 1093 if (!success) {
1083 ResetCodecOnCodecThread(); 1094 ResetCodecOnCodecThread();
1084 return false; 1095 return false;
1085 } 1096 }
1086 1097
1087 // Calculate and print encoding statistics - every 3 seconds. 1098 // Calculate and print encoding statistics - every 3 seconds.
1088 frames_encoded_++; 1099 frames_encoded_++;
1089 current_frames_++; 1100 current_frames_++;
1090 current_bytes_ += payload_size; 1101 current_bytes_ += payload_size;
1091 current_encoding_time_ms_ += frame_encoding_time_ms; 1102 current_encoding_time_ms_ += frame_encoding_time_ms;
1092 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; 1103 LogStatistics(false);
1093 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
1094 current_frames_ > 0) {
1095 ALOGD << "Encoded frames: " << frames_encoded_ << ". Bitrate: " <<
1096 (current_bytes_ * 8 / statistic_time_ms) <<
1097 ", target: " << last_set_bitrate_kbps_ << " kbps, fps: " <<
1098 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
1099 << ", encTime: " <<
1100 (current_encoding_time_ms_ / current_frames_) << ". QP: " <<
1101 (current_acc_qp_ / current_frames_) << " for last " <<
1102 statistic_time_ms << " ms.";
1103 start_time_ms_ = GetCurrentTimeMs();
1104 current_frames_ = 0;
1105 current_bytes_ = 0;
1106 current_acc_qp_ = 0;
1107 current_encoding_time_ms_ = 0;
1108 }
1109 1104
1110 if (callback_status > 0) { 1105 if (callback_status > 0) {
1111 drop_next_input_frame_ = true; 1106 drop_next_input_frame_ = true;
1112 // Theoretically could handle callback_status<0 here, but unclear what 1107 // Theoretically could handle callback_status<0 here, but unclear what
1113 // that would mean for us. 1108 // that would mean for us.
1114 } 1109 }
1115 } 1110 }
1111 return true;
1112 }
1116 1113
1117 return true; 1114 void MediaCodecVideoEncoder::LogStatistics(bool force_log) {
1115 int statistic_time_ms = GetCurrentTimeMs() - stat_start_time_ms_;
1116 if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) &&
1117 current_frames_ > 0 && statistic_time_ms > 0) {
1118 int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
1119 int current_fps =
1120 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
1121 ALOGD << "Encoded frames: " << frames_encoded_ <<
1122 ". Bitrate: " << current_bitrate <<
1123 ", target: " << last_set_bitrate_kbps_ << " kbps" <<
1124 ", fps: " << current_fps <<
1125 ", encTime: " << (current_encoding_time_ms_ / current_frames_) <<
1126 ". QP: " << (current_acc_qp_ / current_frames_) <<
1127 " for last " << statistic_time_ms << " ms.";
1128 stat_start_time_ms_ = GetCurrentTimeMs();
1129 current_frames_ = 0;
1130 current_bytes_ = 0;
1131 current_acc_qp_ = 0;
1132 current_encoding_time_ms_ = 0;
1133 }
1118 } 1134 }
1119 1135
1120 int32_t MediaCodecVideoEncoder::NextNaluPosition( 1136 int32_t MediaCodecVideoEncoder::NextNaluPosition(
1121 uint8_t *buffer, size_t buffer_size) { 1137 uint8_t *buffer, size_t buffer_size) {
1122 if (buffer_size < H264_SC_LENGTH) { 1138 if (buffer_size < H264_SC_LENGTH) {
1123 return -1; 1139 return -1;
1124 } 1140 }
1125 uint8_t *head = buffer; 1141 uint8_t *head = buffer;
1126 // Set end buffer pointer to 4 bytes before actual buffer end so we can 1142 // Set end buffer pointer to 4 bytes before actual buffer end so we can
1127 // access head[1], head[2] and head[3] in a loop without buffer overrun. 1143 // access head[1], head[2] and head[3] in a loop without buffer overrun.
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
1237 } 1253 }
1238 1254
1239 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( 1255 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
1240 webrtc::VideoEncoder* encoder) { 1256 webrtc::VideoEncoder* encoder) {
1241 ALOGD << "Destroy video encoder."; 1257 ALOGD << "Destroy video encoder.";
1242 delete encoder; 1258 delete encoder;
1243 } 1259 }
1244 1260
1245 } // namespace webrtc_jni 1261 } // namespace webrtc_jni
1246 1262
OLDNEW
« no previous file with comments | « talk/app/webrtc/java/jni/androidmediadecoder_jni.cc ('k') | talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698