| OLD | NEW | 
|---|
| 1 /* | 1 /* | 
| 2  * libjingle | 2  * libjingle | 
| 3  * Copyright 2015 Google Inc. | 3  * Copyright 2015 Google Inc. | 
| 4  * | 4  * | 
| 5  * Redistribution and use in source and binary forms, with or without | 5  * Redistribution and use in source and binary forms, with or without | 
| 6  * modification, are permitted provided that the following conditions are met: | 6  * modification, are permitted provided that the following conditions are met: | 
| 7  * | 7  * | 
| 8  *  1. Redistributions of source code must retain the above copyright notice, | 8  *  1. Redistributions of source code must retain the above copyright notice, | 
| 9  *     this list of conditions and the following disclaimer. | 9  *     this list of conditions and the following disclaimer. | 
| 10  *  2. Redistributions in binary form must reproduce the above copyright notice, | 10  *  2. Redistributions in binary form must reproduce the above copyright notice, | 
| (...skipping 17 matching lines...) Expand all  Loading... | 
| 28 | 28 | 
| 29 #include <vector> | 29 #include <vector> | 
| 30 | 30 | 
| 31 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | 31 #include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h" | 
| 32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 32 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 
| 33 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 33 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 
| 34 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | 34 #include "talk/app/webrtc/java/jni/native_handle_impl.h" | 
| 35 #include "webrtc/base/bind.h" | 35 #include "webrtc/base/bind.h" | 
| 36 #include "webrtc/base/checks.h" | 36 #include "webrtc/base/checks.h" | 
| 37 #include "webrtc/base/logging.h" | 37 #include "webrtc/base/logging.h" | 
|  | 38 #include "webrtc/base/scoped_ref_ptr.h" | 
| 38 #include "webrtc/base/thread.h" | 39 #include "webrtc/base/thread.h" | 
|  | 40 #include "webrtc/common_video/interface/i420_buffer_pool.h" | 
| 39 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 41 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 
| 40 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | 42 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | 
| 41 #include "webrtc/system_wrappers/interface/tick_util.h" | 43 #include "webrtc/system_wrappers/interface/tick_util.h" | 
| 42 #include "third_party/libyuv/include/libyuv/convert.h" | 44 #include "third_party/libyuv/include/libyuv/convert.h" | 
| 43 #include "third_party/libyuv/include/libyuv/convert_from.h" | 45 #include "third_party/libyuv/include/libyuv/convert_from.h" | 
| 44 #include "third_party/libyuv/include/libyuv/video_common.h" | 46 #include "third_party/libyuv/include/libyuv/video_common.h" | 
| 45 | 47 | 
| 46 using rtc::Bind; | 48 using rtc::Bind; | 
| 47 using rtc::Thread; | 49 using rtc::Thread; | 
| 48 using rtc::ThreadManager; | 50 using rtc::ThreadManager; | 
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 99   int32_t ProcessHWErrorOnCodecThread(); | 101   int32_t ProcessHWErrorOnCodecThread(); | 
| 100 | 102 | 
| 101   // Type of video codec. | 103   // Type of video codec. | 
| 102   VideoCodecType codecType_; | 104   VideoCodecType codecType_; | 
| 103 | 105 | 
| 104   bool key_frame_required_; | 106   bool key_frame_required_; | 
| 105   bool inited_; | 107   bool inited_; | 
| 106   bool sw_fallback_required_; | 108   bool sw_fallback_required_; | 
| 107   bool use_surface_; | 109   bool use_surface_; | 
| 108   VideoCodec codec_; | 110   VideoCodec codec_; | 
| 109   VideoFrame decoded_image_; | 111   webrtc::I420BufferPool decoded_frame_pool_; | 
| 110   NativeHandleImpl native_handle_; | 112   NativeHandleImpl native_handle_; | 
| 111   DecodedImageCallback* callback_; | 113   DecodedImageCallback* callback_; | 
| 112   int frames_received_;  // Number of frames received by decoder. | 114   int frames_received_;  // Number of frames received by decoder. | 
| 113   int frames_decoded_;  // Number of frames decoded by decoder. | 115   int frames_decoded_;  // Number of frames decoded by decoder. | 
| 114   int64_t start_time_ms_;  // Start time for statistics. | 116   int64_t start_time_ms_;  // Start time for statistics. | 
| 115   int current_frames_;  // Number of frames in the current statistics interval. | 117   int current_frames_;  // Number of frames in the current statistics interval. | 
| 116   int current_bytes_;  // Encoded bytes in the current statistics interval. | 118   int current_bytes_;  // Encoded bytes in the current statistics interval. | 
| 117   int current_decoding_time_ms_;  // Overall decoding time in the current second | 119   int current_decoding_time_ms_;  // Overall decoding time in the current second | 
| 118   uint32_t max_pending_frames_;  // Maximum number of pending input frames | 120   uint32_t max_pending_frames_;  // Maximum number of pending input frames | 
| 119   std::vector<int32_t> timestamps_; | 121   std::vector<int32_t> timestamps_; | 
| 120   std::vector<int64_t> ntp_times_ms_; | 122   std::vector<int64_t> ntp_times_ms_; | 
| 121   std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to | 123   std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to | 
| 122                                              // decoder input. | 124                                              // decoder input. | 
| 123   int32_t output_timestamp_;  // Last output frame timestamp from timestamps_ Q. |  | 
| 124   int64_t output_ntp_time_ms_; // Last output frame ntp time from |  | 
| 125                                // ntp_times_ms_ queue. |  | 
| 126 | 125 | 
| 127   // State that is constant for the lifetime of this object once the ctor | 126   // State that is constant for the lifetime of this object once the ctor | 
| 128   // returns. | 127   // returns. | 
| 129   scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec. | 128   scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec. | 
| 130   ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 129   ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_; | 
| 131   ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 130   ScopedGlobalRef<jobject> j_media_codec_video_decoder_; | 
| 132   jmethodID j_init_decode_method_; | 131   jmethodID j_init_decode_method_; | 
| 133   jmethodID j_release_method_; | 132   jmethodID j_release_method_; | 
| 134   jmethodID j_dequeue_input_buffer_method_; | 133   jmethodID j_dequeue_input_buffer_method_; | 
| 135   jmethodID j_queue_input_buffer_method_; | 134   jmethodID j_queue_input_buffer_method_; | 
| (...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 324     case kVideoCodecH264: | 323     case kVideoCodecH264: | 
| 325       max_pending_frames_ = kMaxPendingFramesH264; | 324       max_pending_frames_ = kMaxPendingFramesH264; | 
| 326       break; | 325       break; | 
| 327     default: | 326     default: | 
| 328       max_pending_frames_ = 0; | 327       max_pending_frames_ = 0; | 
| 329   } | 328   } | 
| 330   start_time_ms_ = GetCurrentTimeMs(); | 329   start_time_ms_ = GetCurrentTimeMs(); | 
| 331   current_frames_ = 0; | 330   current_frames_ = 0; | 
| 332   current_bytes_ = 0; | 331   current_bytes_ = 0; | 
| 333   current_decoding_time_ms_ = 0; | 332   current_decoding_time_ms_ = 0; | 
| 334   output_timestamp_ = 0; |  | 
| 335   output_ntp_time_ms_ = 0; |  | 
| 336   timestamps_.clear(); | 333   timestamps_.clear(); | 
| 337   ntp_times_ms_.clear(); | 334   ntp_times_ms_.clear(); | 
| 338   frame_rtc_times_ms_.clear(); | 335   frame_rtc_times_ms_.clear(); | 
| 339 | 336 | 
| 340   jobjectArray input_buffers = (jobjectArray)GetObjectField( | 337   jobjectArray input_buffers = (jobjectArray)GetObjectField( | 
| 341       jni, *j_media_codec_video_decoder_, j_input_buffers_field_); | 338       jni, *j_media_codec_video_decoder_, j_input_buffers_field_); | 
| 342   size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 339   size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 
| 343   input_buffers_.resize(num_input_buffers); | 340   input_buffers_.resize(num_input_buffers); | 
| 344   for (size_t i = 0; i < num_input_buffers; ++i) { | 341   for (size_t i = 0; i < num_input_buffers; ++i) { | 
| 345     input_buffers_[i] = | 342     input_buffers_[i] = | 
| (...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 593   int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 590   int color_format = GetIntField(jni, *j_media_codec_video_decoder_, | 
| 594       j_color_format_field_); | 591       j_color_format_field_); | 
| 595   int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 592   int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); | 
| 596   int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 593   int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); | 
| 597   int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 594   int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); | 
| 598   int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | 595   int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, | 
| 599       j_slice_height_field_); | 596       j_slice_height_field_); | 
| 600   int texture_id = GetIntField(jni, *j_media_codec_video_decoder_, | 597   int texture_id = GetIntField(jni, *j_media_codec_video_decoder_, | 
| 601       j_textureID_field_); | 598       j_textureID_field_); | 
| 602 | 599 | 
| 603   // Extract data from Java ByteBuffer and create output yuv420 frame - | 600   rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; | 
| 604   // for non surface decoding only. | 601   if (use_surface_) { | 
| 605   if (!use_surface_) { | 602     native_handle_.SetTextureObject(surface_texture_, texture_id); | 
|  | 603     frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>( | 
|  | 604         &native_handle_, width, height); | 
|  | 605   } else { | 
|  | 606     // Extract data from Java ByteBuffer and create output yuv420 frame - | 
|  | 607     // for non surface decoding only. | 
| 606     if (output_buffer_size < width * height * 3 / 2) { | 608     if (output_buffer_size < width * height * 3 / 2) { | 
| 607       ALOGE("Insufficient output buffer size: %d", output_buffer_size); | 609       ALOGE("Insufficient output buffer size: %d", output_buffer_size); | 
| 608       return false; | 610       return false; | 
| 609     } | 611     } | 
| 610     jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | 612     jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( | 
| 611         jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | 613         jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); | 
| 612     jobject output_buffer = | 614     jobject output_buffer = | 
| 613         jni->GetObjectArrayElement(output_buffers, output_buffer_index); | 615         jni->GetObjectArrayElement(output_buffers, output_buffer_index); | 
| 614     uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | 616     uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( | 
| 615         output_buffer)); | 617         output_buffer)); | 
| 616     if (CheckException(jni)) { | 618     if (CheckException(jni)) { | 
| 617       return false; | 619       return false; | 
| 618     } | 620     } | 
| 619     payload += output_buffer_offset; | 621     payload += output_buffer_offset; | 
| 620 | 622 | 
| 621     // Create yuv420 frame. | 623     // Create yuv420 frame. | 
|  | 624     frame_buffer = decoded_frame_pool_.CreateBuffer(width, height); | 
| 622     if (color_format == COLOR_FormatYUV420Planar) { | 625     if (color_format == COLOR_FormatYUV420Planar) { | 
| 623       decoded_image_.CreateFrame( | 626       RTC_CHECK_EQ(0, stride % 2); | 
| 624           payload, | 627       RTC_CHECK_EQ(0, slice_height % 2); | 
| 625           payload + (stride * slice_height), | 628       const int uv_stride = stride / 2; | 
| 626           payload + (5 * stride * slice_height / 4), | 629       const int u_slice_height = slice_height / 2; | 
| 627           width, height, | 630       const uint8_t* y_ptr = payload; | 
| 628           stride, stride / 2, stride / 2); | 631       const uint8_t* u_ptr = y_ptr + stride * slice_height; | 
|  | 632       const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height; | 
|  | 633       libyuv::I420Copy(y_ptr, stride, | 
|  | 634                        u_ptr, uv_stride, | 
|  | 635                        v_ptr, uv_stride, | 
|  | 636                        frame_buffer->MutableData(webrtc::kYPlane), | 
|  | 637                        frame_buffer->stride(webrtc::kYPlane), | 
|  | 638                        frame_buffer->MutableData(webrtc::kUPlane), | 
|  | 639                        frame_buffer->stride(webrtc::kUPlane), | 
|  | 640                        frame_buffer->MutableData(webrtc::kVPlane), | 
|  | 641                        frame_buffer->stride(webrtc::kVPlane), | 
|  | 642                        width, height); | 
| 629     } else { | 643     } else { | 
| 630       // All other supported formats are nv12. | 644       // All other supported formats are nv12. | 
| 631       decoded_image_.CreateEmptyFrame(width, height, width, | 645       const uint8_t* y_ptr = payload; | 
| 632           width / 2, width / 2); | 646       const uint8_t* uv_ptr = y_ptr + stride * slice_height; | 
| 633       libyuv::NV12ToI420( | 647       libyuv::NV12ToI420( | 
| 634           payload, stride, | 648           y_ptr, stride, | 
| 635           payload + stride * slice_height, stride, | 649           uv_ptr, stride, | 
| 636           decoded_image_.buffer(webrtc::kYPlane), | 650           frame_buffer->MutableData(webrtc::kYPlane), | 
| 637           decoded_image_.stride(webrtc::kYPlane), | 651           frame_buffer->stride(webrtc::kYPlane), | 
| 638           decoded_image_.buffer(webrtc::kUPlane), | 652           frame_buffer->MutableData(webrtc::kUPlane), | 
| 639           decoded_image_.stride(webrtc::kUPlane), | 653           frame_buffer->stride(webrtc::kUPlane), | 
| 640           decoded_image_.buffer(webrtc::kVPlane), | 654           frame_buffer->MutableData(webrtc::kVPlane), | 
| 641           decoded_image_.stride(webrtc::kVPlane), | 655           frame_buffer->stride(webrtc::kVPlane), | 
| 642           width, height); | 656           width, height); | 
| 643     } | 657     } | 
| 644   } | 658   } | 
|  | 659   VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); | 
| 645 | 660 | 
| 646   // Get frame timestamps from a queue. | 661   // Get frame timestamps from a queue. | 
| 647   if (timestamps_.size() > 0) { | 662   if (timestamps_.size() > 0) { | 
| 648     output_timestamp_ = timestamps_.front(); | 663     decoded_frame.set_timestamp(timestamps_.front()); | 
| 649     timestamps_.erase(timestamps_.begin()); | 664     timestamps_.erase(timestamps_.begin()); | 
| 650   } | 665   } | 
| 651   if (ntp_times_ms_.size() > 0) { | 666   if (ntp_times_ms_.size() > 0) { | 
| 652     output_ntp_time_ms_ = ntp_times_ms_.front(); | 667     decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); | 
| 653     ntp_times_ms_.erase(ntp_times_ms_.begin()); | 668     ntp_times_ms_.erase(ntp_times_ms_.begin()); | 
| 654   } | 669   } | 
| 655   int64_t frame_decoding_time_ms = 0; | 670   int64_t frame_decoding_time_ms = 0; | 
| 656   if (frame_rtc_times_ms_.size() > 0) { | 671   if (frame_rtc_times_ms_.size() > 0) { | 
| 657     frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); | 672     frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); | 
| 658     frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | 673     frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | 
| 659   } | 674   } | 
| 660   ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." | 675   ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." | 
| 661       " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, | 676       " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, | 
| 662       color_format, output_timestamps_ms, frame_decoding_time_ms); | 677       color_format, output_timestamps_ms, frame_decoding_time_ms); | 
| (...skipping 19 matching lines...) Expand all  Loading... | 
| 682         current_bytes_ * 8 / statistic_time_ms, | 697         current_bytes_ * 8 / statistic_time_ms, | 
| 683         (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, | 698         (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, | 
| 684         current_decoding_time_ms_ / current_frames_, statistic_time_ms); | 699         current_decoding_time_ms_ / current_frames_, statistic_time_ms); | 
| 685     start_time_ms_ = GetCurrentTimeMs(); | 700     start_time_ms_ = GetCurrentTimeMs(); | 
| 686     current_frames_ = 0; | 701     current_frames_ = 0; | 
| 687     current_bytes_ = 0; | 702     current_bytes_ = 0; | 
| 688     current_decoding_time_ms_ = 0; | 703     current_decoding_time_ms_ = 0; | 
| 689   } | 704   } | 
| 690 | 705 | 
| 691   // Callback - output decoded frame. | 706   // Callback - output decoded frame. | 
| 692   int32_t callback_status = WEBRTC_VIDEO_CODEC_OK; | 707   const int32_t callback_status = callback_->Decoded(decoded_frame); | 
| 693   if (use_surface_) { |  | 
| 694     native_handle_.SetTextureObject(surface_texture_, texture_id); |  | 
| 695     VideoFrame texture_image(new rtc::RefCountedObject<JniNativeHandleBuffer>( |  | 
| 696                                  &native_handle_, width, height), |  | 
| 697                              output_timestamp_, 0, webrtc::kVideoRotation_0); |  | 
| 698     texture_image.set_ntp_time_ms(output_ntp_time_ms_); |  | 
| 699     callback_status = callback_->Decoded(texture_image); |  | 
| 700   } else { |  | 
| 701     decoded_image_.set_timestamp(output_timestamp_); |  | 
| 702     decoded_image_.set_ntp_time_ms(output_ntp_time_ms_); |  | 
| 703     callback_status = callback_->Decoded(decoded_image_); |  | 
| 704   } |  | 
| 705   if (callback_status > 0) { | 708   if (callback_status > 0) { | 
| 706     ALOGE("callback error"); | 709     ALOGE("callback error"); | 
| 707   } | 710   } | 
| 708 | 711 | 
| 709   return true; | 712   return true; | 
| 710 } | 713 } | 
| 711 | 714 | 
| 712 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 715 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( | 
| 713     DecodedImageCallback* callback) { | 716     DecodedImageCallback* callback) { | 
| 714   callback_ = callback; | 717   callback_ = callback; | 
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 826   return NULL; | 829   return NULL; | 
| 827 } | 830 } | 
| 828 | 831 | 
| 829 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 832 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 
| 830     webrtc::VideoDecoder* decoder) { | 833     webrtc::VideoDecoder* decoder) { | 
| 831   delete decoder; | 834   delete decoder; | 
| 832 } | 835 } | 
| 833 | 836 | 
| 834 }  // namespace webrtc_jni | 837 }  // namespace webrtc_jni | 
| 835 | 838 | 
| OLD | NEW | 
|---|