OLD | NEW |
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 11 matching lines...) Expand all Loading... |
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 * | 26 * |
27 */ | 27 */ |
28 | 28 |
29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h" | 29 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h" |
30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
31 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" | 31 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" |
| 32 #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
32 #include "webrtc/base/bind.h" | 33 #include "webrtc/base/bind.h" |
33 #include "webrtc/base/checks.h" | 34 #include "webrtc/base/checks.h" |
34 #include "webrtc/base/logging.h" | 35 #include "webrtc/base/logging.h" |
35 #include "webrtc/base/thread.h" | 36 #include "webrtc/base/thread.h" |
36 #include "webrtc/base/thread_checker.h" | 37 #include "webrtc/base/thread_checker.h" |
37 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h" | 38 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h" |
38 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" | 39 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
39 #include "webrtc/modules/video_coding/utility/quality_scaler.h" | 40 #include "webrtc/modules/video_coding/utility/quality_scaler.h" |
40 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 41 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
41 #include "webrtc/system_wrappers/include/field_trial.h" | 42 #include "webrtc/system_wrappers/include/field_trial.h" |
42 #include "webrtc/system_wrappers/include/logcat_trace_context.h" | 43 #include "webrtc/system_wrappers/include/logcat_trace_context.h" |
43 #include "third_party/libyuv/include/libyuv/convert.h" | 44 #include "third_party/libyuv/include/libyuv/convert.h" |
44 #include "third_party/libyuv/include/libyuv/convert_from.h" | 45 #include "third_party/libyuv/include/libyuv/convert_from.h" |
45 #include "third_party/libyuv/include/libyuv/video_common.h" | 46 #include "third_party/libyuv/include/libyuv/video_common.h" |
46 | 47 |
47 using rtc::Bind; | 48 using rtc::Bind; |
48 using rtc::Thread; | 49 using rtc::Thread; |
(...skipping 26 matching lines...) Expand all Loading... |
75 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) | 76 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) |
76 // HW-backed video encode. This C++ class is implemented as a very thin shim, | 77 // HW-backed video encode. This C++ class is implemented as a very thin shim, |
77 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. | 78 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. |
78 // MediaCodecVideoEncoder is created, operated, and destroyed on a single | 79 // MediaCodecVideoEncoder is created, operated, and destroyed on a single |
79 // thread, currently the libjingle Worker thread. | 80 // thread, currently the libjingle Worker thread. |
80 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, | 81 class MediaCodecVideoEncoder : public webrtc::VideoEncoder, |
81 public rtc::MessageHandler { | 82 public rtc::MessageHandler { |
82 public: | 83 public: |
83 virtual ~MediaCodecVideoEncoder(); | 84 virtual ~MediaCodecVideoEncoder(); |
84 MediaCodecVideoEncoder(JNIEnv* jni, | 85 MediaCodecVideoEncoder(JNIEnv* jni, |
85 VideoCodecType codecType); | 86 VideoCodecType codecType, |
| 87 jobject egl_context); |
86 | 88 |
87 // webrtc::VideoEncoder implementation. Everything trampolines to | 89 // webrtc::VideoEncoder implementation. Everything trampolines to |
88 // |codec_thread_| for execution. | 90 // |codec_thread_| for execution. |
89 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, | 91 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, |
90 int32_t /* number_of_cores */, | 92 int32_t /* number_of_cores */, |
91 size_t /* max_payload_size */) override; | 93 size_t /* max_payload_size */) override; |
92 int32_t Encode(const webrtc::VideoFrame& input_image, | 94 int32_t Encode(const webrtc::VideoFrame& input_image, |
93 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | 95 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
94 const std::vector<webrtc::FrameType>* frame_types) override; | 96 const std::vector<webrtc::FrameType>* frame_types) override; |
95 int32_t RegisterEncodeCompleteCallback( | 97 int32_t RegisterEncodeCompleteCallback( |
96 webrtc::EncodedImageCallback* callback) override; | 98 webrtc::EncodedImageCallback* callback) override; |
97 int32_t Release() override; | 99 int32_t Release() override; |
98 int32_t SetChannelParameters(uint32_t /* packet_loss */, | 100 int32_t SetChannelParameters(uint32_t /* packet_loss */, |
99 int64_t /* rtt */) override; | 101 int64_t /* rtt */) override; |
100 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override; | 102 int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override; |
101 | 103 |
102 // rtc::MessageHandler implementation. | 104 // rtc::MessageHandler implementation. |
103 void OnMessage(rtc::Message* msg) override; | 105 void OnMessage(rtc::Message* msg) override; |
104 | 106 |
105 void OnDroppedFrame() override; | 107 void OnDroppedFrame() override; |
106 | 108 |
107 int GetTargetFramerate() override; | 109 int GetTargetFramerate() override; |
108 | 110 |
| 111 bool SupportsNativeHandle() const override { return true; } |
| 112 |
109 private: | 113 private: |
110 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and | 114 // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and |
111 // InitEncodeOnCodecThread() in an attempt to restore the codec to an | 115 // InitEncodeOnCodecThread() in an attempt to restore the codec to an |
112 // operable state. Necessary after all manner of OMX-layer errors. | 116 // operable state. Necessary after all manner of OMX-layer errors. |
113 bool ResetCodecOnCodecThread(); | 117 bool ResetCodecOnCodecThread(); |
114 | 118 |
115 // Implementation of webrtc::VideoEncoder methods above, all running on the | 119 // Implementation of webrtc::VideoEncoder methods above, all running on the |
116 // codec thread exclusively. | 120 // codec thread exclusively. |
117 // | 121 // |
118 // If width==0 then this is assumed to be a re-initialization and the | 122 // If width==0 then this is assumed to be a re-initialization and the |
119 // previously-current values are reused instead of the passed parameters | 123 // previously-current values are reused instead of the passed parameters |
120 // (makes it easier to reason about thread-safety). | 124 // (makes it easier to reason about thread-safety). |
121 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps); | 125 int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps, |
122 // Reconfigure to match |frame| in width, height. Returns false if | 126 bool use_surface); |
123 // reconfiguring fails. | 127 // Reconfigure to match |frame| in width, height. Also reconfigures the |
| 128 // encoder if |frame| is a texture/byte buffer and the encoder is initialized |
| 129 // for byte buffer/texture. Returns false if reconfiguring fails. |
124 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); | 130 bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); |
125 int32_t EncodeOnCodecThread( | 131 int32_t EncodeOnCodecThread( |
126 const webrtc::VideoFrame& input_image, | 132 const webrtc::VideoFrame& input_image, |
127 const std::vector<webrtc::FrameType>* frame_types); | 133 const std::vector<webrtc::FrameType>* frame_types); |
128 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, | 134 bool EncodeByteBufferOnCodecThread(JNIEnv* jni, |
129 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); | 135 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); |
| 136 bool EncodeTextureOnCodecThread(JNIEnv* jni, |
| 137 bool key_frame, const webrtc::VideoFrame& frame); |
130 | 138 |
131 int32_t RegisterEncodeCompleteCallbackOnCodecThread( | 139 int32_t RegisterEncodeCompleteCallbackOnCodecThread( |
132 webrtc::EncodedImageCallback* callback); | 140 webrtc::EncodedImageCallback* callback); |
133 int32_t ReleaseOnCodecThread(); | 141 int32_t ReleaseOnCodecThread(); |
134 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); | 142 int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate); |
135 | 143 |
136 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. | 144 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. |
137 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); | 145 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); |
138 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); | 146 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); |
139 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); | 147 bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); |
(...skipping 17 matching lines...) Expand all Loading... |
157 // State that is constant for the lifetime of this object once the ctor | 165 // State that is constant for the lifetime of this object once the ctor |
158 // returns. | 166 // returns. |
159 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. | 167 scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec. |
160 rtc::ThreadChecker codec_thread_checker_; | 168 rtc::ThreadChecker codec_thread_checker_; |
161 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; | 169 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; |
162 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; | 170 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; |
163 jmethodID j_init_encode_method_; | 171 jmethodID j_init_encode_method_; |
164 jmethodID j_get_input_buffers_method_; | 172 jmethodID j_get_input_buffers_method_; |
165 jmethodID j_dequeue_input_buffer_method_; | 173 jmethodID j_dequeue_input_buffer_method_; |
166 jmethodID j_encode_buffer_method_; | 174 jmethodID j_encode_buffer_method_; |
| 175 jmethodID j_encode_texture_method_; |
167 jmethodID j_release_method_; | 176 jmethodID j_release_method_; |
168 jmethodID j_set_rates_method_; | 177 jmethodID j_set_rates_method_; |
169 jmethodID j_dequeue_output_buffer_method_; | 178 jmethodID j_dequeue_output_buffer_method_; |
170 jmethodID j_release_output_buffer_method_; | 179 jmethodID j_release_output_buffer_method_; |
171 jfieldID j_color_format_field_; | 180 jfieldID j_color_format_field_; |
172 jfieldID j_info_index_field_; | 181 jfieldID j_info_index_field_; |
173 jfieldID j_info_buffer_field_; | 182 jfieldID j_info_buffer_field_; |
174 jfieldID j_info_is_key_frame_field_; | 183 jfieldID j_info_is_key_frame_field_; |
175 jfieldID j_info_presentation_timestamp_us_field_; | 184 jfieldID j_info_presentation_timestamp_us_field_; |
176 | 185 |
177 // State that is valid only between InitEncode() and the next Release(). | 186 // State that is valid only between InitEncode() and the next Release(). |
178 // Touched only on codec_thread_ so no explicit synchronization necessary. | 187 // Touched only on codec_thread_ so no explicit synchronization necessary. |
179 int width_; // Frame width in pixels. | 188 int width_; // Frame width in pixels. |
180 int height_; // Frame height in pixels. | 189 int height_; // Frame height in pixels. |
181 bool inited_; | 190 bool inited_; |
| 191 bool use_surface_; |
182 uint16_t picture_id_; | 192 uint16_t picture_id_; |
183 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. | 193 enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. |
184 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. | 194 int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. |
185 int last_set_fps_; // Last-requested frame rate. | 195 int last_set_fps_; // Last-requested frame rate. |
186 int64_t current_timestamp_us_; // Current frame timestamps in us. | 196 int64_t current_timestamp_us_; // Current frame timestamps in us. |
187 int frames_received_; // Number of frames received by encoder. | 197 int frames_received_; // Number of frames received by encoder. |
188 int frames_encoded_; // Number of frames encoded by encoder. | 198 int frames_encoded_; // Number of frames encoded by encoder. |
189 int frames_dropped_; // Number of frames dropped by encoder. | 199 int frames_dropped_; // Number of frames dropped by encoder. |
190 int frames_in_queue_; // Number of frames in encoder queue. | 200 int frames_in_queue_; // Number of frames in encoder queue. |
191 int64_t start_time_ms_; // Start time for statistics. | 201 int64_t start_time_ms_; // Start time for statistics. |
(...skipping 21 matching lines...) Expand all Loading... |
213 bool scale_; | 223 bool scale_; |
214 | 224 |
215 // H264 bitstream parser, used to extract QP from encoded bitstreams. | 225 // H264 bitstream parser, used to extract QP from encoded bitstreams. |
216 webrtc::H264BitstreamParser h264_bitstream_parser_; | 226 webrtc::H264BitstreamParser h264_bitstream_parser_; |
217 | 227 |
218 // VP9 variables to populate codec specific structure. | 228 // VP9 variables to populate codec specific structure. |
219 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for | 229 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for |
220 // non-flexible VP9 mode. | 230 // non-flexible VP9 mode. |
221 uint8_t tl0_pic_idx_; | 231 uint8_t tl0_pic_idx_; |
222 size_t gof_idx_; | 232 size_t gof_idx_; |
| 233 |
| 234 // EGL context - owned by factory, should not be allocated/destroyed |
| 235 // by MediaCodecVideoEncoder. |
| 236 jobject egl_context_; |
223 }; | 237 }; |
224 | 238 |
225 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { | 239 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { |
226 // Call Release() to ensure no more callbacks to us after we are deleted. | 240 // Call Release() to ensure no more callbacks to us after we are deleted. |
227 Release(); | 241 Release(); |
228 } | 242 } |
229 | 243 |
230 MediaCodecVideoEncoder::MediaCodecVideoEncoder( | 244 MediaCodecVideoEncoder::MediaCodecVideoEncoder( |
231 JNIEnv* jni, VideoCodecType codecType) : | 245 JNIEnv* jni, VideoCodecType codecType, jobject egl_context) : |
232 codecType_(codecType), | 246 codecType_(codecType), |
233 callback_(NULL), | 247 callback_(NULL), |
234 inited_(false), | 248 inited_(false), |
| 249 use_surface_(false), |
235 picture_id_(0), | 250 picture_id_(0), |
236 codec_thread_(new Thread()), | 251 codec_thread_(new Thread()), |
237 j_media_codec_video_encoder_class_( | 252 j_media_codec_video_encoder_class_( |
238 jni, | 253 jni, |
239 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), | 254 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), |
240 j_media_codec_video_encoder_( | 255 j_media_codec_video_encoder_( |
241 jni, | 256 jni, |
242 jni->NewObject(*j_media_codec_video_encoder_class_, | 257 jni->NewObject(*j_media_codec_video_encoder_class_, |
243 GetMethodID(jni, | 258 GetMethodID(jni, |
244 *j_media_codec_video_encoder_class_, | 259 *j_media_codec_video_encoder_class_, |
245 "<init>", | 260 "<init>", |
246 "()V"))) { | 261 "()V"))), |
| 262 egl_context_(egl_context) { |
247 ScopedLocalRefFrame local_ref_frame(jni); | 263 ScopedLocalRefFrame local_ref_frame(jni); |
248 // It would be nice to avoid spinning up a new thread per MediaCodec, and | 264 // It would be nice to avoid spinning up a new thread per MediaCodec, and |
249 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | 265 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug |
250 // 2732 means that deadlocks abound. This class synchronously trampolines | 266 // 2732 means that deadlocks abound. This class synchronously trampolines |
251 // to |codec_thread_|, so if anything else can be coming to _us_ from | 267 // to |codec_thread_|, so if anything else can be coming to _us_ from |
252 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | 268 // |codec_thread_|, or from any thread holding the |_sendCritSect| described |
253 // in the bug, we have a problem. For now work around that with a dedicated | 269 // in the bug, we have a problem. For now work around that with a dedicated |
254 // thread. | 270 // thread. |
255 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | 271 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); |
256 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | 272 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; |
257 codec_thread_checker_.DetachFromThread(); | 273 codec_thread_checker_.DetachFromThread(); |
258 jclass j_output_buffer_info_class = | 274 jclass j_output_buffer_info_class = |
259 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | 275 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |
260 j_init_encode_method_ = GetMethodID( | 276 j_init_encode_method_ = GetMethodID( |
261 jni, | 277 jni, |
262 *j_media_codec_video_encoder_class_, | 278 *j_media_codec_video_encoder_class_, |
263 "initEncode", | 279 "initEncode", |
264 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)Z"); | 280 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" |
| 281 "IIIILjavax/microedition/khronos/egl/EGLContext;)Z"); |
265 j_get_input_buffers_method_ = GetMethodID( | 282 j_get_input_buffers_method_ = GetMethodID( |
266 jni, | 283 jni, |
267 *j_media_codec_video_encoder_class_, | 284 *j_media_codec_video_encoder_class_, |
268 "getInputBuffers", | 285 "getInputBuffers", |
269 "()[Ljava/nio/ByteBuffer;"); | 286 "()[Ljava/nio/ByteBuffer;"); |
270 j_dequeue_input_buffer_method_ = GetMethodID( | 287 j_dequeue_input_buffer_method_ = GetMethodID( |
271 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); | 288 jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); |
272 j_encode_buffer_method_ = GetMethodID( | 289 j_encode_buffer_method_ = GetMethodID( |
273 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); | 290 jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); |
| 291 j_encode_texture_method_ = GetMethodID( |
| 292 jni, *j_media_codec_video_encoder_class_, "encodeTexture", |
| 293 "(ZI[FJ)Z"); |
274 j_release_method_ = | 294 j_release_method_ = |
275 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); | 295 GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); |
276 j_set_rates_method_ = GetMethodID( | 296 j_set_rates_method_ = GetMethodID( |
277 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); | 297 jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); |
278 j_dequeue_output_buffer_method_ = GetMethodID( | 298 j_dequeue_output_buffer_method_ = GetMethodID( |
279 jni, | 299 jni, |
280 *j_media_codec_video_encoder_class_, | 300 *j_media_codec_video_encoder_class_, |
281 "dequeueOutputBuffer", | 301 "dequeueOutputBuffer", |
282 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); | 302 "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); |
283 j_release_output_buffer_method_ = GetMethodID( | 303 j_release_output_buffer_method_ = GetMethodID( |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
344 } | 364 } |
345 quality_scaler_.SetMinResolution(kMinWidth, kMinHeight); | 365 quality_scaler_.SetMinResolution(kMinWidth, kMinHeight); |
346 quality_scaler_.ReportFramerate(codec_settings->maxFramerate); | 366 quality_scaler_.ReportFramerate(codec_settings->maxFramerate); |
347 } | 367 } |
348 return codec_thread_->Invoke<int32_t>( | 368 return codec_thread_->Invoke<int32_t>( |
349 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, | 369 Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, |
350 this, | 370 this, |
351 codec_settings->width, | 371 codec_settings->width, |
352 codec_settings->height, | 372 codec_settings->height, |
353 codec_settings->startBitrate, | 373 codec_settings->startBitrate, |
354 codec_settings->maxFramerate)); | 374 codec_settings->maxFramerate, |
| 375 false /* use_surface */)); |
355 } | 376 } |
356 | 377 |
357 int32_t MediaCodecVideoEncoder::Encode( | 378 int32_t MediaCodecVideoEncoder::Encode( |
358 const webrtc::VideoFrame& frame, | 379 const webrtc::VideoFrame& frame, |
359 const webrtc::CodecSpecificInfo* /* codec_specific_info */, | 380 const webrtc::CodecSpecificInfo* /* codec_specific_info */, |
360 const std::vector<webrtc::FrameType>* frame_types) { | 381 const std::vector<webrtc::FrameType>* frame_types) { |
361 return codec_thread_->Invoke<int32_t>(Bind( | 382 return codec_thread_->Invoke<int32_t>(Bind( |
362 &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types)); | 383 &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types)); |
363 } | 384 } |
364 | 385 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
410 // unclear how to signal such a failure to the app, so instead we stay silent | 431 // unclear how to signal such a failure to the app, so instead we stay silent |
411 // about it and let the next app-called API method reveal the borkedness. | 432 // about it and let the next app-called API method reveal the borkedness. |
412 DeliverPendingOutputs(jni); | 433 DeliverPendingOutputs(jni); |
413 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 434 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
414 } | 435 } |
415 | 436 |
416 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { | 437 bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() { |
417 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 438 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
418 ALOGE << "ResetOnCodecThread"; | 439 ALOGE << "ResetOnCodecThread"; |
419 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || | 440 if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || |
420 InitEncodeOnCodecThread(width_, height_, 0, 0) | 441 InitEncodeOnCodecThread(width_, height_, 0, 0, false) != |
421 != WEBRTC_VIDEO_CODEC_OK) { | 442 WEBRTC_VIDEO_CODEC_OK) { |
422 // TODO(fischman): wouldn't it be nice if there was a way to gracefully | 443 // TODO(fischman): wouldn't it be nice if there was a way to gracefully |
423 // degrade to a SW encoder at this point? There isn't one AFAICT :( | 444 // degrade to a SW encoder at this point? There isn't one AFAICT :( |
424 // https://code.google.com/p/webrtc/issues/detail?id=2920 | 445 // https://code.google.com/p/webrtc/issues/detail?id=2920 |
425 return false; | 446 return false; |
426 } | 447 } |
427 return true; | 448 return true; |
428 } | 449 } |
429 | 450 |
430 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( | 451 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( |
431 int width, int height, int kbps, int fps) { | 452 int width, int height, int kbps, int fps, bool use_surface) { |
432 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 453 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 454 RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set."; |
433 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 455 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
434 ScopedLocalRefFrame local_ref_frame(jni); | 456 ScopedLocalRefFrame local_ref_frame(jni); |
435 | 457 |
436 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << | 458 ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " << |
437 width << " x " << height << ". Bitrate: " << kbps << | 459 width << " x " << height << ". Bitrate: " << kbps << |
438 " kbps. Fps: " << fps; | 460 " kbps. Fps: " << fps; |
439 if (kbps == 0) { | 461 if (kbps == 0) { |
440 kbps = last_set_bitrate_kbps_; | 462 kbps = last_set_bitrate_kbps_; |
441 } | 463 } |
442 if (fps == 0) { | 464 if (fps == 0) { |
(...skipping 15 matching lines...) Expand all Loading... |
458 current_bytes_ = 0; | 480 current_bytes_ = 0; |
459 current_encoding_time_ms_ = 0; | 481 current_encoding_time_ms_ = 0; |
460 last_input_timestamp_ms_ = -1; | 482 last_input_timestamp_ms_ = -1; |
461 last_output_timestamp_ms_ = -1; | 483 last_output_timestamp_ms_ = -1; |
462 output_timestamp_ = 0; | 484 output_timestamp_ = 0; |
463 output_render_time_ms_ = 0; | 485 output_render_time_ms_ = 0; |
464 timestamps_.clear(); | 486 timestamps_.clear(); |
465 render_times_ms_.clear(); | 487 render_times_ms_.clear(); |
466 frame_rtc_times_ms_.clear(); | 488 frame_rtc_times_ms_.clear(); |
467 drop_next_input_frame_ = false; | 489 drop_next_input_frame_ = false; |
| 490 use_surface_ = use_surface; |
468 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | 491 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; |
469 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); | 492 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); |
470 tl0_pic_idx_ = static_cast<uint8_t>(rand()); | 493 tl0_pic_idx_ = static_cast<uint8_t>(rand()); |
471 gof_idx_ = 0; | 494 gof_idx_ = 0; |
472 | 495 |
473 // We enforce no extra stride/padding in the format creation step. | 496 // We enforce no extra stride/padding in the format creation step. |
474 jobject j_video_codec_enum = JavaEnumFromIndex( | 497 jobject j_video_codec_enum = JavaEnumFromIndex( |
475 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); | 498 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); |
476 const bool encode_status = jni->CallBooleanMethod( | 499 const bool encode_status = jni->CallBooleanMethod( |
477 *j_media_codec_video_encoder_, j_init_encode_method_, | 500 *j_media_codec_video_encoder_, j_init_encode_method_, |
478 j_video_codec_enum, width, height, kbps, fps); | 501 j_video_codec_enum, width, height, kbps, fps, |
| 502 (use_surface ? egl_context_ : nullptr)); |
479 if (!encode_status) { | 503 if (!encode_status) { |
480 ALOGE << "Failed to configure encoder."; | 504 ALOGE << "Failed to configure encoder."; |
481 return WEBRTC_VIDEO_CODEC_ERROR; | 505 return WEBRTC_VIDEO_CODEC_ERROR; |
482 } | 506 } |
483 CHECK_EXCEPTION(jni); | 507 CHECK_EXCEPTION(jni); |
484 | 508 |
485 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( | 509 if (use_surface) { |
486 jni->CallObjectMethod(*j_media_codec_video_encoder_, | 510 scale_ = false; // TODO(perkj): Implement scaling when using textures. |
487 j_get_input_buffers_method_)); | 511 } else { |
488 CHECK_EXCEPTION(jni); | 512 jobjectArray input_buffers = reinterpret_cast<jobjectArray>( |
489 if (IsNull(jni, input_buffers)) { | 513 jni->CallObjectMethod(*j_media_codec_video_encoder_, |
490 return WEBRTC_VIDEO_CODEC_ERROR; | 514 j_get_input_buffers_method_)); |
| 515 CHECK_EXCEPTION(jni); |
| 516 if (IsNull(jni, input_buffers)) { |
| 517 return WEBRTC_VIDEO_CODEC_ERROR; |
| 518 } |
| 519 |
| 520 switch (GetIntField(jni, *j_media_codec_video_encoder_, |
| 521 j_color_format_field_)) { |
| 522 case COLOR_FormatYUV420Planar: |
| 523 encoder_fourcc_ = libyuv::FOURCC_YU12; |
| 524 break; |
| 525 case COLOR_FormatYUV420SemiPlanar: |
| 526 case COLOR_QCOM_FormatYUV420SemiPlanar: |
| 527 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
| 528 encoder_fourcc_ = libyuv::FOURCC_NV12; |
| 529 break; |
| 530 default: |
| 531 LOG(LS_ERROR) << "Wrong color format."; |
| 532 return WEBRTC_VIDEO_CODEC_ERROR; |
| 533 } |
| 534 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
| 535 RTC_CHECK(input_buffers_.empty()) |
| 536 << "Unexpected double InitEncode without Release"; |
| 537 input_buffers_.resize(num_input_buffers); |
| 538 for (size_t i = 0; i < num_input_buffers; ++i) { |
| 539 input_buffers_[i] = |
| 540 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
| 541 int64_t yuv_buffer_capacity = |
| 542 jni->GetDirectBufferCapacity(input_buffers_[i]); |
| 543 CHECK_EXCEPTION(jni); |
| 544 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
| 545 } |
491 } | 546 } |
492 | 547 |
493 switch (GetIntField(jni, *j_media_codec_video_encoder_, | |
494 j_color_format_field_)) { | |
495 case COLOR_FormatYUV420Planar: | |
496 encoder_fourcc_ = libyuv::FOURCC_YU12; | |
497 break; | |
498 case COLOR_FormatYUV420SemiPlanar: | |
499 case COLOR_QCOM_FormatYUV420SemiPlanar: | |
500 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | |
501 encoder_fourcc_ = libyuv::FOURCC_NV12; | |
502 break; | |
503 default: | |
504 LOG(LS_ERROR) << "Wrong color format."; | |
505 return WEBRTC_VIDEO_CODEC_ERROR; | |
506 } | |
507 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | |
508 RTC_CHECK(input_buffers_.empty()) | |
509 << "Unexpected double InitEncode without Release"; | |
510 input_buffers_.resize(num_input_buffers); | |
511 for (size_t i = 0; i < num_input_buffers; ++i) { | |
512 input_buffers_[i] = | |
513 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | |
514 int64_t yuv_buffer_capacity = | |
515 jni->GetDirectBufferCapacity(input_buffers_[i]); | |
516 CHECK_EXCEPTION(jni); | |
517 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | |
518 } | |
519 CHECK_EXCEPTION(jni); | |
520 | |
521 | |
522 inited_ = true; | 548 inited_ = true; |
523 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 549 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
524 return WEBRTC_VIDEO_CODEC_OK; | 550 return WEBRTC_VIDEO_CODEC_OK; |
525 } | 551 } |
526 | 552 |
527 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 553 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
528 const webrtc::VideoFrame& frame, | 554 const webrtc::VideoFrame& frame, |
529 const std::vector<webrtc::FrameType>* frame_types) { | 555 const std::vector<webrtc::FrameType>* frame_types) { |
530 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 556 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
531 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 557 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
568 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) { | 594 if (frames_in_queue_ > 2 || encoder_latency_ms > 70) { |
569 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << | 595 ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms << |
570 " ms. Q size: " << frames_in_queue_; | 596 " ms. Q size: " << frames_in_queue_; |
571 frames_dropped_++; | 597 frames_dropped_++; |
572 // Report dropped frame to quality_scaler_. | 598 // Report dropped frame to quality_scaler_. |
573 OnDroppedFrame(); | 599 OnDroppedFrame(); |
574 return WEBRTC_VIDEO_CODEC_OK; | 600 return WEBRTC_VIDEO_CODEC_OK; |
575 } | 601 } |
576 } | 602 } |
577 | 603 |
578 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, | 604 const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta; |
579 j_dequeue_input_buffer_method_); | 605 bool encode_status = true; |
580 CHECK_EXCEPTION(jni); | 606 if (!input_frame.native_handle()) { |
581 if (j_input_buffer_index == -1) { | 607 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, |
582 // Video codec falls behind - no input buffer available. | 608 j_dequeue_input_buffer_method_); |
583 ALOGW << "Encoder drop frame - no input buffers available"; | 609 CHECK_EXCEPTION(jni); |
584 frames_dropped_++; | 610 if (j_input_buffer_index == -1) { |
585 // Report dropped frame to quality_scaler_. | 611 // Video codec falls behind - no input buffer available. |
586 OnDroppedFrame(); | 612 ALOGW << "Encoder drop frame - no input buffers available"; |
587 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. | 613 frames_dropped_++; |
| 614 // Report dropped frame to quality_scaler_. |
| 615 OnDroppedFrame(); |
| 616 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. |
| 617 } |
| 618 if (j_input_buffer_index == -2) { |
| 619 ResetCodecOnCodecThread(); |
| 620 return WEBRTC_VIDEO_CODEC_ERROR; |
| 621 } |
| 622 encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, |
| 623 j_input_buffer_index); |
| 624 } else { |
| 625 encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame); |
588 } | 626 } |
589 if (j_input_buffer_index == -2) { | 627 |
| 628 if (!encode_status) { |
| 629 ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp(); |
590 ResetCodecOnCodecThread(); | 630 ResetCodecOnCodecThread(); |
591 return WEBRTC_VIDEO_CODEC_ERROR; | 631 return WEBRTC_VIDEO_CODEC_ERROR; |
592 } | 632 } |
593 | 633 |
594 last_input_timestamp_ms_ = | 634 last_input_timestamp_ms_ = |
595 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; | 635 current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; |
596 frames_in_queue_++; | 636 frames_in_queue_++; |
597 | 637 |
598 // Save input image timestamps for later output | 638 // Save input image timestamps for later output |
599 timestamps_.push_back(input_frame.timestamp()); | 639 timestamps_.push_back(input_frame.timestamp()); |
600 render_times_ms_.push_back(input_frame.render_time_ms()); | 640 render_times_ms_.push_back(input_frame.render_time_ms()); |
601 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); | 641 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); |
602 | |
603 const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta; | |
604 const bool encode_status = | |
605 EncodeByteBufferOnCodecThread(jni, key_frame, input_frame, | |
606 j_input_buffer_index); | |
607 | |
608 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; | 642 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; |
609 | 643 |
610 if (!encode_status || !DeliverPendingOutputs(jni)) { | 644 if (!DeliverPendingOutputs(jni)) { |
611 ALOGE << "Failed deliver pending outputs."; | 645 ALOGE << "Failed deliver pending outputs."; |
612 ResetCodecOnCodecThread(); | 646 ResetCodecOnCodecThread(); |
613 return WEBRTC_VIDEO_CODEC_ERROR; | 647 return WEBRTC_VIDEO_CODEC_ERROR; |
614 } | 648 } |
615 return WEBRTC_VIDEO_CODEC_OK; | 649 return WEBRTC_VIDEO_CODEC_OK; |
616 } | 650 } |
617 | 651 |
618 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( | 652 bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( |
619 const webrtc::VideoFrame& frame) { | 653 const webrtc::VideoFrame& frame) { |
620 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 654 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
621 | 655 |
| 656 const bool is_texture_frame = frame.native_handle() != nullptr; |
| 657 const bool reconfigure_due_to_format = is_texture_frame != use_surface_; |
622 const bool reconfigure_due_to_size = | 658 const bool reconfigure_due_to_size = |
623 frame.width() != width_ || frame.height() != height_; | 659 frame.width() != width_ || frame.height() != height_; |
624 | 660 |
| 661 if (reconfigure_due_to_format) { |
| 662 ALOGD << "Reconfigure encoder due to format change. " |
| 663 << (use_surface_ ? |
| 664 "Reconfiguring to encode from byte buffer." : |
| 665 "Reconfiguring to encode from texture."); |
| 666 } |
625 if (reconfigure_due_to_size) { | 667 if (reconfigure_due_to_size) { |
626 ALOGD << "Reconfigure encoder due to frame resolution change from " | 668 ALOGD << "Reconfigure encoder due to frame resolution change from " |
627 << width_ << " x " << height_ << " to " << frame.width() << " x " | 669 << width_ << " x " << height_ << " to " << frame.width() << " x " |
628 << frame.height(); | 670 << frame.height(); |
629 width_ = frame.width(); | 671 width_ = frame.width(); |
630 height_ = frame.height(); | 672 height_ = frame.height(); |
631 } | 673 } |
632 | 674 |
633 if (!reconfigure_due_to_size) | 675 if (!reconfigure_due_to_format && !reconfigure_due_to_size) |
634 return true; | 676 return true; |
635 | 677 |
636 ReleaseOnCodecThread(); | 678 ReleaseOnCodecThread(); |
637 | 679 |
638 return InitEncodeOnCodecThread(width_, height_, 0, 0) == | 680 return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) == |
639 WEBRTC_VIDEO_CODEC_OK; | 681 WEBRTC_VIDEO_CODEC_OK; |
640 } | 682 } |
641 | 683 |
642 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, | 684 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, |
643 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { | 685 bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { |
644 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 686 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 687 RTC_CHECK(!use_surface_); |
645 | 688 |
646 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", | 689 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", |
647 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); | 690 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); |
648 | 691 |
649 jobject j_input_buffer = input_buffers_[input_buffer_index]; | 692 jobject j_input_buffer = input_buffers_[input_buffer_index]; |
650 uint8_t* yuv_buffer = | 693 uint8_t* yuv_buffer = |
651 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 694 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
652 CHECK_EXCEPTION(jni); | 695 CHECK_EXCEPTION(jni); |
653 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; | 696 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
654 RTC_CHECK(!libyuv::ConvertFromI420( | 697 RTC_CHECK(!libyuv::ConvertFromI420( |
655 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane), | 698 frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane), |
656 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), | 699 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), |
657 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), | 700 frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), |
658 yuv_buffer, width_, width_, height_, encoder_fourcc_)) | 701 yuv_buffer, width_, width_, height_, encoder_fourcc_)) |
659 << "ConvertFromI420 failed"; | 702 << "ConvertFromI420 failed"; |
660 | 703 |
661 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, | 704 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
662 j_encode_buffer_method_, | 705 j_encode_buffer_method_, |
663 key_frame, | 706 key_frame, |
664 input_buffer_index, | 707 input_buffer_index, |
665 yuv_size_, | 708 yuv_size_, |
666 current_timestamp_us_); | 709 current_timestamp_us_); |
667 CHECK_EXCEPTION(jni); | 710 CHECK_EXCEPTION(jni); |
668 return encode_status; | 711 return encode_status; |
669 } | 712 } |
670 | 713 |
| 714 bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, |
| 715 bool key_frame, const webrtc::VideoFrame& frame) { |
| 716 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
| 717 RTC_CHECK(use_surface_); |
| 718 NativeHandleImpl* handle = |
| 719 static_cast<NativeHandleImpl*>(frame.native_handle()); |
| 720 jfloatArray sampling_matrix = jni->NewFloatArray(16); |
| 721 jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix); |
| 722 |
| 723 bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, |
| 724 j_encode_texture_method_, |
| 725 key_frame, |
| 726 handle->oes_texture_id, |
| 727 sampling_matrix, |
| 728 current_timestamp_us_); |
| 729 CHECK_EXCEPTION(jni); |
| 730 return encode_status; |
| 731 } |
| 732 |
671 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( | 733 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( |
672 webrtc::EncodedImageCallback* callback) { | 734 webrtc::EncodedImageCallback* callback) { |
673 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 735 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
674 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 736 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
675 ScopedLocalRefFrame local_ref_frame(jni); | 737 ScopedLocalRefFrame local_ref_frame(jni); |
676 callback_ = callback; | 738 callback_ = callback; |
677 return WEBRTC_VIDEO_CODEC_OK; | 739 return WEBRTC_VIDEO_CODEC_OK; |
678 } | 740 } |
679 | 741 |
680 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { | 742 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() { |
681 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 743 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
682 if (!inited_) { | 744 if (!inited_) { |
683 return WEBRTC_VIDEO_CODEC_OK; | 745 return WEBRTC_VIDEO_CODEC_OK; |
684 } | 746 } |
685 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 747 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
686 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << | 748 ALOGD << "EncoderReleaseOnCodecThread: Frames received: " << |
687 frames_received_ << ". Encoded: " << frames_encoded_ << | 749 frames_received_ << ". Encoded: " << frames_encoded_ << |
688 ". Dropped: " << frames_dropped_; | 750 ". Dropped: " << frames_dropped_; |
689 ScopedLocalRefFrame local_ref_frame(jni); | 751 ScopedLocalRefFrame local_ref_frame(jni); |
690 for (size_t i = 0; i < input_buffers_.size(); ++i) | 752 for (size_t i = 0; i < input_buffers_.size(); ++i) |
691 jni->DeleteGlobalRef(input_buffers_[i]); | 753 jni->DeleteGlobalRef(input_buffers_[i]); |
692 input_buffers_.clear(); | 754 input_buffers_.clear(); |
693 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); | 755 jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); |
694 CHECK_EXCEPTION(jni); | 756 CHECK_EXCEPTION(jni); |
695 rtc::MessageQueueManager::Clear(this); | 757 rtc::MessageQueueManager::Clear(this); |
696 inited_ = false; | 758 inited_ = false; |
| 759 use_surface_ = false; |
697 ALOGD << "EncoderReleaseOnCodecThread done."; | 760 ALOGD << "EncoderReleaseOnCodecThread done."; |
698 return WEBRTC_VIDEO_CODEC_OK; | 761 return WEBRTC_VIDEO_CODEC_OK; |
699 } | 762 } |
700 | 763 |
701 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, | 764 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate, |
702 uint32_t frame_rate) { | 765 uint32_t frame_rate) { |
703 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); | 766 RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); |
704 if (last_set_bitrate_kbps_ == new_bit_rate && | 767 if (last_set_bitrate_kbps_ == new_bit_rate && |
705 last_set_fps_ == frame_rate) { | 768 last_set_fps_ == frame_rate) { |
706 return WEBRTC_VIDEO_CODEC_OK; | 769 return WEBRTC_VIDEO_CODEC_OK; |
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
984 | 1047 |
985 void MediaCodecVideoEncoder::OnDroppedFrame() { | 1048 void MediaCodecVideoEncoder::OnDroppedFrame() { |
986 if (scale_) | 1049 if (scale_) |
987 quality_scaler_.ReportDroppedFrame(); | 1050 quality_scaler_.ReportDroppedFrame(); |
988 } | 1051 } |
989 | 1052 |
990 int MediaCodecVideoEncoder::GetTargetFramerate() { | 1053 int MediaCodecVideoEncoder::GetTargetFramerate() { |
991 return scale_ ? quality_scaler_.GetTargetFramerate() : -1; | 1054 return scale_ ? quality_scaler_.GetTargetFramerate() : -1; |
992 } | 1055 } |
993 | 1056 |
994 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() { | 1057 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() |
| 1058 : egl_context_ (nullptr) { |
995 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 1059 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
996 ScopedLocalRefFrame local_ref_frame(jni); | 1060 ScopedLocalRefFrame local_ref_frame(jni); |
997 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder"); | 1061 jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder"); |
998 supported_codecs_.clear(); | 1062 supported_codecs_.clear(); |
999 | 1063 |
1000 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( | 1064 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( |
1001 j_encoder_class, | 1065 j_encoder_class, |
1002 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z")); | 1066 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z")); |
1003 CHECK_EXCEPTION(jni); | 1067 CHECK_EXCEPTION(jni); |
1004 if (is_vp8_hw_supported) { | 1068 if (is_vp8_hw_supported) { |
(...skipping 18 matching lines...) Expand all Loading... |
1023 CHECK_EXCEPTION(jni); | 1087 CHECK_EXCEPTION(jni); |
1024 if (is_h264_hw_supported) { | 1088 if (is_h264_hw_supported) { |
1025 ALOGD << "H.264 HW Encoder supported."; | 1089 ALOGD << "H.264 HW Encoder supported."; |
1026 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264", | 1090 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264", |
1027 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | 1091 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); |
1028 } | 1092 } |
1029 } | 1093 } |
1030 | 1094 |
1031 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {} | 1095 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {} |
1032 | 1096 |
| 1097 void MediaCodecVideoEncoderFactory::SetEGLContext( |
| 1098 JNIEnv* jni, jobject render_egl_context) { |
| 1099 ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext"; |
| 1100 if (egl_context_) { |
| 1101 jni->DeleteGlobalRef(egl_context_); |
| 1102 egl_context_ = NULL; |
| 1103 } |
| 1104 if (!IsNull(jni, render_egl_context)) { |
| 1105 egl_context_ = jni->NewGlobalRef(render_egl_context); |
| 1106 if (CheckException(jni)) { |
| 1107 ALOGE << "error calling NewGlobalRef for EGL Context."; |
| 1108 egl_context_ = NULL; |
| 1109 } else { |
| 1110 jclass j_egl_context_class = |
| 1111 FindClass(jni, "javax/microedition/khronos/egl/EGLContext"); |
| 1112 if (!jni->IsInstanceOf(egl_context_, j_egl_context_class)) { |
| 1113 ALOGE << "Wrong EGL Context."; |
| 1114 jni->DeleteGlobalRef(egl_context_); |
| 1115 egl_context_ = NULL; |
| 1116 } |
| 1117 } |
| 1118 } |
| 1119 if (egl_context_ == NULL) { |
| 1120 ALOGW << "NULL VideoDecoder EGL context - HW surface encoding is disabled."; |
| 1121 } |
| 1122 } |
| 1123 |
1033 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder( | 1124 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder( |
1034 VideoCodecType type) { | 1125 VideoCodecType type) { |
1035 if (supported_codecs_.empty()) { | 1126 if (supported_codecs_.empty()) { |
1036 ALOGW << "No HW video encoder for type " << (int)type; | 1127 ALOGW << "No HW video encoder for type " << (int)type; |
1037 return NULL; | 1128 return NULL; |
1038 } | 1129 } |
1039 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin(); | 1130 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin(); |
1040 it != supported_codecs_.end(); ++it) { | 1131 it != supported_codecs_.end(); ++it) { |
1041 if (it->type == type) { | 1132 if (it->type == type) { |
1042 ALOGD << "Create HW video encoder for type " << (int)type << | 1133 ALOGD << "Create HW video encoder for type " << (int)type << |
1043 " (" << it->name << ")."; | 1134 " (" << it->name << ")."; |
1044 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type); | 1135 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type, |
| 1136 egl_context_); |
1045 } | 1137 } |
1046 } | 1138 } |
1047 ALOGW << "Can not find HW video encoder for type " << (int)type; | 1139 ALOGW << "Can not find HW video encoder for type " << (int)type; |
1048 return NULL; | 1140 return NULL; |
1049 } | 1141 } |
1050 | 1142 |
1051 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>& | 1143 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>& |
1052 MediaCodecVideoEncoderFactory::codecs() const { | 1144 MediaCodecVideoEncoderFactory::codecs() const { |
1053 return supported_codecs_; | 1145 return supported_codecs_; |
1054 } | 1146 } |
1055 | 1147 |
1056 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1148 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
1057 webrtc::VideoEncoder* encoder) { | 1149 webrtc::VideoEncoder* encoder) { |
1058 ALOGD << "Destroy video encoder."; | 1150 ALOGD << "Destroy video encoder."; |
1059 delete encoder; | 1151 delete encoder; |
1060 } | 1152 } |
1061 | 1153 |
1062 } // namespace webrtc_jni | 1154 } // namespace webrtc_jni |
1063 | 1155 |
OLD | NEW |