Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(216)

Side by Side Diff: webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc

Issue 3009613002: Android: Replace webrtc_jni namespace with nested jni namespace (Closed)
Patch Set: Rebase Created 3 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h" 43 #include "webrtc/sdk/android/src/jni/androidmediacodeccommon.h"
44 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" 44 #include "webrtc/sdk/android/src/jni/classreferenceholder.h"
45 #include "webrtc/sdk/android/src/jni/jni_helpers.h" 45 #include "webrtc/sdk/android/src/jni/jni_helpers.h"
46 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" 46 #include "webrtc/sdk/android/src/jni/native_handle_impl.h"
47 #include "webrtc/system_wrappers/include/field_trial.h" 47 #include "webrtc/system_wrappers/include/field_trial.h"
48 48
49 using rtc::Bind; 49 using rtc::Bind;
50 using rtc::Thread; 50 using rtc::Thread;
51 using rtc::ThreadManager; 51 using rtc::ThreadManager;
52 52
53 using webrtc::CodecSpecificInfo; 53 namespace webrtc {
54 using webrtc::EncodedImage; 54 namespace jni {
55 using webrtc::VideoFrame;
56 using webrtc::RTPFragmentationHeader;
57 using webrtc::VideoCodec;
58 using webrtc::VideoCodecType;
59 using webrtc::kVideoCodecH264;
60 using webrtc::kVideoCodecVP8;
61 using webrtc::kVideoCodecVP9;
62 using webrtc::QualityScaler;
63
64 namespace webrtc_jni {
65 55
66 // Maximum supported HW video encoder fps. 56 // Maximum supported HW video encoder fps.
67 #define MAX_VIDEO_FPS 30 57 #define MAX_VIDEO_FPS 30
68 // Maximum allowed fps value in SetRates() call. 58 // Maximum allowed fps value in SetRates() call.
69 #define MAX_ALLOWED_VIDEO_FPS 60 59 #define MAX_ALLOWED_VIDEO_FPS 60
70 // Maximum allowed frames in encoder input queue. 60 // Maximum allowed frames in encoder input queue.
71 #define MAX_ENCODER_Q_SIZE 2 61 #define MAX_ENCODER_Q_SIZE 2
72 // Maximum amount of dropped frames caused by full encoder queue - exceeding 62 // Maximum amount of dropped frames caused by full encoder queue - exceeding
73 // this threshold means that encoder probably got stuck and need to be reset. 63 // this threshold means that encoder probably got stuck and need to be reset.
74 #define ENCODER_STALL_FRAMEDROP_THRESHOLD 60 64 #define ENCODER_STALL_FRAMEDROP_THRESHOLD 60
(...skipping 11 matching lines...) Expand all
86 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER) 76 #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER)
87 77
88 namespace { 78 namespace {
89 // Maximum time limit between incoming frames before requesting a key frame. 79 // Maximum time limit between incoming frames before requesting a key frame.
90 const size_t kFrameDiffThresholdMs = 350; 80 const size_t kFrameDiffThresholdMs = 350;
91 const int kMinKeyFrameInterval = 6; 81 const int kMinKeyFrameInterval = 6;
92 const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile"; 82 const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile";
93 const char kCustomQPThresholdsFieldTrial[] = "WebRTC-CustomQPThresholds"; 83 const char kCustomQPThresholdsFieldTrial[] = "WebRTC-CustomQPThresholds";
94 } // namespace 84 } // namespace
95 85
96 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses 86 // MediaCodecVideoEncoder is a VideoEncoder implementation that uses
97 // Android's MediaCodec SDK API behind the scenes to implement (hopefully) 87 // Android's MediaCodec SDK API behind the scenes to implement (hopefully)
98 // HW-backed video encode. This C++ class is implemented as a very thin shim, 88 // HW-backed video encode. This C++ class is implemented as a very thin shim,
99 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder. 89 // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
100 // MediaCodecVideoEncoder must be operated on a single task queue, currently 90 // MediaCodecVideoEncoder must be operated on a single task queue, currently
101 // this is the encoder queue from ViE encoder. 91 // this is the encoder queue from ViE encoder.
102 class MediaCodecVideoEncoder : public webrtc::VideoEncoder { 92 class MediaCodecVideoEncoder : public VideoEncoder {
103 public: 93 public:
104 virtual ~MediaCodecVideoEncoder(); 94 virtual ~MediaCodecVideoEncoder();
105 MediaCodecVideoEncoder(JNIEnv* jni, 95 MediaCodecVideoEncoder(JNIEnv* jni,
106 const cricket::VideoCodec& codec, 96 const cricket::VideoCodec& codec,
107 jobject egl_context); 97 jobject egl_context);
108 98
109 // webrtc::VideoEncoder implementation. 99 // VideoEncoder implementation.
110 int32_t InitEncode(const webrtc::VideoCodec* codec_settings, 100 int32_t InitEncode(const VideoCodec* codec_settings,
111 int32_t /* number_of_cores */, 101 int32_t /* number_of_cores */,
112 size_t /* max_payload_size */) override; 102 size_t /* max_payload_size */) override;
113 int32_t Encode(const webrtc::VideoFrame& input_image, 103 int32_t Encode(const VideoFrame& input_image,
114 const webrtc::CodecSpecificInfo* /* codec_specific_info */, 104 const CodecSpecificInfo* /* codec_specific_info */,
115 const std::vector<webrtc::FrameType>* frame_types) override; 105 const std::vector<FrameType>* frame_types) override;
116 int32_t RegisterEncodeCompleteCallback( 106 int32_t RegisterEncodeCompleteCallback(
117 webrtc::EncodedImageCallback* callback) override; 107 EncodedImageCallback* callback) override;
118 int32_t Release() override; 108 int32_t Release() override;
119 int32_t SetChannelParameters(uint32_t /* packet_loss */, 109 int32_t SetChannelParameters(uint32_t /* packet_loss */,
120 int64_t /* rtt */) override; 110 int64_t /* rtt */) override;
121 int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation, 111 int32_t SetRateAllocation(const BitrateAllocation& rate_allocation,
122 uint32_t frame_rate) override; 112 uint32_t frame_rate) override;
123 113
124 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } 114 bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
125 const char* ImplementationName() const override; 115 const char* ImplementationName() const override;
126 116
127 // Fills the input buffer with data from the buffers passed as parameters. 117 // Fills the input buffer with data from the buffers passed as parameters.
128 bool FillInputBuffer(JNIEnv* jni, 118 bool FillInputBuffer(JNIEnv* jni,
129 int input_buffer_index, 119 int input_buffer_index,
130 uint8_t const* buffer_y, 120 uint8_t const* buffer_y,
131 int stride_y, 121 int stride_y,
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
163 // previously-current values are reused instead of the passed parameters 153 // previously-current values are reused instead of the passed parameters
164 // (makes it easier to reason about thread-safety). 154 // (makes it easier to reason about thread-safety).
165 int32_t InitEncodeInternal(int width, 155 int32_t InitEncodeInternal(int width,
166 int height, 156 int height,
167 int kbps, 157 int kbps,
168 int fps, 158 int fps,
169 bool use_surface); 159 bool use_surface);
170 // Reconfigure to match |frame| in width, height. Also reconfigures the 160 // Reconfigure to match |frame| in width, height. Also reconfigures the
171 // encoder if |frame| is a texture/byte buffer and the encoder is initialized 161 // encoder if |frame| is a texture/byte buffer and the encoder is initialized
172 // for byte buffer/texture. Returns false if reconfiguring fails. 162 // for byte buffer/texture. Returns false if reconfiguring fails.
173 bool MaybeReconfigureEncoder(JNIEnv* jni, const webrtc::VideoFrame& frame); 163 bool MaybeReconfigureEncoder(JNIEnv* jni, const VideoFrame& frame);
174 164
175 // Returns true if the frame is a texture frame and we should use surface 165 // Returns true if the frame is a texture frame and we should use surface
176 // based encoding. 166 // based encoding.
177 bool IsTextureFrame(JNIEnv* jni, const webrtc::VideoFrame& frame); 167 bool IsTextureFrame(JNIEnv* jni, const VideoFrame& frame);
178 168
179 bool EncodeByteBuffer(JNIEnv* jni, 169 bool EncodeByteBuffer(JNIEnv* jni,
180 bool key_frame, 170 bool key_frame,
181 const webrtc::VideoFrame& frame, 171 const VideoFrame& frame,
182 int input_buffer_index); 172 int input_buffer_index);
183 bool EncodeTexture(JNIEnv* jni, 173 bool EncodeTexture(JNIEnv* jni, bool key_frame, const VideoFrame& frame);
184 bool key_frame,
185 const webrtc::VideoFrame& frame);
186 // Encodes a new style org.webrtc.VideoFrame. Might be a I420 or a texture 174 // Encodes a new style org.webrtc.VideoFrame. Might be a I420 or a texture
187 // frame. 175 // frame.
188 bool EncodeJavaFrame(JNIEnv* jni, 176 bool EncodeJavaFrame(JNIEnv* jni,
189 bool key_frame, 177 bool key_frame,
190 jobject frame, 178 jobject frame,
191 int input_buffer_index); 179 int input_buffer_index);
192 180
193 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. 181 // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
194 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); 182 int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
195 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); 183 jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
(...skipping 16 matching lines...) Expand all
212 // Mutex for protecting inited_. It is only used for correctness checking on 200 // Mutex for protecting inited_. It is only used for correctness checking on
213 // debug build. It is used for checking that encoder has been released in the 201 // debug build. It is used for checking that encoder has been released in the
214 // destructor. Because this might happen on a different thread, we need a 202 // destructor. Because this might happen on a different thread, we need a
215 // mutex. 203 // mutex.
216 rtc::CriticalSection inited_crit_; 204 rtc::CriticalSection inited_crit_;
217 #endif 205 #endif
218 206
219 // Type of video codec. 207 // Type of video codec.
220 const cricket::VideoCodec codec_; 208 const cricket::VideoCodec codec_;
221 209
222 webrtc::EncodedImageCallback* callback_; 210 EncodedImageCallback* callback_;
223 211
224 // State that is constant for the lifetime of this object once the ctor 212 // State that is constant for the lifetime of this object once the ctor
225 // returns. 213 // returns.
226 rtc::SequencedTaskChecker encoder_queue_checker_; 214 rtc::SequencedTaskChecker encoder_queue_checker_;
227 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_; 215 ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
228 ScopedGlobalRef<jobject> j_media_codec_video_encoder_; 216 ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
229 jmethodID j_init_encode_method_; 217 jmethodID j_init_encode_method_;
230 jmethodID j_get_input_buffers_method_; 218 jmethodID j_get_input_buffers_method_;
231 jmethodID j_dequeue_input_buffer_method_; 219 jmethodID j_dequeue_input_buffer_method_;
232 jmethodID j_encode_buffer_method_; 220 jmethodID j_encode_buffer_method_;
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 int current_encoding_time_ms_; // Overall encoding time in the current second 254 int current_encoding_time_ms_; // Overall encoding time in the current second
267 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. 255 int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
268 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. 256 int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
269 // Holds the task while the polling loop is paused. 257 // Holds the task while the polling loop is paused.
270 std::unique_ptr<rtc::QueuedTask> encode_task_; 258 std::unique_ptr<rtc::QueuedTask> encode_task_;
271 259
272 struct InputFrameInfo { 260 struct InputFrameInfo {
273 InputFrameInfo(int64_t encode_start_time, 261 InputFrameInfo(int64_t encode_start_time,
274 int32_t frame_timestamp, 262 int32_t frame_timestamp,
275 int64_t frame_render_time_ms, 263 int64_t frame_render_time_ms,
276 webrtc::VideoRotation rotation) 264 VideoRotation rotation)
277 : encode_start_time(encode_start_time), 265 : encode_start_time(encode_start_time),
278 frame_timestamp(frame_timestamp), 266 frame_timestamp(frame_timestamp),
279 frame_render_time_ms(frame_render_time_ms), 267 frame_render_time_ms(frame_render_time_ms),
280 rotation(rotation) {} 268 rotation(rotation) {}
281 // Time when video frame is sent to encoder input. 269 // Time when video frame is sent to encoder input.
282 const int64_t encode_start_time; 270 const int64_t encode_start_time;
283 271
284 // Input frame information. 272 // Input frame information.
285 const int32_t frame_timestamp; 273 const int32_t frame_timestamp;
286 const int64_t frame_render_time_ms; 274 const int64_t frame_render_time_ms;
287 const webrtc::VideoRotation rotation; 275 const VideoRotation rotation;
288 }; 276 };
289 std::list<InputFrameInfo> input_frame_infos_; 277 std::list<InputFrameInfo> input_frame_infos_;
290 int32_t output_timestamp_; // Last output frame timestamp from 278 int32_t output_timestamp_; // Last output frame timestamp from
291 // |input_frame_infos_|. 279 // |input_frame_infos_|.
292 int64_t output_render_time_ms_; // Last output frame render time from 280 int64_t output_render_time_ms_; // Last output frame render time from
293 // |input_frame_infos_|. 281 // |input_frame_infos_|.
294 webrtc::VideoRotation output_rotation_; // Last output frame rotation from 282 VideoRotation output_rotation_; // Last output frame rotation from
295 // |input_frame_infos_|. 283 // |input_frame_infos_|.
296 284
297 // Frame size in bytes fed to MediaCodec. 285 // Frame size in bytes fed to MediaCodec.
298 int yuv_size_; 286 int yuv_size_;
299 // True only when between a callback_->OnEncodedImage() call return a positive 287 // True only when between a callback_->OnEncodedImage() call return a positive
300 // value and the next Encode() call being ignored. 288 // value and the next Encode() call being ignored.
301 bool drop_next_input_frame_; 289 bool drop_next_input_frame_;
302 bool scale_; 290 bool scale_;
303 webrtc::H264::Profile profile_; 291 H264::Profile profile_;
304 // Global references; must be deleted in Release(). 292 // Global references; must be deleted in Release().
305 std::vector<jobject> input_buffers_; 293 std::vector<jobject> input_buffers_;
306 webrtc::H264BitstreamParser h264_bitstream_parser_; 294 H264BitstreamParser h264_bitstream_parser_;
307 295
308 // VP9 variables to populate codec specific structure. 296 // VP9 variables to populate codec specific structure.
309 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for 297 GofInfoVP9 gof_; // Contains each frame's temporal information for
310 // non-flexible VP9 mode. 298 // non-flexible VP9 mode.
311 size_t gof_idx_; 299 size_t gof_idx_;
312 300
313 // EGL context - owned by factory, should not be allocated/destroyed 301 // EGL context - owned by factory, should not be allocated/destroyed
314 // by MediaCodecVideoEncoder. 302 // by MediaCodecVideoEncoder.
315 jobject egl_context_; 303 jobject egl_context_;
316 304
317 // Temporary fix for VP8. 305 // Temporary fix for VP8.
318 // Sends a key frame if frames are largely spaced apart (possibly 306 // Sends a key frame if frames are largely spaced apart (possibly
319 // corresponding to a large image change). 307 // corresponding to a large image change).
320 int64_t last_frame_received_ms_; 308 int64_t last_frame_received_ms_;
321 int frames_received_since_last_key_; 309 int frames_received_since_last_key_;
322 webrtc::VideoCodecMode codec_mode_; 310 VideoCodecMode codec_mode_;
323 311
324 // RTP state. 312 // RTP state.
325 uint16_t picture_id_; 313 uint16_t picture_id_;
326 uint8_t tl0_pic_idx_; 314 uint8_t tl0_pic_idx_;
327 315
328 bool sw_fallback_required_; 316 bool sw_fallback_required_;
329 317
330 // All other member variables should be before WeakPtrFactory. Valid only from 318 // All other member variables should be before WeakPtrFactory. Valid only from
331 // InitEncode to Release. 319 // InitEncode to Release.
332 std::unique_ptr<rtc::WeakPtrFactory<MediaCodecVideoEncoder>> weak_factory_; 320 std::unique_ptr<rtc::WeakPtrFactory<MediaCodecVideoEncoder>> weak_factory_;
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
405 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); 393 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
406 j_info_is_key_frame_field_ = 394 j_info_is_key_frame_field_ =
407 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); 395 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
408 j_info_presentation_timestamp_us_field_ = GetFieldID( 396 j_info_presentation_timestamp_us_field_ = GetFieldID(
409 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); 397 jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
410 if (CheckException(jni)) { 398 if (CheckException(jni)) {
411 ALOGW << "MediaCodecVideoEncoder ctor failed."; 399 ALOGW << "MediaCodecVideoEncoder ctor failed.";
412 ProcessHWError(true /* reset_if_fallback_unavailable */); 400 ProcessHWError(true /* reset_if_fallback_unavailable */);
413 } 401 }
414 402
415 webrtc::Random random(rtc::TimeMicros()); 403 Random random(rtc::TimeMicros());
416 picture_id_ = random.Rand<uint16_t>() & 0x7FFF; 404 picture_id_ = random.Rand<uint16_t>() & 0x7FFF;
417 tl0_pic_idx_ = random.Rand<uint8_t>(); 405 tl0_pic_idx_ = random.Rand<uint8_t>();
418 } 406 }
419 407
420 int32_t MediaCodecVideoEncoder::InitEncode( 408 int32_t MediaCodecVideoEncoder::InitEncode(const VideoCodec* codec_settings,
421 const webrtc::VideoCodec* codec_settings, 409 int32_t /* number_of_cores */,
422 int32_t /* number_of_cores */, 410 size_t /* max_payload_size */) {
423 size_t /* max_payload_size */) {
424 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 411 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
425 if (codec_settings == NULL) { 412 if (codec_settings == NULL) {
426 ALOGE << "NULL VideoCodec instance"; 413 ALOGE << "NULL VideoCodec instance";
427 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 414 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
428 } 415 }
429 // Factory should guard against other codecs being used with us. 416 // Factory should guard against other codecs being used with us.
430 const VideoCodecType codec_type = GetCodecType(); 417 const VideoCodecType codec_type = GetCodecType();
431 RTC_CHECK(codec_settings->codecType == codec_type) 418 RTC_CHECK(codec_settings->codecType == codec_type)
432 << "Unsupported codec " << codec_settings->codecType << " for " 419 << "Unsupported codec " << codec_settings->codecType << " for "
433 << codec_type; 420 << codec_type;
(...skipping 11 matching lines...) Expand all
445 } else if (codec_type == kVideoCodecVP9) { 432 } else if (codec_type == kVideoCodecVP9) {
446 scale_ = codec_settings->VP9().automaticResizeOn; 433 scale_ = codec_settings->VP9().automaticResizeOn;
447 } else { 434 } else {
448 scale_ = true; 435 scale_ = true;
449 } 436 }
450 437
451 ALOGD << "InitEncode request: " << init_width << " x " << init_height; 438 ALOGD << "InitEncode request: " << init_width << " x " << init_height;
452 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); 439 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
453 440
454 // Check allowed H.264 profile 441 // Check allowed H.264 profile
455 profile_ = webrtc::H264::Profile::kProfileBaseline; 442 profile_ = H264::Profile::kProfileBaseline;
456 if (codec_type == kVideoCodecH264) { 443 if (codec_type == kVideoCodecH264) {
457 const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id = 444 const rtc::Optional<H264::ProfileLevelId> profile_level_id =
458 webrtc::H264::ParseSdpProfileLevelId(codec_.params); 445 H264::ParseSdpProfileLevelId(codec_.params);
459 RTC_DCHECK(profile_level_id); 446 RTC_DCHECK(profile_level_id);
460 profile_ = profile_level_id->profile; 447 profile_ = profile_level_id->profile;
461 ALOGD << "H.264 profile: " << profile_; 448 ALOGD << "H.264 profile: " << profile_;
462 } 449 }
463 450
464 return InitEncodeInternal( 451 return InitEncodeInternal(
465 init_width, init_height, codec_settings->startBitrate, 452 init_width, init_height, codec_settings->startBitrate,
466 codec_settings->maxFramerate, codec_settings->expect_encode_from_texture); 453 codec_settings->maxFramerate, codec_settings->expect_encode_from_texture);
467 } 454 }
468 455
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
546 return false; 533 return false;
547 } 534 }
548 535
549 int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncode() { 536 int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncode() {
550 ProcessHWError(true /* reset_if_fallback_unavailable */); 537 ProcessHWError(true /* reset_if_fallback_unavailable */);
551 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE 538 return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE
552 : WEBRTC_VIDEO_CODEC_ERROR; 539 : WEBRTC_VIDEO_CODEC_ERROR;
553 } 540 }
554 541
555 VideoCodecType MediaCodecVideoEncoder::GetCodecType() const { 542 VideoCodecType MediaCodecVideoEncoder::GetCodecType() const {
556 return webrtc::PayloadStringToCodecType(codec_.name); 543 return PayloadStringToCodecType(codec_.name);
557 } 544 }
558 545
559 int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, 546 int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
560 int height, 547 int height,
561 int kbps, 548 int kbps,
562 int fps, 549 int fps,
563 bool use_surface) { 550 bool use_surface) {
564 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 551 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
565 if (sw_fallback_required_) { 552 if (sw_fallback_required_) {
566 return WEBRTC_VIDEO_CODEC_OK; 553 return WEBRTC_VIDEO_CODEC_OK;
(...skipping 28 matching lines...) Expand all
595 current_bytes_ = 0; 582 current_bytes_ = 0;
596 current_acc_qp_ = 0; 583 current_acc_qp_ = 0;
597 current_encoding_time_ms_ = 0; 584 current_encoding_time_ms_ = 0;
598 last_input_timestamp_ms_ = -1; 585 last_input_timestamp_ms_ = -1;
599 last_output_timestamp_ms_ = -1; 586 last_output_timestamp_ms_ = -1;
600 output_timestamp_ = 0; 587 output_timestamp_ = 0;
601 output_render_time_ms_ = 0; 588 output_render_time_ms_ = 0;
602 input_frame_infos_.clear(); 589 input_frame_infos_.clear();
603 drop_next_input_frame_ = false; 590 drop_next_input_frame_ = false;
604 use_surface_ = use_surface; 591 use_surface_ = use_surface;
605 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); 592 gof_.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1);
606 gof_idx_ = 0; 593 gof_idx_ = 0;
607 last_frame_received_ms_ = -1; 594 last_frame_received_ms_ = -1;
608 frames_received_since_last_key_ = kMinKeyFrameInterval; 595 frames_received_since_last_key_ = kMinKeyFrameInterval;
609 596
610 // We enforce no extra stride/padding in the format creation step. 597 // We enforce no extra stride/padding in the format creation step.
611 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( 598 jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
612 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); 599 jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type);
613 const bool encode_status = jni->CallBooleanMethod( 600 const bool encode_status = jni->CallBooleanMethod(
614 *j_media_codec_video_encoder_, j_init_encode_method_, j_video_codec_enum, 601 *j_media_codec_video_encoder_, j_init_encode_method_, j_video_codec_enum,
615 profile_, width, height, kbps, fps, 602 profile_, width, height, kbps, fps,
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
679 #endif 666 #endif
680 inited_ = true; 667 inited_ = true;
681 } 668 }
682 weak_factory_.reset(new rtc::WeakPtrFactory<MediaCodecVideoEncoder>(this)); 669 weak_factory_.reset(new rtc::WeakPtrFactory<MediaCodecVideoEncoder>(this));
683 encode_task_.reset(new EncodeTask(weak_factory_->GetWeakPtr())); 670 encode_task_.reset(new EncodeTask(weak_factory_->GetWeakPtr()));
684 671
685 return WEBRTC_VIDEO_CODEC_OK; 672 return WEBRTC_VIDEO_CODEC_OK;
686 } 673 }
687 674
688 int32_t MediaCodecVideoEncoder::Encode( 675 int32_t MediaCodecVideoEncoder::Encode(
689 const webrtc::VideoFrame& frame, 676 const VideoFrame& frame,
690 const webrtc::CodecSpecificInfo* /* codec_specific_info */, 677 const CodecSpecificInfo* /* codec_specific_info */,
691 const std::vector<webrtc::FrameType>* frame_types) { 678 const std::vector<FrameType>* frame_types) {
692 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 679 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
693 if (sw_fallback_required_) 680 if (sw_fallback_required_)
694 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; 681 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
695 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 682 JNIEnv* jni = AttachCurrentThreadIfNeeded();
696 ScopedLocalRefFrame local_ref_frame(jni); 683 ScopedLocalRefFrame local_ref_frame(jni);
697 const int64_t frame_input_time_ms = rtc::TimeMillis(); 684 const int64_t frame_input_time_ms = rtc::TimeMillis();
698 685
699 if (!inited_) { 686 if (!inited_) {
700 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 687 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
701 } 688 }
702 689
703 bool send_key_frame = false; 690 bool send_key_frame = false;
704 if (codec_mode_ == webrtc::kRealtimeVideo) { 691 if (codec_mode_ == kRealtimeVideo) {
705 ++frames_received_since_last_key_; 692 ++frames_received_since_last_key_;
706 int64_t now_ms = rtc::TimeMillis(); 693 int64_t now_ms = rtc::TimeMillis();
707 if (last_frame_received_ms_ != -1 && 694 if (last_frame_received_ms_ != -1 &&
708 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) { 695 (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) {
709 // Add limit to prevent triggering a key for every frame for very low 696 // Add limit to prevent triggering a key for every frame for very low
710 // framerates (e.g. if frame diff > kFrameDiffThresholdMs). 697 // framerates (e.g. if frame diff > kFrameDiffThresholdMs).
711 if (frames_received_since_last_key_ > kMinKeyFrameInterval) { 698 if (frames_received_since_last_key_ > kMinKeyFrameInterval) {
712 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_); 699 ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_);
713 send_key_frame = true; 700 send_key_frame = true;
714 } 701 }
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
754 if (consecutive_full_queue_frame_drops_ >= 741 if (consecutive_full_queue_frame_drops_ >=
755 ENCODER_STALL_FRAMEDROP_THRESHOLD) { 742 ENCODER_STALL_FRAMEDROP_THRESHOLD) {
756 ALOGE << "Encoder got stuck."; 743 ALOGE << "Encoder got stuck.";
757 return ProcessHWErrorOnEncode(); 744 return ProcessHWErrorOnEncode();
758 } 745 }
759 frames_dropped_media_encoder_++; 746 frames_dropped_media_encoder_++;
760 return WEBRTC_VIDEO_CODEC_OK; 747 return WEBRTC_VIDEO_CODEC_OK;
761 } 748 }
762 consecutive_full_queue_frame_drops_ = 0; 749 consecutive_full_queue_frame_drops_ = 0;
763 750
764 rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer( 751 rtc::scoped_refptr<VideoFrameBuffer> input_buffer(frame.video_frame_buffer());
765 frame.video_frame_buffer());
766 752
767 VideoFrame input_frame(input_buffer, frame.timestamp(), 753 VideoFrame input_frame(input_buffer, frame.timestamp(),
768 frame.render_time_ms(), frame.rotation()); 754 frame.render_time_ms(), frame.rotation());
769 755
770 if (!MaybeReconfigureEncoder(jni, input_frame)) { 756 if (!MaybeReconfigureEncoder(jni, input_frame)) {
771 ALOGE << "Failed to reconfigure encoder."; 757 ALOGE << "Failed to reconfigure encoder.";
772 return WEBRTC_VIDEO_CODEC_ERROR; 758 return WEBRTC_VIDEO_CODEC_ERROR;
773 } 759 }
774 760
775 const bool key_frame = 761 const bool key_frame =
776 frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; 762 frame_types->front() != kVideoFrameDelta || send_key_frame;
777 bool encode_status = true; 763 bool encode_status = true;
778 764
779 int j_input_buffer_index = -1; 765 int j_input_buffer_index = -1;
780 if (!use_surface_) { 766 if (!use_surface_) {
781 j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, 767 j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
782 j_dequeue_input_buffer_method_); 768 j_dequeue_input_buffer_method_);
783 if (CheckException(jni)) { 769 if (CheckException(jni)) {
784 ALOGE << "Exception in dequeu input buffer."; 770 ALOGE << "Exception in dequeu input buffer.";
785 return ProcessHWErrorOnEncode(); 771 return ProcessHWErrorOnEncode();
786 } 772 }
787 if (j_input_buffer_index == -1) { 773 if (j_input_buffer_index == -1) {
788 // Video codec falls behind - no input buffer available. 774 // Video codec falls behind - no input buffer available.
789 ALOGW << "Encoder drop frame - no input buffers available"; 775 ALOGW << "Encoder drop frame - no input buffers available";
790 if (frames_received_ > 1) { 776 if (frames_received_ > 1) {
791 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; 777 current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
792 frames_dropped_media_encoder_++; 778 frames_dropped_media_encoder_++;
793 } else { 779 } else {
794 // Input buffers are not ready after codec initialization, HW is still 780 // Input buffers are not ready after codec initialization, HW is still
795 // allocating thme - this is expected and should not result in drop 781 // allocating thme - this is expected and should not result in drop
796 // frame report. 782 // frame report.
797 frames_received_ = 0; 783 frames_received_ = 0;
798 } 784 }
799 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. 785 return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
800 } else if (j_input_buffer_index == -2) { 786 } else if (j_input_buffer_index == -2) {
801 return ProcessHWErrorOnEncode(); 787 return ProcessHWErrorOnEncode();
802 } 788 }
803 } 789 }
804 790
805 if (input_frame.video_frame_buffer()->type() != 791 if (input_frame.video_frame_buffer()->type() !=
806 webrtc::VideoFrameBuffer::Type::kNative) { 792 VideoFrameBuffer::Type::kNative) {
807 encode_status = 793 encode_status =
808 EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); 794 EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index);
809 } else { 795 } else {
810 AndroidVideoFrameBuffer* android_buffer = 796 AndroidVideoFrameBuffer* android_buffer =
811 static_cast<AndroidVideoFrameBuffer*>( 797 static_cast<AndroidVideoFrameBuffer*>(
812 input_frame.video_frame_buffer().get()); 798 input_frame.video_frame_buffer().get());
813 switch (android_buffer->android_type()) { 799 switch (android_buffer->android_type()) {
814 case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: 800 case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer:
815 encode_status = EncodeTexture(jni, key_frame, input_frame); 801 encode_status = EncodeTexture(jni, key_frame, input_frame);
816 break; 802 break;
(...skipping 28 matching lines...) Expand all
845 rtc::TaskQueue::Current()->PostDelayedTask(std::move(encode_task_), 831 rtc::TaskQueue::Current()->PostDelayedTask(std::move(encode_task_),
846 kMediaCodecPollMs); 832 kMediaCodecPollMs);
847 } 833 }
848 834
849 if (!DeliverPendingOutputs(jni)) { 835 if (!DeliverPendingOutputs(jni)) {
850 return ProcessHWErrorOnEncode(); 836 return ProcessHWErrorOnEncode();
851 } 837 }
852 return WEBRTC_VIDEO_CODEC_OK; 838 return WEBRTC_VIDEO_CODEC_OK;
853 } 839 }
854 840
855 bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( 841 bool MediaCodecVideoEncoder::MaybeReconfigureEncoder(JNIEnv* jni,
856 JNIEnv* jni, 842 const VideoFrame& frame) {
857 const webrtc::VideoFrame& frame) {
858 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 843 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
859 844
860 bool is_texture = IsTextureFrame(jni, frame); 845 bool is_texture = IsTextureFrame(jni, frame);
861 const bool reconfigure_due_to_format = is_texture != use_surface_; 846 const bool reconfigure_due_to_format = is_texture != use_surface_;
862 const bool reconfigure_due_to_size = 847 const bool reconfigure_due_to_size =
863 frame.width() != width_ || frame.height() != height_; 848 frame.width() != width_ || frame.height() != height_;
864 849
865 if (reconfigure_due_to_format) { 850 if (reconfigure_due_to_format) {
866 ALOGD << "Reconfigure encoder due to format change. " 851 ALOGD << "Reconfigure encoder due to format change. "
867 << (use_surface_ ? 852 << (use_surface_ ?
(...skipping 13 matching lines...) Expand all
881 if (!reconfigure_due_to_format && !reconfigure_due_to_size) 866 if (!reconfigure_due_to_format && !reconfigure_due_to_size)
882 return true; 867 return true;
883 868
884 Release(); 869 Release();
885 870
886 return InitEncodeInternal(width_, height_, 0, 0, is_texture) == 871 return InitEncodeInternal(width_, height_, 0, 0, is_texture) ==
887 WEBRTC_VIDEO_CODEC_OK; 872 WEBRTC_VIDEO_CODEC_OK;
888 } 873 }
889 874
890 bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni, 875 bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni,
891 const webrtc::VideoFrame& frame) { 876 const VideoFrame& frame) {
892 if (frame.video_frame_buffer()->type() != 877 if (frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) {
893 webrtc::VideoFrameBuffer::Type::kNative) {
894 return false; 878 return false;
895 } 879 }
896 880
897 AndroidVideoFrameBuffer* android_buffer = 881 AndroidVideoFrameBuffer* android_buffer =
898 static_cast<AndroidVideoFrameBuffer*>(frame.video_frame_buffer().get()); 882 static_cast<AndroidVideoFrameBuffer*>(frame.video_frame_buffer().get());
899 switch (android_buffer->android_type()) { 883 switch (android_buffer->android_type()) {
900 case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: 884 case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer:
901 return true; 885 return true;
902 case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: 886 case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
903 return jni->IsInstanceOf(static_cast<AndroidVideoBuffer*>(android_buffer) 887 return jni->IsInstanceOf(static_cast<AndroidVideoBuffer*>(android_buffer)
904 ->video_frame_buffer(), 888 ->video_frame_buffer(),
905 *j_video_frame_texture_buffer_class_); 889 *j_video_frame_texture_buffer_class_);
906 default: 890 default:
907 RTC_NOTREACHED(); 891 RTC_NOTREACHED();
908 return false; 892 return false;
909 } 893 }
910 } 894 }
911 895
912 bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, 896 bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni,
913 bool key_frame, 897 bool key_frame,
914 const webrtc::VideoFrame& frame, 898 const VideoFrame& frame,
915 int input_buffer_index) { 899 int input_buffer_index) {
916 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 900 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
917 RTC_CHECK(!use_surface_); 901 RTC_CHECK(!use_surface_);
918 902
919 rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer = 903 rtc::scoped_refptr<I420BufferInterface> i420_buffer =
920 frame.video_frame_buffer()->ToI420(); 904 frame.video_frame_buffer()->ToI420();
921 if (!FillInputBuffer(jni, input_buffer_index, i420_buffer->DataY(), 905 if (!FillInputBuffer(jni, input_buffer_index, i420_buffer->DataY(),
922 i420_buffer->StrideY(), i420_buffer->DataU(), 906 i420_buffer->StrideY(), i420_buffer->DataU(),
923 i420_buffer->StrideU(), i420_buffer->DataV(), 907 i420_buffer->StrideU(), i420_buffer->DataV(),
924 i420_buffer->StrideV())) { 908 i420_buffer->StrideV())) {
925 return false; 909 return false;
926 } 910 }
927 bool encode_status = jni->CallBooleanMethod( 911 bool encode_status = jni->CallBooleanMethod(
928 *j_media_codec_video_encoder_, j_encode_buffer_method_, key_frame, 912 *j_media_codec_video_encoder_, j_encode_buffer_method_, key_frame,
929 input_buffer_index, yuv_size_, current_timestamp_us_); 913 input_buffer_index, yuv_size_, current_timestamp_us_);
(...skipping 25 matching lines...) Expand all
955 939
956 RTC_CHECK(!libyuv::ConvertFromI420(buffer_y, stride_y, buffer_u, stride_u, 940 RTC_CHECK(!libyuv::ConvertFromI420(buffer_y, stride_y, buffer_u, stride_u,
957 buffer_v, stride_v, yuv_buffer, width_, 941 buffer_v, stride_v, yuv_buffer, width_,
958 width_, height_, encoder_fourcc_)) 942 width_, height_, encoder_fourcc_))
959 << "ConvertFromI420 failed"; 943 << "ConvertFromI420 failed";
960 return true; 944 return true;
961 } 945 }
962 946
963 bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, 947 bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni,
964 bool key_frame, 948 bool key_frame,
965 const webrtc::VideoFrame& frame) { 949 const VideoFrame& frame) {
966 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 950 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
967 RTC_CHECK(use_surface_); 951 RTC_CHECK(use_surface_);
968 NativeHandleImpl handle = 952 NativeHandleImpl handle =
969 static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get()) 953 static_cast<AndroidTextureBuffer*>(frame.video_frame_buffer().get())
970 ->native_handle_impl(); 954 ->native_handle_impl();
971 955
972 jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni); 956 jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni);
973 bool encode_status = jni->CallBooleanMethod( 957 bool encode_status = jni->CallBooleanMethod(
974 *j_media_codec_video_encoder_, j_encode_texture_method_, key_frame, 958 *j_media_codec_video_encoder_, j_encode_texture_method_, key_frame,
975 handle.oes_texture_id, sampling_matrix, current_timestamp_us_); 959 handle.oes_texture_id, sampling_matrix, current_timestamp_us_);
(...skipping 14 matching lines...) Expand all
990 jlongFromPointer(this), key_frame, frame, input_buffer_index); 974 jlongFromPointer(this), key_frame, frame, input_buffer_index);
991 if (CheckException(jni)) { 975 if (CheckException(jni)) {
992 ALOGE << "Exception in encode frame."; 976 ALOGE << "Exception in encode frame.";
993 ProcessHWError(true /* reset_if_fallback_unavailable */); 977 ProcessHWError(true /* reset_if_fallback_unavailable */);
994 return false; 978 return false;
995 } 979 }
996 return encode_status; 980 return encode_status;
997 } 981 }
998 982
999 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( 983 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
1000 webrtc::EncodedImageCallback* callback) { 984 EncodedImageCallback* callback) {
1001 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 985 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
1002 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 986 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1003 ScopedLocalRefFrame local_ref_frame(jni); 987 ScopedLocalRefFrame local_ref_frame(jni);
1004 callback_ = callback; 988 callback_ = callback;
1005 return WEBRTC_VIDEO_CODEC_OK; 989 return WEBRTC_VIDEO_CODEC_OK;
1006 } 990 }
1007 991
1008 int32_t MediaCodecVideoEncoder::Release() { 992 int32_t MediaCodecVideoEncoder::Release() {
1009 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 993 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
1010 if (!inited_) { 994 if (!inited_) {
(...skipping 20 matching lines...) Expand all
1031 rtc::CritScope lock(&inited_crit_); 1015 rtc::CritScope lock(&inited_crit_);
1032 #endif 1016 #endif
1033 inited_ = false; 1017 inited_ = false;
1034 } 1018 }
1035 use_surface_ = false; 1019 use_surface_ = false;
1036 ALOGD << "EncoderRelease done."; 1020 ALOGD << "EncoderRelease done.";
1037 return WEBRTC_VIDEO_CODEC_OK; 1021 return WEBRTC_VIDEO_CODEC_OK;
1038 } 1022 }
1039 1023
1040 int32_t MediaCodecVideoEncoder::SetRateAllocation( 1024 int32_t MediaCodecVideoEncoder::SetRateAllocation(
1041 const webrtc::BitrateAllocation& rate_allocation, 1025 const BitrateAllocation& rate_allocation,
1042 uint32_t frame_rate) { 1026 uint32_t frame_rate) {
1043 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); 1027 RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
1044 const uint32_t new_bit_rate = rate_allocation.get_sum_kbps(); 1028 const uint32_t new_bit_rate = rate_allocation.get_sum_kbps();
1045 if (sw_fallback_required_) 1029 if (sw_fallback_required_)
1046 return WEBRTC_VIDEO_CODEC_OK; 1030 return WEBRTC_VIDEO_CODEC_OK;
1047 frame_rate = 1031 frame_rate =
1048 (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? frame_rate : MAX_ALLOWED_VIDEO_FPS; 1032 (frame_rate < MAX_ALLOWED_VIDEO_FPS) ? frame_rate : MAX_ALLOWED_VIDEO_FPS;
1049 if (last_set_bitrate_kbps_ == new_bit_rate && last_set_fps_ == frame_rate) { 1033 if (last_set_bitrate_kbps_ == new_bit_rate && last_set_fps_ == frame_rate) {
1050 return WEBRTC_VIDEO_CODEC_OK; 1034 return WEBRTC_VIDEO_CODEC_OK;
1051 } 1035 }
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
1140 uint8_t* payload = reinterpret_cast<uint8_t*>( 1124 uint8_t* payload = reinterpret_cast<uint8_t*>(
1141 jni->GetDirectBufferAddress(j_output_buffer)); 1125 jni->GetDirectBufferAddress(j_output_buffer));
1142 if (CheckException(jni)) { 1126 if (CheckException(jni)) {
1143 ALOGE << "Exception in get direct buffer address."; 1127 ALOGE << "Exception in get direct buffer address.";
1144 ProcessHWError(true /* reset_if_fallback_unavailable */); 1128 ProcessHWError(true /* reset_if_fallback_unavailable */);
1145 return WEBRTC_VIDEO_CODEC_ERROR; 1129 return WEBRTC_VIDEO_CODEC_ERROR;
1146 } 1130 }
1147 1131
1148 // Callback - return encoded frame. 1132 // Callback - return encoded frame.
1149 const VideoCodecType codec_type = GetCodecType(); 1133 const VideoCodecType codec_type = GetCodecType();
1150 webrtc::EncodedImageCallback::Result callback_result( 1134 EncodedImageCallback::Result callback_result(
1151 webrtc::EncodedImageCallback::Result::OK); 1135 EncodedImageCallback::Result::OK);
1152 if (callback_) { 1136 if (callback_) {
1153 std::unique_ptr<webrtc::EncodedImage> image( 1137 std::unique_ptr<EncodedImage> image(
1154 new webrtc::EncodedImage(payload, payload_size, payload_size)); 1138 new EncodedImage(payload, payload_size, payload_size));
1155 image->_encodedWidth = width_; 1139 image->_encodedWidth = width_;
1156 image->_encodedHeight = height_; 1140 image->_encodedHeight = height_;
1157 image->_timeStamp = output_timestamp_; 1141 image->_timeStamp = output_timestamp_;
1158 image->capture_time_ms_ = output_render_time_ms_; 1142 image->capture_time_ms_ = output_render_time_ms_;
1159 image->rotation_ = output_rotation_; 1143 image->rotation_ = output_rotation_;
1160 image->content_type_ = 1144 image->content_type_ = (codec_mode_ == VideoCodecMode::kScreensharing)
1161 (codec_mode_ == webrtc::VideoCodecMode::kScreensharing) 1145 ? VideoContentType::SCREENSHARE
1162 ? webrtc::VideoContentType::SCREENSHARE 1146 : VideoContentType::UNSPECIFIED;
1163 : webrtc::VideoContentType::UNSPECIFIED; 1147 image->timing_.flags = TimingFrameFlags::kInvalid;
1164 image->timing_.flags = webrtc::TimingFrameFlags::kInvalid; 1148 image->_frameType = (key_frame ? kVideoFrameKey : kVideoFrameDelta);
1165 image->_frameType =
1166 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
1167 image->_completeFrame = true; 1149 image->_completeFrame = true;
1168 webrtc::CodecSpecificInfo info; 1150 CodecSpecificInfo info;
1169 memset(&info, 0, sizeof(info)); 1151 memset(&info, 0, sizeof(info));
1170 info.codecType = codec_type; 1152 info.codecType = codec_type;
1171 if (codec_type == kVideoCodecVP8) { 1153 if (codec_type == kVideoCodecVP8) {
1172 info.codecSpecific.VP8.pictureId = picture_id_; 1154 info.codecSpecific.VP8.pictureId = picture_id_;
1173 info.codecSpecific.VP8.nonReference = false; 1155 info.codecSpecific.VP8.nonReference = false;
1174 info.codecSpecific.VP8.simulcastIdx = 0; 1156 info.codecSpecific.VP8.simulcastIdx = 0;
1175 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; 1157 info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
1176 info.codecSpecific.VP8.layerSync = false; 1158 info.codecSpecific.VP8.layerSync = false;
1177 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; 1159 info.codecSpecific.VP8.tl0PicIdx = kNoTl0PicIdx;
1178 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx; 1160 info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
1179 } else if (codec_type == kVideoCodecVP9) { 1161 } else if (codec_type == kVideoCodecVP9) {
1180 if (key_frame) { 1162 if (key_frame) {
1181 gof_idx_ = 0; 1163 gof_idx_ = 0;
1182 } 1164 }
1183 info.codecSpecific.VP9.picture_id = picture_id_; 1165 info.codecSpecific.VP9.picture_id = picture_id_;
1184 info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true; 1166 info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
1185 info.codecSpecific.VP9.flexible_mode = false; 1167 info.codecSpecific.VP9.flexible_mode = false;
1186 info.codecSpecific.VP9.ss_data_available = key_frame ? true : false; 1168 info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
1187 info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++; 1169 info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
1188 info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx; 1170 info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
1189 info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx; 1171 info.codecSpecific.VP9.spatial_idx = kNoSpatialIdx;
1190 info.codecSpecific.VP9.temporal_up_switch = true; 1172 info.codecSpecific.VP9.temporal_up_switch = true;
1191 info.codecSpecific.VP9.inter_layer_predicted = false; 1173 info.codecSpecific.VP9.inter_layer_predicted = false;
1192 info.codecSpecific.VP9.gof_idx = 1174 info.codecSpecific.VP9.gof_idx =
1193 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof); 1175 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
1194 info.codecSpecific.VP9.num_spatial_layers = 1; 1176 info.codecSpecific.VP9.num_spatial_layers = 1;
1195 info.codecSpecific.VP9.spatial_layer_resolution_present = false; 1177 info.codecSpecific.VP9.spatial_layer_resolution_present = false;
1196 if (info.codecSpecific.VP9.ss_data_available) { 1178 if (info.codecSpecific.VP9.ss_data_available) {
1197 info.codecSpecific.VP9.spatial_layer_resolution_present = true; 1179 info.codecSpecific.VP9.spatial_layer_resolution_present = true;
1198 info.codecSpecific.VP9.width[0] = width_; 1180 info.codecSpecific.VP9.width[0] = width_;
1199 info.codecSpecific.VP9.height[0] = height_; 1181 info.codecSpecific.VP9.height[0] = height_;
1200 info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_); 1182 info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
1201 } 1183 }
1202 } 1184 }
1203 picture_id_ = (picture_id_ + 1) & 0x7FFF; 1185 picture_id_ = (picture_id_ + 1) & 0x7FFF;
1204 1186
1205 // Generate a header describing a single fragment. 1187 // Generate a header describing a single fragment.
1206 webrtc::RTPFragmentationHeader header; 1188 RTPFragmentationHeader header;
1207 memset(&header, 0, sizeof(header)); 1189 memset(&header, 0, sizeof(header));
1208 if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecVP9) { 1190 if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecVP9) {
1209 header.VerifyAndAllocateFragmentationHeader(1); 1191 header.VerifyAndAllocateFragmentationHeader(1);
1210 header.fragmentationOffset[0] = 0; 1192 header.fragmentationOffset[0] = 0;
1211 header.fragmentationLength[0] = image->_length; 1193 header.fragmentationLength[0] = image->_length;
1212 header.fragmentationPlType[0] = 0; 1194 header.fragmentationPlType[0] = 0;
1213 header.fragmentationTimeDiff[0] = 0; 1195 header.fragmentationTimeDiff[0] = 0;
1214 if (codec_type == kVideoCodecVP8) { 1196 if (codec_type == kVideoCodecVP8) {
1215 int qp; 1197 int qp;
1216 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) { 1198 if (vp8::GetQp(payload, payload_size, &qp)) {
1217 current_acc_qp_ += qp; 1199 current_acc_qp_ += qp;
1218 image->qp_ = qp; 1200 image->qp_ = qp;
1219 } 1201 }
1220 } else if (codec_type == kVideoCodecVP9) { 1202 } else if (codec_type == kVideoCodecVP9) {
1221 int qp; 1203 int qp;
1222 if (webrtc::vp9::GetQp(payload, payload_size, &qp)) { 1204 if (vp9::GetQp(payload, payload_size, &qp)) {
1223 current_acc_qp_ += qp; 1205 current_acc_qp_ += qp;
1224 image->qp_ = qp; 1206 image->qp_ = qp;
1225 } 1207 }
1226 } 1208 }
1227 } else if (codec_type == kVideoCodecH264) { 1209 } else if (codec_type == kVideoCodecH264) {
1228 h264_bitstream_parser_.ParseBitstream(payload, payload_size); 1210 h264_bitstream_parser_.ParseBitstream(payload, payload_size);
1229 int qp; 1211 int qp;
1230 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) { 1212 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
1231 current_acc_qp_ += qp; 1213 current_acc_qp_ += qp;
1232 image->qp_ = qp; 1214 image->qp_ = qp;
1233 } 1215 }
1234 // For H.264 search for start codes. 1216 // For H.264 search for start codes.
1235 const std::vector<webrtc::H264::NaluIndex> nalu_idxs = 1217 const std::vector<H264::NaluIndex> nalu_idxs =
1236 webrtc::H264::FindNaluIndices(payload, payload_size); 1218 H264::FindNaluIndices(payload, payload_size);
1237 if (nalu_idxs.empty()) { 1219 if (nalu_idxs.empty()) {
1238 ALOGE << "Start code is not found!"; 1220 ALOGE << "Start code is not found!";
1239 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1] 1221 ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
1240 << " " << image->_buffer[2] << " " << image->_buffer[3] 1222 << " " << image->_buffer[2] << " " << image->_buffer[3]
1241 << " " << image->_buffer[4] << " " << image->_buffer[5]; 1223 << " " << image->_buffer[4] << " " << image->_buffer[5];
1242 ProcessHWError(true /* reset_if_fallback_unavailable */); 1224 ProcessHWError(true /* reset_if_fallback_unavailable */);
1243 return false; 1225 return false;
1244 } 1226 }
1245 header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size()); 1227 header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size());
1246 for (size_t i = 0; i < nalu_idxs.size(); i++) { 1228 for (size_t i = 0; i < nalu_idxs.size(); i++) {
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1309 ". QP: " << (current_acc_qp_ / current_frames_divider) << 1291 ". QP: " << (current_acc_qp_ / current_frames_divider) <<
1310 " for last " << statistic_time_ms << " ms."; 1292 " for last " << statistic_time_ms << " ms.";
1311 stat_start_time_ms_ = rtc::TimeMillis(); 1293 stat_start_time_ms_ = rtc::TimeMillis();
1312 current_frames_ = 0; 1294 current_frames_ = 0;
1313 current_bytes_ = 0; 1295 current_bytes_ = 0;
1314 current_acc_qp_ = 0; 1296 current_acc_qp_ = 0;
1315 current_encoding_time_ms_ = 0; 1297 current_encoding_time_ms_ = 0;
1316 } 1298 }
1317 } 1299 }
1318 1300
1319 webrtc::VideoEncoder::ScalingSettings 1301 VideoEncoder::ScalingSettings MediaCodecVideoEncoder::GetScalingSettings()
1320 MediaCodecVideoEncoder::GetScalingSettings() const { 1302 const {
1321 if (webrtc::field_trial::IsEnabled(kCustomQPThresholdsFieldTrial)) { 1303 if (field_trial::IsEnabled(kCustomQPThresholdsFieldTrial)) {
1322 const VideoCodecType codec_type = GetCodecType(); 1304 const VideoCodecType codec_type = GetCodecType();
1323 std::string experiment_string = 1305 std::string experiment_string =
1324 webrtc::field_trial::FindFullName(kCustomQPThresholdsFieldTrial); 1306 field_trial::FindFullName(kCustomQPThresholdsFieldTrial);
1325 ALOGD << "QP custom thresholds: " << experiment_string << " for codec " 1307 ALOGD << "QP custom thresholds: " << experiment_string << " for codec "
1326 << codec_type; 1308 << codec_type;
1327 int low_vp8_qp_threshold; 1309 int low_vp8_qp_threshold;
1328 int high_vp8_qp_threshold; 1310 int high_vp8_qp_threshold;
1329 int low_h264_qp_threshold; 1311 int low_h264_qp_threshold;
1330 int high_h264_qp_threshold; 1312 int high_h264_qp_threshold;
1331 int parsed_values = sscanf(experiment_string.c_str(), "Enabled-%u,%u,%u,%u", 1313 int parsed_values = sscanf(experiment_string.c_str(), "Enabled-%u,%u,%u,%u",
1332 &low_vp8_qp_threshold, &high_vp8_qp_threshold, 1314 &low_vp8_qp_threshold, &high_vp8_qp_threshold,
1333 &low_h264_qp_threshold, &high_h264_qp_threshold); 1315 &low_h264_qp_threshold, &high_h264_qp_threshold);
1334 if (parsed_values == 4) { 1316 if (parsed_values == 4) {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1384 bool is_h264_high_profile_hw_supported = jni->CallStaticBooleanMethod( 1366 bool is_h264_high_profile_hw_supported = jni->CallStaticBooleanMethod(
1385 j_decoder_class, 1367 j_decoder_class,
1386 GetStaticMethodID(jni, j_decoder_class, "isH264HighProfileHwSupported", 1368 GetStaticMethodID(jni, j_decoder_class, "isH264HighProfileHwSupported",
1387 "()Z")); 1369 "()Z"));
1388 CHECK_EXCEPTION(jni); 1370 CHECK_EXCEPTION(jni);
1389 if (is_h264_high_profile_hw_supported) { 1371 if (is_h264_high_profile_hw_supported) {
1390 ALOGD << "H.264 High Profile HW Encoder supported."; 1372 ALOGD << "H.264 High Profile HW Encoder supported.";
1391 // TODO(magjed): Enumerate actual level instead of using hardcoded level 1373 // TODO(magjed): Enumerate actual level instead of using hardcoded level
1392 // 3.1. Level 3.1 is 1280x720@30fps which is enough for now. 1374 // 3.1. Level 3.1 is 1280x720@30fps which is enough for now.
1393 cricket::VideoCodec constrained_high(cricket::kH264CodecName); 1375 cricket::VideoCodec constrained_high(cricket::kH264CodecName);
1394 const webrtc::H264::ProfileLevelId constrained_high_profile( 1376 const H264::ProfileLevelId constrained_high_profile(
1395 webrtc::H264::kProfileConstrainedHigh, webrtc::H264::kLevel3_1); 1377 H264::kProfileConstrainedHigh, H264::kLevel3_1);
1396 constrained_high.SetParam( 1378 constrained_high.SetParam(
1397 cricket::kH264FmtpProfileLevelId, 1379 cricket::kH264FmtpProfileLevelId,
1398 *webrtc::H264::ProfileLevelIdToString(constrained_high_profile)); 1380 *H264::ProfileLevelIdToString(constrained_high_profile));
1399 constrained_high.SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1"); 1381 constrained_high.SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1");
1400 constrained_high.SetParam(cricket::kH264FmtpPacketizationMode, "1"); 1382 constrained_high.SetParam(cricket::kH264FmtpPacketizationMode, "1");
1401 supported_codecs_with_h264_hp_.push_back(constrained_high); 1383 supported_codecs_with_h264_hp_.push_back(constrained_high);
1402 } 1384 }
1403 1385
1404 bool is_h264_hw_supported = jni->CallStaticBooleanMethod( 1386 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
1405 j_encoder_class, 1387 j_encoder_class,
1406 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z")); 1388 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
1407 CHECK_EXCEPTION(jni); 1389 CHECK_EXCEPTION(jni);
1408 if (is_h264_hw_supported) { 1390 if (is_h264_hw_supported) {
1409 ALOGD << "H.264 HW Encoder supported."; 1391 ALOGD << "H.264 HW Encoder supported.";
1410 // TODO(magjed): Push Constrained High profile as well when negotiation is 1392 // TODO(magjed): Push Constrained High profile as well when negotiation is
1411 // ready, http://crbug/webrtc/6337. We can negotiate Constrained High 1393 // ready, http://crbug/webrtc/6337. We can negotiate Constrained High
1412 // profile as long as we have decode support for it and still send Baseline 1394 // profile as long as we have decode support for it and still send Baseline
1413 // since Baseline is a subset of the High profile. 1395 // since Baseline is a subset of the High profile.
1414 cricket::VideoCodec constrained_baseline(cricket::kH264CodecName); 1396 cricket::VideoCodec constrained_baseline(cricket::kH264CodecName);
1415 const webrtc::H264::ProfileLevelId constrained_baseline_profile( 1397 const H264::ProfileLevelId constrained_baseline_profile(
1416 webrtc::H264::kProfileConstrainedBaseline, webrtc::H264::kLevel3_1); 1398 H264::kProfileConstrainedBaseline, H264::kLevel3_1);
1417 constrained_baseline.SetParam( 1399 constrained_baseline.SetParam(
1418 cricket::kH264FmtpProfileLevelId, 1400 cricket::kH264FmtpProfileLevelId,
1419 *webrtc::H264::ProfileLevelIdToString(constrained_baseline_profile)); 1401 *H264::ProfileLevelIdToString(constrained_baseline_profile));
1420 constrained_baseline.SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1"); 1402 constrained_baseline.SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1");
1421 constrained_baseline.SetParam(cricket::kH264FmtpPacketizationMode, "1"); 1403 constrained_baseline.SetParam(cricket::kH264FmtpPacketizationMode, "1");
1422 supported_codecs_.push_back(constrained_baseline); 1404 supported_codecs_.push_back(constrained_baseline);
1423 supported_codecs_with_h264_hp_.push_back(constrained_baseline); 1405 supported_codecs_with_h264_hp_.push_back(constrained_baseline);
1424 } 1406 }
1425 } 1407 }
1426 1408
1427 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() { 1409 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {
1428 ALOGD << "MediaCodecVideoEncoderFactory dtor"; 1410 ALOGD << "MediaCodecVideoEncoderFactory dtor";
1429 if (egl_context_) { 1411 if (egl_context_) {
1430 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 1412 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1431 jni->DeleteGlobalRef(egl_context_); 1413 jni->DeleteGlobalRef(egl_context_);
1432 } 1414 }
1433 } 1415 }
1434 1416
1435 void MediaCodecVideoEncoderFactory::SetEGLContext( 1417 void MediaCodecVideoEncoderFactory::SetEGLContext(
1436 JNIEnv* jni, jobject egl_context) { 1418 JNIEnv* jni, jobject egl_context) {
1437 ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext"; 1419 ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
1438 if (egl_context_) { 1420 if (egl_context_) {
1439 jni->DeleteGlobalRef(egl_context_); 1421 jni->DeleteGlobalRef(egl_context_);
1440 egl_context_ = nullptr; 1422 egl_context_ = nullptr;
1441 } 1423 }
1442 egl_context_ = jni->NewGlobalRef(egl_context); 1424 egl_context_ = jni->NewGlobalRef(egl_context);
1443 if (CheckException(jni)) { 1425 if (CheckException(jni)) {
1444 ALOGE << "error calling NewGlobalRef for EGL Context."; 1426 ALOGE << "error calling NewGlobalRef for EGL Context.";
1445 } 1427 }
1446 } 1428 }
1447 1429
1448 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder( 1430 VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
1449 const cricket::VideoCodec& codec) { 1431 const cricket::VideoCodec& codec) {
1450 if (supported_codecs().empty()) { 1432 if (supported_codecs().empty()) {
1451 ALOGW << "No HW video encoder for codec " << codec.name; 1433 ALOGW << "No HW video encoder for codec " << codec.name;
1452 return nullptr; 1434 return nullptr;
1453 } 1435 }
1454 if (FindMatchingCodec(supported_codecs(), codec)) { 1436 if (FindMatchingCodec(supported_codecs(), codec)) {
1455 ALOGD << "Create HW video encoder for " << codec.name; 1437 ALOGD << "Create HW video encoder for " << codec.name;
1456 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 1438 JNIEnv* jni = AttachCurrentThreadIfNeeded();
1457 ScopedLocalRefFrame local_ref_frame(jni); 1439 ScopedLocalRefFrame local_ref_frame(jni);
1458 return new MediaCodecVideoEncoder(jni, codec, egl_context_); 1440 return new MediaCodecVideoEncoder(jni, codec, egl_context_);
1459 } 1441 }
1460 ALOGW << "Can not find HW video encoder for type " << codec.name; 1442 ALOGW << "Can not find HW video encoder for type " << codec.name;
1461 return nullptr; 1443 return nullptr;
1462 } 1444 }
1463 1445
1464 const std::vector<cricket::VideoCodec>& 1446 const std::vector<cricket::VideoCodec>&
1465 MediaCodecVideoEncoderFactory::supported_codecs() const { 1447 MediaCodecVideoEncoderFactory::supported_codecs() const {
1466 if (webrtc::field_trial::IsEnabled(kH264HighProfileFieldTrial)) { 1448 if (field_trial::IsEnabled(kH264HighProfileFieldTrial)) {
1467 return supported_codecs_with_h264_hp_; 1449 return supported_codecs_with_h264_hp_;
1468 } else { 1450 } else {
1469 return supported_codecs_; 1451 return supported_codecs_;
1470 } 1452 }
1471 } 1453 }
1472 1454
1473 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( 1455 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(VideoEncoder* encoder) {
1474 webrtc::VideoEncoder* encoder) {
1475 ALOGD << "Destroy video encoder."; 1456 ALOGD << "Destroy video encoder.";
1476 delete encoder; 1457 delete encoder;
1477 } 1458 }
1478 1459
1479 JNI_FUNCTION_DECLARATION(void, 1460 JNI_FUNCTION_DECLARATION(void,
1480 MediaCodecVideoEncoder_nativeFillBuffer, 1461 MediaCodecVideoEncoder_nativeFillBuffer,
1481 JNIEnv* jni, 1462 JNIEnv* jni,
1482 jclass, 1463 jclass,
1483 jlong native_encoder, 1464 jlong native_encoder,
1484 jint input_buffer, 1465 jint input_buffer,
1485 jobject j_buffer_y, 1466 jobject j_buffer_y,
1486 jint stride_y, 1467 jint stride_y,
1487 jobject j_buffer_u, 1468 jobject j_buffer_u,
1488 jint stride_u, 1469 jint stride_u,
1489 jobject j_buffer_v, 1470 jobject j_buffer_v,
1490 jint stride_v) { 1471 jint stride_v) {
1491 uint8_t* buffer_y = 1472 uint8_t* buffer_y =
1492 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_y)); 1473 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_y));
1493 uint8_t* buffer_u = 1474 uint8_t* buffer_u =
1494 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_u)); 1475 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_u));
1495 uint8_t* buffer_v = 1476 uint8_t* buffer_v =
1496 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_v)); 1477 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer_v));
1497 1478
1498 reinterpret_cast<MediaCodecVideoEncoder*>(native_encoder) 1479 reinterpret_cast<MediaCodecVideoEncoder*>(native_encoder)
1499 ->FillInputBuffer(jni, input_buffer, buffer_y, stride_y, buffer_u, 1480 ->FillInputBuffer(jni, input_buffer, buffer_y, stride_y, buffer_u,
1500 stride_u, buffer_v, stride_v); 1481 stride_u, buffer_v, stride_v);
1501 } 1482 }
1502 1483
1503 } // namespace webrtc_jni 1484 } // namespace jni
1485 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/sdk/android/src/jni/androidmediaencoder_jni.h ('k') | webrtc/sdk/android/src/jni/androidmetrics_jni.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698