OLD | NEW |
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
229 "()V"))) { | 229 "()V"))) { |
230 ScopedLocalRefFrame local_ref_frame(jni); | 230 ScopedLocalRefFrame local_ref_frame(jni); |
231 // It would be nice to avoid spinning up a new thread per MediaCodec, and | 231 // It would be nice to avoid spinning up a new thread per MediaCodec, and |
232 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug | 232 // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug |
233 // 2732 means that deadlocks abound. This class synchronously trampolines | 233 // 2732 means that deadlocks abound. This class synchronously trampolines |
234 // to |codec_thread_|, so if anything else can be coming to _us_ from | 234 // to |codec_thread_|, so if anything else can be coming to _us_ from |
235 // |codec_thread_|, or from any thread holding the |_sendCritSect| described | 235 // |codec_thread_|, or from any thread holding the |_sendCritSect| described |
236 // in the bug, we have a problem. For now work around that with a dedicated | 236 // in the bug, we have a problem. For now work around that with a dedicated |
237 // thread. | 237 // thread. |
238 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); | 238 codec_thread_->SetName("MediaCodecVideoEncoder", NULL); |
239 CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; | 239 RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder"; |
240 | 240 |
241 jclass j_output_buffer_info_class = | 241 jclass j_output_buffer_info_class = |
242 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); | 242 FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); |
243 j_init_encode_method_ = GetMethodID( | 243 j_init_encode_method_ = GetMethodID( |
244 jni, | 244 jni, |
245 *j_media_codec_video_encoder_class_, | 245 *j_media_codec_video_encoder_class_, |
246 "initEncode", | 246 "initEncode", |
247 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)" | 247 "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)" |
248 "[Ljava/nio/ByteBuffer;"); | 248 "[Ljava/nio/ByteBuffer;"); |
249 j_dequeue_input_buffer_method_ = GetMethodID( | 249 j_dequeue_input_buffer_method_ = GetMethodID( |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
285 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the | 285 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the |
286 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is | 286 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is |
287 // always = 127. Note that in SW, QP is that of the user-level range [0, 63]. | 287 // always = 127. Note that in SW, QP is that of the user-level range [0, 63]. |
288 const int kMaxQP = 127; | 288 const int kMaxQP = 127; |
289 const int kLowQpThresholdDenominator = 3; | 289 const int kLowQpThresholdDenominator = 3; |
290 if (codec_settings == NULL) { | 290 if (codec_settings == NULL) { |
291 ALOGE("NULL VideoCodec instance"); | 291 ALOGE("NULL VideoCodec instance"); |
292 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 292 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
293 } | 293 } |
294 // Factory should guard against other codecs being used with us. | 294 // Factory should guard against other codecs being used with us. |
295 CHECK(codec_settings->codecType == codecType_) << "Unsupported codec " << | 295 RTC_CHECK(codec_settings->codecType == codecType_) |
296 codec_settings->codecType << " for " << codecType_; | 296 << "Unsupported codec " << codec_settings->codecType << " for " |
| 297 << codecType_; |
297 | 298 |
298 ALOGD("InitEncode request"); | 299 ALOGD("InitEncode request"); |
299 scale_ = false; | 300 scale_ = false; |
300 if (scale_ && codecType_ == kVideoCodecVP8) { | 301 if (scale_ && codecType_ == kVideoCodecVP8) { |
301 quality_scaler_->Init(kMaxQP / kLowQpThresholdDenominator, true); | 302 quality_scaler_->Init(kMaxQP / kLowQpThresholdDenominator, true); |
302 quality_scaler_->SetMinResolution(kMinWidth, kMinHeight); | 303 quality_scaler_->SetMinResolution(kMinWidth, kMinHeight); |
303 quality_scaler_->ReportFramerate(codec_settings->maxFramerate); | 304 quality_scaler_->ReportFramerate(codec_settings->maxFramerate); |
304 updated_framerate_ = codec_settings->maxFramerate; | 305 updated_framerate_ = codec_settings->maxFramerate; |
305 } else { | 306 } else { |
306 updated_framerate_ = -1; | 307 updated_framerate_ = -1; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
352 new_bit_rate, | 353 new_bit_rate, |
353 frame_rate)); | 354 frame_rate)); |
354 } | 355 } |
355 | 356 |
356 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { | 357 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) { |
357 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 358 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
358 ScopedLocalRefFrame local_ref_frame(jni); | 359 ScopedLocalRefFrame local_ref_frame(jni); |
359 | 360 |
360 // We only ever send one message to |this| directly (not through a Bind()'d | 361 // We only ever send one message to |this| directly (not through a Bind()'d |
361 // functor), so expect no ID/data. | 362 // functor), so expect no ID/data. |
362 CHECK(!msg->message_id) << "Unexpected message!"; | 363 RTC_CHECK(!msg->message_id) << "Unexpected message!"; |
363 CHECK(!msg->pdata) << "Unexpected message!"; | 364 RTC_CHECK(!msg->pdata) << "Unexpected message!"; |
364 CheckOnCodecThread(); | 365 CheckOnCodecThread(); |
365 if (!inited_) { | 366 if (!inited_) { |
366 return; | 367 return; |
367 } | 368 } |
368 | 369 |
369 // It would be nice to recover from a failure here if one happened, but it's | 370 // It would be nice to recover from a failure here if one happened, but it's |
370 // unclear how to signal such a failure to the app, so instead we stay silent | 371 // unclear how to signal such a failure to the app, so instead we stay silent |
371 // about it and let the next app-called API method reveal the borkedness. | 372 // about it and let the next app-called API method reveal the borkedness. |
372 DeliverPendingOutputs(jni); | 373 DeliverPendingOutputs(jni); |
373 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 374 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
374 } | 375 } |
375 | 376 |
376 void MediaCodecVideoEncoder::CheckOnCodecThread() { | 377 void MediaCodecVideoEncoder::CheckOnCodecThread() { |
377 CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread()) | 378 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread()) |
378 << "Running on wrong thread!"; | 379 << "Running on wrong thread!"; |
379 } | 380 } |
380 | 381 |
381 void MediaCodecVideoEncoder::ResetCodec() { | 382 void MediaCodecVideoEncoder::ResetCodec() { |
382 ALOGE("ResetCodec"); | 383 ALOGE("ResetCodec"); |
383 if (Release() != WEBRTC_VIDEO_CODEC_OK || | 384 if (Release() != WEBRTC_VIDEO_CODEC_OK || |
384 codec_thread_->Invoke<int32_t>(Bind( | 385 codec_thread_->Invoke<int32_t>(Bind( |
385 &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, | 386 &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, |
386 width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) { | 387 width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) { |
387 // TODO(fischman): wouldn't it be nice if there was a way to gracefully | 388 // TODO(fischman): wouldn't it be nice if there was a way to gracefully |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
453 case COLOR_FormatYUV420SemiPlanar: | 454 case COLOR_FormatYUV420SemiPlanar: |
454 case COLOR_QCOM_FormatYUV420SemiPlanar: | 455 case COLOR_QCOM_FormatYUV420SemiPlanar: |
455 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | 456 case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
456 encoder_fourcc_ = libyuv::FOURCC_NV12; | 457 encoder_fourcc_ = libyuv::FOURCC_NV12; |
457 break; | 458 break; |
458 default: | 459 default: |
459 LOG(LS_ERROR) << "Wrong color format."; | 460 LOG(LS_ERROR) << "Wrong color format."; |
460 return WEBRTC_VIDEO_CODEC_ERROR; | 461 return WEBRTC_VIDEO_CODEC_ERROR; |
461 } | 462 } |
462 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 463 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
463 CHECK(input_buffers_.empty()) | 464 RTC_CHECK(input_buffers_.empty()) |
464 << "Unexpected double InitEncode without Release"; | 465 << "Unexpected double InitEncode without Release"; |
465 input_buffers_.resize(num_input_buffers); | 466 input_buffers_.resize(num_input_buffers); |
466 for (size_t i = 0; i < num_input_buffers; ++i) { | 467 for (size_t i = 0; i < num_input_buffers; ++i) { |
467 input_buffers_[i] = | 468 input_buffers_[i] = |
468 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 469 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
469 int64 yuv_buffer_capacity = | 470 int64 yuv_buffer_capacity = |
470 jni->GetDirectBufferCapacity(input_buffers_[i]); | 471 jni->GetDirectBufferCapacity(input_buffers_[i]); |
471 CHECK_EXCEPTION(jni); | 472 CHECK_EXCEPTION(jni); |
472 CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; | 473 RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; |
473 } | 474 } |
474 CHECK_EXCEPTION(jni); | 475 CHECK_EXCEPTION(jni); |
475 | 476 |
476 codec_thread_->PostDelayed(kMediaCodecPollMs, this); | 477 codec_thread_->PostDelayed(kMediaCodecPollMs, this); |
477 return WEBRTC_VIDEO_CODEC_OK; | 478 return WEBRTC_VIDEO_CODEC_OK; |
478 } | 479 } |
479 | 480 |
480 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( | 481 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( |
481 const webrtc::VideoFrame& frame, | 482 const webrtc::VideoFrame& frame, |
482 const std::vector<webrtc::VideoFrameType>* frame_types) { | 483 const std::vector<webrtc::VideoFrameType>* frame_types) { |
483 CheckOnCodecThread(); | 484 CheckOnCodecThread(); |
484 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 485 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
485 ScopedLocalRefFrame local_ref_frame(jni); | 486 ScopedLocalRefFrame local_ref_frame(jni); |
486 | 487 |
487 if (!inited_) { | 488 if (!inited_) { |
488 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 489 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
489 } | 490 } |
490 frames_received_++; | 491 frames_received_++; |
491 if (!DeliverPendingOutputs(jni)) { | 492 if (!DeliverPendingOutputs(jni)) { |
492 ResetCodec(); | 493 ResetCodec(); |
493 // Continue as if everything's fine. | 494 // Continue as if everything's fine. |
494 } | 495 } |
495 | 496 |
496 if (drop_next_input_frame_) { | 497 if (drop_next_input_frame_) { |
497 ALOGV("Encoder drop frame - failed callback."); | 498 ALOGV("Encoder drop frame - failed callback."); |
498 drop_next_input_frame_ = false; | 499 drop_next_input_frame_ = false; |
499 return WEBRTC_VIDEO_CODEC_OK; | 500 return WEBRTC_VIDEO_CODEC_OK; |
500 } | 501 } |
501 | 502 |
502 CHECK(frame_types->size() == 1) << "Unexpected stream count"; | 503 RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count"; |
503 // Check framerate before spatial resolution change. | 504 // Check framerate before spatial resolution change. |
504 if (scale_ && codecType_ == kVideoCodecVP8) { | 505 if (scale_ && codecType_ == kVideoCodecVP8) { |
505 quality_scaler_->OnEncodeFrame(frame); | 506 quality_scaler_->OnEncodeFrame(frame); |
506 updated_framerate_ = quality_scaler_->GetTargetFramerate(); | 507 updated_framerate_ = quality_scaler_->GetTargetFramerate(); |
507 } | 508 } |
508 const VideoFrame& input_frame = (scale_ && codecType_ == kVideoCodecVP8) ? | 509 const VideoFrame& input_frame = (scale_ && codecType_ == kVideoCodecVP8) ? |
509 quality_scaler_->GetScaledFrame(frame) : frame; | 510 quality_scaler_->GetScaledFrame(frame) : frame; |
510 | 511 |
511 if (input_frame.width() != width_ || input_frame.height() != height_) { | 512 if (input_frame.width() != width_ || input_frame.height() != height_) { |
512 ALOGD("Frame resolution change from %d x %d to %d x %d", | 513 ALOGD("Frame resolution change from %d x %d to %d x %d", |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
548 return WEBRTC_VIDEO_CODEC_ERROR; | 549 return WEBRTC_VIDEO_CODEC_ERROR; |
549 } | 550 } |
550 | 551 |
551 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", | 552 ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", |
552 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); | 553 frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); |
553 | 554 |
554 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | 555 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; |
555 uint8* yuv_buffer = | 556 uint8* yuv_buffer = |
556 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer)); | 557 reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer)); |
557 CHECK_EXCEPTION(jni); | 558 CHECK_EXCEPTION(jni); |
558 CHECK(yuv_buffer) << "Indirect buffer??"; | 559 RTC_CHECK(yuv_buffer) << "Indirect buffer??"; |
559 CHECK(!libyuv::ConvertFromI420( | 560 RTC_CHECK(!libyuv::ConvertFromI420( |
560 input_frame.buffer(webrtc::kYPlane), | 561 input_frame.buffer(webrtc::kYPlane), input_frame.stride(webrtc::kYPlane), |
561 input_frame.stride(webrtc::kYPlane), | 562 input_frame.buffer(webrtc::kUPlane), input_frame.stride(webrtc::kUPlane), |
562 input_frame.buffer(webrtc::kUPlane), | 563 input_frame.buffer(webrtc::kVPlane), input_frame.stride(webrtc::kVPlane), |
563 input_frame.stride(webrtc::kUPlane), | 564 yuv_buffer, width_, width_, height_, encoder_fourcc_)) |
564 input_frame.buffer(webrtc::kVPlane), | |
565 input_frame.stride(webrtc::kVPlane), | |
566 yuv_buffer, width_, | |
567 width_, height_, | |
568 encoder_fourcc_)) | |
569 << "ConvertFromI420 failed"; | 565 << "ConvertFromI420 failed"; |
570 last_input_timestamp_ms_ = current_timestamp_us_ / 1000; | 566 last_input_timestamp_ms_ = current_timestamp_us_ / 1000; |
571 frames_in_queue_++; | 567 frames_in_queue_++; |
572 | 568 |
573 // Save input image timestamps for later output | 569 // Save input image timestamps for later output |
574 timestamps_.push_back(input_frame.timestamp()); | 570 timestamps_.push_back(input_frame.timestamp()); |
575 render_times_ms_.push_back(input_frame.render_time_ms()); | 571 render_times_ms_.push_back(input_frame.render_time_ms()); |
576 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); | 572 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); |
577 | 573 |
578 bool key_frame = frame_types->front() != webrtc::kDeltaFrame; | 574 bool key_frame = frame_types->front() != webrtc::kDeltaFrame; |
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
929 } | 925 } |
930 | 926 |
931 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 927 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
932 webrtc::VideoEncoder* encoder) { | 928 webrtc::VideoEncoder* encoder) { |
933 ALOGD("Destroy video encoder."); | 929 ALOGD("Destroy video encoder."); |
934 delete encoder; | 930 delete encoder; |
935 } | 931 } |
936 | 932 |
937 } // namespace webrtc_jni | 933 } // namespace webrtc_jni |
938 | 934 |
OLD | NEW |