OLD | NEW |
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 265 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
276 << "Unsupported codec " << inst->codecType << " for " << codecType_; | 276 << "Unsupported codec " << inst->codecType << " for " << codecType_; |
277 | 277 |
278 if (sw_fallback_required_) { | 278 if (sw_fallback_required_) { |
279 ALOGE << "InitDecode() - fallback to SW decoder"; | 279 ALOGE << "InitDecode() - fallback to SW decoder"; |
280 return WEBRTC_VIDEO_CODEC_OK; | 280 return WEBRTC_VIDEO_CODEC_OK; |
281 } | 281 } |
282 // Save VideoCodec instance for later. | 282 // Save VideoCodec instance for later. |
283 if (&codec_ != inst) { | 283 if (&codec_ != inst) { |
284 codec_ = *inst; | 284 codec_ = *inst; |
285 } | 285 } |
286 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1; | 286 // If maxFramerate is not set then assume 30 fps. |
| 287 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30; |
287 | 288 |
288 // Call Java init. | 289 // Call Java init. |
289 return codec_thread_->Invoke<int32_t>( | 290 return codec_thread_->Invoke<int32_t>( |
290 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); | 291 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); |
291 } | 292 } |
292 | 293 |
293 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { | 294 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { |
294 CheckOnCodecThread(); | 295 CheckOnCodecThread(); |
295 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 296 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
296 ScopedLocalRefFrame local_ref_frame(jni); | 297 ScopedLocalRefFrame local_ref_frame(jni); |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
341 current_frames_ = 0; | 342 current_frames_ = 0; |
342 current_bytes_ = 0; | 343 current_bytes_ = 0; |
343 current_decoding_time_ms_ = 0; | 344 current_decoding_time_ms_ = 0; |
344 timestamps_.clear(); | 345 timestamps_.clear(); |
345 ntp_times_ms_.clear(); | 346 ntp_times_ms_.clear(); |
346 frame_rtc_times_ms_.clear(); | 347 frame_rtc_times_ms_.clear(); |
347 | 348 |
348 jobjectArray input_buffers = (jobjectArray)GetObjectField( | 349 jobjectArray input_buffers = (jobjectArray)GetObjectField( |
349 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); | 350 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); |
350 size_t num_input_buffers = jni->GetArrayLength(input_buffers); | 351 size_t num_input_buffers = jni->GetArrayLength(input_buffers); |
351 max_pending_frames_ = | 352 ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; |
352 std::min(max_pending_frames_, static_cast<uint32_t>(num_input_buffers)); | |
353 input_buffers_.resize(num_input_buffers); | 353 input_buffers_.resize(num_input_buffers); |
354 for (size_t i = 0; i < num_input_buffers; ++i) { | 354 for (size_t i = 0; i < num_input_buffers; ++i) { |
355 input_buffers_[i] = | 355 input_buffers_[i] = |
356 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); | 356 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); |
357 if (CheckException(jni)) { | 357 if (CheckException(jni)) { |
358 ALOGE << "NewGlobalRef error - fallback to SW codec."; | 358 ALOGE << "NewGlobalRef error - fallback to SW codec."; |
359 sw_fallback_required_ = true; | 359 sw_fallback_required_ = true; |
360 return WEBRTC_VIDEO_CODEC_ERROR; | 360 return WEBRTC_VIDEO_CODEC_ERROR; |
361 } | 361 } |
362 } | 362 } |
(...skipping 17 matching lines...) Expand all Loading... |
380 return codec_thread_->Invoke<int32_t>( | 380 return codec_thread_->Invoke<int32_t>( |
381 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); | 381 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); |
382 } | 382 } |
383 | 383 |
384 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { | 384 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { |
385 if (!inited_) { | 385 if (!inited_) { |
386 return WEBRTC_VIDEO_CODEC_OK; | 386 return WEBRTC_VIDEO_CODEC_OK; |
387 } | 387 } |
388 CheckOnCodecThread(); | 388 CheckOnCodecThread(); |
389 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 389 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
390 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_; | 390 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << |
| 391 frames_received_ << ". Frames decoded: " << frames_decoded_; |
391 ScopedLocalRefFrame local_ref_frame(jni); | 392 ScopedLocalRefFrame local_ref_frame(jni); |
392 for (size_t i = 0; i < input_buffers_.size(); i++) { | 393 for (size_t i = 0; i < input_buffers_.size(); i++) { |
393 jni->DeleteGlobalRef(input_buffers_[i]); | 394 jni->DeleteGlobalRef(input_buffers_[i]); |
394 } | 395 } |
395 input_buffers_.clear(); | 396 input_buffers_.clear(); |
396 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); | 397 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); |
397 inited_ = false; | 398 inited_ = false; |
398 rtc::MessageQueueManager::Clear(this); | 399 rtc::MessageQueueManager::Clear(this); |
399 if (CheckException(jni)) { | 400 if (CheckException(jni)) { |
400 ALOGE << "Decoder release exception"; | 401 ALOGE << "Decoder release exception"; |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
497 CheckOnCodecThread(); | 498 CheckOnCodecThread(); |
498 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 499 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
499 ScopedLocalRefFrame local_ref_frame(jni); | 500 ScopedLocalRefFrame local_ref_frame(jni); |
500 | 501 |
501 // Try to drain the decoder and wait until output is not too | 502 // Try to drain the decoder and wait until output is not too |
502 // much behind the input. | 503 // much behind the input. |
503 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 504 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
504 ALOGV("Received: %d. Decoded: %d. Wait for output...", | 505 ALOGV("Received: %d. Decoded: %d. Wait for output...", |
505 frames_received_, frames_decoded_); | 506 frames_received_, frames_decoded_); |
506 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { | 507 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) { |
507 ALOGE << "DeliverPendingOutputs error"; | 508 ALOGE << "DeliverPendingOutputs error. Frames received: " << |
| 509 frames_received_ << ". Frames decoded: " << frames_decoded_; |
508 return ProcessHWErrorOnCodecThread(); | 510 return ProcessHWErrorOnCodecThread(); |
509 } | 511 } |
510 if (frames_received_ > frames_decoded_ + max_pending_frames_) { | 512 if (frames_received_ > frames_decoded_ + max_pending_frames_) { |
511 ALOGE << "Output buffer dequeue timeout"; | 513 ALOGE << "Output buffer dequeue timeout. Frames received: " << |
| 514 frames_received_ << ". Frames decoded: " << frames_decoded_; |
512 return ProcessHWErrorOnCodecThread(); | 515 return ProcessHWErrorOnCodecThread(); |
513 } | 516 } |
514 } | 517 } |
515 | 518 |
516 // Get input buffer. | 519 // Get input buffer. |
517 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, | 520 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, |
518 j_dequeue_input_buffer_method_); | 521 j_dequeue_input_buffer_method_); |
519 if (CheckException(jni) || j_input_buffer_index < 0) { | 522 if (CheckException(jni) || j_input_buffer_index < 0) { |
520 ALOGE << "dequeueInputBuffer error"; | 523 ALOGE << "dequeueInputBuffer error"; |
521 return ProcessHWErrorOnCodecThread(); | 524 return ProcessHWErrorOnCodecThread(); |
522 } | 525 } |
523 | 526 |
524 // Copy encoded data to Java ByteBuffer. | 527 // Copy encoded data to Java ByteBuffer. |
525 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; | 528 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; |
526 uint8_t* buffer = | 529 uint8_t* buffer = |
527 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); | 530 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); |
528 RTC_CHECK(buffer) << "Indirect buffer??"; | 531 RTC_CHECK(buffer) << "Indirect buffer??"; |
529 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); | 532 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); |
530 if (CheckException(jni) || buffer_capacity < inputImage._length) { | 533 if (CheckException(jni) || buffer_capacity < inputImage._length) { |
531 ALOGE << "Input frame size "<< inputImage._length << | 534 ALOGE << "Input frame size "<< inputImage._length << |
532 " is bigger than buffer size " << buffer_capacity; | 535 " is bigger than buffer size " << buffer_capacity; |
533 return ProcessHWErrorOnCodecThread(); | 536 return ProcessHWErrorOnCodecThread(); |
534 } | 537 } |
535 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate; | 538 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate; |
536 ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d", | 539 if (frames_decoded_ < kMaxDecodedLogFrames) { |
537 frames_received_, inputImage._frameType, j_input_buffer_index, | 540 ALOGD << "Decoder frame in # " << frames_received_ << ". Type: " |
538 timestamp_us / 1000, inputImage._length); | 541 << inputImage._frameType << ". Buffer # " << |
| 542 j_input_buffer_index << ". TS: " << (int)(timestamp_us / 1000) |
| 543 << ". Size: " << inputImage._length; |
| 544 } |
539 memcpy(buffer, inputImage._buffer, inputImage._length); | 545 memcpy(buffer, inputImage._buffer, inputImage._length); |
540 | 546 |
541 // Save input image timestamps for later output. | 547 // Save input image timestamps for later output. |
542 frames_received_++; | 548 frames_received_++; |
543 current_bytes_ += inputImage._length; | 549 current_bytes_ += inputImage._length; |
544 timestamps_.push_back(inputImage._timeStamp); | 550 timestamps_.push_back(inputImage._timeStamp); |
545 ntp_times_ms_.push_back(inputImage.ntp_time_ms_); | 551 ntp_times_ms_.push_back(inputImage.ntp_time_ms_); |
546 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); | 552 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); |
547 | 553 |
548 // Feed input to decoder. | 554 // Feed input to decoder. |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
690 } | 696 } |
691 if (ntp_times_ms_.size() > 0) { | 697 if (ntp_times_ms_.size() > 0) { |
692 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); | 698 decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); |
693 ntp_times_ms_.erase(ntp_times_ms_.begin()); | 699 ntp_times_ms_.erase(ntp_times_ms_.begin()); |
694 } | 700 } |
695 int64_t frame_decoding_time_ms = 0; | 701 int64_t frame_decoding_time_ms = 0; |
696 if (frame_rtc_times_ms_.size() > 0) { | 702 if (frame_rtc_times_ms_.size() > 0) { |
697 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); | 703 frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front(); |
698 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); | 704 frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin()); |
699 } | 705 } |
700 ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld." | 706 if (frames_decoded_ < kMaxDecodedLogFrames) { |
701 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, | 707 ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << |
702 color_format, output_timestamps_ms, frame_decoding_time_ms); | 708 " x " << height << ". " << stride << " x " << slice_height << |
| 709 ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms << |
| 710 ". DecTime: " << (int)frame_decoding_time_ms; |
| 711 } |
703 | 712 |
704 // Calculate and print decoding statistics - every 3 seconds. | 713 // Calculate and print decoding statistics - every 3 seconds. |
705 frames_decoded_++; | 714 frames_decoded_++; |
706 current_frames_++; | 715 current_frames_++; |
707 current_decoding_time_ms_ += frame_decoding_time_ms; | 716 current_decoding_time_ms_ += frame_decoding_time_ms; |
708 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; | 717 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; |
709 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && | 718 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && |
710 current_frames_ > 0) { | 719 current_frames_ > 0) { |
711 ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " << | 720 ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " << |
712 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << | 721 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
847 } | 856 } |
848 | 857 |
849 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( | 858 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( |
850 webrtc::VideoDecoder* decoder) { | 859 webrtc::VideoDecoder* decoder) { |
851 ALOGD << "Destroy video decoder."; | 860 ALOGD << "Destroy video decoder."; |
852 delete decoder; | 861 delete decoder; |
853 } | 862 } |
854 | 863 |
855 } // namespace webrtc_jni | 864 } // namespace webrtc_jni |
856 | 865 |
OLD | NEW |