| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 #include <algorithm> | 10 #include <algorithm> |
| (...skipping 2634 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2645 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InputFramesPerSecond")); | 2645 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "InputFramesPerSecond")); |
| 2646 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "SentFramesPerSecond")); | 2646 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "SentFramesPerSecond")); |
| 2647 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond")); | 2647 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond")); |
| 2648 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderFramesPerSecond")); | 2648 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderFramesPerSecond")); |
| 2649 | 2649 |
| 2650 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.JitterBufferDelayInMs")); | 2650 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.JitterBufferDelayInMs")); |
| 2651 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.TargetDelayInMs")); | 2651 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.TargetDelayInMs")); |
| 2652 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs")); | 2652 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs")); |
| 2653 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs")); | 2653 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs")); |
| 2654 | 2654 |
| 2655 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs")); | 2655 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs")); |
| 2656 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs")); | |
| 2657 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond")); | 2656 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond")); |
| 2658 | 2657 |
| 2659 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EncodeTimeInMs")); | 2658 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EncodeTimeInMs")); |
| 2660 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.DecodeTimeInMs")); | 2659 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.DecodeTimeInMs")); |
| 2661 | 2660 |
| 2662 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "NumberOfPauseEvents")); | 2661 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "NumberOfPauseEvents")); |
| 2663 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "PausedTimeInPercent")); | 2662 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "PausedTimeInPercent")); |
| 2664 | 2663 |
| 2665 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "BitrateSentInKbps")); | 2664 EXPECT_EQ(1, metrics::NumSamples(video_prefix + "BitrateSentInKbps")); |
| 2666 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.BitrateReceivedInKbps")); | 2665 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.BitrateReceivedInKbps")); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2686 | 2685 |
| 2687 int num_red_samples = use_red ? 1 : 0; | 2686 int num_red_samples = use_red ? 1 : 0; |
| 2688 EXPECT_EQ(num_red_samples, | 2687 EXPECT_EQ(num_red_samples, |
| 2689 metrics::NumSamples("WebRTC.Video.FecBitrateSentInKbps")); | 2688 metrics::NumSamples("WebRTC.Video.FecBitrateSentInKbps")); |
| 2690 EXPECT_EQ(num_red_samples, | 2689 EXPECT_EQ(num_red_samples, |
| 2691 metrics::NumSamples("WebRTC.Video.FecBitrateReceivedInKbps")); | 2690 metrics::NumSamples("WebRTC.Video.FecBitrateReceivedInKbps")); |
| 2692 EXPECT_EQ(num_red_samples, | 2691 EXPECT_EQ(num_red_samples, |
| 2693 metrics::NumSamples("WebRTC.Video.ReceivedFecPacketsInPercent")); | 2692 metrics::NumSamples("WebRTC.Video.ReceivedFecPacketsInPercent")); |
| 2694 } | 2693 } |
| 2695 | 2694 |
| 2696 TEST_F(EndToEndTest, ContentTypeSwitches) { | |
| 2697 class StatsObserver : public test::BaseTest, | |
| 2698 public rtc::VideoSinkInterface<VideoFrame> { | |
| 2699 public: | |
| 2700 StatsObserver() : BaseTest(kLongTimeoutMs), num_frames_received_(0) {} | |
| 2701 | |
| 2702 bool ShouldCreateReceivers() const override { return true; } | |
| 2703 | |
| 2704 void OnFrame(const VideoFrame& video_frame) override { | |
| 2705 // The RTT is needed to estimate |ntp_time_ms| which is used by | |
| 2706 // end-to-end delay stats. Therefore, start counting received frames once | |
| 2707 // |ntp_time_ms| is valid. | |
| 2708 if (video_frame.ntp_time_ms() > 0 && | |
| 2709 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >= | |
| 2710 video_frame.ntp_time_ms()) { | |
| 2711 rtc::CritScope lock(&crit_); | |
| 2712 ++num_frames_received_; | |
| 2713 } | |
| 2714 } | |
| 2715 | |
| 2716 Action OnSendRtp(const uint8_t* packet, size_t length) override { | |
| 2717 if (MinNumberOfFramesReceived()) | |
| 2718 observation_complete_.Set(); | |
| 2719 return SEND_PACKET; | |
| 2720 } | |
| 2721 | |
| 2722 bool MinNumberOfFramesReceived() const { | |
| 2723 const int kMinRequiredHistogramSamples = 200; | |
| 2724 rtc::CritScope lock(&crit_); | |
| 2725 return num_frames_received_ > kMinRequiredHistogramSamples; | |
| 2726 } | |
| 2727 | |
| 2728 // May be called several times. | |
| 2729 void PerformTest() override { | |
| 2730 EXPECT_TRUE(Wait()) << "Timed out waiting for enough packets."; | |
| 2731 // Reset frame counter so next PerformTest() call will do something. | |
| 2732 { | |
| 2733 rtc::CritScope lock(&crit_); | |
| 2734 num_frames_received_ = 0; | |
| 2735 } | |
| 2736 } | |
| 2737 | |
| 2738 rtc::CriticalSection crit_; | |
| 2739 int num_frames_received_ GUARDED_BY(&crit_); | |
| 2740 } test; | |
| 2741 | |
| 2742 metrics::Reset(); | |
| 2743 | |
| 2744 Call::Config send_config(test.GetSenderCallConfig()); | |
| 2745 CreateSenderCall(send_config); | |
| 2746 Call::Config recv_config(test.GetReceiverCallConfig()); | |
| 2747 CreateReceiverCall(recv_config); | |
| 2748 receive_transport_.reset(test.CreateReceiveTransport()); | |
| 2749 send_transport_.reset(test.CreateSendTransport(sender_call_.get())); | |
| 2750 send_transport_->SetReceiver(receiver_call_->Receiver()); | |
| 2751 receive_transport_->SetReceiver(sender_call_->Receiver()); | |
| 2752 receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp); | |
| 2753 CreateSendConfig(1, 0, 0, send_transport_.get()); | |
| 2754 CreateMatchingReceiveConfigs(receive_transport_.get()); | |
| 2755 | |
| 2756 // Modify send and receive configs. | |
| 2757 video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | |
| 2758 video_receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | |
| 2759 video_receive_configs_[0].renderer = &test; | |
| 2760 // RTT needed for RemoteNtpTimeEstimator for the receive stream. | |
| 2761 video_receive_configs_[0].rtp.rtcp_xr.receiver_reference_time_report = true; | |
| 2762 // Start with realtime video. | |
| 2763 video_encoder_config_.content_type = | |
| 2764 VideoEncoderConfig::ContentType::kRealtimeVideo; | |
| 2765 // Second encoder config for the second part of the test uses screenshare | |
| 2766 VideoEncoderConfig encoder_config_with_screenshare_ = | |
| 2767 video_encoder_config_.Copy(); | |
| 2768 encoder_config_with_screenshare_.content_type = | |
| 2769 VideoEncoderConfig::ContentType::kScreen; | |
| 2770 | |
| 2771 CreateVideoStreams(); | |
| 2772 CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth, | |
| 2773 kDefaultHeight); | |
| 2774 Start(); | |
| 2775 | |
| 2776 test.PerformTest(); | |
| 2777 | |
| 2778 // Replace old send stream. | |
| 2779 sender_call_->DestroyVideoSendStream(video_send_stream_); | |
| 2780 video_send_stream_ = sender_call_->CreateVideoSendStream( | |
| 2781 video_send_config_.Copy(), encoder_config_with_screenshare_.Copy()); | |
| 2782 video_send_stream_->SetSource( | |
| 2783 frame_generator_capturer_.get(), | |
| 2784 VideoSendStream::DegradationPreference::kBalanced); | |
| 2785 video_send_stream_->Start(); | |
| 2786 | |
| 2787 // Continue to run test but now with screenshare. | |
| 2788 test.PerformTest(); | |
| 2789 | |
| 2790 send_transport_->StopSending(); | |
| 2791 receive_transport_->StopSending(); | |
| 2792 Stop(); | |
| 2793 DestroyStreams(); | |
| 2794 DestroyCalls(); | |
| 2795 // Delete the call for Call stats to be reported. | |
| 2796 sender_call_.reset(); | |
| 2797 receiver_call_.reset(); | |
| 2798 | |
| 2799 // Verify that stats have been updated for both screenshare and video. | |
| 2800 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs")); | |
| 2801 EXPECT_EQ(1, | |
| 2802 metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayInMs")); | |
| 2803 EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayMaxInMs")); | |
| 2804 EXPECT_EQ( | |
| 2805 1, metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs")); | |
| 2806 } | |
| 2807 | |
| 2808 TEST_F(EndToEndTest, VerifyHistogramStatsWithRtx) { | 2695 TEST_F(EndToEndTest, VerifyHistogramStatsWithRtx) { |
| 2809 const bool kEnabledRtx = true; | 2696 const bool kEnabledRtx = true; |
| 2810 const bool kEnabledRed = false; | 2697 const bool kEnabledRed = false; |
| 2811 const bool kScreenshare = false; | 2698 const bool kScreenshare = false; |
| 2812 VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare); | 2699 VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare); |
| 2813 } | 2700 } |
| 2814 | 2701 |
| 2815 TEST_F(EndToEndTest, VerifyHistogramStatsWithRed) { | 2702 TEST_F(EndToEndTest, VerifyHistogramStatsWithRed) { |
| 2816 const bool kEnabledRtx = false; | 2703 const bool kEnabledRtx = false; |
| 2817 const bool kEnabledRed = true; | 2704 const bool kEnabledRed = true; |
| (...skipping 1645 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4463 std::unique_ptr<VideoEncoder> encoder_; | 4350 std::unique_ptr<VideoEncoder> encoder_; |
| 4464 std::unique_ptr<VideoDecoder> decoder_; | 4351 std::unique_ptr<VideoDecoder> decoder_; |
| 4465 rtc::CriticalSection crit_; | 4352 rtc::CriticalSection crit_; |
| 4466 int recorded_frames_ GUARDED_BY(crit_); | 4353 int recorded_frames_ GUARDED_BY(crit_); |
| 4467 } test(this); | 4354 } test(this); |
| 4468 | 4355 |
| 4469 RunBaseTest(&test); | 4356 RunBaseTest(&test); |
| 4470 } | 4357 } |
| 4471 | 4358 |
| 4472 } // namespace webrtc | 4359 } // namespace webrtc |
| OLD | NEW |