| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 #include <stdio.h> | 10 #include <stdio.h> |
| (...skipping 733 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 744 std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_); | 744 std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_); |
| 745 rtc::Event done_; | 745 rtc::Event done_; |
| 746 }; | 746 }; |
| 747 | 747 |
| 748 VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {} | 748 VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {} |
| 749 | 749 |
| 750 void VideoQualityTest::TestBody() {} | 750 void VideoQualityTest::TestBody() {} |
| 751 | 751 |
| 752 std::string VideoQualityTest::GenerateGraphTitle() const { | 752 std::string VideoQualityTest::GenerateGraphTitle() const { |
| 753 std::stringstream ss; | 753 std::stringstream ss; |
| 754 ss << params_.common.codec; | 754 ss << params_.video.codec; |
| 755 ss << " (" << params_.common.target_bitrate_bps / 1000 << "kbps"; | 755 ss << " (" << params_.video.target_bitrate_bps / 1000 << "kbps"; |
| 756 ss << ", " << params_.common.fps << " FPS"; | 756 ss << ", " << params_.video.fps << " FPS"; |
| 757 if (params_.screenshare.scroll_duration) | 757 if (params_.screenshare.scroll_duration) |
| 758 ss << ", " << params_.screenshare.scroll_duration << "s scroll"; | 758 ss << ", " << params_.screenshare.scroll_duration << "s scroll"; |
| 759 if (params_.ss.streams.size() > 1) | 759 if (params_.ss.streams.size() > 1) |
| 760 ss << ", Stream #" << params_.ss.selected_stream; | 760 ss << ", Stream #" << params_.ss.selected_stream; |
| 761 if (params_.ss.num_spatial_layers > 1) | 761 if (params_.ss.num_spatial_layers > 1) |
| 762 ss << ", Layer #" << params_.ss.selected_sl; | 762 ss << ", Layer #" << params_.ss.selected_sl; |
| 763 ss << ")"; | 763 ss << ")"; |
| 764 return ss.str(); | 764 return ss.str(); |
| 765 } | 765 } |
| 766 | 766 |
| 767 void VideoQualityTest::CheckParams() { | 767 void VideoQualityTest::CheckParams() { |
| 768 // Add a default stream in none specified. | 768 // Add a default stream in none specified. |
| 769 if (params_.ss.streams.empty()) | 769 if (params_.ss.streams.empty()) |
| 770 params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_)); | 770 params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_)); |
| 771 if (params_.ss.num_spatial_layers == 0) | 771 if (params_.ss.num_spatial_layers == 0) |
| 772 params_.ss.num_spatial_layers = 1; | 772 params_.ss.num_spatial_layers = 1; |
| 773 | 773 |
| 774 if (params_.pipe.loss_percent != 0 || | 774 if (params_.pipe.loss_percent != 0 || |
| 775 params_.pipe.queue_length_packets != 0) { | 775 params_.pipe.queue_length_packets != 0) { |
| 776 // Since LayerFilteringTransport changes the sequence numbers, we can't | 776 // Since LayerFilteringTransport changes the sequence numbers, we can't |
| 777 // use that feature with pack loss, since the NACK request would end up | 777 // use that feature with pack loss, since the NACK request would end up |
| 778 // retransmitting the wrong packets. | 778 // retransmitting the wrong packets. |
| 779 RTC_CHECK(params_.ss.selected_sl == -1 || | 779 RTC_CHECK(params_.ss.selected_sl == -1 || |
| 780 params_.ss.selected_sl == params_.ss.num_spatial_layers - 1); | 780 params_.ss.selected_sl == params_.ss.num_spatial_layers - 1); |
| 781 RTC_CHECK(params_.common.selected_tl == -1 || | 781 RTC_CHECK(params_.video.selected_tl == -1 || |
| 782 params_.common.selected_tl == | 782 params_.video.selected_tl == |
| 783 params_.common.num_temporal_layers - 1); | 783 params_.video.num_temporal_layers - 1); |
| 784 } | 784 } |
| 785 | 785 |
| 786 // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it | 786 // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it |
| 787 // does in some parts of the code? | 787 // does in some parts of the code? |
| 788 RTC_CHECK_GE(params_.common.max_bitrate_bps, | 788 RTC_CHECK_GE(params_.video.max_bitrate_bps, params_.video.target_bitrate_bps); |
| 789 params_.common.target_bitrate_bps); | 789 RTC_CHECK_GE(params_.video.target_bitrate_bps, params_.video.min_bitrate_bps); |
| 790 RTC_CHECK_GE(params_.common.target_bitrate_bps, | 790 RTC_CHECK_LT(params_.video.selected_tl, params_.video.num_temporal_layers); |
| 791 params_.common.min_bitrate_bps); | |
| 792 RTC_CHECK_LT(params_.common.selected_tl, params_.common.num_temporal_layers); | |
| 793 RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size()); | 791 RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size()); |
| 794 for (const VideoStream& stream : params_.ss.streams) { | 792 for (const VideoStream& stream : params_.ss.streams) { |
| 795 RTC_CHECK_GE(stream.min_bitrate_bps, 0); | 793 RTC_CHECK_GE(stream.min_bitrate_bps, 0); |
| 796 RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps); | 794 RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps); |
| 797 RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps); | 795 RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps); |
| 798 RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()), | 796 RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()), |
| 799 params_.common.num_temporal_layers - 1); | 797 params_.video.num_temporal_layers - 1); |
| 800 } | 798 } |
| 801 // TODO(ivica): Should we check if the sum of all streams/layers is equal to | 799 // TODO(ivica): Should we check if the sum of all streams/layers is equal to |
| 802 // the total bitrate? We anyway have to update them in the case bitrate | 800 // the total bitrate? We anyway have to update them in the case bitrate |
| 803 // estimator changes the total bitrates. | 801 // estimator changes the total bitrates. |
| 804 RTC_CHECK_GE(params_.ss.num_spatial_layers, 1); | 802 RTC_CHECK_GE(params_.ss.num_spatial_layers, 1); |
| 805 RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers); | 803 RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers); |
| 806 RTC_CHECK(params_.ss.spatial_layers.empty() || | 804 RTC_CHECK(params_.ss.spatial_layers.empty() || |
| 807 params_.ss.spatial_layers.size() == | 805 params_.ss.spatial_layers.size() == |
| 808 static_cast<size_t>(params_.ss.num_spatial_layers)); | 806 static_cast<size_t>(params_.ss.num_spatial_layers)); |
| 809 if (params_.common.codec == "VP8") { | 807 if (params_.video.codec == "VP8") { |
| 810 RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1); | 808 RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1); |
| 811 } else if (params_.common.codec == "VP9") { | 809 } else if (params_.video.codec == "VP9") { |
| 812 RTC_CHECK_EQ(params_.ss.streams.size(), 1u); | 810 RTC_CHECK_EQ(params_.ss.streams.size(), 1u); |
| 813 } | 811 } |
| 814 } | 812 } |
| 815 | 813 |
| 816 // Static. | 814 // Static. |
| 817 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) { | 815 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) { |
| 818 // Parse comma separated nonnegative integers, where some elements may be | 816 // Parse comma separated nonnegative integers, where some elements may be |
| 819 // empty. The empty values are replaced with -1. | 817 // empty. The empty values are replaced with -1. |
| 820 // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40} | 818 // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40} |
| 821 // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1} | 819 // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1} |
| (...skipping 15 matching lines...) Expand all Loading... |
| 837 << "Unexpected non-number value."; | 835 << "Unexpected non-number value."; |
| 838 p += pos; | 836 p += pos; |
| 839 } | 837 } |
| 840 result.push_back(value); | 838 result.push_back(value); |
| 841 return result; | 839 return result; |
| 842 } | 840 } |
| 843 | 841 |
| 844 // Static. | 842 // Static. |
| 845 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) { | 843 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) { |
| 846 VideoStream stream; | 844 VideoStream stream; |
| 847 stream.width = params.common.width; | 845 stream.width = params.video.width; |
| 848 stream.height = params.common.height; | 846 stream.height = params.video.height; |
| 849 stream.max_framerate = params.common.fps; | 847 stream.max_framerate = params.video.fps; |
| 850 stream.min_bitrate_bps = params.common.min_bitrate_bps; | 848 stream.min_bitrate_bps = params.video.min_bitrate_bps; |
| 851 stream.target_bitrate_bps = params.common.target_bitrate_bps; | 849 stream.target_bitrate_bps = params.video.target_bitrate_bps; |
| 852 stream.max_bitrate_bps = params.common.max_bitrate_bps; | 850 stream.max_bitrate_bps = params.video.max_bitrate_bps; |
| 853 stream.max_qp = 52; | 851 stream.max_qp = 52; |
| 854 if (params.common.num_temporal_layers == 2) | 852 if (params.video.num_temporal_layers == 2) |
| 855 stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps); | 853 stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps); |
| 856 return stream; | 854 return stream; |
| 857 } | 855 } |
| 858 | 856 |
| 859 // Static. | 857 // Static. |
| 860 void VideoQualityTest::FillScalabilitySettings( | 858 void VideoQualityTest::FillScalabilitySettings( |
| 861 Params* params, | 859 Params* params, |
| 862 const std::vector<std::string>& stream_descriptors, | 860 const std::vector<std::string>& stream_descriptors, |
| 863 size_t selected_stream, | 861 size_t selected_stream, |
| 864 int num_spatial_layers, | 862 int num_spatial_layers, |
| (...skipping 22 matching lines...) Expand all Loading... |
| 887 if (v[5] != -1) | 885 if (v[5] != -1) |
| 888 stream.max_bitrate_bps = v[5]; | 886 stream.max_bitrate_bps = v[5]; |
| 889 if (v.size() > 6 && v[6] != -1) | 887 if (v.size() > 6 && v[6] != -1) |
| 890 stream.max_qp = v[6]; | 888 stream.max_qp = v[6]; |
| 891 if (v.size() > 7) { | 889 if (v.size() > 7) { |
| 892 stream.temporal_layer_thresholds_bps.clear(); | 890 stream.temporal_layer_thresholds_bps.clear(); |
| 893 stream.temporal_layer_thresholds_bps.insert( | 891 stream.temporal_layer_thresholds_bps.insert( |
| 894 stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end()); | 892 stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end()); |
| 895 } else { | 893 } else { |
| 896 // Automatic TL thresholds for more than two layers not supported. | 894 // Automatic TL thresholds for more than two layers not supported. |
| 897 RTC_CHECK_LE(params->common.num_temporal_layers, 2); | 895 RTC_CHECK_LE(params->video.num_temporal_layers, 2); |
| 898 } | 896 } |
| 899 params->ss.streams.push_back(stream); | 897 params->ss.streams.push_back(stream); |
| 900 } | 898 } |
| 901 params->ss.selected_stream = selected_stream; | 899 params->ss.selected_stream = selected_stream; |
| 902 | 900 |
| 903 params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1; | 901 params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1; |
| 904 params->ss.selected_sl = selected_sl; | 902 params->ss.selected_sl = selected_sl; |
| 905 RTC_CHECK(params->ss.spatial_layers.empty()); | 903 RTC_CHECK(params->ss.spatial_layers.empty()); |
| 906 for (auto descriptor : sl_descriptors) { | 904 for (auto descriptor : sl_descriptors) { |
| 907 if (descriptor.empty()) | 905 if (descriptor.empty()) |
| (...skipping 11 matching lines...) Expand all Loading... |
| 919 | 917 |
| 920 void VideoQualityTest::SetupCommon(Transport* send_transport, | 918 void VideoQualityTest::SetupCommon(Transport* send_transport, |
| 921 Transport* recv_transport) { | 919 Transport* recv_transport) { |
| 922 if (params_.logs) | 920 if (params_.logs) |
| 923 trace_to_stderr_.reset(new test::TraceToStderr); | 921 trace_to_stderr_.reset(new test::TraceToStderr); |
| 924 | 922 |
| 925 size_t num_streams = params_.ss.streams.size(); | 923 size_t num_streams = params_.ss.streams.size(); |
| 926 CreateSendConfig(num_streams, 0, send_transport); | 924 CreateSendConfig(num_streams, 0, send_transport); |
| 927 | 925 |
| 928 int payload_type; | 926 int payload_type; |
| 929 if (params_.common.codec == "H264") { | 927 if (params_.video.codec == "H264") { |
| 930 encoder_.reset(VideoEncoder::Create(VideoEncoder::kH264)); | 928 encoder_.reset(VideoEncoder::Create(VideoEncoder::kH264)); |
| 931 payload_type = kPayloadTypeH264; | 929 payload_type = kPayloadTypeH264; |
| 932 } else if (params_.common.codec == "VP8") { | 930 } else if (params_.video.codec == "VP8") { |
| 933 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8)); | 931 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8)); |
| 934 payload_type = kPayloadTypeVP8; | 932 payload_type = kPayloadTypeVP8; |
| 935 } else if (params_.common.codec == "VP9") { | 933 } else if (params_.video.codec == "VP9") { |
| 936 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9)); | 934 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9)); |
| 937 payload_type = kPayloadTypeVP9; | 935 payload_type = kPayloadTypeVP9; |
| 938 } else { | 936 } else { |
| 939 RTC_NOTREACHED() << "Codec not supported!"; | 937 RTC_NOTREACHED() << "Codec not supported!"; |
| 940 return; | 938 return; |
| 941 } | 939 } |
| 942 video_send_config_.encoder_settings.encoder = encoder_.get(); | 940 video_send_config_.encoder_settings.encoder = encoder_.get(); |
| 943 video_send_config_.encoder_settings.payload_name = params_.common.codec; | 941 video_send_config_.encoder_settings.payload_name = params_.video.codec; |
| 944 video_send_config_.encoder_settings.payload_type = payload_type; | 942 video_send_config_.encoder_settings.payload_type = payload_type; |
| 945 video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | 943 video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; |
| 946 video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType; | 944 video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType; |
| 947 for (size_t i = 0; i < num_streams; ++i) | 945 for (size_t i = 0; i < num_streams; ++i) |
| 948 video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]); | 946 video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]); |
| 949 | 947 |
| 950 video_send_config_.rtp.extensions.clear(); | 948 video_send_config_.rtp.extensions.clear(); |
| 951 if (params_.common.send_side_bwe) { | 949 if (params_.call.send_side_bwe) { |
| 952 video_send_config_.rtp.extensions.push_back( | 950 video_send_config_.rtp.extensions.push_back( |
| 953 RtpExtension(RtpExtension::kTransportSequenceNumberUri, | 951 RtpExtension(RtpExtension::kTransportSequenceNumberUri, |
| 954 test::kTransportSequenceNumberExtensionId)); | 952 test::kTransportSequenceNumberExtensionId)); |
| 955 } else { | 953 } else { |
| 956 video_send_config_.rtp.extensions.push_back(RtpExtension( | 954 video_send_config_.rtp.extensions.push_back(RtpExtension( |
| 957 RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId)); | 955 RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId)); |
| 958 } | 956 } |
| 959 | 957 |
| 960 video_encoder_config_.min_transmit_bitrate_bps = | 958 video_encoder_config_.min_transmit_bitrate_bps = |
| 961 params_.common.min_transmit_bps; | 959 params_.video.min_transmit_bps; |
| 962 video_encoder_config_.streams = params_.ss.streams; | 960 video_encoder_config_.streams = params_.ss.streams; |
| 963 video_encoder_config_.spatial_layers = params_.ss.spatial_layers; | 961 video_encoder_config_.spatial_layers = params_.ss.spatial_layers; |
| 964 | 962 |
| 965 CreateMatchingReceiveConfigs(recv_transport); | 963 CreateMatchingReceiveConfigs(recv_transport); |
| 966 | 964 |
| 967 for (size_t i = 0; i < num_streams; ++i) { | 965 for (size_t i = 0; i < num_streams; ++i) { |
| 968 video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | 966 video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; |
| 969 video_receive_configs_[i].rtp.rtx[payload_type].ssrc = kSendRtxSsrcs[i]; | 967 video_receive_configs_[i].rtp.rtx[payload_type].ssrc = kSendRtxSsrcs[i]; |
| 970 video_receive_configs_[i].rtp.rtx[payload_type].payload_type = | 968 video_receive_configs_[i].rtp.rtx[payload_type].payload_type = |
| 971 kSendRtxPayloadType; | 969 kSendRtxPayloadType; |
| 972 video_receive_configs_[i].rtp.transport_cc = params_.common.send_side_bwe; | 970 video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe; |
| 973 } | 971 } |
| 974 } | 972 } |
| 975 | 973 |
| 976 void VideoQualityTest::SetupScreenshare() { | 974 void VideoQualityTest::SetupScreenshare() { |
| 977 RTC_CHECK(params_.screenshare.enabled); | 975 RTC_CHECK(params_.screenshare.enabled); |
| 978 | 976 |
| 979 // Fill out codec settings. | 977 // Fill out codec settings. |
| 980 video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; | 978 video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; |
| 981 if (params_.common.codec == "VP8") { | 979 if (params_.video.codec == "VP8") { |
| 982 codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings(); | 980 codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings(); |
| 983 codec_settings_.VP8.denoisingOn = false; | 981 codec_settings_.VP8.denoisingOn = false; |
| 984 codec_settings_.VP8.frameDroppingOn = false; | 982 codec_settings_.VP8.frameDroppingOn = false; |
| 985 codec_settings_.VP8.numberOfTemporalLayers = | 983 codec_settings_.VP8.numberOfTemporalLayers = |
| 986 static_cast<unsigned char>(params_.common.num_temporal_layers); | 984 static_cast<unsigned char>(params_.video.num_temporal_layers); |
| 987 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8; | 985 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8; |
| 988 } else if (params_.common.codec == "VP9") { | 986 } else if (params_.video.codec == "VP9") { |
| 989 codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings(); | 987 codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings(); |
| 990 codec_settings_.VP9.denoisingOn = false; | 988 codec_settings_.VP9.denoisingOn = false; |
| 991 codec_settings_.VP9.frameDroppingOn = false; | 989 codec_settings_.VP9.frameDroppingOn = false; |
| 992 codec_settings_.VP9.numberOfTemporalLayers = | 990 codec_settings_.VP9.numberOfTemporalLayers = |
| 993 static_cast<unsigned char>(params_.common.num_temporal_layers); | 991 static_cast<unsigned char>(params_.video.num_temporal_layers); |
| 994 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9; | 992 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9; |
| 995 codec_settings_.VP9.numberOfSpatialLayers = | 993 codec_settings_.VP9.numberOfSpatialLayers = |
| 996 static_cast<unsigned char>(params_.ss.num_spatial_layers); | 994 static_cast<unsigned char>(params_.ss.num_spatial_layers); |
| 997 } | 995 } |
| 998 | 996 |
| 999 // Setup frame generator. | 997 // Setup frame generator. |
| 1000 const size_t kWidth = 1850; | 998 const size_t kWidth = 1850; |
| 1001 const size_t kHeight = 1110; | 999 const size_t kHeight = 1110; |
| 1002 std::vector<std::string> slides; | 1000 std::vector<std::string> slides; |
| 1003 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); | 1001 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); |
| 1004 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); | 1002 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); |
| 1005 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); | 1003 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); |
| 1006 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); | 1004 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); |
| 1007 | 1005 |
| 1008 if (params_.screenshare.scroll_duration == 0) { | 1006 if (params_.screenshare.scroll_duration == 0) { |
| 1009 // Cycle image every slide_change_interval seconds. | 1007 // Cycle image every slide_change_interval seconds. |
| 1010 frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile( | 1008 frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile( |
| 1011 slides, kWidth, kHeight, | 1009 slides, kWidth, kHeight, |
| 1012 params_.screenshare.slide_change_interval * params_.common.fps)); | 1010 params_.screenshare.slide_change_interval * params_.video.fps)); |
| 1013 } else { | 1011 } else { |
| 1014 RTC_CHECK_LE(params_.common.width, kWidth); | 1012 RTC_CHECK_LE(params_.video.width, kWidth); |
| 1015 RTC_CHECK_LE(params_.common.height, kHeight); | 1013 RTC_CHECK_LE(params_.video.height, kHeight); |
| 1016 RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0); | 1014 RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0); |
| 1017 const int kPauseDurationMs = (params_.screenshare.slide_change_interval - | 1015 const int kPauseDurationMs = (params_.screenshare.slide_change_interval - |
| 1018 params_.screenshare.scroll_duration) * | 1016 params_.screenshare.scroll_duration) * |
| 1019 1000; | 1017 1000; |
| 1020 RTC_CHECK_LE(params_.screenshare.scroll_duration, | 1018 RTC_CHECK_LE(params_.screenshare.scroll_duration, |
| 1021 params_.screenshare.slide_change_interval); | 1019 params_.screenshare.slide_change_interval); |
| 1022 | 1020 |
| 1023 frame_generator_.reset( | 1021 frame_generator_.reset( |
| 1024 test::FrameGenerator::CreateScrollingInputFromYuvFiles( | 1022 test::FrameGenerator::CreateScrollingInputFromYuvFiles( |
| 1025 clock_, slides, kWidth, kHeight, params_.common.width, | 1023 clock_, slides, kWidth, kHeight, params_.video.width, |
| 1026 params_.common.height, params_.screenshare.scroll_duration * 1000, | 1024 params_.video.height, params_.screenshare.scroll_duration * 1000, |
| 1027 kPauseDurationMs)); | 1025 kPauseDurationMs)); |
| 1028 } | 1026 } |
| 1029 } | 1027 } |
| 1030 | 1028 |
| 1031 void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) { | 1029 void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) { |
| 1032 if (params_.screenshare.enabled) { | 1030 if (params_.screenshare.enabled) { |
| 1033 test::FrameGeneratorCapturer* frame_generator_capturer = | 1031 test::FrameGeneratorCapturer* frame_generator_capturer = |
| 1034 new test::FrameGeneratorCapturer( | 1032 new test::FrameGeneratorCapturer( |
| 1035 clock_, input, frame_generator_.release(), params_.common.fps); | 1033 clock_, input, frame_generator_.release(), params_.video.fps); |
| 1036 EXPECT_TRUE(frame_generator_capturer->Init()); | 1034 EXPECT_TRUE(frame_generator_capturer->Init()); |
| 1037 capturer_.reset(frame_generator_capturer); | 1035 capturer_.reset(frame_generator_capturer); |
| 1038 } else { | 1036 } else { |
| 1039 if (params_.video.clip_name.empty()) { | 1037 if (params_.video.clip_name.empty()) { |
| 1040 capturer_.reset(test::VideoCapturer::Create(input, params_.common.width, | 1038 capturer_.reset(test::VideoCapturer::Create(input, params_.video.width, |
| 1041 params_.common.height, | 1039 params_.video.height, |
| 1042 params_.common.fps, clock_)); | 1040 params_.video.fps, clock_)); |
| 1043 } else { | 1041 } else { |
| 1044 capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile( | 1042 capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile( |
| 1045 input, test::ResourcePath(params_.video.clip_name, "yuv"), | 1043 input, test::ResourcePath(params_.video.clip_name, "yuv"), |
| 1046 params_.common.width, params_.common.height, params_.common.fps, | 1044 params_.video.width, params_.video.height, params_.video.fps, |
| 1047 clock_)); | 1045 clock_)); |
| 1048 ASSERT_TRUE(capturer_) << "Could not create capturer for " | 1046 ASSERT_TRUE(capturer_) << "Could not create capturer for " |
| 1049 << params_.video.clip_name | 1047 << params_.video.clip_name |
| 1050 << ".yuv. Is this resource file present?"; | 1048 << ".yuv. Is this resource file present?"; |
| 1051 } | 1049 } |
| 1052 } | 1050 } |
| 1053 } | 1051 } |
| 1054 | 1052 |
| 1055 void VideoQualityTest::RunWithAnalyzer(const Params& params) { | 1053 void VideoQualityTest::RunWithAnalyzer(const Params& params) { |
| 1056 params_ = params; | 1054 params_ = params; |
| 1057 | 1055 |
| 1058 RTC_CHECK(!params_.audio); | 1056 RTC_CHECK(!params_.audio.enabled); |
| 1059 // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to | 1057 // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to |
| 1060 // differentiate between the analyzer and the renderer case. | 1058 // differentiate between the analyzer and the renderer case. |
| 1061 CheckParams(); | 1059 CheckParams(); |
| 1062 | 1060 |
| 1063 FILE* graph_data_output_file = nullptr; | 1061 FILE* graph_data_output_file = nullptr; |
| 1064 if (!params_.analyzer.graph_data_output_filename.empty()) { | 1062 if (!params_.analyzer.graph_data_output_filename.empty()) { |
| 1065 graph_data_output_file = | 1063 graph_data_output_file = |
| 1066 fopen(params_.analyzer.graph_data_output_filename.c_str(), "w"); | 1064 fopen(params_.analyzer.graph_data_output_filename.c_str(), "w"); |
| 1067 RTC_CHECK(graph_data_output_file) | 1065 RTC_CHECK(graph_data_output_file) |
| 1068 << "Can't open the file " << params_.analyzer.graph_data_output_filename | 1066 << "Can't open the file " << params_.analyzer.graph_data_output_filename |
| 1069 << "!"; | 1067 << "!"; |
| 1070 } | 1068 } |
| 1071 | 1069 |
| 1072 Call::Config call_config; | 1070 Call::Config call_config; |
| 1073 call_config.bitrate_config = params.common.call_bitrate_config; | 1071 call_config.bitrate_config = params_.call.call_bitrate_config; |
| 1074 CreateCalls(call_config, call_config); | 1072 CreateCalls(call_config, call_config); |
| 1075 | 1073 |
| 1076 test::LayerFilteringTransport send_transport( | 1074 test::LayerFilteringTransport send_transport( |
| 1077 params.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, | 1075 params_.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, |
| 1078 params.common.selected_tl, params_.ss.selected_sl); | 1076 params_.video.selected_tl, params_.ss.selected_sl); |
| 1079 test::DirectTransport recv_transport(params.pipe, receiver_call_.get()); | 1077 test::DirectTransport recv_transport(params_.pipe, receiver_call_.get()); |
| 1080 | 1078 |
| 1081 std::string graph_title = params_.analyzer.graph_title; | 1079 std::string graph_title = params_.analyzer.graph_title; |
| 1082 if (graph_title.empty()) | 1080 if (graph_title.empty()) |
| 1083 graph_title = VideoQualityTest::GenerateGraphTitle(); | 1081 graph_title = VideoQualityTest::GenerateGraphTitle(); |
| 1084 | 1082 |
| 1085 // In the case of different resolutions, the functions calculating PSNR and | 1083 // In the case of different resolutions, the functions calculating PSNR and |
| 1086 // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer | 1084 // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer |
| 1087 // aborts if the average psnr/ssim are below the given threshold, which is | 1085 // aborts if the average psnr/ssim are below the given threshold, which is |
| 1088 // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary | 1086 // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary |
| 1089 // abort. | 1087 // abort. |
| 1090 VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream]; | 1088 VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream]; |
| 1091 int selected_sl = params_.ss.selected_sl != -1 | 1089 int selected_sl = params_.ss.selected_sl != -1 |
| 1092 ? params_.ss.selected_sl | 1090 ? params_.ss.selected_sl |
| 1093 : params_.ss.num_spatial_layers - 1; | 1091 : params_.ss.num_spatial_layers - 1; |
| 1094 bool disable_quality_check = | 1092 bool disable_quality_check = |
| 1095 selected_stream.width != params_.common.width || | 1093 selected_stream.width != params_.video.width || |
| 1096 selected_stream.height != params_.common.height || | 1094 selected_stream.height != params_.video.height || |
| 1097 (!params_.ss.spatial_layers.empty() && | 1095 (!params_.ss.spatial_layers.empty() && |
| 1098 params_.ss.spatial_layers[selected_sl].scaling_factor_num != | 1096 params_.ss.spatial_layers[selected_sl].scaling_factor_num != |
| 1099 params_.ss.spatial_layers[selected_sl].scaling_factor_den); | 1097 params_.ss.spatial_layers[selected_sl].scaling_factor_den); |
| 1100 if (disable_quality_check) { | 1098 if (disable_quality_check) { |
| 1101 fprintf(stderr, | 1099 fprintf(stderr, |
| 1102 "Warning: Calculating PSNR and SSIM for downsized resolution " | 1100 "Warning: Calculating PSNR and SSIM for downsized resolution " |
| 1103 "not implemented yet! Skipping PSNR and SSIM calculations!"); | 1101 "not implemented yet! Skipping PSNR and SSIM calculations!"); |
| 1104 } | 1102 } |
| 1105 | 1103 |
| 1106 VideoAnalyzer analyzer( | 1104 VideoAnalyzer analyzer( |
| 1107 &send_transport, params_.analyzer.test_label, | 1105 &send_transport, params_.analyzer.test_label, |
| 1108 disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold, | 1106 disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold, |
| 1109 disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold, | 1107 disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold, |
| 1110 params_.analyzer.test_durations_secs * params_.common.fps, | 1108 params_.analyzer.test_durations_secs * params_.video.fps, |
| 1111 graph_data_output_file, graph_title, | 1109 graph_data_output_file, graph_title, |
| 1112 kVideoSendSsrcs[params_.ss.selected_stream]); | 1110 kVideoSendSsrcs[params_.ss.selected_stream]); |
| 1113 | 1111 |
| 1114 analyzer.SetReceiver(receiver_call_->Receiver()); | 1112 analyzer.SetReceiver(receiver_call_->Receiver()); |
| 1115 send_transport.SetReceiver(&analyzer); | 1113 send_transport.SetReceiver(&analyzer); |
| 1116 recv_transport.SetReceiver(sender_call_->Receiver()); | 1114 recv_transport.SetReceiver(sender_call_->Receiver()); |
| 1117 | 1115 |
| 1118 SetupCommon(&analyzer, &recv_transport); | 1116 SetupCommon(&analyzer, &recv_transport); |
| 1119 video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer; | 1117 video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer; |
| 1120 video_send_config_.pre_encode_callback = analyzer.pre_encode_proxy(); | 1118 video_send_config_.pre_encode_callback = analyzer.pre_encode_proxy(); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1151 | 1149 |
| 1152 if (graph_data_output_file) | 1150 if (graph_data_output_file) |
| 1153 fclose(graph_data_output_file); | 1151 fclose(graph_data_output_file); |
| 1154 } | 1152 } |
| 1155 | 1153 |
| 1156 void VideoQualityTest::RunWithRenderers(const Params& params) { | 1154 void VideoQualityTest::RunWithRenderers(const Params& params) { |
| 1157 params_ = params; | 1155 params_ = params; |
| 1158 CheckParams(); | 1156 CheckParams(); |
| 1159 | 1157 |
| 1160 std::unique_ptr<test::VideoRenderer> local_preview( | 1158 std::unique_ptr<test::VideoRenderer> local_preview( |
| 1161 test::VideoRenderer::Create("Local Preview", params_.common.width, | 1159 test::VideoRenderer::Create("Local Preview", params_.video.width, |
| 1162 params_.common.height)); | 1160 params_.video.height)); |
| 1163 size_t stream_id = params_.ss.selected_stream; | 1161 size_t stream_id = params_.ss.selected_stream; |
| 1164 std::string title = "Loopback Video"; | 1162 std::string title = "Loopback Video"; |
| 1165 if (params_.ss.streams.size() > 1) { | 1163 if (params_.ss.streams.size() > 1) { |
| 1166 std::ostringstream s; | 1164 std::ostringstream s; |
| 1167 s << stream_id; | 1165 s << stream_id; |
| 1168 title += " - Stream #" + s.str(); | 1166 title += " - Stream #" + s.str(); |
| 1169 } | 1167 } |
| 1170 | 1168 |
| 1171 std::unique_ptr<test::VideoRenderer> loopback_video( | 1169 std::unique_ptr<test::VideoRenderer> loopback_video( |
| 1172 test::VideoRenderer::Create(title.c_str(), | 1170 test::VideoRenderer::Create(title.c_str(), |
| 1173 params_.ss.streams[stream_id].width, | 1171 params_.ss.streams[stream_id].width, |
| 1174 params_.ss.streams[stream_id].height)); | 1172 params_.ss.streams[stream_id].height)); |
| 1175 | 1173 |
| 1176 // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to | 1174 // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to |
| 1177 // match the full stack tests. | 1175 // match the full stack tests. |
| 1178 Call::Config call_config; | 1176 Call::Config call_config; |
| 1179 call_config.bitrate_config = params_.common.call_bitrate_config; | 1177 call_config.bitrate_config = params_.call.call_bitrate_config; |
| 1180 | 1178 |
| 1181 ::VoiceEngineState voe; | 1179 ::VoiceEngineState voe; |
| 1182 if (params_.audio) { | 1180 if (params_.audio.enabled) { |
| 1183 CreateVoiceEngine(&voe, decoder_factory_); | 1181 CreateVoiceEngine(&voe, decoder_factory_); |
| 1184 AudioState::Config audio_state_config; | 1182 AudioState::Config audio_state_config; |
| 1185 audio_state_config.voice_engine = voe.voice_engine; | 1183 audio_state_config.voice_engine = voe.voice_engine; |
| 1186 call_config.audio_state = AudioState::Create(audio_state_config); | 1184 call_config.audio_state = AudioState::Create(audio_state_config); |
| 1187 } | 1185 } |
| 1188 | 1186 |
| 1189 std::unique_ptr<Call> call(Call::Create(call_config)); | 1187 std::unique_ptr<Call> call(Call::Create(call_config)); |
| 1190 | 1188 |
| 1191 test::LayerFilteringTransport transport( | 1189 test::LayerFilteringTransport transport( |
| 1192 params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9, | 1190 params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9, |
| 1193 params.common.selected_tl, params_.ss.selected_sl); | 1191 params.video.selected_tl, params_.ss.selected_sl); |
| 1194 // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at | 1192 // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at |
| 1195 // least share as much code as possible. That way this test would also match | 1193 // least share as much code as possible. That way this test would also match |
| 1196 // the full stack tests better. | 1194 // the full stack tests better. |
| 1197 transport.SetReceiver(call->Receiver()); | 1195 transport.SetReceiver(call->Receiver()); |
| 1198 | 1196 |
| 1199 SetupCommon(&transport, &transport); | 1197 SetupCommon(&transport, &transport); |
| 1200 | 1198 |
| 1201 video_send_config_.pre_encode_callback = local_preview.get(); | 1199 video_send_config_.pre_encode_callback = local_preview.get(); |
| 1202 video_receive_configs_[stream_id].renderer = loopback_video.get(); | 1200 video_receive_configs_[stream_id].renderer = loopback_video.get(); |
| 1203 if (params_.audio && params_.audio_video_sync) | 1201 if (params_.audio.enabled && params_.audio.sync_video) |
| 1204 video_receive_configs_[stream_id].sync_group = kSyncGroup; | 1202 video_receive_configs_[stream_id].sync_group = kSyncGroup; |
| 1205 | 1203 |
| 1206 video_send_config_.suspend_below_min_bitrate = | 1204 video_send_config_.suspend_below_min_bitrate = |
| 1207 params_.common.suspend_below_min_bitrate; | 1205 params_.video.suspend_below_min_bitrate; |
| 1208 | 1206 |
| 1209 if (params.common.fec) { | 1207 if (params.video.fec) { |
| 1210 video_send_config_.rtp.fec.red_payload_type = kRedPayloadType; | 1208 video_send_config_.rtp.fec.red_payload_type = kRedPayloadType; |
| 1211 video_send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType; | 1209 video_send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType; |
| 1212 video_receive_configs_[stream_id].rtp.fec.red_payload_type = | 1210 video_receive_configs_[stream_id].rtp.fec.red_payload_type = |
| 1213 kRedPayloadType; | 1211 kRedPayloadType; |
| 1214 video_receive_configs_[stream_id].rtp.fec.ulpfec_payload_type = | 1212 video_receive_configs_[stream_id].rtp.fec.ulpfec_payload_type = |
| 1215 kUlpfecPayloadType; | 1213 kUlpfecPayloadType; |
| 1216 } | 1214 } |
| 1217 | 1215 |
| 1218 if (params_.screenshare.enabled) | 1216 if (params_.screenshare.enabled) |
| 1219 SetupScreenshare(); | 1217 SetupScreenshare(); |
| 1220 | 1218 |
| 1221 video_send_stream_ = call->CreateVideoSendStream( | 1219 video_send_stream_ = call->CreateVideoSendStream( |
| 1222 video_send_config_.Copy(), video_encoder_config_.Copy()); | 1220 video_send_config_.Copy(), video_encoder_config_.Copy()); |
| 1223 VideoReceiveStream* video_receive_stream = | 1221 VideoReceiveStream* video_receive_stream = |
| 1224 call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy()); | 1222 call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy()); |
| 1225 CreateCapturer(video_send_stream_->Input()); | 1223 CreateCapturer(video_send_stream_->Input()); |
| 1226 | 1224 |
| 1227 AudioReceiveStream* audio_receive_stream = nullptr; | 1225 AudioReceiveStream* audio_receive_stream = nullptr; |
| 1228 if (params_.audio) { | 1226 if (params_.audio.enabled) { |
| 1229 audio_send_config_ = AudioSendStream::Config(&transport); | 1227 audio_send_config_ = AudioSendStream::Config(&transport); |
| 1230 audio_send_config_.voe_channel_id = voe.send_channel_id; | 1228 audio_send_config_.voe_channel_id = voe.send_channel_id; |
| 1231 audio_send_config_.rtp.ssrc = kAudioSendSsrc; | 1229 audio_send_config_.rtp.ssrc = kAudioSendSsrc; |
| 1232 | 1230 |
| 1233 // Add extension to enable audio send side BWE, and allow audio bit rate | 1231 // Add extension to enable audio send side BWE, and allow audio bit rate |
| 1234 // adaptation. | 1232 // adaptation. |
| 1235 audio_send_config_.rtp.extensions.clear(); | 1233 audio_send_config_.rtp.extensions.clear(); |
| 1236 if (params_.common.send_side_bwe) { | 1234 if (params_.call.send_side_bwe) { |
| 1237 audio_send_config_.rtp.extensions.push_back(webrtc::RtpExtension( | 1235 audio_send_config_.rtp.extensions.push_back(webrtc::RtpExtension( |
| 1238 webrtc::RtpExtension::kTransportSequenceNumberUri, | 1236 webrtc::RtpExtension::kTransportSequenceNumberUri, |
| 1239 test::kTransportSequenceNumberExtensionId)); | 1237 test::kTransportSequenceNumberExtensionId)); |
| 1240 audio_send_config_.min_bitrate_kbps = kOpusMinBitrate / 1000; | 1238 audio_send_config_.min_bitrate_kbps = kOpusMinBitrate / 1000; |
| 1241 audio_send_config_.max_bitrate_kbps = kOpusBitrateFb / 1000; | 1239 audio_send_config_.max_bitrate_kbps = kOpusBitrateFb / 1000; |
| 1242 } | 1240 } |
| 1243 | 1241 |
| 1244 audio_send_stream_ = call->CreateAudioSendStream(audio_send_config_); | 1242 audio_send_stream_ = call->CreateAudioSendStream(audio_send_config_); |
| 1245 | 1243 |
| 1246 AudioReceiveStream::Config audio_config; | 1244 AudioReceiveStream::Config audio_config; |
| 1247 audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc; | 1245 audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc; |
| 1248 audio_config.rtcp_send_transport = &transport; | 1246 audio_config.rtcp_send_transport = &transport; |
| 1249 audio_config.voe_channel_id = voe.receive_channel_id; | 1247 audio_config.voe_channel_id = voe.receive_channel_id; |
| 1250 audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc; | 1248 audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc; |
| 1251 audio_config.rtp.transport_cc = params_.common.send_side_bwe; | 1249 audio_config.rtp.transport_cc = params_.call.send_side_bwe; |
| 1252 audio_config.rtp.extensions = audio_send_config_.rtp.extensions; | 1250 audio_config.rtp.extensions = audio_send_config_.rtp.extensions; |
| 1253 audio_config.decoder_factory = decoder_factory_; | 1251 audio_config.decoder_factory = decoder_factory_; |
| 1254 if (params_.audio_video_sync) | 1252 if (params_.audio.sync_video) |
| 1255 audio_config.sync_group = kSyncGroup; | 1253 audio_config.sync_group = kSyncGroup; |
| 1256 | 1254 |
| 1257 audio_receive_stream =call->CreateAudioReceiveStream(audio_config); | 1255 audio_receive_stream =call->CreateAudioReceiveStream(audio_config); |
| 1258 | 1256 |
| 1259 const CodecInst kOpusInst = {120, "OPUS", 48000, 960, 2, 64000}; | 1257 const CodecInst kOpusInst = {120, "OPUS", 48000, 960, 2, 64000}; |
| 1260 EXPECT_EQ(0, voe.codec->SetSendCodec(voe.send_channel_id, kOpusInst)); | 1258 EXPECT_EQ(0, voe.codec->SetSendCodec(voe.send_channel_id, kOpusInst)); |
| 1261 } | 1259 } |
| 1262 | 1260 |
| 1263 // Start sending and receiving video. | 1261 // Start sending and receiving video. |
| 1264 video_receive_stream->Start(); | 1262 video_receive_stream->Start(); |
| 1265 video_send_stream_->Start(); | 1263 video_send_stream_->Start(); |
| 1266 capturer_->Start(); | 1264 capturer_->Start(); |
| 1267 | 1265 |
| 1268 if (params_.audio) { | 1266 if (params_.audio.enabled) { |
| 1269 // Start receiving audio. | 1267 // Start receiving audio. |
| 1270 audio_receive_stream->Start(); | 1268 audio_receive_stream->Start(); |
| 1271 EXPECT_EQ(0, voe.base->StartPlayout(voe.receive_channel_id)); | 1269 EXPECT_EQ(0, voe.base->StartPlayout(voe.receive_channel_id)); |
| 1272 EXPECT_EQ(0, voe.base->StartReceive(voe.receive_channel_id)); | 1270 EXPECT_EQ(0, voe.base->StartReceive(voe.receive_channel_id)); |
| 1273 | 1271 |
| 1274 // Start sending audio. | 1272 // Start sending audio. |
| 1275 audio_send_stream_->Start(); | 1273 audio_send_stream_->Start(); |
| 1276 EXPECT_EQ(0, voe.base->StartSend(voe.send_channel_id)); | 1274 EXPECT_EQ(0, voe.base->StartSend(voe.send_channel_id)); |
| 1277 } | 1275 } |
| 1278 | 1276 |
| 1279 test::PressEnterToContinue(); | 1277 test::PressEnterToContinue(); |
| 1280 | 1278 |
| 1281 if (params_.audio) { | 1279 if (params_.audio.enabled) { |
| 1282 // Stop sending audio. | 1280 // Stop sending audio. |
| 1283 EXPECT_EQ(0, voe.base->StopSend(voe.send_channel_id)); | 1281 EXPECT_EQ(0, voe.base->StopSend(voe.send_channel_id)); |
| 1284 audio_send_stream_->Stop(); | 1282 audio_send_stream_->Stop(); |
| 1285 | 1283 |
| 1286 // Stop receiving audio. | 1284 // Stop receiving audio. |
| 1287 EXPECT_EQ(0, voe.base->StopReceive(voe.receive_channel_id)); | 1285 EXPECT_EQ(0, voe.base->StopReceive(voe.receive_channel_id)); |
| 1288 EXPECT_EQ(0, voe.base->StopPlayout(voe.receive_channel_id)); | 1286 EXPECT_EQ(0, voe.base->StopPlayout(voe.receive_channel_id)); |
| 1289 audio_receive_stream->Stop(); | 1287 audio_receive_stream->Stop(); |
| 1290 } | 1288 } |
| 1291 | 1289 |
| 1292 // Stop receiving and sending video. | 1290 // Stop receiving and sending video. |
| 1293 capturer_->Stop(); | 1291 capturer_->Stop(); |
| 1294 video_send_stream_->Stop(); | 1292 video_send_stream_->Stop(); |
| 1295 video_receive_stream->Stop(); | 1293 video_receive_stream->Stop(); |
| 1296 | 1294 |
| 1297 call->DestroyVideoReceiveStream(video_receive_stream); | 1295 call->DestroyVideoReceiveStream(video_receive_stream); |
| 1298 call->DestroyVideoSendStream(video_send_stream_); | 1296 call->DestroyVideoSendStream(video_send_stream_); |
| 1299 | 1297 |
| 1300 if (params_.audio) { | 1298 if (params_.audio.enabled) { |
| 1301 call->DestroyAudioSendStream(audio_send_stream_); | 1299 call->DestroyAudioSendStream(audio_send_stream_); |
| 1302 call->DestroyAudioReceiveStream(audio_receive_stream); | 1300 call->DestroyAudioReceiveStream(audio_receive_stream); |
| 1303 } | 1301 } |
| 1304 | 1302 |
| 1305 transport.StopSending(); | 1303 transport.StopSending(); |
| 1306 if (params_.audio) | 1304 if (params_.audio.enabled) |
| 1307 DestroyVoiceEngine(&voe); | 1305 DestroyVoiceEngine(&voe); |
| 1308 } | 1306 } |
| 1309 | 1307 |
| 1310 } // namespace webrtc | 1308 } // namespace webrtc |
| OLD | NEW |